1 /// The tokenizer for the query interpreter
3 use crate::error
::{Error, Result}
;
5 #[derive(Debug, PartialEq, Eq)]
9 next
: Option
<Box
<Token
>>
14 next
: Option
<Box
<Token
>>
20 pub fn next(&self) -> Option
<&Box
<Token
>> {
21 trace
!("Matching token (self): {:?}", self);
23 &Token
::Identifier { ref next, .. }
=> next
.as_ref(),
24 &Token
::Index { ref next, .. }
=> next
.as_ref(),
28 /// Convenience function for `token.next().is_some()`
29 pub fn has_next(&self) -> bool
{
30 trace
!("self.has_next(): {:?}", self.next().is_some());
34 pub fn set_next(&mut self, token
: Token
) {
35 trace
!("self.set_next({:?})", token
);
37 &mut Token
::Identifier { ref mut next, .. }
=> *next
= Some(Box
::new(token
)),
38 &mut Token
::Index { ref mut next, .. }
=> *next
= Some(Box
::new(token
)),
42 /// Pop the last token from the chain of tokens
44 /// Returns None if the current Token has no next token
45 pub fn pop_last(&mut self) -> Option
<Box
<Token
>> {
46 trace
!("self.pop_last()");
48 trace
!("self.pop_last(): No next");
51 trace
!("self.pop_last(): Having next");
53 &mut Token
::Identifier { ref mut next, .. }
=> {
54 trace
!("self.pop_last(): self is Identifier");
56 trace
!("self.pop_last(): next is Some(_)");
57 let mut n
= next
.take().unwrap();
59 trace
!("self.pop_last(): next also has a next");
61 trace
!("self.pop_last(): Recursing now");
62 let result
= n
.pop_last();
66 trace
!("self.pop_last(): Returning Result");
69 trace
!("self.pop_last(): next itself has no next, returning Some");
73 trace
!("self.pop_last(): next is none, returning None");
78 &mut Token
::Index { ref mut next, .. }
=> {
79 trace
!("self.pop_last(): self is Index");
81 trace
!("self.pop_last(): next is Some(_)");
83 let mut n
= next
.take().unwrap();
85 trace
!("self.pop_last(): next also has a next");
87 trace
!("self.pop_last(): Recursing now");
88 let result
= n
.pop_last();
92 trace
!("self.pop_last(): Returning Result");
95 trace
!("self.pop_last(): next itself has no next, returning Some");
99 trace
!("self.pop_last(): next is none, returning None");
108 pub fn identifier(&self) -> &String
{
109 trace
!("self.identifier()");
111 &Token
::Identifier { ref ident, .. }
=> &ident
,
117 pub fn idx(&self) -> usize {
118 trace
!("self.idx()");
120 &Token
::Index { idx: i, .. }
=> i
,
127 pub fn tokenize_with_seperator(query
: &str, seperator
: char) -> Result
<Token
> {
129 trace
!("tokenize_with_seperator(query: {:?}, seperator: {:?})", query
, seperator
);
131 /// Creates a Token object from a string
135 /// * If the internal regex does not compile (should never happen)
136 /// * If the token is non-valid (that is, a array index with a non-i64)
137 /// * If the regex does not find anything
138 /// * If the integer in the brackets (`[]`) cannot be parsed to a valid i64
140 /// # Incorrect behaviour
142 /// * If the regex finds multiple captures
146 /// The `Token` object with the correct identifier/index for this token and no next token.
148 fn mk_token_object(s
: &str) -> Result
<Token
> {
150 use std
::str::FromStr
;
152 trace
!("mk_token_object(s: {:?})", s
);
155 static ref RE
: Regex
= Regex
::new(r
"^\[\d+\]$").unwrap();
158 if !has_array_brackets(s
) {
159 trace
!("returning Ok(Identifier(ident: {:?}, next: None))", s
);
160 return Ok(Token
::Identifier { ident: String::from(s), next: None }
);
163 match RE
.captures(s
) {
164 None
=> return Err(Error
::ArrayAccessWithoutIndex
),
166 trace
!("Captured: {:?}", captures
);
167 match captures
.get(0) {
168 None
=> Ok(Token
::Identifier { ident: String::from(s), next: None }
),
170 trace
!("First capture: {:?}", mtch
);
172 let mtch
= without_array_brackets(mtch
.as_str());
173 trace
!(".. without array brackets: {:?}", mtch
);
175 let i
: usize = FromStr
::from_str(&mtch
).unwrap(); // save because regex
177 trace
!("returning Ok(Index(idx: {}, next: None)", i
);
188 /// Check whether a str begins with '[' and ends with ']'
189 fn has_array_brackets(s
: &str) -> bool
{
190 trace
!("has_array_brackets({:?})", s
);
191 s
.as_bytes()[0] == b'
['
&& s
.as_bytes()[s
.len() - 1] == b'
]'
194 /// Remove '[' and ']' from a str
195 fn without_array_brackets(s
: &str) -> String
{
196 trace
!("without_array_brackets({:?})", s
);
197 s
.replace("[","").replace("]","")
200 fn build_token_tree(split
: &mut Split
<char>, last
: &mut Token
) -> Result
<()> {
201 trace
!("build_token_tree(split: {:?}, last: {:?})", split
, last
);
203 None
=> { /* No more tokens */ }
205 trace
!("build_token_tree(...): next from split: {:?}", token
);
207 if token
.len() == 0 {
208 trace
!("build_token_tree(...): Empty identifier... returning Error");
209 return Err(Error
::EmptyIdentifier
)
212 let mut token
= r
#try!(mk_token_object(token));
213 r
#try!(build_token_tree(split, &mut token));
214 last
.set_next(token
);
218 trace
!("build_token_tree(...): returning Ok(())");
222 if query
.is_empty() {
223 trace
!("Query is empty. Returning error");
224 return Err(Error
::EmptyQueryError
)
227 let mut tokens
= query
.split(seperator
);
228 trace
!("Tokens splitted: {:?}", tokens
);
230 match tokens
.next() {
231 None
=> Err(Error
::EmptyQueryError
),
233 trace
!("next Token: {:?}", token
);
235 if token
.len() == 0 {
236 trace
!("Empty token. Returning Error");
237 return Err(Error
::EmptyIdentifier
);
240 let mut tok
= r
#try!(mk_token_object(token));
241 let _
= r
#try!(build_token_tree(&mut tokens, &mut tok));
243 trace
!("Returning Ok({:?})", tok
);
251 use crate::error
::Error
;
257 fn test_tokenize_empty_query_to_error() {
258 let tokens
= tokenize_with_seperator(&String
::from(""), '
.'
);
259 assert
!(tokens
.is_err());
260 let tokens
= tokens
.unwrap_err();
262 assert
!(is_match
!(tokens
, Error
::EmptyQueryError { .. }
));
266 fn test_tokenize_seperator_only() {
267 let tokens
= tokenize_with_seperator(&String
::from("."), '
.'
);
268 assert
!(tokens
.is_err());
269 let tokens
= tokens
.unwrap_err();
271 assert
!(is_match
!(tokens
, Error
::EmptyIdentifier { .. }
));
275 fn test_tokenize_array_brackets_only() {
276 let tokens
= tokenize_with_seperator(&String
::from("[]"), '
.'
);
277 assert
!(tokens
.is_err());
278 let tokens
= tokens
.unwrap_err();
280 assert
!(is_match
!(tokens
, Error
::ArrayAccessWithoutIndex { .. }
));
284 fn test_tokenize_identifiers_with_array_brackets_only() {
285 let tokens
= tokenize_with_seperator(&String
::from("a.b.c.[]"), '
.'
);
286 assert
!(tokens
.is_err());
287 let tokens
= tokens
.unwrap_err();
289 assert
!(is_match
!(tokens
, Error
::ArrayAccessWithoutIndex { .. }
));
293 fn test_tokenize_identifiers_in_array_brackets() {
294 let tokens
= tokenize_with_seperator(&String
::from("[a]"), '
.'
);
295 assert
!(tokens
.is_err());
296 let tokens
= tokens
.unwrap_err();
298 assert
!(is_match
!(tokens
, Error
::ArrayAccessWithoutIndex { .. }
));
302 fn test_tokenize_single_token_query() {
303 let tokens
= tokenize_with_seperator(&String
::from("example"), '
.'
);
304 assert
!(tokens
.is_ok());
305 let tokens
= tokens
.unwrap();
307 assert
!(match tokens
{
308 Token
::Identifier { ref ident, next: None }
=> {
309 assert_eq
!("example", ident
);
317 fn test_tokenize_double_token_query() {
318 let tokens
= tokenize_with_seperator(&String
::from("a.b"), '
.'
);
319 assert
!(tokens
.is_ok());
320 let tokens
= tokens
.unwrap();
322 assert
!(match tokens
{
323 Token
::Identifier { next: Some(ref next), .. }
=> {
324 assert_eq
!("b", next
.deref().identifier());
326 &Token
::Identifier { next: None, .. }
=> true,
332 assert_eq
!("a", tokens
.identifier());
336 fn test_tokenize_ident_then_array_query() {
337 let tokens
= tokenize_with_seperator(&String
::from("a.[0]"), '
.'
);
338 assert
!(tokens
.is_ok());
339 let tokens
= tokens
.unwrap();
341 assert_eq
!("a", tokens
.identifier());
342 assert
!(match tokens
{
343 Token
::Identifier { next: Some(ref next), .. }
=> {
345 &Token
::Index { idx: 0, next: None }
=> true,
354 fn test_tokenize_many_idents_then_array_query() {
355 let tokens
= tokenize_with_seperator(&String
::from("a.b.c.[1000]"), '
.'
);
356 assert
!(tokens
.is_ok());
357 let tokens
= tokens
.unwrap();
359 assert_eq
!("a", tokens
.identifier());
363 ident
: String
::from("a"),
364 next
: Some(Box
::new(Token
::Identifier
{
365 ident
: String
::from("b"),
366 next
: Some(Box
::new(Token
::Identifier
{
367 ident
: String
::from("c"),
368 next
: Some(Box
::new(Token
::Index
{
376 assert_eq
!(expected
, tokens
);
380 fn test_tokenize_empty_token_after_good_token() {
381 let tokens
= tokenize_with_seperator(&String
::from("a..b"), '
.'
);
382 assert
!(tokens
.is_err());
383 let tokens
= tokens
.unwrap_err();
385 assert
!(is_match
!(tokens
, Error
::EmptyIdentifier { .. }
));
389 fn test_array_index(i
: usize) -> bool
{
390 match tokenize_with_seperator(&format
!("[{}]", i
), '
.'
) {
391 Ok(Token
::Index { next: None, .. }
) => true,
398 fn test_pop_last_token_from_single_identifier_token_is_none() {
399 let mut token
= Token
::Identifier
{
400 ident
: String
::from("something"),
404 let last
= token
.pop_last();
405 assert
!(last
.is_none());
409 fn test_pop_last_token_from_single_index_token_is_none() {
410 let mut token
= Token
::Index
{
415 let last
= token
.pop_last();
416 assert
!(last
.is_none());
420 fn test_pop_last_token_from_single_identifier_token_is_one() {
421 let mut token
= Token
::Identifier
{
422 ident
: String
::from("some"),
423 next
: Some(Box
::new(Token
::Identifier
{
424 ident
: String
::from("thing"),
429 let last
= token
.pop_last();
431 assert
!(last
.is_some());
432 let last
= last
.unwrap();
434 assert
!(is_match
!(*last
, Token
::Identifier { .. }
));
436 Token
::Identifier { ident, .. }
=> {
437 assert_eq
!("thing", ident
);
439 _
=> panic
!("What just happened?"),
444 fn test_pop_last_token_from_single_index_token_is_one() {
445 let mut token
= Token
::Index
{
447 next
: Some(Box
::new(Token
::Index
{
453 let last
= token
.pop_last();
455 assert
!(last
.is_some());
456 let last
= last
.unwrap();
458 assert
!(is_match
!(*last
, Token
::Index { idx: 1, .. }
));
462 fn test_pop_last_token_from_identifier_chain() {
463 let tokens
= tokenize_with_seperator(&String
::from("a.b.c.d.e.f"), '
.'
);
464 assert
!(tokens
.is_ok());
465 let mut tokens
= tokens
.unwrap();
467 let last
= tokens
.pop_last();
468 assert
!(last
.is_some());
469 assert_eq
!("f", last
.unwrap().identifier());
473 fn test_pop_last_token_from_mixed_chain() {
474 let tokens
= tokenize_with_seperator(&String
::from("a.[100].c.[3].e.f"), '
.'
);
475 assert
!(tokens
.is_ok());
476 let mut tokens
= tokens
.unwrap();
478 let last
= tokens
.pop_last();
479 assert
!(last
.is_some());
480 assert_eq
!("f", last
.unwrap().identifier());
484 fn test_pop_last_token_from_identifier_chain_is_array() {
485 let tokens
= tokenize_with_seperator(&String
::from("a.b.c.d.e.f.[1000]"), '
.'
);
486 assert
!(tokens
.is_ok());
487 let mut tokens
= tokens
.unwrap();
489 let last
= tokens
.pop_last();
490 assert
!(last
.is_some());
491 assert_eq
!(1000, last
.unwrap().idx());
495 fn test_pop_last_token_from_mixed_chain_is_array() {
496 let tokens
= tokenize_with_seperator(&String
::from("a.[100].c.[3].e.f.[1000]"), '
.'
);
497 assert
!(tokens
.is_ok());
498 let mut tokens
= tokens
.unwrap();
500 let last
= tokens
.pop_last();
501 assert
!(last
.is_some());
502 assert_eq
!(1000, last
.unwrap().idx());
506 fn test_pop_last_token_from_one_token() {
507 let tokens
= tokenize_with_seperator(&String
::from("a"), '
.'
);
508 assert
!(tokens
.is_ok());
509 let mut tokens
= tokens
.unwrap();
511 let last
= tokens
.pop_last();
512 assert
!(last
.is_none());
516 fn test_pop_last_chain() {
517 let tokens
= tokenize_with_seperator(&String
::from("a.[100].c.[3].e.f.[1000]"), '
.'
);
518 assert
!(tokens
.is_ok());
519 let mut tokens
= tokens
.unwrap();
521 let last
= tokens
.pop_last();
522 assert
!(last
.is_some());
523 assert_eq
!(1000, last
.unwrap().idx());
525 let last
= tokens
.pop_last();
526 assert
!(last
.is_some());
527 assert_eq
!("f", last
.unwrap().identifier());
529 let last
= tokens
.pop_last();
530 assert
!(last
.is_some());
531 assert_eq
!("e", last
.unwrap().identifier());
533 let last
= tokens
.pop_last();
534 assert
!(last
.is_some());
535 assert_eq
!(3, last
.unwrap().idx());
537 let last
= tokens
.pop_last();
538 assert
!(last
.is_some());
539 assert_eq
!("c", last
.unwrap().identifier());
541 let last
= tokens
.pop_last();
542 assert
!(last
.is_some());
543 assert_eq
!(100, last
.unwrap().idx());
545 let last
= tokens
.pop_last();
546 assert
!(last
.is_none());