2018-05-22 18:02:44 -05:00
|
|
|
use super::*;
|
2018-11-05 21:34:12 -06:00
|
|
|
|
|
|
|
use abortable_parser::{Result, SliceIter};
|
|
|
|
|
2018-12-06 12:23:52 -06:00
|
|
|
use crate::iter::OffsetStrIter;
|
2018-05-22 18:02:44 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_empty_token() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let result = emptytok(OffsetStrIter::new("NULL "));
|
|
|
|
assert!(
|
|
|
|
result.is_complete(),
|
|
|
|
format!("result {:?} is not done", result)
|
|
|
|
);
|
|
|
|
if let Result::Complete(_, tok) = result {
|
2018-05-22 18:02:44 -05:00
|
|
|
assert_eq!(tok.fragment, "NULL");
|
|
|
|
assert_eq!(tok.typ, TokenType::EMPTY);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-30 23:00:50 -05:00
|
|
|
#[test]
|
|
|
|
fn test_assert_token() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let result = asserttok(OffsetStrIter::new("assert "));
|
|
|
|
assert!(
|
|
|
|
result.is_complete(),
|
|
|
|
format!("result {:?} is not done", result)
|
|
|
|
);
|
|
|
|
if let Result::Complete(_, tok) = result {
|
2018-05-30 23:00:50 -05:00
|
|
|
assert_eq!(tok.fragment, "assert");
|
|
|
|
assert_eq!(tok.typ, TokenType::BAREWORD);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-13 20:37:58 -05:00
|
|
|
#[test]
|
|
|
|
fn test_out_token() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let result = outtok(OffsetStrIter::new("out "));
|
|
|
|
assert!(
|
|
|
|
result.is_complete(),
|
|
|
|
format!("result {:?} is not done", result)
|
|
|
|
);
|
|
|
|
if let Result::Complete(_, tok) = result {
|
2018-08-13 20:37:58 -05:00
|
|
|
assert_eq!(tok.fragment, "out");
|
|
|
|
assert_eq!(tok.typ, TokenType::BAREWORD);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-05 21:34:12 -06:00
|
|
|
#[test]
|
|
|
|
fn test_out_token_with_comment() {
|
|
|
|
let result = outtok(OffsetStrIter::new("out//comment"));
|
|
|
|
assert!(
|
|
|
|
result.is_complete(),
|
|
|
|
format!("result {:?} is not done", result)
|
|
|
|
);
|
|
|
|
if let Result::Complete(_, tok) = result {
|
|
|
|
assert_eq!(tok.fragment, "out");
|
|
|
|
assert_eq!(tok.typ, TokenType::BAREWORD);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_not_out_token() {
|
|
|
|
let result = outtok(OffsetStrIter::new("output"));
|
|
|
|
assert!(result.is_fail(), format!("result {:?} is not fail", result));
|
|
|
|
}
|
|
|
|
|
2018-05-22 18:02:44 -05:00
|
|
|
#[test]
|
|
|
|
fn test_escape_quoted() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let result = escapequoted(OffsetStrIter::new("foo \\\"bar\""));
|
|
|
|
assert!(
|
|
|
|
result.is_complete(),
|
|
|
|
format!("result {:?} is not ok", result)
|
|
|
|
);
|
|
|
|
if let Result::Complete(_rest, frag) = result {
|
2018-05-22 18:02:44 -05:00
|
|
|
assert_eq!(frag, "foo \"bar");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_string_with_escaping() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let result = strtok(OffsetStrIter::new("\"foo \\\\ \\\"bar\""));
|
|
|
|
assert!(
|
|
|
|
result.is_complete(),
|
|
|
|
format!("result {:?} is not ok", result)
|
|
|
|
);
|
|
|
|
if let Result::Complete(_, tok) = result {
|
2018-05-22 18:02:44 -05:00
|
|
|
assert_eq!(tok.fragment, "foo \\ \"bar".to_string());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_tokenize_bareword_with_dash() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let input = OffsetStrIter::new("foo-bar ");
|
|
|
|
let result = tokenize(input.clone());
|
2018-05-22 18:02:44 -05:00
|
|
|
assert!(result.is_ok(), format!("result {:?} is not ok", result));
|
|
|
|
if let Ok(toks) = result {
|
|
|
|
assert_eq!(toks.len(), 2);
|
|
|
|
assert_eq!(toks[0].fragment, "foo-bar");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! assert_token {
|
|
|
|
($input:expr, $typ:expr, $msg:expr) => {
|
2018-11-05 21:34:12 -06:00
|
|
|
let result = token(OffsetStrIter::new($input));
|
2018-05-22 18:02:44 -05:00
|
|
|
assert!(
|
2018-11-05 21:34:12 -06:00
|
|
|
result.is_complete(),
|
2018-05-22 18:02:44 -05:00
|
|
|
format!("result {:?} is not a {}", result, $msg)
|
|
|
|
);
|
2018-11-05 21:34:12 -06:00
|
|
|
if let Result::Complete(_, tok) = result {
|
2018-05-22 18:02:44 -05:00
|
|
|
assert_eq!(tok.typ, $typ);
|
2018-11-05 21:34:12 -06:00
|
|
|
assert_eq!(tok.fragment, $input);
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2018-11-05 21:34:12 -06:00
|
|
|
#[test]
|
|
|
|
fn test_digittok() {
|
|
|
|
assert_token!("1", TokenType::DIGIT, "1");
|
|
|
|
}
|
|
|
|
|
2018-05-22 18:02:44 -05:00
|
|
|
#[test]
|
|
|
|
fn test_boolean() {
|
|
|
|
assert_token!("true", TokenType::BOOLEAN, "boolean");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_eqeqtok() {
|
|
|
|
assert_token!("==", TokenType::PUNCT, "==");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_notequaltok() {
|
|
|
|
assert_token!("!=", TokenType::PUNCT, "!=");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_gttok() {
|
|
|
|
assert_token!(">", TokenType::PUNCT, ">");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_lttok() {
|
|
|
|
assert_token!("<", TokenType::PUNCT, "<");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_gteqtok() {
|
|
|
|
assert_token!(">=", TokenType::PUNCT, ">=");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_lteqtok() {
|
|
|
|
assert_token!("<=", TokenType::PUNCT, "<=");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_tokenize_one_of_each() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let input = OffsetStrIter::new(
|
2018-08-13 20:37:58 -05:00
|
|
|
"map out filter assert let import macro select as => [ ] { } ; = % / * \
|
2018-05-22 18:02:44 -05:00
|
|
|
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
|
2018-11-05 21:34:12 -06:00
|
|
|
);
|
|
|
|
let result = tokenize(input.clone());
|
2018-05-22 18:02:44 -05:00
|
|
|
assert!(result.is_ok(), format!("result {:?} is not ok", result));
|
|
|
|
let v = result.unwrap();
|
|
|
|
for (i, t) in v.iter().enumerate() {
|
|
|
|
println!("{}: {:?}", i, t);
|
|
|
|
}
|
2018-08-13 20:37:58 -05:00
|
|
|
assert_eq!(v.len(), 39);
|
|
|
|
assert_eq!(v[38].typ, TokenType::END);
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_parse_has_end() {
|
2018-11-05 21:34:12 -06:00
|
|
|
let input = OffsetStrIter::new("foo");
|
|
|
|
let result = tokenize(input.clone());
|
2018-05-22 18:02:44 -05:00
|
|
|
assert!(result.is_ok());
|
|
|
|
let v = result.unwrap();
|
|
|
|
assert_eq!(v.len(), 2);
|
|
|
|
assert_eq!(v[1].typ, TokenType::END);
|
|
|
|
}
|
|
|
|
|
2018-11-05 21:34:12 -06:00
|
|
|
#[test]
|
|
|
|
fn test_whitespace() {
|
|
|
|
assert!(whitespace(OffsetStrIter::new(" ")).is_complete());
|
|
|
|
let result = whitespace(OffsetStrIter::new(" "));
|
|
|
|
match result {
|
|
|
|
Result::Complete(rest, o) => {
|
|
|
|
assert_eq!(rest.get_offset(), 2);
|
|
|
|
assert_eq!(o.typ, TokenType::WS);
|
|
|
|
}
|
|
|
|
_ => assert!(false, "Not complete"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-22 18:02:44 -05:00
|
|
|
#[test]
|
|
|
|
fn test_parse_comment() {
|
2018-11-05 21:34:12 -06:00
|
|
|
assert!(comment(OffsetStrIter::new("// comment\n")).is_complete());
|
|
|
|
assert!(comment(OffsetStrIter::new("// comment")).is_complete());
|
|
|
|
let mut parsed = comment(OffsetStrIter::new("// comment\n"));
|
|
|
|
assert!(parsed.is_complete());
|
|
|
|
if let Result::Complete(_rest, cmt) = parsed {
|
|
|
|
assert_eq!(
|
|
|
|
cmt,
|
2018-05-22 18:02:44 -05:00
|
|
|
Token {
|
|
|
|
typ: TokenType::COMMENT,
|
|
|
|
fragment: " comment".to_string(),
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
line: 1,
|
|
|
|
column: 1,
|
|
|
|
offset: 0
|
|
|
|
},
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
2018-11-05 21:34:12 -06:00
|
|
|
);
|
|
|
|
}
|
|
|
|
assert!(comment(OffsetStrIter::new("// comment\r\n")).is_complete());
|
|
|
|
parsed = comment(OffsetStrIter::new("// comment\r\n"));
|
|
|
|
if let Result::Complete(_rest, cmt) = parsed {
|
|
|
|
assert_eq!(
|
|
|
|
cmt,
|
2018-05-22 18:02:44 -05:00
|
|
|
Token {
|
|
|
|
typ: TokenType::COMMENT,
|
|
|
|
fragment: " comment".to_string(),
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
column: 1,
|
|
|
|
line: 1,
|
|
|
|
offset: 0
|
|
|
|
},
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
2018-11-05 21:34:12 -06:00
|
|
|
);
|
|
|
|
}
|
|
|
|
assert!(comment(OffsetStrIter::new("// comment\r\n ")).is_complete());
|
|
|
|
parsed = comment(OffsetStrIter::new("// comment\r\n "));
|
|
|
|
if let Result::Complete(_rest, cmt) = parsed {
|
|
|
|
assert_eq!(
|
|
|
|
cmt,
|
2018-05-22 18:02:44 -05:00
|
|
|
Token {
|
|
|
|
typ: TokenType::COMMENT,
|
|
|
|
fragment: " comment".to_string(),
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
column: 1,
|
|
|
|
line: 1,
|
|
|
|
offset: 0
|
|
|
|
},
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
2018-11-05 21:34:12 -06:00
|
|
|
);
|
|
|
|
}
|
|
|
|
assert!(comment(OffsetStrIter::new("// comment")).is_complete());
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_match_word() {
|
2018-05-29 20:48:57 -05:00
|
|
|
let input = vec![Token {
|
|
|
|
fragment: "foo".to_string(),
|
|
|
|
typ: TokenType::BAREWORD,
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
line: 1,
|
|
|
|
column: 1,
|
|
|
|
offset: 0,
|
2018-05-22 18:02:44 -05:00
|
|
|
},
|
2018-11-05 21:34:12 -06:00
|
|
|
}];
|
|
|
|
let result = word!(SliceIter::new(input.as_slice()), "foo");
|
2018-05-22 18:02:44 -05:00
|
|
|
match result {
|
2018-11-05 21:34:12 -06:00
|
|
|
Result::Complete(_, tok) => assert_eq!(tok, input[0]),
|
2018-05-22 18:02:44 -05:00
|
|
|
res => assert!(false, format!("Fail: {:?}", res)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_match_word_empty_input() {
|
2018-05-29 20:48:57 -05:00
|
|
|
let input = vec![Token {
|
|
|
|
fragment: "".to_string(),
|
|
|
|
typ: TokenType::END,
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
line: 1,
|
|
|
|
column: 1,
|
|
|
|
offset: 0,
|
2018-05-22 18:02:44 -05:00
|
|
|
},
|
2018-11-05 21:34:12 -06:00
|
|
|
}];
|
|
|
|
let result = word!(SliceIter::new(input.as_slice()), "foo");
|
2018-05-22 18:02:44 -05:00
|
|
|
match result {
|
2018-11-05 21:34:12 -06:00
|
|
|
Result::Complete(_, _) => assert!(false, "Should have been an error but was Done"),
|
|
|
|
Result::Incomplete(_) => assert!(false, "Should have been a Fail but was Incomplete"),
|
|
|
|
Result::Fail(_) => {
|
2018-05-22 18:02:44 -05:00
|
|
|
// noop
|
|
|
|
}
|
2018-11-05 21:34:12 -06:00
|
|
|
Result::Abort(_) => assert!(false, "Should have been a Fail but was Abort"),
|
2018-05-22 18:02:44 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_match_punct() {
|
2018-05-29 20:48:57 -05:00
|
|
|
let input = vec![Token {
|
|
|
|
fragment: "!".to_string(),
|
|
|
|
typ: TokenType::PUNCT,
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
line: 1,
|
|
|
|
column: 1,
|
|
|
|
offset: 0,
|
2018-05-22 18:02:44 -05:00
|
|
|
},
|
2018-11-05 21:34:12 -06:00
|
|
|
}];
|
|
|
|
let result = punct!(SliceIter::new(input.as_slice()), "!");
|
2018-05-22 18:02:44 -05:00
|
|
|
match result {
|
2018-11-05 21:34:12 -06:00
|
|
|
Result::Complete(_, tok) => assert_eq!(tok, input[0]),
|
2018-05-22 18:02:44 -05:00
|
|
|
res => assert!(false, format!("Fail: {:?}", res)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_match_type() {
|
2018-05-29 20:48:57 -05:00
|
|
|
let input = vec![Token {
|
|
|
|
fragment: "foo".to_string(),
|
|
|
|
typ: TokenType::BAREWORD,
|
2018-11-05 21:34:12 -06:00
|
|
|
pos: Position {
|
2019-01-23 20:02:35 -06:00
|
|
|
file: None,
|
2018-11-05 21:34:12 -06:00
|
|
|
line: 1,
|
|
|
|
column: 1,
|
|
|
|
offset: 0,
|
2018-05-22 18:02:44 -05:00
|
|
|
},
|
2018-11-05 21:34:12 -06:00
|
|
|
}];
|
|
|
|
let result = match_type!(SliceIter::new(input.as_slice()), BAREWORD);
|
2018-05-22 18:02:44 -05:00
|
|
|
match result {
|
2018-11-05 21:34:12 -06:00
|
|
|
Result::Complete(_, tok) => assert_eq!(tok, input[0]),
|
2018-05-22 18:02:44 -05:00
|
|
|
res => assert!(false, format!("Fail: {:?}", res)),
|
|
|
|
}
|
|
|
|
}
|