Set Rust edition to 2021

This commit is contained in:
Jeremy Wall 2023-01-16 15:41:09 -05:00
parent c59d25974a
commit ca7347e164
3 changed files with 20 additions and 36 deletions

View File

@ -1,5 +1,5 @@
[package] [package]
edition = "2018" edition = "2021"
name = "ucg" name = "ucg"
version = "0.7.2" version = "0.7.2"
authors = ["Jeremy Wall <jeremy@marzhillstudios.com>"] authors = ["Jeremy Wall <jeremy@marzhillstudios.com>"]

View File

@ -33,7 +33,7 @@ fn assert_build(input: &str) {
b.eval_string(input).unwrap(); b.eval_string(input).unwrap();
let env = b.environment.borrow(); let env = b.environment.borrow();
if !env.assert_results.success { if !env.assert_results.success {
assert!(false, env.assert_results.failures.clone()); assert!(false, "{}", env.assert_results.failures.clone());
} }
} }

View File

@ -9,10 +9,7 @@ use crate::iter::OffsetStrIter;
#[test] #[test]
fn test_empty_token() { fn test_empty_token() {
let result = emptytok(OffsetStrIter::new("NULL ")); let result = emptytok(OffsetStrIter::new("NULL "));
assert!( assert!(result.is_complete(), "result {:?} is not done", result);
result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "NULL"); assert_eq!(tok.fragment, "NULL");
assert_eq!(tok.typ, TokenType::EMPTY); assert_eq!(tok.typ, TokenType::EMPTY);
@ -22,10 +19,7 @@ fn test_empty_token() {
#[test] #[test]
fn test_assert_token() { fn test_assert_token() {
let result = asserttok(OffsetStrIter::new("assert ")); let result = asserttok(OffsetStrIter::new("assert "));
assert!( assert!(result.is_complete(), "result {:?} is not done", result);
result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "assert"); assert_eq!(tok.fragment, "assert");
assert_eq!(tok.typ, TokenType::BAREWORD); assert_eq!(tok.typ, TokenType::BAREWORD);
@ -35,10 +29,7 @@ fn test_assert_token() {
#[test] #[test]
fn test_out_token() { fn test_out_token() {
let result = outtok(OffsetStrIter::new("out ")); let result = outtok(OffsetStrIter::new("out "));
assert!( assert!(result.is_complete(), "result {:?} is not done", result);
result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "out"); assert_eq!(tok.fragment, "out");
assert_eq!(tok.typ, TokenType::BAREWORD); assert_eq!(tok.typ, TokenType::BAREWORD);
@ -48,10 +39,7 @@ fn test_out_token() {
#[test] #[test]
fn test_out_token_with_comment() { fn test_out_token_with_comment() {
let result = outtok(OffsetStrIter::new("out//comment")); let result = outtok(OffsetStrIter::new("out//comment"));
assert!( assert!(result.is_complete(), "result {:?} is not done", result);
result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "out"); assert_eq!(tok.fragment, "out");
assert_eq!(tok.typ, TokenType::BAREWORD); assert_eq!(tok.typ, TokenType::BAREWORD);
@ -61,16 +49,13 @@ fn test_out_token_with_comment() {
#[test] #[test]
fn test_not_out_token() { fn test_not_out_token() {
let result = outtok(OffsetStrIter::new("output")); let result = outtok(OffsetStrIter::new("output"));
assert!(result.is_fail(), format!("result {:?} is not fail", result)); assert!(result.is_fail(), "result {:?} is not fail", result);
} }
#[test] #[test]
fn test_escape_quoted() { fn test_escape_quoted() {
let result = escapequoted(OffsetStrIter::new("foo \\\"bar\"")); let result = escapequoted(OffsetStrIter::new("foo \\\"bar\""));
assert!( assert!(result.is_complete(), "result {:?} is not ok", result);
result.is_complete(),
format!("result {:?} is not ok", result)
);
if let Result::Complete(_rest, frag) = result { if let Result::Complete(_rest, frag) = result {
assert_eq!(frag, "foo \"bar"); assert_eq!(frag, "foo \"bar");
} }
@ -79,10 +64,7 @@ fn test_escape_quoted() {
#[test] #[test]
fn test_string_with_escaping() { fn test_string_with_escaping() {
let result = strtok(OffsetStrIter::new("\"foo \\\\ \\\"bar\"")); let result = strtok(OffsetStrIter::new("\"foo \\\\ \\\"bar\""));
assert!( assert!(result.is_complete(), "result {:?} is not ok", result);
result.is_complete(),
format!("result {:?} is not ok", result)
);
if let Result::Complete(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "foo \\ \"bar".to_string()); assert_eq!(tok.fragment, "foo \\ \"bar".to_string());
} }
@ -92,7 +74,7 @@ fn test_string_with_escaping() {
fn test_tokenize_bareword_with_dash() { fn test_tokenize_bareword_with_dash() {
let input = OffsetStrIter::new("foo-bar "); let input = OffsetStrIter::new("foo-bar ");
let result = tokenize(input.clone(), None); let result = tokenize(input.clone(), None);
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), "result {:?} is not ok", result);
if let Ok(toks) = result { if let Ok(toks) = result {
assert_eq!(toks.len(), 2); assert_eq!(toks.len(), 2);
assert_eq!(toks[0].fragment, "foo-bar"); assert_eq!(toks[0].fragment, "foo-bar");
@ -104,7 +86,9 @@ macro_rules! assert_token {
let result = token(OffsetStrIter::new($input)); let result = token(OffsetStrIter::new($input));
assert!( assert!(
result.is_complete(), result.is_complete(),
format!("result {:?} is not a {}", result, $msg) "result {:?} is not a {}",
result,
$msg
); );
if let Result::Complete(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.typ, $typ); assert_eq!(tok.typ, $typ);
@ -160,7 +144,7 @@ fn test_tokenize_one_of_each() {
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=", + - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
); );
let result = tokenize(input.clone(), None); let result = tokenize(input.clone(), None);
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), "result {:?} is not ok", result);
let v = result.unwrap(); let v = result.unwrap();
for (i, t) in v.iter().enumerate() { for (i, t) in v.iter().enumerate() {
println!("{}: {:?}", i, t); println!("{}: {:?}", i, t);
@ -177,7 +161,7 @@ fn test_tokenize_one_of_each_comment_map_path() {
); );
let mut comment_map = BTreeMap::new(); let mut comment_map = BTreeMap::new();
let result = tokenize(input.clone(), Some(&mut comment_map)); let result = tokenize(input.clone(), Some(&mut comment_map));
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), "result {:?} is not ok", result);
let v = result.unwrap(); let v = result.unwrap();
for (i, t) in v.iter().enumerate() { for (i, t) in v.iter().enumerate() {
println!("{}: {:?}", i, t); println!("{}: {:?}", i, t);
@ -282,7 +266,7 @@ fn test_match_word() {
let result = word!(SliceIter::new(input.as_slice()), "foo"); let result = word!(SliceIter::new(input.as_slice()), "foo");
match result { match result {
Result::Complete(_, tok) => assert_eq!(tok, input[0]), Result::Complete(_, tok) => assert_eq!(tok, input[0]),
res => assert!(false, format!("Fail: {:?}", res)), res => assert!(false, "Fail: {:?}", res),
} }
} }
@ -324,7 +308,7 @@ fn test_match_punct() {
let result = punct!(SliceIter::new(input.as_slice()), "!"); let result = punct!(SliceIter::new(input.as_slice()), "!");
match result { match result {
Result::Complete(_, tok) => assert_eq!(tok, input[0]), Result::Complete(_, tok) => assert_eq!(tok, input[0]),
res => assert!(false, format!("Fail: {:?}", res)), res => assert!(false, "Fail: {:?}", res),
} }
} }
@ -343,7 +327,7 @@ fn test_match_type() {
let result = match_type!(SliceIter::new(input.as_slice()), BAREWORD); let result = match_type!(SliceIter::new(input.as_slice()), BAREWORD);
match result { match result {
Result::Complete(_, tok) => assert_eq!(tok, input[0]), Result::Complete(_, tok) => assert_eq!(tok, input[0]),
res => assert!(false, format!("Fail: {:?}", res)), res => assert!(false, "Fail: {:?}", res),
} }
} }
@ -352,7 +336,7 @@ fn test_tokenize_builds_comment_map() {
let input = OffsetStrIter::new("// comment 1\n\n//comment 2"); let input = OffsetStrIter::new("// comment 1\n\n//comment 2");
let mut comment_map = BTreeMap::new(); let mut comment_map = BTreeMap::new();
let result = tokenize(input.clone(), Some(&mut comment_map)); let result = tokenize(input.clone(), Some(&mut comment_map));
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), "result {:?} is not ok", result);
assert_eq!(comment_map.len(), 2); assert_eq!(comment_map.len(), 2);
} }
@ -362,7 +346,7 @@ fn test_tokenize_builds_comment_map_groups() {
let input = OffsetStrIter::new("// first part\n// comment 1\n\n//comment 2"); let input = OffsetStrIter::new("// first part\n// comment 1\n\n//comment 2");
let mut comment_map = BTreeMap::new(); let mut comment_map = BTreeMap::new();
let result = tokenize(input.clone(), Some(&mut comment_map)); let result = tokenize(input.clone(), Some(&mut comment_map));
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), "result {:?} is not ok", result);
assert_eq!(comment_map.len(), 2); assert_eq!(comment_map.len(), 2);
assert_eq!(comment_map[&2].len(), 2); assert_eq!(comment_map[&2].len(), 2);