FEATURE: Use context for our error reporting.

This commit is contained in:
Jeremy Wall 2018-10-17 19:00:29 -05:00
parent c22d397545
commit 24b97c1037
8 changed files with 194 additions and 124 deletions

6
Cargo.lock generated
View File

@ -1,6 +1,6 @@
[[package]]
name = "abortable_parser"
version = "0.1.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -228,7 +228,7 @@ dependencies = [
name = "ucg"
version = "0.2.0"
dependencies = [
"abortable_parser 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"abortable_parser 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
"cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -285,7 +285,7 @@ dependencies = [
]
[metadata]
"checksum abortable_parser 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a628e31269165eeea62b71b2555c6379d4fbadb3e34656b6e1445b0235247c0d"
"checksum abortable_parser 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bd280856ce341823c6aa6fddb3bafae236c23223824f47aef3443deb0b8d900c"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f"

View File

@ -10,7 +10,7 @@ keywords = ["compiler", "config"]
license = "Apache-2.0"
[dependencies]
abortable_parser = "~0.1.0"
abortable_parser = "~0.2.0"
clap = "~2.26.0"
serde_json = "~1.0.9"
simple-error = "0.1"

View File

@ -81,7 +81,6 @@ impl Error {
cause: Box<error::Error>,
pos: Position,
) -> Self {
// FIXME(jwall): This should take a real position instead of this fake one.
let mut e = Self::new(msg, t, pos);
e.cause = Some(cause);
return e;

View File

@ -28,6 +28,7 @@ use error;
use iter::OffsetStrIter;
use tokenizer::*;
// TODO(jwall): Rename this to something better.
type NomResult<'a, O> = Result<SliceIter<'a, Token>, O>;
#[cfg(feature = "tracing")]
@ -35,7 +36,7 @@ const ENABLE_TRACE: bool = true;
#[cfg(not(feature = "tracing"))]
const ENABLE_TRACE: bool = false;
type ParseResult<O> = std::result::Result<O, abortable_parser::Error>;
type ParseResult<'a, O> = std::result::Result<O, abortable_parser::Error<SliceIter<'a, Token>>>;
macro_rules! trace_nom {
($i:expr, $rule:ident!( $($args:tt)* )) => {
@ -94,7 +95,10 @@ make_fn!(
);
// Helper function to make the return types work for down below.
fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseResult<Value> {
fn triple_to_number<'a>(
input: SliceIter<'a, Token>,
v: (Option<Token>, Option<Token>, Option<Token>),
) -> ParseResult<'a, Value> {
let (pref, mut pref_pos) = match v.0 {
None => ("", Position::new(0, 0, 0)),
Some(ref bs) => (bs.fragment.borrow(), bs.pos.clone()),
@ -106,7 +110,10 @@ fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseRe
let i = match FromStr::from_str(pref) {
Ok(i) => i,
Err(_) => {
return Err(Error::new(format!("Not an integer! {}", pref), &0));
return Err(Error::new(
format!("Not an integer! {}", pref),
Box::new(input.clone()),
));
}
};
return Ok(Value::Int(value_node!(i, pref_pos)));
@ -116,9 +123,9 @@ fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseRe
pref_pos = v.1.unwrap().pos;
}
let (suf, pos) = match v.2 {
None => ("".to_string(), Position::new(0, 0, 0)),
Some(bs) => (bs.fragment, bs.pos),
let suf = match v.2 {
None => "".to_string(),
Some(bs) => bs.fragment,
};
let to_parse = pref.to_string() + "." + &suf;
@ -127,7 +134,7 @@ fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseRe
Err(_) => {
return Err(Error::new(
format!("Not a float! {}", to_parse),
&pos.offset,
Box::new(input.clone()),
));
}
};
@ -212,7 +219,6 @@ macro_rules! alt_peek {
// This is our default termination case.
// If there is no fallback then we return an Error.
(__inner $i:expr, __end) => {
// FIXME(jwall): Should we make this a compile error instead?
compile_error!("alt_peek! requirs a fallback case");
};
@ -270,10 +276,13 @@ fn number(input: SliceIter<Token>) -> Result<SliceIter<Token>, Value> {
Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, triple) => match triple_to_number(triple) {
Result::Complete(rest, triple) => {
let num = triple_to_number(rest.clone(), triple);
match num {
Ok(val) => Result::Complete(rest, val),
Err(e) => Result::Fail(e),
},
}
}
}
}
// trace_macros!(false);
@ -415,7 +424,7 @@ fn symbol_or_expression(input: SliceIter<Token>) -> NomResult<Expression> {
} else {
return Result::Fail(Error::new(
"Expected (.) but no dot found".to_string(),
&rest,
Box::new(rest.clone()),
));
}
}
@ -425,7 +434,7 @@ fn symbol_or_expression(input: SliceIter<Token>) -> NomResult<Expression> {
} else {
return Result::Fail(Error::new(
"Expected (.) but no dot found".to_string(),
&rest,
Box::new(rest.clone()),
));
}
}
@ -471,7 +480,7 @@ fn selector_list(input: SliceIter<Token>) -> NomResult<SelectorList> {
if list.is_empty() {
return Result::Fail(Error::new(
"(.) with no selector fields after".to_string(),
&rest,
Box::new(rest.clone()),
));
} else {
(rest, Some(list))
@ -514,7 +523,12 @@ make_fn!(
)
);
fn tuple_to_macro(pos: Position, vals: Option<Vec<Value>>, val: Value) -> ParseResult<Expression> {
fn tuple_to_macro<'a>(
input: SliceIter<'a, Token>,
pos: Position,
vals: Option<Vec<Value>>,
val: Value,
) -> ParseResult<'a, Expression> {
let mut default_args = match vals {
None => Vec::new(),
Some(vals) => vals,
@ -533,7 +547,7 @@ fn tuple_to_macro(pos: Position, vals: Option<Vec<Value>>, val: Value) -> ParseR
})),
val => Err(Error::new(
format!("Expected Tuple Got {:?}", val),
&val.pos().offset,
Box::new(input.clone()),
)),
}
}
@ -558,34 +572,42 @@ fn macro_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Express
Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, (pos, arglist, map)) => match tuple_to_macro(pos, arglist, map) {
Result::Complete(rest, (pos, arglist, map)) => {
match tuple_to_macro(rest.clone(), pos, arglist, map) {
Ok(expr) => Result::Complete(rest, expr),
Err(e) => Result::Fail(Error::caused_by("Invalid Macro syntax", &rest, Box::new(e))),
},
Err(e) => Result::Fail(Error::caused_by(
"Invalid Macro syntax",
Box::new(e),
Box::new(rest.clone()),
)),
}
}
}
}
// FIXME(jwall): need to make this into a proper parse function.
fn tuple_to_select(
pos: Position,
fn tuple_to_select<'a>(
input: SliceIter<'a, Token>,
e1: Expression,
e2: Expression,
val: Value,
) -> ParseResult<Expression> {
) -> ParseResult<'a, Expression> {
match val {
Value::Tuple(v) => Ok(Expression::Select(SelectDef {
val: Box::new(e1),
default: Box::new(e2),
tuple: v.val,
pos: pos,
pos: (&input).into(),
})),
val => Err(Error::new(format!("Expected Tuple Got {:?}", val), &0)),
val => Err(Error::new(
format!("Expected Tuple Got {:?}", val),
Box::new(input.clone()),
)),
}
}
fn select_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expression> {
let parsed = do_each!(input,
start => word!("select"),
_ => word!("select"),
val => do_each!(
expr => trace_nom!(expression),
_ => punct!(","),
@ -597,19 +619,19 @@ fn select_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expres
(expr)
),
map => trace_nom!(tuple),
(start.pos.clone(), val, default, map)
(val, default, map)
);
match parsed {
Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, (pos, val, default, map)) => {
match tuple_to_select(pos, val, default, map) {
Result::Complete(rest, (val, default, map)) => {
match tuple_to_select(input.clone(), val, default, map) {
Ok(expr) => Result::Complete(rest, expr),
Err(e) => Result::Fail(Error::caused_by(
"Invalid Select Expression",
&rest,
Box::new(e),
Box::new(rest.clone()),
)),
}
}
@ -636,17 +658,21 @@ make_fn!(
)
);
fn tuple_to_call(pos: Position, val: Value, exprs: Vec<Expression>) -> ParseResult<Expression> {
fn tuple_to_call<'a>(
input: SliceIter<'a, Token>,
val: Value,
exprs: Vec<Expression>,
) -> ParseResult<'a, Expression> {
if let Value::Selector(def) = val {
Ok(Expression::Call(CallDef {
macroref: def,
arglist: exprs,
pos: pos,
pos: (&input).into(),
}))
} else {
Err(Error::new(
format!("Expected Selector Got {:?}", val),
&val.pos().offset,
Box::new(input.clone()),
))
}
}
@ -664,30 +690,34 @@ make_fn!(
);
fn call_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expression> {
let parsed = do_each!(input,
let parsed = do_each!(input.clone(),
macroname => trace_nom!(selector_value),
_ => punct!("("),
args => separated!(punct!(","), trace_nom!(expression)),
_ => punct!(")"),
(macroname.pos().clone(), macroname, args)
(macroname, args)
);
match parsed {
Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, (pos, name, args)) => match tuple_to_call(pos, name, args) {
Result::Complete(rest, (name, args)) => match tuple_to_call(input.clone(), name, args) {
Ok(expr) => Result::Complete(rest, expr),
Err(e) => Result::Fail(Error::caused_by("Invalid Call Syntax", &rest, Box::new(e))),
Err(e) => Result::Fail(Error::caused_by(
"Invalid Call Syntax",
Box::new(e),
Box::new(rest),
)),
},
}
}
fn tuple_to_list_op(
pos: Position,
fn tuple_to_list_op<'a>(
input: &'a SliceIter<Token>,
kind: ListOpType,
macroname: Value,
list: Expression,
) -> ParseResult<Expression> {
) -> ParseResult<'a, Expression> {
if let Value::Selector(mut def) = macroname {
// First of all we need to assert that this is a selector of at least
// two sections.
@ -701,7 +731,7 @@ fn tuple_to_list_op(
}
return Err(Error::new(
format!("Missing a result field for the macro"),
&def.pos.offset,
Box::new(input.clone()),
));
}
&mut Some(ref mut tl) => {
@ -714,7 +744,7 @@ fn tuple_to_list_op(
}
return Err(Error::new(
format!("Missing a result field for the macro"),
&def.pos.offset,
Box::new(input.clone()),
));
}
let fname = tl.pop();
@ -726,7 +756,7 @@ fn tuple_to_list_op(
mac: def,
field: fieldname,
target: Box::new(list),
pos: pos,
pos: input.into(),
}));
}
if ENABLE_TRACE {
@ -737,21 +767,21 @@ fn tuple_to_list_op(
}
return Err(Error::new(
format!("Expected a selector but got {}", macroname.type_name()),
&pos.offset,
Box::new(input.clone()),
));
}
make_fn!(
list_op_expression<SliceIter<Token>, Expression>,
do_each!(
pos => pos,
input => input!(),
optype => either!(
do_each!(_ => word!("map"), (ListOpType::Map)),
do_each!(_ => word!("filter"), (ListOpType::Filter))
),
macroname => trace_nom!(selector_value),
list => trace_nom!(non_op_expression),
(tuple_to_list_op(pos, optype, macroname, list).unwrap())
(tuple_to_list_op(&input, optype, macroname, list).unwrap())
)
);
@ -893,7 +923,7 @@ fn statement(i: SliceIter<Token>) -> Result<SliceIter<Token>, Statement> {
/// Parses a LocatedSpan into a list of Statements or an `error::Error`.
pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error::Error> {
match tokenize(input.clone()) {
match tokenize(&input) {
Ok(tokenized) => {
let mut out = Vec::new();
let mut i_ = SliceIter::from(&tokenized);
@ -907,20 +937,18 @@ pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error:
match statement(i.clone()) {
Result::Abort(e) => {
let pos: Position = (&i).into();
let err = error::Error::new_with_boxed_cause(
"Statement Parse Error",
let err = error::Error::new(
format!("Statement Parse Error {}", e),
error::ErrorType::ParseError,
Box::new(e),
pos,
);
return Err(err);
}
Result::Fail(e) => {
let pos: Position = (&i).into();
let err = error::Error::new_with_boxed_cause(
"Statement Parse Error",
let err = error::Error::new(
format!("Statement Parse Error {}", e),
error::ErrorType::ParseError,
Box::new(e),
pos,
);
return Err(err);

View File

@ -49,7 +49,10 @@ make_fn!(
fn parse_expression(i: SliceIter<Element>) -> Result<SliceIter<Element>, Expression> {
let mut i_ = i.clone();
if eoi(i_.clone()).is_complete() {
return Result::Abort(Error::new("Expected Expression found End Of Input", &i_));
return Result::Abort(Error::new(
"Expected Expression found End Of Input",
Box::new(i_),
));
}
let el = i_.next();
if let Some(&Element::Expr(ref expr)) = el {
@ -60,7 +63,7 @@ fn parse_expression(i: SliceIter<Element>) -> Result<SliceIter<Element>, Express
"Error while parsing Binary Expression Expected Expression got {:?}",
el
),
&i_,
Box::new(i_),
));
}
@ -69,7 +72,7 @@ fn parse_sum_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, Binar
if eoi(i_.clone()).is_complete() {
return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"),
&i_,
Box::new(i_),
));
}
let el = i_.next();
@ -91,7 +94,7 @@ fn parse_sum_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, Binar
"Error while parsing Binary Expression Unexpected Operator {:?}",
el
),
&i_,
Box::new(i_),
));
}
@ -114,7 +117,7 @@ fn parse_product_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, B
if eoi(i_.clone()).is_complete() {
return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"),
&i_,
Box::new(i_),
));
}
let el = i_.next();
@ -136,7 +139,7 @@ fn parse_product_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, B
"Error while parsing Binary Expression Unexpected Operator {:?}",
el
),
&i_,
Box::new(i_),
));
}
@ -224,7 +227,7 @@ fn parse_compare_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, C
if eoi(i_.clone()).is_complete() {
return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"),
&i_,
Box::new(i_),
));
}
let el = i_.next();
@ -236,7 +239,7 @@ fn parse_compare_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, C
"Error while parsing Binary Expression Unexpected Operator {:?}",
el
),
&i,
Box::new(i),
));
}
@ -309,24 +312,51 @@ fn parse_operand_list<'a>(i: SliceIter<'a, Token>) -> NomResult<'a, Vec<Element>
}
/// Parse a binary operator expression.
pub fn op_expression<'a>(i: SliceIter<'a, Token>) -> NomResult<'a, Expression> {
pub fn op_expression<'a>(i: SliceIter<'a, Token>) -> Result<SliceIter<Token>, Expression> {
let preparse = parse_operand_list(i.clone());
match preparse {
Result::Fail(e) => Result::Fail(e),
Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => {
let err = Error::caused_by(
"Failed while parsing operator expression",
Box::new(e),
Box::new(i),
);
Result::Fail(err)
}
Result::Abort(e) => {
let err = Error::caused_by(
"Failed while parsing operator expression",
Box::new(e),
Box::new(i),
);
Result::Fail(err)
}
Result::Incomplete(i) => Result::Incomplete(i),
Result::Complete(rest, oplist) => {
let mut i_ = SliceIter::new(&oplist);
let parse_result = either!(
i_,
i_.clone(),
trace_nom!(compare_expression),
trace_nom!(math_expression)
);
match parse_result {
Result::Fail(e) => Result::Fail(e),
Result::Abort(e) => Result::Abort(e),
Result::Incomplete(i) => Result::Incomplete(i),
Result::Fail(_e) => {
// TODO(jwall): It would be good to be able to use caused_by here.
let err = Error::new(
"Failed while parsing operator expression",
Box::new(rest.clone()),
);
Result::Fail(err)
}
Result::Abort(_e) => {
let err = Error::new(
"Failed while parsing operator expression",
Box::new(rest.clone()),
);
Result::Abort(err)
}
Result::Incomplete(_) => Result::Incomplete(i.clone()),
Result::Complete(_, expr) => Result::Complete(rest.clone(), expr),
}
}

View File

@ -24,7 +24,7 @@ macro_rules! assert_parse {
};
($i:expr, $f:expr, $out:expr) => {{
let input = OffsetStrIter::new($i);
match tokenize(input) {
match tokenize(&input) {
Err(e) => assert!(false, format!("Tokenizer Error: {:?}", e)),
Ok(val) => match $f(SliceIter::new(val.as_slice())) {
Result::Complete(_, result) => assert_eq!(result, $out),
@ -40,7 +40,7 @@ macro_rules! assert_fail {
};
($i:expr, $f:expr) => {{
let input = OffsetStrIter::new($i);
match tokenize(input) {
match tokenize(&input) {
Err(_) => assert!(true),
Ok(val) => {
let result = $f(SliceIter::new(val.as_slice()));
@ -56,7 +56,7 @@ macro_rules! assert_abort {
};
($i:expr, $f:expr) => {{
let input = OffsetStrIter::new($i);
match tokenize(input) {
match tokenize(&input) {
Err(_) => assert!(true),
Ok(val) => {
let result = $f(SliceIter::new(val.as_slice()));

View File

@ -27,12 +27,20 @@ fn is_symbol_char<'a>(i: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, u8> {
let mut _i = i.clone();
let c = match _i.next() {
Some(c) => *c,
None => return Result::Fail(Error::new("Unexpected End of Input".to_string(), &_i)),
None => {
return Result::Fail(Error::new(
"Unexpected End of Input".to_string(),
Box::new(_i.clone()),
))
}
};
if (c as char).is_ascii_alphanumeric() || c == b'-' || c == b'_' {
Result::Complete(_i, c)
} else {
Result::Fail(Error::new("Not a symbol character".to_string(), &_i))
Result::Fail(Error::new(
"Not a symbol character".to_string(),
Box::new(_i.clone()),
))
}
}
@ -60,7 +68,7 @@ fn escapequoted<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, Strin
escape = false; // reset our escaping sentinel
}
}
return Result::Incomplete(_input.get_offset());
return Result::Incomplete(_input.clone());
}
make_fn!(strtok<OffsetStrIter, Token>,
@ -304,11 +312,14 @@ fn comment(input: OffsetStrIter) -> Result<OffsetStrIter, Token> {
}
// If we didn't find a new line then we just grab everything.
_ => {
return Result::Abort(Error::new("Unparsable comment".to_string(), &rest));
return Result::Abort(Error::new(
"Unparsable comment".to_string(),
Box::new(rest.clone()),
));
}
}
}
Result::Incomplete(offset) => return Result::Incomplete(offset),
Result::Incomplete(ctx) => return Result::Incomplete(ctx),
Result::Fail(e) => return Result::Fail(e),
Result::Abort(e) => return Result::Abort(e),
}
@ -339,8 +350,9 @@ make_fn!(end_of_input<OffsetStrIter, Token>,
)
);
make_fn!(token<OffsetStrIter, Token>,
fn token<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, Token> {
either!(
input,
strtok,
pipequotetok,
emptytok, // This must come before the barewordtok
@ -380,11 +392,12 @@ make_fn!(token<OffsetStrIter, Token>,
filtertok,
barewordtok,
whitespace,
end_of_input)
);
end_of_input
)
}
/// Consumes an input OffsetStrIter and returns either a Vec<Token> or a error::Error.
pub fn tokenize(input: OffsetStrIter) -> std::result::Result<Vec<Token>, error::Error> {
pub fn tokenize(input: &OffsetStrIter) -> std::result::Result<Vec<Token>, error::Error> {
let mut out = Vec::new();
let mut i = input.clone();
loop {
@ -392,27 +405,24 @@ pub fn tokenize(input: OffsetStrIter) -> std::result::Result<Vec<Token>, error::
break;
}
let pos: Position = Position::from(&i);
// FIXME(jwall): We need to return a error::Error so we have position information.
match token(i.clone()) {
Result::Abort(e) => {
return Err(error::Error::new_with_boxed_cause(
"Invalid Token encountered",
return Err(error::Error::new(
format!("Invalid Token encountered {}", e),
error::ErrorType::UnexpectedToken,
Box::new(e),
pos,
))
}
Result::Fail(e) => {
return Err(error::Error::new_with_boxed_cause(
"Invalid Token encountered",
return Err(error::Error::new(
format!("Invalid Token encountered {}", e),
error::ErrorType::UnexpectedToken,
Box::new(e),
pos,
))
}
Result::Incomplete(_offset) => {
return Err(error::Error::new(
"Invalid Token encountered",
"Incomplete Token encountered",
error::ErrorType::IncompleteParsing,
pos,
))
@ -439,7 +449,7 @@ pub fn tokenize(input: OffsetStrIter) -> std::result::Result<Vec<Token>, error::
/// Clones a token.
///
/// This is necessary to allow the match_type and match_token macros to work.
pub fn token_clone(t: &Token) -> std::result::Result<Token, Error> {
pub fn token_clone(t: &Token) -> std::result::Result<Token, Error<SliceIter<Token>>> {
Ok(t.clone())
}
@ -517,7 +527,7 @@ macro_rules! match_type {
let mut _i = $i.clone();
if eoi(_i.clone()).is_complete() {
Result::Fail(Error::new(format!("End of Input! {}", $msg), &$i))
Result::Fail(Error::new(format!("End of Input! {}", $msg), Box::new(_i)))
} else {
match _i.next() {
Some(tok) => {
@ -525,14 +535,14 @@ macro_rules! match_type {
match $h(tok) {
std::result::Result::Ok(v) => Result::Complete(_i.clone(), v),
std::result::Result::Err(e) => {
Result::Fail(Error::caused_by($msg, &_i, Box::new(e)))
Result::Fail(Error::caused_by($msg, Box::new(e), Box::new(_i)))
}
}
} else {
Result::Fail(Error::new($msg.to_string(), &$i))
Result::Fail(Error::new($msg.to_string(), Box::new($i)))
}
}
None => Result::Fail(Error::new($msg.to_string(), &$i)),
None => Result::Fail(Error::new($msg.to_string(), Box::new($i))),
}
}
}};
@ -575,17 +585,17 @@ macro_rules! match_token {
match $h(tok) {
std::result::Result::Ok(v) => Result::Complete(i_.clone(), v),
std::result::Result::Err(e) => {
Result::Fail(Error::caused_by($msg, &i_, Box::new(e)))
Result::Fail(Error::caused_by($msg, Box::new(e), Box::new(i_)))
}
}
} else {
Result::Fail(Error::new(
format!("Expected {} Instead is ({})", $msg, tok.fragment),
&i_,
Box::new(i_),
))
}
} else {
Result::Fail(Error::new("Unexpected End Of Input", &i_))
Result::Fail(Error::new("Unexpected End Of Input", Box::new(i_)))
}
}};
}

View File

@ -101,7 +101,8 @@ fn test_string_with_escaping() {
#[test]
fn test_tokenize_bareword_with_dash() {
let result = tokenize(OffsetStrIter::new("foo-bar "));
let input = OffsetStrIter::new("foo-bar ");
let result = tokenize(&input);
assert!(result.is_ok(), format!("result {:?} is not ok", result));
if let Ok(toks) = result {
assert_eq!(toks.len(), 2);
@ -165,10 +166,11 @@ fn test_lteqtok() {
#[test]
fn test_tokenize_one_of_each() {
let result = tokenize(OffsetStrIter::new(
let input = OffsetStrIter::new(
"map out filter assert let import macro select as => [ ] { } ; = % / * \
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
));
);
let result = tokenize(&input);
assert!(result.is_ok(), format!("result {:?} is not ok", result));
let v = result.unwrap();
for (i, t) in v.iter().enumerate() {
@ -180,7 +182,8 @@ fn test_tokenize_one_of_each() {
#[test]
fn test_parse_has_end() {
let result = tokenize(OffsetStrIter::new("foo"));
let input = OffsetStrIter::new("foo");
let result = tokenize(&input);
assert!(result.is_ok());
let v = result.unwrap();
assert_eq!(v.len(), 2);