FEATURE: Use context for our error reporting.

This commit is contained in:
Jeremy Wall 2018-10-17 19:00:29 -05:00
parent c22d397545
commit 24b97c1037
8 changed files with 194 additions and 124 deletions

6
Cargo.lock generated
View File

@ -1,6 +1,6 @@
[[package]] [[package]]
name = "abortable_parser" name = "abortable_parser"
version = "0.1.0" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -228,7 +228,7 @@ dependencies = [
name = "ucg" name = "ucg"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"abortable_parser 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "abortable_parser 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
"cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -285,7 +285,7 @@ dependencies = [
] ]
[metadata] [metadata]
"checksum abortable_parser 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a628e31269165eeea62b71b2555c6379d4fbadb3e34656b6e1445b0235247c0d" "checksum abortable_parser 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bd280856ce341823c6aa6fddb3bafae236c23223824f47aef3443deb0b8d900c"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652" "checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f" "checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f"

View File

@ -10,7 +10,7 @@ keywords = ["compiler", "config"]
license = "Apache-2.0" license = "Apache-2.0"
[dependencies] [dependencies]
abortable_parser = "~0.1.0" abortable_parser = "~0.2.0"
clap = "~2.26.0" clap = "~2.26.0"
serde_json = "~1.0.9" serde_json = "~1.0.9"
simple-error = "0.1" simple-error = "0.1"

View File

@ -81,7 +81,6 @@ impl Error {
cause: Box<error::Error>, cause: Box<error::Error>,
pos: Position, pos: Position,
) -> Self { ) -> Self {
// FIXME(jwall): This should take a real position instead of this fake one.
let mut e = Self::new(msg, t, pos); let mut e = Self::new(msg, t, pos);
e.cause = Some(cause); e.cause = Some(cause);
return e; return e;

View File

@ -28,6 +28,7 @@ use error;
use iter::OffsetStrIter; use iter::OffsetStrIter;
use tokenizer::*; use tokenizer::*;
// TODO(jwall): Rename this to something better.
type NomResult<'a, O> = Result<SliceIter<'a, Token>, O>; type NomResult<'a, O> = Result<SliceIter<'a, Token>, O>;
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
@ -35,7 +36,7 @@ const ENABLE_TRACE: bool = true;
#[cfg(not(feature = "tracing"))] #[cfg(not(feature = "tracing"))]
const ENABLE_TRACE: bool = false; const ENABLE_TRACE: bool = false;
type ParseResult<O> = std::result::Result<O, abortable_parser::Error>; type ParseResult<'a, O> = std::result::Result<O, abortable_parser::Error<SliceIter<'a, Token>>>;
macro_rules! trace_nom { macro_rules! trace_nom {
($i:expr, $rule:ident!( $($args:tt)* )) => { ($i:expr, $rule:ident!( $($args:tt)* )) => {
@ -94,7 +95,10 @@ make_fn!(
); );
// Helper function to make the return types work for down below. // Helper function to make the return types work for down below.
fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseResult<Value> { fn triple_to_number<'a>(
input: SliceIter<'a, Token>,
v: (Option<Token>, Option<Token>, Option<Token>),
) -> ParseResult<'a, Value> {
let (pref, mut pref_pos) = match v.0 { let (pref, mut pref_pos) = match v.0 {
None => ("", Position::new(0, 0, 0)), None => ("", Position::new(0, 0, 0)),
Some(ref bs) => (bs.fragment.borrow(), bs.pos.clone()), Some(ref bs) => (bs.fragment.borrow(), bs.pos.clone()),
@ -106,7 +110,10 @@ fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseRe
let i = match FromStr::from_str(pref) { let i = match FromStr::from_str(pref) {
Ok(i) => i, Ok(i) => i,
Err(_) => { Err(_) => {
return Err(Error::new(format!("Not an integer! {}", pref), &0)); return Err(Error::new(
format!("Not an integer! {}", pref),
Box::new(input.clone()),
));
} }
}; };
return Ok(Value::Int(value_node!(i, pref_pos))); return Ok(Value::Int(value_node!(i, pref_pos)));
@ -116,9 +123,9 @@ fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseRe
pref_pos = v.1.unwrap().pos; pref_pos = v.1.unwrap().pos;
} }
let (suf, pos) = match v.2 { let suf = match v.2 {
None => ("".to_string(), Position::new(0, 0, 0)), None => "".to_string(),
Some(bs) => (bs.fragment, bs.pos), Some(bs) => bs.fragment,
}; };
let to_parse = pref.to_string() + "." + &suf; let to_parse = pref.to_string() + "." + &suf;
@ -127,7 +134,7 @@ fn triple_to_number(v: (Option<Token>, Option<Token>, Option<Token>)) -> ParseRe
Err(_) => { Err(_) => {
return Err(Error::new( return Err(Error::new(
format!("Not a float! {}", to_parse), format!("Not a float! {}", to_parse),
&pos.offset, Box::new(input.clone()),
)); ));
} }
}; };
@ -212,7 +219,6 @@ macro_rules! alt_peek {
// This is our default termination case. // This is our default termination case.
// If there is no fallback then we return an Error. // If there is no fallback then we return an Error.
(__inner $i:expr, __end) => { (__inner $i:expr, __end) => {
// FIXME(jwall): Should we make this a compile error instead?
compile_error!("alt_peek! requirs a fallback case"); compile_error!("alt_peek! requirs a fallback case");
}; };
@ -270,10 +276,13 @@ fn number(input: SliceIter<Token>) -> Result<SliceIter<Token>, Value> {
Result::Abort(e) => Result::Abort(e), Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e), Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset), Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, triple) => match triple_to_number(triple) { Result::Complete(rest, triple) => {
Ok(val) => Result::Complete(rest, val), let num = triple_to_number(rest.clone(), triple);
Err(e) => Result::Fail(e), match num {
}, Ok(val) => Result::Complete(rest, val),
Err(e) => Result::Fail(e),
}
}
} }
} }
// trace_macros!(false); // trace_macros!(false);
@ -415,7 +424,7 @@ fn symbol_or_expression(input: SliceIter<Token>) -> NomResult<Expression> {
} else { } else {
return Result::Fail(Error::new( return Result::Fail(Error::new(
"Expected (.) but no dot found".to_string(), "Expected (.) but no dot found".to_string(),
&rest, Box::new(rest.clone()),
)); ));
} }
} }
@ -425,7 +434,7 @@ fn symbol_or_expression(input: SliceIter<Token>) -> NomResult<Expression> {
} else { } else {
return Result::Fail(Error::new( return Result::Fail(Error::new(
"Expected (.) but no dot found".to_string(), "Expected (.) but no dot found".to_string(),
&rest, Box::new(rest.clone()),
)); ));
} }
} }
@ -471,7 +480,7 @@ fn selector_list(input: SliceIter<Token>) -> NomResult<SelectorList> {
if list.is_empty() { if list.is_empty() {
return Result::Fail(Error::new( return Result::Fail(Error::new(
"(.) with no selector fields after".to_string(), "(.) with no selector fields after".to_string(),
&rest, Box::new(rest.clone()),
)); ));
} else { } else {
(rest, Some(list)) (rest, Some(list))
@ -514,7 +523,12 @@ make_fn!(
) )
); );
fn tuple_to_macro(pos: Position, vals: Option<Vec<Value>>, val: Value) -> ParseResult<Expression> { fn tuple_to_macro<'a>(
input: SliceIter<'a, Token>,
pos: Position,
vals: Option<Vec<Value>>,
val: Value,
) -> ParseResult<'a, Expression> {
let mut default_args = match vals { let mut default_args = match vals {
None => Vec::new(), None => Vec::new(),
Some(vals) => vals, Some(vals) => vals,
@ -533,7 +547,7 @@ fn tuple_to_macro(pos: Position, vals: Option<Vec<Value>>, val: Value) -> ParseR
})), })),
val => Err(Error::new( val => Err(Error::new(
format!("Expected Tuple Got {:?}", val), format!("Expected Tuple Got {:?}", val),
&val.pos().offset, Box::new(input.clone()),
)), )),
} }
} }
@ -558,34 +572,42 @@ fn macro_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Express
Result::Abort(e) => Result::Abort(e), Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e), Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset), Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, (pos, arglist, map)) => match tuple_to_macro(pos, arglist, map) { Result::Complete(rest, (pos, arglist, map)) => {
Ok(expr) => Result::Complete(rest, expr), match tuple_to_macro(rest.clone(), pos, arglist, map) {
Err(e) => Result::Fail(Error::caused_by("Invalid Macro syntax", &rest, Box::new(e))), Ok(expr) => Result::Complete(rest, expr),
}, Err(e) => Result::Fail(Error::caused_by(
"Invalid Macro syntax",
Box::new(e),
Box::new(rest.clone()),
)),
}
}
} }
} }
// FIXME(jwall): need to make this into a proper parse function. fn tuple_to_select<'a>(
fn tuple_to_select( input: SliceIter<'a, Token>,
pos: Position,
e1: Expression, e1: Expression,
e2: Expression, e2: Expression,
val: Value, val: Value,
) -> ParseResult<Expression> { ) -> ParseResult<'a, Expression> {
match val { match val {
Value::Tuple(v) => Ok(Expression::Select(SelectDef { Value::Tuple(v) => Ok(Expression::Select(SelectDef {
val: Box::new(e1), val: Box::new(e1),
default: Box::new(e2), default: Box::new(e2),
tuple: v.val, tuple: v.val,
pos: pos, pos: (&input).into(),
})), })),
val => Err(Error::new(format!("Expected Tuple Got {:?}", val), &0)), val => Err(Error::new(
format!("Expected Tuple Got {:?}", val),
Box::new(input.clone()),
)),
} }
} }
fn select_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expression> { fn select_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expression> {
let parsed = do_each!(input, let parsed = do_each!(input,
start => word!("select"), _ => word!("select"),
val => do_each!( val => do_each!(
expr => trace_nom!(expression), expr => trace_nom!(expression),
_ => punct!(","), _ => punct!(","),
@ -597,19 +619,19 @@ fn select_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expres
(expr) (expr)
), ),
map => trace_nom!(tuple), map => trace_nom!(tuple),
(start.pos.clone(), val, default, map) (val, default, map)
); );
match parsed { match parsed {
Result::Abort(e) => Result::Abort(e), Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e), Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset), Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, (pos, val, default, map)) => { Result::Complete(rest, (val, default, map)) => {
match tuple_to_select(pos, val, default, map) { match tuple_to_select(input.clone(), val, default, map) {
Ok(expr) => Result::Complete(rest, expr), Ok(expr) => Result::Complete(rest, expr),
Err(e) => Result::Fail(Error::caused_by( Err(e) => Result::Fail(Error::caused_by(
"Invalid Select Expression", "Invalid Select Expression",
&rest,
Box::new(e), Box::new(e),
Box::new(rest.clone()),
)), )),
} }
} }
@ -636,17 +658,21 @@ make_fn!(
) )
); );
fn tuple_to_call(pos: Position, val: Value, exprs: Vec<Expression>) -> ParseResult<Expression> { fn tuple_to_call<'a>(
input: SliceIter<'a, Token>,
val: Value,
exprs: Vec<Expression>,
) -> ParseResult<'a, Expression> {
if let Value::Selector(def) = val { if let Value::Selector(def) = val {
Ok(Expression::Call(CallDef { Ok(Expression::Call(CallDef {
macroref: def, macroref: def,
arglist: exprs, arglist: exprs,
pos: pos, pos: (&input).into(),
})) }))
} else { } else {
Err(Error::new( Err(Error::new(
format!("Expected Selector Got {:?}", val), format!("Expected Selector Got {:?}", val),
&val.pos().offset, Box::new(input.clone()),
)) ))
} }
} }
@ -664,30 +690,34 @@ make_fn!(
); );
fn call_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expression> { fn call_expression(input: SliceIter<Token>) -> Result<SliceIter<Token>, Expression> {
let parsed = do_each!(input, let parsed = do_each!(input.clone(),
macroname => trace_nom!(selector_value), macroname => trace_nom!(selector_value),
_ => punct!("("), _ => punct!("("),
args => separated!(punct!(","), trace_nom!(expression)), args => separated!(punct!(","), trace_nom!(expression)),
_ => punct!(")"), _ => punct!(")"),
(macroname.pos().clone(), macroname, args) (macroname, args)
); );
match parsed { match parsed {
Result::Abort(e) => Result::Abort(e), Result::Abort(e) => Result::Abort(e),
Result::Fail(e) => Result::Fail(e), Result::Fail(e) => Result::Fail(e),
Result::Incomplete(offset) => Result::Incomplete(offset), Result::Incomplete(offset) => Result::Incomplete(offset),
Result::Complete(rest, (pos, name, args)) => match tuple_to_call(pos, name, args) { Result::Complete(rest, (name, args)) => match tuple_to_call(input.clone(), name, args) {
Ok(expr) => Result::Complete(rest, expr), Ok(expr) => Result::Complete(rest, expr),
Err(e) => Result::Fail(Error::caused_by("Invalid Call Syntax", &rest, Box::new(e))), Err(e) => Result::Fail(Error::caused_by(
"Invalid Call Syntax",
Box::new(e),
Box::new(rest),
)),
}, },
} }
} }
fn tuple_to_list_op( fn tuple_to_list_op<'a>(
pos: Position, input: &'a SliceIter<Token>,
kind: ListOpType, kind: ListOpType,
macroname: Value, macroname: Value,
list: Expression, list: Expression,
) -> ParseResult<Expression> { ) -> ParseResult<'a, Expression> {
if let Value::Selector(mut def) = macroname { if let Value::Selector(mut def) = macroname {
// First of all we need to assert that this is a selector of at least // First of all we need to assert that this is a selector of at least
// two sections. // two sections.
@ -701,7 +731,7 @@ fn tuple_to_list_op(
} }
return Err(Error::new( return Err(Error::new(
format!("Missing a result field for the macro"), format!("Missing a result field for the macro"),
&def.pos.offset, Box::new(input.clone()),
)); ));
} }
&mut Some(ref mut tl) => { &mut Some(ref mut tl) => {
@ -714,7 +744,7 @@ fn tuple_to_list_op(
} }
return Err(Error::new( return Err(Error::new(
format!("Missing a result field for the macro"), format!("Missing a result field for the macro"),
&def.pos.offset, Box::new(input.clone()),
)); ));
} }
let fname = tl.pop(); let fname = tl.pop();
@ -726,7 +756,7 @@ fn tuple_to_list_op(
mac: def, mac: def,
field: fieldname, field: fieldname,
target: Box::new(list), target: Box::new(list),
pos: pos, pos: input.into(),
})); }));
} }
if ENABLE_TRACE { if ENABLE_TRACE {
@ -737,21 +767,21 @@ fn tuple_to_list_op(
} }
return Err(Error::new( return Err(Error::new(
format!("Expected a selector but got {}", macroname.type_name()), format!("Expected a selector but got {}", macroname.type_name()),
&pos.offset, Box::new(input.clone()),
)); ));
} }
make_fn!( make_fn!(
list_op_expression<SliceIter<Token>, Expression>, list_op_expression<SliceIter<Token>, Expression>,
do_each!( do_each!(
pos => pos, input => input!(),
optype => either!( optype => either!(
do_each!(_ => word!("map"), (ListOpType::Map)), do_each!(_ => word!("map"), (ListOpType::Map)),
do_each!(_ => word!("filter"), (ListOpType::Filter)) do_each!(_ => word!("filter"), (ListOpType::Filter))
), ),
macroname => trace_nom!(selector_value), macroname => trace_nom!(selector_value),
list => trace_nom!(non_op_expression), list => trace_nom!(non_op_expression),
(tuple_to_list_op(pos, optype, macroname, list).unwrap()) (tuple_to_list_op(&input, optype, macroname, list).unwrap())
) )
); );
@ -893,7 +923,7 @@ fn statement(i: SliceIter<Token>) -> Result<SliceIter<Token>, Statement> {
/// Parses a LocatedSpan into a list of Statements or an `error::Error`. /// Parses a LocatedSpan into a list of Statements or an `error::Error`.
pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error::Error> { pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error::Error> {
match tokenize(input.clone()) { match tokenize(&input) {
Ok(tokenized) => { Ok(tokenized) => {
let mut out = Vec::new(); let mut out = Vec::new();
let mut i_ = SliceIter::from(&tokenized); let mut i_ = SliceIter::from(&tokenized);
@ -907,20 +937,18 @@ pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error:
match statement(i.clone()) { match statement(i.clone()) {
Result::Abort(e) => { Result::Abort(e) => {
let pos: Position = (&i).into(); let pos: Position = (&i).into();
let err = error::Error::new_with_boxed_cause( let err = error::Error::new(
"Statement Parse Error", format!("Statement Parse Error {}", e),
error::ErrorType::ParseError, error::ErrorType::ParseError,
Box::new(e),
pos, pos,
); );
return Err(err); return Err(err);
} }
Result::Fail(e) => { Result::Fail(e) => {
let pos: Position = (&i).into(); let pos: Position = (&i).into();
let err = error::Error::new_with_boxed_cause( let err = error::Error::new(
"Statement Parse Error", format!("Statement Parse Error {}", e),
error::ErrorType::ParseError, error::ErrorType::ParseError,
Box::new(e),
pos, pos,
); );
return Err(err); return Err(err);

View File

@ -49,7 +49,10 @@ make_fn!(
fn parse_expression(i: SliceIter<Element>) -> Result<SliceIter<Element>, Expression> { fn parse_expression(i: SliceIter<Element>) -> Result<SliceIter<Element>, Expression> {
let mut i_ = i.clone(); let mut i_ = i.clone();
if eoi(i_.clone()).is_complete() { if eoi(i_.clone()).is_complete() {
return Result::Abort(Error::new("Expected Expression found End Of Input", &i_)); return Result::Abort(Error::new(
"Expected Expression found End Of Input",
Box::new(i_),
));
} }
let el = i_.next(); let el = i_.next();
if let Some(&Element::Expr(ref expr)) = el { if let Some(&Element::Expr(ref expr)) = el {
@ -60,7 +63,7 @@ fn parse_expression(i: SliceIter<Element>) -> Result<SliceIter<Element>, Express
"Error while parsing Binary Expression Expected Expression got {:?}", "Error while parsing Binary Expression Expected Expression got {:?}",
el el
), ),
&i_, Box::new(i_),
)); ));
} }
@ -69,7 +72,7 @@ fn parse_sum_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, Binar
if eoi(i_.clone()).is_complete() { if eoi(i_.clone()).is_complete() {
return Result::Fail(Error::new( return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"), format!("Expected Expression found End Of Input"),
&i_, Box::new(i_),
)); ));
} }
let el = i_.next(); let el = i_.next();
@ -91,7 +94,7 @@ fn parse_sum_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, Binar
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Unexpected Operator {:?}",
el el
), ),
&i_, Box::new(i_),
)); ));
} }
@ -114,7 +117,7 @@ fn parse_product_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, B
if eoi(i_.clone()).is_complete() { if eoi(i_.clone()).is_complete() {
return Result::Fail(Error::new( return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"), format!("Expected Expression found End Of Input"),
&i_, Box::new(i_),
)); ));
} }
let el = i_.next(); let el = i_.next();
@ -136,7 +139,7 @@ fn parse_product_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, B
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Unexpected Operator {:?}",
el el
), ),
&i_, Box::new(i_),
)); ));
} }
@ -224,7 +227,7 @@ fn parse_compare_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, C
if eoi(i_.clone()).is_complete() { if eoi(i_.clone()).is_complete() {
return Result::Fail(Error::new( return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"), format!("Expected Expression found End Of Input"),
&i_, Box::new(i_),
)); ));
} }
let el = i_.next(); let el = i_.next();
@ -236,7 +239,7 @@ fn parse_compare_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, C
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Unexpected Operator {:?}",
el el
), ),
&i, Box::new(i),
)); ));
} }
@ -309,24 +312,51 @@ fn parse_operand_list<'a>(i: SliceIter<'a, Token>) -> NomResult<'a, Vec<Element>
} }
/// Parse a binary operator expression. /// Parse a binary operator expression.
pub fn op_expression<'a>(i: SliceIter<'a, Token>) -> NomResult<'a, Expression> { pub fn op_expression<'a>(i: SliceIter<'a, Token>) -> Result<SliceIter<Token>, Expression> {
let preparse = parse_operand_list(i.clone()); let preparse = parse_operand_list(i.clone());
match preparse { match preparse {
Result::Fail(e) => Result::Fail(e), Result::Fail(e) => {
Result::Abort(e) => Result::Abort(e), let err = Error::caused_by(
"Failed while parsing operator expression",
Box::new(e),
Box::new(i),
);
Result::Fail(err)
}
Result::Abort(e) => {
let err = Error::caused_by(
"Failed while parsing operator expression",
Box::new(e),
Box::new(i),
);
Result::Fail(err)
}
Result::Incomplete(i) => Result::Incomplete(i), Result::Incomplete(i) => Result::Incomplete(i),
Result::Complete(rest, oplist) => { Result::Complete(rest, oplist) => {
let mut i_ = SliceIter::new(&oplist); let mut i_ = SliceIter::new(&oplist);
let parse_result = either!( let parse_result = either!(
i_, i_.clone(),
trace_nom!(compare_expression), trace_nom!(compare_expression),
trace_nom!(math_expression) trace_nom!(math_expression)
); );
match parse_result { match parse_result {
Result::Fail(e) => Result::Fail(e), Result::Fail(_e) => {
Result::Abort(e) => Result::Abort(e), // TODO(jwall): It would be good to be able to use caused_by here.
Result::Incomplete(i) => Result::Incomplete(i), let err = Error::new(
"Failed while parsing operator expression",
Box::new(rest.clone()),
);
Result::Fail(err)
}
Result::Abort(_e) => {
let err = Error::new(
"Failed while parsing operator expression",
Box::new(rest.clone()),
);
Result::Abort(err)
}
Result::Incomplete(_) => Result::Incomplete(i.clone()),
Result::Complete(_, expr) => Result::Complete(rest.clone(), expr), Result::Complete(_, expr) => Result::Complete(rest.clone(), expr),
} }
} }

View File

@ -24,7 +24,7 @@ macro_rules! assert_parse {
}; };
($i:expr, $f:expr, $out:expr) => {{ ($i:expr, $f:expr, $out:expr) => {{
let input = OffsetStrIter::new($i); let input = OffsetStrIter::new($i);
match tokenize(input) { match tokenize(&input) {
Err(e) => assert!(false, format!("Tokenizer Error: {:?}", e)), Err(e) => assert!(false, format!("Tokenizer Error: {:?}", e)),
Ok(val) => match $f(SliceIter::new(val.as_slice())) { Ok(val) => match $f(SliceIter::new(val.as_slice())) {
Result::Complete(_, result) => assert_eq!(result, $out), Result::Complete(_, result) => assert_eq!(result, $out),
@ -40,7 +40,7 @@ macro_rules! assert_fail {
}; };
($i:expr, $f:expr) => {{ ($i:expr, $f:expr) => {{
let input = OffsetStrIter::new($i); let input = OffsetStrIter::new($i);
match tokenize(input) { match tokenize(&input) {
Err(_) => assert!(true), Err(_) => assert!(true),
Ok(val) => { Ok(val) => {
let result = $f(SliceIter::new(val.as_slice())); let result = $f(SliceIter::new(val.as_slice()));
@ -56,7 +56,7 @@ macro_rules! assert_abort {
}; };
($i:expr, $f:expr) => {{ ($i:expr, $f:expr) => {{
let input = OffsetStrIter::new($i); let input = OffsetStrIter::new($i);
match tokenize(input) { match tokenize(&input) {
Err(_) => assert!(true), Err(_) => assert!(true),
Ok(val) => { Ok(val) => {
let result = $f(SliceIter::new(val.as_slice())); let result = $f(SliceIter::new(val.as_slice()));

View File

@ -27,12 +27,20 @@ fn is_symbol_char<'a>(i: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, u8> {
let mut _i = i.clone(); let mut _i = i.clone();
let c = match _i.next() { let c = match _i.next() {
Some(c) => *c, Some(c) => *c,
None => return Result::Fail(Error::new("Unexpected End of Input".to_string(), &_i)), None => {
return Result::Fail(Error::new(
"Unexpected End of Input".to_string(),
Box::new(_i.clone()),
))
}
}; };
if (c as char).is_ascii_alphanumeric() || c == b'-' || c == b'_' { if (c as char).is_ascii_alphanumeric() || c == b'-' || c == b'_' {
Result::Complete(_i, c) Result::Complete(_i, c)
} else { } else {
Result::Fail(Error::new("Not a symbol character".to_string(), &_i)) Result::Fail(Error::new(
"Not a symbol character".to_string(),
Box::new(_i.clone()),
))
} }
} }
@ -60,7 +68,7 @@ fn escapequoted<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, Strin
escape = false; // reset our escaping sentinel escape = false; // reset our escaping sentinel
} }
} }
return Result::Incomplete(_input.get_offset()); return Result::Incomplete(_input.clone());
} }
make_fn!(strtok<OffsetStrIter, Token>, make_fn!(strtok<OffsetStrIter, Token>,
@ -136,27 +144,27 @@ make_fn!(booleantok<OffsetStrIter, Token>,
macro_rules! do_text_token_tok { macro_rules! do_text_token_tok {
($i:expr, $type:expr, $text_token:expr, WS) => { ($i:expr, $type:expr, $text_token:expr, WS) => {
do_each!($i, do_each!($i,
span => input!(), span => input!(),
frag => text_token!($text_token), frag => text_token!($text_token),
_ => either!(whitespace, comment), _ => either!(whitespace, comment),
(Token { (Token {
typ: $type, typ: $type,
pos: Position::from(&span), pos: Position::from(&span),
fragment: frag.to_string(), fragment: frag.to_string(),
}) })
) )
}; };
($i:expr, $type:expr, $text_token:expr) => { ($i:expr, $type:expr, $text_token:expr) => {
do_each!($i, do_each!($i,
span => input!(), span => input!(),
frag => text_token!($text_token), frag => text_token!($text_token),
(Token { (Token {
typ: $type, typ: $type,
pos: Position::from(&span), pos: Position::from(&span),
fragment: frag.to_string(), fragment: frag.to_string(),
}) })
) )
}; };
} }
@ -304,11 +312,14 @@ fn comment(input: OffsetStrIter) -> Result<OffsetStrIter, Token> {
} }
// If we didn't find a new line then we just grab everything. // If we didn't find a new line then we just grab everything.
_ => { _ => {
return Result::Abort(Error::new("Unparsable comment".to_string(), &rest)); return Result::Abort(Error::new(
"Unparsable comment".to_string(),
Box::new(rest.clone()),
));
} }
} }
} }
Result::Incomplete(offset) => return Result::Incomplete(offset), Result::Incomplete(ctx) => return Result::Incomplete(ctx),
Result::Fail(e) => return Result::Fail(e), Result::Fail(e) => return Result::Fail(e),
Result::Abort(e) => return Result::Abort(e), Result::Abort(e) => return Result::Abort(e),
} }
@ -339,8 +350,9 @@ make_fn!(end_of_input<OffsetStrIter, Token>,
) )
); );
make_fn!(token<OffsetStrIter, Token>, fn token<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, Token> {
either!( either!(
input,
strtok, strtok,
pipequotetok, pipequotetok,
emptytok, // This must come before the barewordtok emptytok, // This must come before the barewordtok
@ -380,11 +392,12 @@ make_fn!(token<OffsetStrIter, Token>,
filtertok, filtertok,
barewordtok, barewordtok,
whitespace, whitespace,
end_of_input) end_of_input
); )
}
/// Consumes an input OffsetStrIter and returns either a Vec<Token> or a error::Error. /// Consumes an input OffsetStrIter and returns either a Vec<Token> or a error::Error.
pub fn tokenize(input: OffsetStrIter) -> std::result::Result<Vec<Token>, error::Error> { pub fn tokenize(input: &OffsetStrIter) -> std::result::Result<Vec<Token>, error::Error> {
let mut out = Vec::new(); let mut out = Vec::new();
let mut i = input.clone(); let mut i = input.clone();
loop { loop {
@ -392,27 +405,24 @@ pub fn tokenize(input: OffsetStrIter) -> std::result::Result<Vec<Token>, error::
break; break;
} }
let pos: Position = Position::from(&i); let pos: Position = Position::from(&i);
// FIXME(jwall): We need to return a error::Error so we have position information.
match token(i.clone()) { match token(i.clone()) {
Result::Abort(e) => { Result::Abort(e) => {
return Err(error::Error::new_with_boxed_cause( return Err(error::Error::new(
"Invalid Token encountered", format!("Invalid Token encountered {}", e),
error::ErrorType::UnexpectedToken, error::ErrorType::UnexpectedToken,
Box::new(e),
pos, pos,
)) ))
} }
Result::Fail(e) => { Result::Fail(e) => {
return Err(error::Error::new_with_boxed_cause( return Err(error::Error::new(
"Invalid Token encountered", format!("Invalid Token encountered {}", e),
error::ErrorType::UnexpectedToken, error::ErrorType::UnexpectedToken,
Box::new(e),
pos, pos,
)) ))
} }
Result::Incomplete(_offset) => { Result::Incomplete(_offset) => {
return Err(error::Error::new( return Err(error::Error::new(
"Invalid Token encountered", "Incomplete Token encountered",
error::ErrorType::IncompleteParsing, error::ErrorType::IncompleteParsing,
pos, pos,
)) ))
@ -439,7 +449,7 @@ pub fn tokenize(input: OffsetStrIter) -> std::result::Result<Vec<Token>, error::
/// Clones a token. /// Clones a token.
/// ///
/// This is necessary to allow the match_type and match_token macros to work. /// This is necessary to allow the match_type and match_token macros to work.
pub fn token_clone(t: &Token) -> std::result::Result<Token, Error> { pub fn token_clone(t: &Token) -> std::result::Result<Token, Error<SliceIter<Token>>> {
Ok(t.clone()) Ok(t.clone())
} }
@ -517,7 +527,7 @@ macro_rules! match_type {
let mut _i = $i.clone(); let mut _i = $i.clone();
if eoi(_i.clone()).is_complete() { if eoi(_i.clone()).is_complete() {
Result::Fail(Error::new(format!("End of Input! {}", $msg), &$i)) Result::Fail(Error::new(format!("End of Input! {}", $msg), Box::new(_i)))
} else { } else {
match _i.next() { match _i.next() {
Some(tok) => { Some(tok) => {
@ -525,14 +535,14 @@ macro_rules! match_type {
match $h(tok) { match $h(tok) {
std::result::Result::Ok(v) => Result::Complete(_i.clone(), v), std::result::Result::Ok(v) => Result::Complete(_i.clone(), v),
std::result::Result::Err(e) => { std::result::Result::Err(e) => {
Result::Fail(Error::caused_by($msg, &_i, Box::new(e))) Result::Fail(Error::caused_by($msg, Box::new(e), Box::new(_i)))
} }
} }
} else { } else {
Result::Fail(Error::new($msg.to_string(), &$i)) Result::Fail(Error::new($msg.to_string(), Box::new($i)))
} }
} }
None => Result::Fail(Error::new($msg.to_string(), &$i)), None => Result::Fail(Error::new($msg.to_string(), Box::new($i))),
} }
} }
}}; }};
@ -575,17 +585,17 @@ macro_rules! match_token {
match $h(tok) { match $h(tok) {
std::result::Result::Ok(v) => Result::Complete(i_.clone(), v), std::result::Result::Ok(v) => Result::Complete(i_.clone(), v),
std::result::Result::Err(e) => { std::result::Result::Err(e) => {
Result::Fail(Error::caused_by($msg, &i_, Box::new(e))) Result::Fail(Error::caused_by($msg, Box::new(e), Box::new(i_)))
} }
} }
} else { } else {
Result::Fail(Error::new( Result::Fail(Error::new(
format!("Expected {} Instead is ({})", $msg, tok.fragment), format!("Expected {} Instead is ({})", $msg, tok.fragment),
&i_, Box::new(i_),
)) ))
} }
} else { } else {
Result::Fail(Error::new("Unexpected End Of Input", &i_)) Result::Fail(Error::new("Unexpected End Of Input", Box::new(i_)))
} }
}}; }};
} }

View File

@ -101,7 +101,8 @@ fn test_string_with_escaping() {
#[test] #[test]
fn test_tokenize_bareword_with_dash() { fn test_tokenize_bareword_with_dash() {
let result = tokenize(OffsetStrIter::new("foo-bar ")); let input = OffsetStrIter::new("foo-bar ");
let result = tokenize(&input);
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), format!("result {:?} is not ok", result));
if let Ok(toks) = result { if let Ok(toks) = result {
assert_eq!(toks.len(), 2); assert_eq!(toks.len(), 2);
@ -165,10 +166,11 @@ fn test_lteqtok() {
#[test] #[test]
fn test_tokenize_one_of_each() { fn test_tokenize_one_of_each() {
let result = tokenize(OffsetStrIter::new( let input = OffsetStrIter::new(
"map out filter assert let import macro select as => [ ] { } ; = % / * \ "map out filter assert let import macro select as => [ ] { } ; = % / * \
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=", + - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
)); );
let result = tokenize(&input);
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), format!("result {:?} is not ok", result));
let v = result.unwrap(); let v = result.unwrap();
for (i, t) in v.iter().enumerate() { for (i, t) in v.iter().enumerate() {
@ -180,7 +182,8 @@ fn test_tokenize_one_of_each() {
#[test] #[test]
fn test_parse_has_end() { fn test_parse_has_end() {
let result = tokenize(OffsetStrIter::new("foo")); let input = OffsetStrIter::new("foo");
let result = tokenize(&input);
assert!(result.is_ok()); assert!(result.is_ok());
let v = result.unwrap(); let v = result.unwrap();
assert_eq!(v.len(), 2); assert_eq!(v.len(), 2);