mirror of
https://github.com/zaphar/ucg.git
synced 2025-07-21 18:10:42 -04:00
FEATURE: report better stacktraces for parsing.
This commit is contained in:
parent
24b97c1037
commit
e4c80b19f5
6
Cargo.lock
generated
6
Cargo.lock
generated
@ -1,6 +1,6 @@
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "abortable_parser"
|
name = "abortable_parser"
|
||||||
version = "0.2.0"
|
version = "0.2.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -228,7 +228,7 @@ dependencies = [
|
|||||||
name = "ucg"
|
name = "ucg"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"abortable_parser 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"abortable_parser 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -285,7 +285,7 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
"checksum abortable_parser 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bd280856ce341823c6aa6fddb3bafae236c23223824f47aef3443deb0b8d900c"
|
"checksum abortable_parser 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "09cdf5378b5e4a079fa886e621519fcb2502d9cb008d3f76b92f61f3890d5906"
|
||||||
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
|
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
|
||||||
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
|
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
|
||||||
"checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f"
|
"checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f"
|
||||||
|
@ -10,7 +10,7 @@ keywords = ["compiler", "config"]
|
|||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
abortable_parser = "~0.2.0"
|
abortable_parser = "0.2.1"
|
||||||
clap = "~2.26.0"
|
clap = "~2.26.0"
|
||||||
serde_json = "~1.0.9"
|
serde_json = "~1.0.9"
|
||||||
simple-error = "0.1"
|
simple-error = "0.1"
|
||||||
|
1
example_errors/bad_file.ucg
Normal file
1
example_errors/bad_file.ucg
Normal file
@ -0,0 +1 @@
|
|||||||
|
let x =
|
@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
db_conn1: "db1.prod.net:3306/testdb"
|
|
||||||
db_conn2: "db2.prod.net:3306/testdb"
|
|
||||||
tmpldir: "./templates"
|
|
||||||
prefix:
|
|
||||||
foo: bar
|
|
||||||
l:
|
|
||||||
- foo
|
|
||||||
- bar
|
|
@ -25,6 +25,8 @@ use std::fmt;
|
|||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::hash::Hasher;
|
use std::hash::Hasher;
|
||||||
|
|
||||||
|
use abortable_parser;
|
||||||
|
|
||||||
macro_rules! enum_type_equality {
|
macro_rules! enum_type_equality {
|
||||||
( $slf:ident, $r:expr, $( $l:pat ),* ) => {
|
( $slf:ident, $r:expr, $( $l:pat ),* ) => {
|
||||||
match $slf {
|
match $slf {
|
||||||
@ -110,6 +112,15 @@ impl Token {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl abortable_parser::Positioned for Token {
|
||||||
|
fn line(&self) -> usize {
|
||||||
|
self.pos.line
|
||||||
|
}
|
||||||
|
fn column(&self) -> usize {
|
||||||
|
self.pos.column
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Borrow<str> for Token {
|
impl Borrow<str> for Token {
|
||||||
fn borrow(&self) -> &str {
|
fn borrow(&self) -> &str {
|
||||||
&self.fragment
|
&self.fragment
|
||||||
@ -119,7 +130,7 @@ impl Borrow<str> for Token {
|
|||||||
/// Helper macro for making a Positioned Value.
|
/// Helper macro for making a Positioned Value.
|
||||||
macro_rules! value_node {
|
macro_rules! value_node {
|
||||||
($v:expr, $p:expr) => {
|
($v:expr, $p:expr) => {
|
||||||
Positioned::new_with_pos($v, $p)
|
PositionedItem::new_with_pos($v, $p)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,11 +170,14 @@ macro_rules! make_tok {
|
|||||||
#[allow(unused_macros)]
|
#[allow(unused_macros)]
|
||||||
macro_rules! make_expr {
|
macro_rules! make_expr {
|
||||||
($e:expr, $i:expr) => {
|
($e:expr, $i:expr) => {
|
||||||
Expression::Simple(Value::Symbol(Positioned::new_with_pos($e.to_string(), $i)))
|
Expression::Simple(Value::Symbol(PositionedItem::new_with_pos(
|
||||||
|
$e.to_string(),
|
||||||
|
$i,
|
||||||
|
)))
|
||||||
};
|
};
|
||||||
|
|
||||||
($e:expr => int, $i:expr) => {
|
($e:expr => int, $i:expr) => {
|
||||||
Expression::Simple(Value::Int(Positioned::new_with_pos($e, $i)))
|
Expression::Simple(Value::Int(PositionedItem::new_with_pos($e, $i)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -323,13 +337,13 @@ impl SelectorDef {
|
|||||||
pub enum Value {
|
pub enum Value {
|
||||||
// Constant Values
|
// Constant Values
|
||||||
Empty(Position),
|
Empty(Position),
|
||||||
Boolean(Positioned<bool>),
|
Boolean(PositionedItem<bool>),
|
||||||
Int(Positioned<i64>),
|
Int(PositionedItem<i64>),
|
||||||
Float(Positioned<f64>),
|
Float(PositionedItem<f64>),
|
||||||
Str(Positioned<String>),
|
Str(PositionedItem<String>),
|
||||||
Symbol(Positioned<String>),
|
Symbol(PositionedItem<String>),
|
||||||
// Complex Values
|
// Complex Values
|
||||||
Tuple(Positioned<FieldList>),
|
Tuple(PositionedItem<FieldList>),
|
||||||
List(ListDef),
|
List(ListDef),
|
||||||
Selector(SelectorDef),
|
Selector(SelectorDef),
|
||||||
}
|
}
|
||||||
@ -434,18 +448,18 @@ pub struct SelectDef {
|
|||||||
|
|
||||||
/// Adds position information to any type `T`.
|
/// Adds position information to any type `T`.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Positioned<T> {
|
pub struct PositionedItem<T> {
|
||||||
pub pos: Position,
|
pub pos: Position,
|
||||||
pub val: T,
|
pub val: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: std::fmt::Display> std::fmt::Display for Positioned<T> {
|
impl<T: std::fmt::Display> std::fmt::Display for PositionedItem<T> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
|
||||||
write!(f, "{}", self.val)
|
write!(f, "{}", self.val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Positioned<T> {
|
impl<T> PositionedItem<T> {
|
||||||
/// Constructs a new Positioned<T> with a value, line, and column information.
|
/// Constructs a new Positioned<T> with a value, line, and column information.
|
||||||
pub fn new<P: Into<Position>>(v: T, p: P) -> Self {
|
pub fn new<P: Into<Position>>(v: T, p: P) -> Self {
|
||||||
Self::new_with_pos(v, p.into())
|
Self::new_with_pos(v, p.into())
|
||||||
@ -453,48 +467,48 @@ impl<T> Positioned<T> {
|
|||||||
|
|
||||||
/// Constructs a new Positioned<T> with a value and a Position.
|
/// Constructs a new Positioned<T> with a value and a Position.
|
||||||
pub fn new_with_pos(v: T, pos: Position) -> Self {
|
pub fn new_with_pos(v: T, pos: Position) -> Self {
|
||||||
Positioned { pos: pos, val: v }
|
PositionedItem { pos: pos, val: v }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: PartialEq> PartialEq for Positioned<T> {
|
impl<T: PartialEq> PartialEq for PositionedItem<T> {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.val == other.val
|
self.val == other.val
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Eq> Eq for Positioned<T> {}
|
impl<T: Eq> Eq for PositionedItem<T> {}
|
||||||
|
|
||||||
impl<T: Ord> Ord for Positioned<T> {
|
impl<T: Ord> Ord for PositionedItem<T> {
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
self.val.cmp(&other.val)
|
self.val.cmp(&other.val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: PartialOrd> PartialOrd for Positioned<T> {
|
impl<T: PartialOrd> PartialOrd for PositionedItem<T> {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
self.val.partial_cmp(&other.val)
|
self.val.partial_cmp(&other.val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Hash> Hash for Positioned<T> {
|
impl<T: Hash> Hash for PositionedItem<T> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.val.hash(state);
|
self.val.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> From<&'a Token> for Positioned<String> {
|
impl<'a> From<&'a Token> for PositionedItem<String> {
|
||||||
fn from(t: &'a Token) -> Positioned<String> {
|
fn from(t: &'a Token) -> PositionedItem<String> {
|
||||||
Positioned {
|
PositionedItem {
|
||||||
pos: t.pos.clone(),
|
pos: t.pos.clone(),
|
||||||
val: t.fragment.to_string(),
|
val: t.fragment.to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> From<&'a Positioned<String>> for Positioned<String> {
|
impl<'a> From<&'a PositionedItem<String>> for PositionedItem<String> {
|
||||||
fn from(t: &Positioned<String>) -> Positioned<String> {
|
fn from(t: &PositionedItem<String>) -> PositionedItem<String> {
|
||||||
Positioned {
|
PositionedItem {
|
||||||
pos: t.pos.clone(),
|
pos: t.pos.clone(),
|
||||||
val: t.val.clone(),
|
val: t.val.clone(),
|
||||||
}
|
}
|
||||||
@ -508,7 +522,7 @@ impl<'a> From<&'a Positioned<String>> for Positioned<String> {
|
|||||||
/// any values except what is defined in their arguments.
|
/// any values except what is defined in their arguments.
|
||||||
#[derive(PartialEq, Debug, Clone)]
|
#[derive(PartialEq, Debug, Clone)]
|
||||||
pub struct MacroDef {
|
pub struct MacroDef {
|
||||||
pub argdefs: Vec<Positioned<String>>,
|
pub argdefs: Vec<PositionedItem<String>>,
|
||||||
pub fields: FieldList,
|
pub fields: FieldList,
|
||||||
pub pos: Position,
|
pub pos: Position,
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ pub enum Val {
|
|||||||
Float(f64),
|
Float(f64),
|
||||||
Str(String),
|
Str(String),
|
||||||
List(Vec<Rc<Val>>),
|
List(Vec<Rc<Val>>),
|
||||||
Tuple(Vec<(Positioned<String>, Rc<Val>)>),
|
Tuple(Vec<(PositionedItem<String>, Rc<Val>)>),
|
||||||
Macro(MacroDef),
|
Macro(MacroDef),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,7 +114,7 @@ impl Val {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the fields if this Val is a tuple. None otherwise.
|
/// Returns the fields if this Val is a tuple. None otherwise.
|
||||||
pub fn get_fields(&self) -> Option<&Vec<(Positioned<String>, Rc<Val>)>> {
|
pub fn get_fields(&self) -> Option<&Vec<(PositionedItem<String>, Rc<Val>)>> {
|
||||||
if let &Val::Tuple(ref fs) = self {
|
if let &Val::Tuple(ref fs) = self {
|
||||||
Some(fs)
|
Some(fs)
|
||||||
} else {
|
} else {
|
||||||
|
@ -44,7 +44,7 @@ impl MacroDef {
|
|||||||
cache: Rc<RefCell<assets::Cache>>,
|
cache: Rc<RefCell<assets::Cache>>,
|
||||||
env: Rc<Val>,
|
env: Rc<Val>,
|
||||||
mut args: Vec<Rc<Val>>,
|
mut args: Vec<Rc<Val>>,
|
||||||
) -> Result<Vec<(Positioned<String>, Rc<Val>)>, Box<Error>> {
|
) -> Result<Vec<(PositionedItem<String>, Rc<Val>)>, Box<Error>> {
|
||||||
// Error conditions. If the args don't match the length and types of the argdefs then this is
|
// Error conditions. If the args don't match the length and types of the argdefs then this is
|
||||||
// macro call error.
|
// macro call error.
|
||||||
if args.len() > self.argdefs.len() {
|
if args.len() > self.argdefs.len() {
|
||||||
@ -61,12 +61,12 @@ impl MacroDef {
|
|||||||
// If the expressions reference Symbols not defined in the MacroDef that is also an error.
|
// If the expressions reference Symbols not defined in the MacroDef that is also an error.
|
||||||
// TODO(jwall): We should probably enforce that the Expression Symbols must be in argdefs rules
|
// TODO(jwall): We should probably enforce that the Expression Symbols must be in argdefs rules
|
||||||
// at Macro definition time not evaluation time.
|
// at Macro definition time not evaluation time.
|
||||||
let mut scope = HashMap::<Positioned<String>, Rc<Val>>::new();
|
let mut scope = HashMap::<PositionedItem<String>, Rc<Val>>::new();
|
||||||
for (i, arg) in args.drain(0..).enumerate() {
|
for (i, arg) in args.drain(0..).enumerate() {
|
||||||
scope.entry(self.argdefs[i].clone()).or_insert(arg.clone());
|
scope.entry(self.argdefs[i].clone()).or_insert(arg.clone());
|
||||||
}
|
}
|
||||||
let b = Builder::new_with_env_and_scope(root, cache, scope, env);
|
let b = Builder::new_with_env_and_scope(root, cache, scope, env);
|
||||||
let mut result: Vec<(Positioned<String>, Rc<Val>)> = Vec::new();
|
let mut result: Vec<(PositionedItem<String>, Rc<Val>)> = Vec::new();
|
||||||
for &(ref key, ref expr) in self.fields.iter() {
|
for &(ref key, ref expr) in self.fields.iter() {
|
||||||
// We clone the expressions here because this macro may be consumed
|
// We clone the expressions here because this macro may be consumed
|
||||||
// multiple times in the future.
|
// multiple times in the future.
|
||||||
@ -81,7 +81,7 @@ impl MacroDef {
|
|||||||
type BuildResult = Result<(), Box<Error>>;
|
type BuildResult = Result<(), Box<Error>>;
|
||||||
|
|
||||||
/// Defines a set of values in a parsed file.
|
/// Defines a set of values in a parsed file.
|
||||||
type ValueMap = HashMap<Positioned<String>, Rc<Val>>;
|
type ValueMap = HashMap<PositionedItem<String>, Rc<Val>>;
|
||||||
|
|
||||||
/// AssertCollector collects the results of assertions in the UCG AST.
|
/// AssertCollector collects the results of assertions in the UCG AST.
|
||||||
pub struct AssertCollector {
|
pub struct AssertCollector {
|
||||||
@ -136,7 +136,7 @@ macro_rules! eval_binary_expr {
|
|||||||
impl<'a> Builder<'a> {
|
impl<'a> Builder<'a> {
|
||||||
// TOOD(jwall): This needs some unit tests.
|
// TOOD(jwall): This needs some unit tests.
|
||||||
fn tuple_to_val(&self, fields: &Vec<(Token, Expression)>) -> Result<Rc<Val>, Box<Error>> {
|
fn tuple_to_val(&self, fields: &Vec<(Token, Expression)>) -> Result<Rc<Val>, Box<Error>> {
|
||||||
let mut new_fields = Vec::<(Positioned<String>, Rc<Val>)>::new();
|
let mut new_fields = Vec::<(PositionedItem<String>, Rc<Val>)>::new();
|
||||||
for &(ref name, ref expr) in fields.iter() {
|
for &(ref name, ref expr) in fields.iter() {
|
||||||
let val = try!(self.eval_expr(expr));
|
let val = try!(self.eval_expr(expr));
|
||||||
new_fields.push((name.into(), val));
|
new_fields.push((name.into(), val));
|
||||||
@ -190,10 +190,10 @@ impl<'a> Builder<'a> {
|
|||||||
cache: Rc<RefCell<assets::Cache>>,
|
cache: Rc<RefCell<assets::Cache>>,
|
||||||
scope: ValueMap,
|
scope: ValueMap,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let env_vars: Vec<(Positioned<String>, Rc<Val>)> = env::vars()
|
let env_vars: Vec<(PositionedItem<String>, Rc<Val>)> = env::vars()
|
||||||
.map(|t| {
|
.map(|t| {
|
||||||
(
|
(
|
||||||
Positioned::new(t.0, Position::new(0, 0, 0)),
|
PositionedItem::new(t.0, Position::new(0, 0, 0)),
|
||||||
Rc::new(t.1.into()),
|
Rc::new(t.1.into()),
|
||||||
)
|
)
|
||||||
}).collect();
|
}).collect();
|
||||||
@ -225,7 +225,7 @@ impl<'a> Builder<'a> {
|
|||||||
|
|
||||||
/// Returns a Val by name from previously built UCG.
|
/// Returns a Val by name from previously built UCG.
|
||||||
pub fn get_out_by_name(&self, name: &str) -> Option<Rc<Val>> {
|
pub fn get_out_by_name(&self, name: &str) -> Option<Rc<Val>> {
|
||||||
let key = Positioned {
|
let key = PositionedItem {
|
||||||
pos: Position::new(0, 0, 0),
|
pos: Position::new(0, 0, 0),
|
||||||
val: name.to_string(),
|
val: name.to_string(),
|
||||||
};
|
};
|
||||||
@ -249,8 +249,7 @@ impl<'a> Builder<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn eval_span(&mut self, input: OffsetStrIter) -> Result<Rc<Val>, Box<Error>> {
|
fn eval_span(&mut self, input: OffsetStrIter) -> Result<Rc<Val>, Box<Error>> {
|
||||||
// TODO(jwall): This should really return a better error.
|
match parse(input.clone()) {
|
||||||
match parse(input) {
|
|
||||||
Ok(stmts) => {
|
Ok(stmts) => {
|
||||||
//panic!("Successfully parsed {}", input);
|
//panic!("Successfully parsed {}", input);
|
||||||
let mut out: Option<Rc<Val>> = None;
|
let mut out: Option<Rc<Val>> = None;
|
||||||
@ -262,13 +261,10 @@ impl<'a> Builder<'a> {
|
|||||||
Some(val) => Ok(val),
|
Some(val) => Ok(val),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => Err(Box::new(error::Error::new_with_cause(
|
Err(err) => Err(Box::new(error::Error::new(
|
||||||
format!(
|
format!("{}", err,),
|
||||||
"Error while parsing file: {}",
|
|
||||||
self.curr_file.unwrap_or("<eval>")
|
|
||||||
),
|
|
||||||
error::ErrorType::ParseError,
|
error::ErrorType::ParseError,
|
||||||
err,
|
(&input).into(),
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -301,7 +297,8 @@ impl<'a> Builder<'a> {
|
|||||||
let mut b = Self::new(normalized.clone(), self.assets.clone());
|
let mut b = Self::new(normalized.clone(), self.assets.clone());
|
||||||
let filepath = normalized.to_str().unwrap().clone();
|
let filepath = normalized.to_str().unwrap().clone();
|
||||||
try!(b.build_file(filepath));
|
try!(b.build_file(filepath));
|
||||||
let fields: Vec<(Positioned<String>, Rc<Val>)> = b.build_output.drain().collect();
|
let fields: Vec<(PositionedItem<String>, Rc<Val>)> =
|
||||||
|
b.build_output.drain().collect();
|
||||||
Rc::new(Val::Tuple(fields))
|
Rc::new(Val::Tuple(fields))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -366,7 +363,7 @@ impl<'a> Builder<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_sym(&self, sym: &Positioned<String>) -> Option<Rc<Val>> {
|
fn lookup_sym(&self, sym: &PositionedItem<String>) -> Option<Rc<Val>> {
|
||||||
if &sym.val == "env" {
|
if &sym.val == "env" {
|
||||||
return Some(self.env.clone());
|
return Some(self.env.clone());
|
||||||
}
|
}
|
||||||
@ -376,7 +373,10 @@ impl<'a> Builder<'a> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_in_fieldlist(target: &str, fs: &Vec<(Positioned<String>, Rc<Val>)>) -> Option<Rc<Val>> {
|
fn find_in_fieldlist(
|
||||||
|
target: &str,
|
||||||
|
fs: &Vec<(PositionedItem<String>, Rc<Val>)>,
|
||||||
|
) -> Option<Rc<Val>> {
|
||||||
for (key, val) in fs.iter().cloned() {
|
for (key, val) in fs.iter().cloned() {
|
||||||
if target == &key.val {
|
if target == &key.val {
|
||||||
return Some(val.clone());
|
return Some(val.clone());
|
||||||
@ -390,7 +390,7 @@ impl<'a> Builder<'a> {
|
|||||||
stack: &mut VecDeque<Rc<Val>>,
|
stack: &mut VecDeque<Rc<Val>>,
|
||||||
sl: &SelectorList,
|
sl: &SelectorList,
|
||||||
next: (&Position, &str),
|
next: (&Position, &str),
|
||||||
fs: &Vec<(Positioned<String>, Rc<Val>)>,
|
fs: &Vec<(PositionedItem<String>, Rc<Val>)>,
|
||||||
) -> Result<(), Box<Error>> {
|
) -> Result<(), Box<Error>> {
|
||||||
if let Some(vv) = Self::find_in_fieldlist(next.1, fs) {
|
if let Some(vv) = Self::find_in_fieldlist(next.1, fs) {
|
||||||
stack.push_back(vv.clone());
|
stack.push_back(vv.clone());
|
||||||
@ -781,7 +781,7 @@ impl<'a> Builder<'a> {
|
|||||||
fn eval_copy(&self, def: &CopyDef) -> Result<Rc<Val>, Box<Error>> {
|
fn eval_copy(&self, def: &CopyDef) -> Result<Rc<Val>, Box<Error>> {
|
||||||
let v = try!(self.lookup_selector(&def.selector.sel));
|
let v = try!(self.lookup_selector(&def.selector.sel));
|
||||||
if let Val::Tuple(ref src_fields) = *v {
|
if let Val::Tuple(ref src_fields) = *v {
|
||||||
let mut m = HashMap::<Positioned<String>, (i32, Rc<Val>)>::new();
|
let mut m = HashMap::<PositionedItem<String>, (i32, Rc<Val>)>::new();
|
||||||
// loop through fields and build up a hashmap
|
// loop through fields and build up a hashmap
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for &(ref key, ref val) in src_fields.iter() {
|
for &(ref key, ref val) in src_fields.iter() {
|
||||||
@ -830,7 +830,7 @@ impl<'a> Builder<'a> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let mut new_fields: Vec<(Positioned<String>, (i32, Rc<Val>))> = m.drain().collect();
|
let mut new_fields: Vec<(PositionedItem<String>, (i32, Rc<Val>))> = m.drain().collect();
|
||||||
// We want to maintain our order for the fields to make comparing tuples
|
// We want to maintain our order for the fields to make comparing tuples
|
||||||
// easier in later code. So we sort by the field order before constructing a new tuple.
|
// easier in later code. So we sort by the field order before constructing a new tuple.
|
||||||
new_fields.sort_by(|a, b| {
|
new_fields.sort_by(|a, b| {
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use ast::Positioned;
|
use ast::PositionedItem;
|
||||||
use build::Val;
|
use build::Val;
|
||||||
use convert::traits::{Converter, Result};
|
use convert::traits::{Converter, Result};
|
||||||
|
|
||||||
@ -29,7 +29,11 @@ impl EnvConverter {
|
|||||||
EnvConverter {}
|
EnvConverter {}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_tuple(&self, flds: &Vec<(Positioned<String>, Rc<Val>)>, w: &mut Write) -> Result {
|
fn convert_tuple(
|
||||||
|
&self,
|
||||||
|
flds: &Vec<(PositionedItem<String>, Rc<Val>)>,
|
||||||
|
w: &mut Write,
|
||||||
|
) -> Result {
|
||||||
for &(ref name, ref val) in flds.iter() {
|
for &(ref name, ref val) in flds.iter() {
|
||||||
if val.is_tuple() {
|
if val.is_tuple() {
|
||||||
eprintln!("Skipping embedded tuple...");
|
eprintln!("Skipping embedded tuple...");
|
||||||
|
@ -17,7 +17,7 @@ use std;
|
|||||||
use std::io::{Cursor, Write};
|
use std::io::{Cursor, Write};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use ast::{Position, Positioned};
|
use ast::{Position, PositionedItem};
|
||||||
use build::Val;
|
use build::Val;
|
||||||
use build::Val::Tuple;
|
use build::Val::Tuple;
|
||||||
use convert;
|
use convert;
|
||||||
@ -49,7 +49,7 @@ impl ExecConverter {
|
|||||||
Position::new(0, 0, 0),
|
Position::new(0, 0, 0),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
let mut env: Option<&Vec<(Positioned<String>, Rc<Val>)>> = None;
|
let mut env: Option<&Vec<(PositionedItem<String>, Rc<Val>)>> = None;
|
||||||
let mut command: Option<&str> = None;
|
let mut command: Option<&str> = None;
|
||||||
let mut args: Option<&Vec<Rc<Val>>> = None;
|
let mut args: Option<&Vec<Rc<Val>>> = None;
|
||||||
for &(ref name, ref val) in fields.iter() {
|
for &(ref name, ref val) in fields.iter() {
|
||||||
|
@ -36,7 +36,7 @@ impl JsonConverter {
|
|||||||
|
|
||||||
fn convert_tuple(
|
fn convert_tuple(
|
||||||
&self,
|
&self,
|
||||||
items: &Vec<(ast::Positioned<String>, Rc<Val>)>,
|
items: &Vec<(ast::PositionedItem<String>, Rc<Val>)>,
|
||||||
) -> std::io::Result<serde_json::Value> {
|
) -> std::io::Result<serde_json::Value> {
|
||||||
let mut mp = serde_json::Map::new();
|
let mut mp = serde_json::Map::new();
|
||||||
for &(ref k, ref v) in items.iter() {
|
for &(ref k, ref v) in items.iter() {
|
||||||
|
@ -25,7 +25,7 @@ impl YamlConverter {
|
|||||||
|
|
||||||
fn convert_tuple(
|
fn convert_tuple(
|
||||||
&self,
|
&self,
|
||||||
items: &Vec<(ast::Positioned<String>, Rc<Val>)>,
|
items: &Vec<(ast::PositionedItem<String>, Rc<Val>)>,
|
||||||
) -> std::io::Result<serde_yaml::Value> {
|
) -> std::io::Result<serde_yaml::Value> {
|
||||||
let mut mapping = serde_yaml::Mapping::new();
|
let mut mapping = serde_yaml::Mapping::new();
|
||||||
for &(ref k, ref v) in items.iter() {
|
for &(ref k, ref v) in items.iter() {
|
||||||
|
77
src/error.rs
77
src/error.rs
@ -15,6 +15,9 @@
|
|||||||
//! Errors for use by the ucg compiler.
|
//! Errors for use by the ucg compiler.
|
||||||
use std::error;
|
use std::error;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use abortable_parser::Positioned;
|
||||||
|
|
||||||
use ast::*;
|
use ast::*;
|
||||||
|
|
||||||
@ -60,7 +63,6 @@ pub struct Error {
|
|||||||
pub err_type: ErrorType,
|
pub err_type: ErrorType,
|
||||||
pub pos: Position,
|
pub pos: Position,
|
||||||
pub msg: String,
|
pub msg: String,
|
||||||
pub cause: Option<Box<error::Error>>,
|
|
||||||
_pkgonly: (),
|
_pkgonly: (),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,41 +72,21 @@ impl Error {
|
|||||||
err_type: t,
|
err_type: t,
|
||||||
pos: pos,
|
pos: pos,
|
||||||
msg: msg.into(),
|
msg: msg.into(),
|
||||||
cause: None,
|
|
||||||
_pkgonly: (),
|
_pkgonly: (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_boxed_cause<S: Into<String>>(
|
|
||||||
msg: S,
|
|
||||||
t: ErrorType,
|
|
||||||
cause: Box<error::Error>,
|
|
||||||
pos: Position,
|
|
||||||
) -> Self {
|
|
||||||
let mut e = Self::new(msg, t, pos);
|
|
||||||
e.cause = Some(cause);
|
|
||||||
return e;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_with_cause<S: Into<String>>(msg: S, t: ErrorType, cause: Self) -> Self {
|
|
||||||
let pos = cause.pos.clone();
|
|
||||||
Self::new_with_boxed_cause(msg, t, Box::new(cause), pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn render(&self, w: &mut fmt::Formatter) -> fmt::Result {
|
fn render(&self, w: &mut fmt::Formatter) -> fmt::Result {
|
||||||
try!(write!(
|
try!(write!(
|
||||||
w,
|
w,
|
||||||
"{}: \"{}\" at line: {} column: {}",
|
"{} at line: {} column: {}\nCaused By:\n\t{} ",
|
||||||
self.err_type, self.msg, self.pos.line, self.pos.column
|
self.err_type, self.pos.line, self.pos.column, self.msg
|
||||||
));
|
));
|
||||||
if let Some(ref cause) = self.cause {
|
|
||||||
try!(write!(w, "\n\tCaused By: {}", cause));
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Error {
|
impl Debug for Error {
|
||||||
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
|
||||||
self.render(w)
|
self.render(w)
|
||||||
}
|
}
|
||||||
@ -121,3 +103,50 @@ impl error::Error for Error {
|
|||||||
&self.msg
|
&self.msg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct StackPrinter<C: abortable_parser::Positioned> {
|
||||||
|
pub err: abortable_parser::Error<C>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C> StackPrinter<C>
|
||||||
|
where
|
||||||
|
C: abortable_parser::Positioned,
|
||||||
|
{
|
||||||
|
pub fn render(&self, w: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let mut curr_err = Some(&self.err);
|
||||||
|
let mut tabstop = "";
|
||||||
|
loop {
|
||||||
|
match curr_err {
|
||||||
|
// our exit condition;
|
||||||
|
None => break,
|
||||||
|
Some(err) => {
|
||||||
|
let context = err.get_context();
|
||||||
|
try!(write!(
|
||||||
|
w,
|
||||||
|
"{}{}: line: {}, column: {}\n",
|
||||||
|
tabstop,
|
||||||
|
err.get_msg(),
|
||||||
|
context.line(),
|
||||||
|
context.column(),
|
||||||
|
));
|
||||||
|
tabstop = "\t";
|
||||||
|
curr_err = err.get_cause();
|
||||||
|
if curr_err.is_some() {
|
||||||
|
try!(write!(w, "Caused by: \n"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C> fmt::Display for StackPrinter<C>
|
||||||
|
where
|
||||||
|
C: Positioned,
|
||||||
|
{
|
||||||
|
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.render(w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -2,9 +2,7 @@ use std::convert::From;
|
|||||||
use std::iter::Iterator;
|
use std::iter::Iterator;
|
||||||
|
|
||||||
use abortable_parser::iter::{SliceIter, StrIter};
|
use abortable_parser::iter::{SliceIter, StrIter};
|
||||||
use abortable_parser::{
|
use abortable_parser::{InputIter, Offsetable, Peekable, Positioned, Seekable, Span, SpanRange};
|
||||||
InputIter, Offsetable, Peekable, Seekable, Span, SpanRange, TextPositionTracker,
|
|
||||||
};
|
|
||||||
|
|
||||||
use ast::{Position, Token};
|
use ast::{Position, Token};
|
||||||
|
|
||||||
@ -81,7 +79,7 @@ impl<'a> Peekable<&'a u8> for OffsetStrIter<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TextPositionTracker for OffsetStrIter<'a> {
|
impl<'a> Positioned for OffsetStrIter<'a> {
|
||||||
fn line(&self) -> usize {
|
fn line(&self) -> usize {
|
||||||
self.contained.line() + self.line_offset
|
self.contained.line() + self.line_offset
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ use abortable_parser::{Error, Peekable, Result};
|
|||||||
|
|
||||||
use self::precedence::op_expression;
|
use self::precedence::op_expression;
|
||||||
use ast::*;
|
use ast::*;
|
||||||
use error;
|
use error::StackPrinter;
|
||||||
use iter::OffsetStrIter;
|
use iter::OffsetStrIter;
|
||||||
use tokenizer::*;
|
use tokenizer::*;
|
||||||
|
|
||||||
@ -291,7 +291,7 @@ make_fn!(
|
|||||||
boolean_value<SliceIter<Token>, Value>,
|
boolean_value<SliceIter<Token>, Value>,
|
||||||
do_each!(
|
do_each!(
|
||||||
b => match_type!(BOOLEAN),
|
b => match_type!(BOOLEAN),
|
||||||
(Value::Boolean(Positioned{
|
(Value::Boolean(PositionedItem{
|
||||||
val: b.fragment == "true",
|
val: b.fragment == "true",
|
||||||
pos: b.pos,
|
pos: b.pos,
|
||||||
}))
|
}))
|
||||||
@ -535,7 +535,7 @@ fn tuple_to_macro<'a>(
|
|||||||
};
|
};
|
||||||
let arglist = default_args
|
let arglist = default_args
|
||||||
.drain(0..)
|
.drain(0..)
|
||||||
.map(|s| Positioned {
|
.map(|s| PositionedItem {
|
||||||
pos: s.pos().clone(),
|
pos: s.pos().clone(),
|
||||||
val: s.to_string(),
|
val: s.to_string(),
|
||||||
}).collect();
|
}).collect();
|
||||||
@ -922,11 +922,11 @@ fn statement(i: SliceIter<Token>) -> Result<SliceIter<Token>, Statement> {
|
|||||||
//trace_macros!(false);
|
//trace_macros!(false);
|
||||||
|
|
||||||
/// Parses a LocatedSpan into a list of Statements or an `error::Error`.
|
/// Parses a LocatedSpan into a list of Statements or an `error::Error`.
|
||||||
pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error::Error> {
|
pub fn parse<'a>(input: OffsetStrIter<'a>) -> std::result::Result<Vec<Statement>, String> {
|
||||||
match tokenize(&input) {
|
match tokenize(input.clone()) {
|
||||||
Ok(tokenized) => {
|
Ok(tokenized) => {
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
let mut i_ = SliceIter::from(&tokenized);
|
let mut i_ = SliceIter::new(&tokenized);
|
||||||
loop {
|
loop {
|
||||||
let i = i_.clone();
|
let i = i_.clone();
|
||||||
if let Some(tok) = i.peek_next() {
|
if let Some(tok) = i.peek_next() {
|
||||||
@ -936,31 +936,30 @@ pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error:
|
|||||||
}
|
}
|
||||||
match statement(i.clone()) {
|
match statement(i.clone()) {
|
||||||
Result::Abort(e) => {
|
Result::Abort(e) => {
|
||||||
let pos: Position = (&i).into();
|
let err = abortable_parser::Error::caused_by(
|
||||||
let err = error::Error::new(
|
"Statement Parse Error",
|
||||||
format!("Statement Parse Error {}", e),
|
Box::new(e),
|
||||||
error::ErrorType::ParseError,
|
Box::new(i.clone()),
|
||||||
pos,
|
|
||||||
);
|
);
|
||||||
return Err(err);
|
let ctx_err = StackPrinter { err: err };
|
||||||
|
return Err(format!("{}", ctx_err));
|
||||||
}
|
}
|
||||||
Result::Fail(e) => {
|
Result::Fail(e) => {
|
||||||
let pos: Position = (&i).into();
|
let err = abortable_parser::Error::caused_by(
|
||||||
let err = error::Error::new(
|
"Statement Parse Error",
|
||||||
format!("Statement Parse Error {}", e),
|
Box::new(e),
|
||||||
error::ErrorType::ParseError,
|
Box::new(i.clone()),
|
||||||
pos,
|
|
||||||
);
|
);
|
||||||
return Err(err);
|
let ctx_err = StackPrinter { err: err };
|
||||||
|
return Err(format!("{}", ctx_err));
|
||||||
}
|
}
|
||||||
Result::Incomplete(_ei) => {
|
Result::Incomplete(_ei) => {
|
||||||
let pos: Position = (&i).into();
|
let err = abortable_parser::Error::new(
|
||||||
let err = error::Error::new(
|
|
||||||
"Unexpected end of parse input",
|
"Unexpected end of parse input",
|
||||||
error::ErrorType::IncompleteParsing,
|
Box::new(i.clone()),
|
||||||
pos,
|
|
||||||
);
|
);
|
||||||
return Err(err);
|
let ctx_err = StackPrinter { err: err };
|
||||||
|
return Err(format!("{}", ctx_err));
|
||||||
}
|
}
|
||||||
Result::Complete(rest, stmt) => {
|
Result::Complete(rest, stmt) => {
|
||||||
out.push(stmt);
|
out.push(stmt);
|
||||||
@ -974,12 +973,7 @@ pub fn parse(input: OffsetStrIter) -> std::result::Result<Vec<Statement>, error:
|
|||||||
return Ok(out);
|
return Ok(out);
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let err = error::Error::new_with_cause(
|
return Err(e);
|
||||||
"Tokenization Error",
|
|
||||||
error::ErrorType::UnexpectedToken,
|
|
||||||
e,
|
|
||||||
);
|
|
||||||
return Err(err);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ macro_rules! assert_parse {
|
|||||||
};
|
};
|
||||||
($i:expr, $f:expr, $out:expr) => {{
|
($i:expr, $f:expr, $out:expr) => {{
|
||||||
let input = OffsetStrIter::new($i);
|
let input = OffsetStrIter::new($i);
|
||||||
match tokenize(&input) {
|
match tokenize(input.clone()) {
|
||||||
Err(e) => assert!(false, format!("Tokenizer Error: {:?}", e)),
|
Err(e) => assert!(false, format!("Tokenizer Error: {:?}", e)),
|
||||||
Ok(val) => match $f(SliceIter::new(val.as_slice())) {
|
Ok(val) => match $f(SliceIter::new(val.as_slice())) {
|
||||||
Result::Complete(_, result) => assert_eq!(result, $out),
|
Result::Complete(_, result) => assert_eq!(result, $out),
|
||||||
@ -40,7 +40,7 @@ macro_rules! assert_fail {
|
|||||||
};
|
};
|
||||||
($i:expr, $f:expr) => {{
|
($i:expr, $f:expr) => {{
|
||||||
let input = OffsetStrIter::new($i);
|
let input = OffsetStrIter::new($i);
|
||||||
match tokenize(&input) {
|
match tokenize(input.clone()) {
|
||||||
Err(_) => assert!(true),
|
Err(_) => assert!(true),
|
||||||
Ok(val) => {
|
Ok(val) => {
|
||||||
let result = $f(SliceIter::new(val.as_slice()));
|
let result = $f(SliceIter::new(val.as_slice()));
|
||||||
@ -56,7 +56,7 @@ macro_rules! assert_abort {
|
|||||||
};
|
};
|
||||||
($i:expr, $f:expr) => {{
|
($i:expr, $f:expr) => {{
|
||||||
let input = OffsetStrIter::new($i);
|
let input = OffsetStrIter::new($i);
|
||||||
match tokenize(&input) {
|
match tokenize(input.clone()) {
|
||||||
Err(_) => assert!(true),
|
Err(_) => assert!(true),
|
||||||
Ok(val) => {
|
Ok(val) => {
|
||||||
let result = $f(SliceIter::new(val.as_slice()));
|
let result = $f(SliceIter::new(val.as_slice()));
|
||||||
@ -84,11 +84,11 @@ fn test_null_parsing() {
|
|||||||
fn test_boolean_parsing() {
|
fn test_boolean_parsing() {
|
||||||
assert_parse!(
|
assert_parse!(
|
||||||
boolean_value("true"),
|
boolean_value("true"),
|
||||||
Value::Boolean(Positioned::new(true, Position::new(1, 1, 0)))
|
Value::Boolean(PositionedItem::new(true, Position::new(1, 1, 0)))
|
||||||
);
|
);
|
||||||
assert_parse!(
|
assert_parse!(
|
||||||
boolean_value("false"),
|
boolean_value("false"),
|
||||||
Value::Boolean(Positioned::new(false, Position::new(1, 1, 0)))
|
Value::Boolean(PositionedItem::new(false, Position::new(1, 1, 0)))
|
||||||
);
|
);
|
||||||
assert_fail!(boolean_value("truth"));
|
assert_fail!(boolean_value("truth"));
|
||||||
}
|
}
|
||||||
@ -141,7 +141,7 @@ fn test_selector_parsing() {
|
|||||||
Value::Selector(
|
Value::Selector(
|
||||||
make_selector!(Expression::Grouped(Box::new(Expression::Simple(
|
make_selector!(Expression::Grouped(Box::new(Expression::Simple(
|
||||||
Value::Tuple(value_node!(
|
Value::Tuple(value_node!(
|
||||||
vec![(make_tok!("foo", Position::new(1, 3, 2)), Expression::Simple(Value::Int(Positioned::new(1, Position::new(1, 7, 6)))))],
|
vec![(make_tok!("foo", Position::new(1, 3, 2)), Expression::Simple(Value::Int(PositionedItem::new(1, Position::new(1, 7, 6)))))],
|
||||||
Position::new(1, 3, 3)))
|
Position::new(1, 3, 3)))
|
||||||
))) => [ make_tok!("foo", Position::new(1, 11, 10)) ] => Position::new(1, 2, 1))
|
))) => [ make_tok!("foo", Position::new(1, 11, 10)) ] => Position::new(1, 2, 1))
|
||||||
)
|
)
|
||||||
|
@ -20,7 +20,7 @@ use abortable_parser::iter::SliceIter;
|
|||||||
use abortable_parser::{Error, Offsetable, Result};
|
use abortable_parser::{Error, Offsetable, Result};
|
||||||
|
|
||||||
use ast::*;
|
use ast::*;
|
||||||
use error;
|
use error::StackPrinter;
|
||||||
use iter::OffsetStrIter;
|
use iter::OffsetStrIter;
|
||||||
|
|
||||||
fn is_symbol_char<'a>(i: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, u8> {
|
fn is_symbol_char<'a>(i: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, u8> {
|
||||||
@ -144,27 +144,27 @@ make_fn!(booleantok<OffsetStrIter, Token>,
|
|||||||
macro_rules! do_text_token_tok {
|
macro_rules! do_text_token_tok {
|
||||||
($i:expr, $type:expr, $text_token:expr, WS) => {
|
($i:expr, $type:expr, $text_token:expr, WS) => {
|
||||||
do_each!($i,
|
do_each!($i,
|
||||||
span => input!(),
|
span => input!(),
|
||||||
frag => text_token!($text_token),
|
frag => text_token!($text_token),
|
||||||
_ => either!(whitespace, comment),
|
_ => either!(whitespace, comment),
|
||||||
(Token {
|
(Token {
|
||||||
typ: $type,
|
typ: $type,
|
||||||
pos: Position::from(&span),
|
pos: Position::from(&span),
|
||||||
fragment: frag.to_string(),
|
fragment: frag.to_string(),
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
($i:expr, $type:expr, $text_token:expr) => {
|
($i:expr, $type:expr, $text_token:expr) => {
|
||||||
do_each!($i,
|
do_each!($i,
|
||||||
span => input!(),
|
span => input!(),
|
||||||
frag => text_token!($text_token),
|
frag => text_token!($text_token),
|
||||||
(Token {
|
(Token {
|
||||||
typ: $type,
|
typ: $type,
|
||||||
pos: Position::from(&span),
|
pos: Position::from(&span),
|
||||||
fragment: frag.to_string(),
|
fragment: frag.to_string(),
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -397,35 +397,37 @@ fn token<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, Token> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes an input OffsetStrIter and returns either a Vec<Token> or a error::Error.
|
/// Consumes an input OffsetStrIter and returns either a Vec<Token> or a error::Error.
|
||||||
pub fn tokenize(input: &OffsetStrIter) -> std::result::Result<Vec<Token>, error::Error> {
|
pub fn tokenize<'a>(input: OffsetStrIter<'a>) -> std::result::Result<Vec<Token>, String> {
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
let mut i = input.clone();
|
let mut i = input.clone();
|
||||||
loop {
|
loop {
|
||||||
if let Result::Complete(_, _) = eoi(i.clone()) {
|
if let Result::Complete(_, _) = eoi(i.clone()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let pos: Position = Position::from(&i);
|
|
||||||
match token(i.clone()) {
|
match token(i.clone()) {
|
||||||
Result::Abort(e) => {
|
Result::Abort(e) => {
|
||||||
return Err(error::Error::new(
|
let err = abortable_parser::Error::caused_by(
|
||||||
format!("Invalid Token encountered {}", e),
|
"Invalid Token encountered",
|
||||||
error::ErrorType::UnexpectedToken,
|
Box::new(e),
|
||||||
pos,
|
Box::new(i.clone()),
|
||||||
))
|
);
|
||||||
|
let ctx_err = StackPrinter { err: err };
|
||||||
|
return Err(format!("{}", ctx_err));
|
||||||
}
|
}
|
||||||
Result::Fail(e) => {
|
Result::Fail(e) => {
|
||||||
return Err(error::Error::new(
|
let err = abortable_parser::Error::caused_by(
|
||||||
format!("Invalid Token encountered {}", e),
|
"Invalid Token encountered",
|
||||||
error::ErrorType::UnexpectedToken,
|
Box::new(e),
|
||||||
pos,
|
Box::new(i.clone()),
|
||||||
))
|
);
|
||||||
|
let ctx_err = StackPrinter { err: err };
|
||||||
|
return Err(format!("{}", ctx_err));
|
||||||
}
|
}
|
||||||
Result::Incomplete(_offset) => {
|
Result::Incomplete(_offset) => {
|
||||||
return Err(error::Error::new(
|
let err =
|
||||||
"Incomplete Token encountered",
|
abortable_parser::Error::new("Invalid Token encountered", Box::new(i.clone()));
|
||||||
error::ErrorType::IncompleteParsing,
|
let ctx_err = StackPrinter { err: err };
|
||||||
pos,
|
return Err(format!("{}", ctx_err));
|
||||||
))
|
|
||||||
}
|
}
|
||||||
Result::Complete(rest, tok) => {
|
Result::Complete(rest, tok) => {
|
||||||
i = rest;
|
i = rest;
|
||||||
|
@ -102,7 +102,7 @@ fn test_string_with_escaping() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_bareword_with_dash() {
|
fn test_tokenize_bareword_with_dash() {
|
||||||
let input = OffsetStrIter::new("foo-bar ");
|
let input = OffsetStrIter::new("foo-bar ");
|
||||||
let result = tokenize(&input);
|
let result = tokenize(input.clone());
|
||||||
assert!(result.is_ok(), format!("result {:?} is not ok", result));
|
assert!(result.is_ok(), format!("result {:?} is not ok", result));
|
||||||
if let Ok(toks) = result {
|
if let Ok(toks) = result {
|
||||||
assert_eq!(toks.len(), 2);
|
assert_eq!(toks.len(), 2);
|
||||||
@ -170,7 +170,7 @@ fn test_tokenize_one_of_each() {
|
|||||||
"map out filter assert let import macro select as => [ ] { } ; = % / * \
|
"map out filter assert let import macro select as => [ ] { } ; = % / * \
|
||||||
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
|
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
|
||||||
);
|
);
|
||||||
let result = tokenize(&input);
|
let result = tokenize(input.clone());
|
||||||
assert!(result.is_ok(), format!("result {:?} is not ok", result));
|
assert!(result.is_ok(), format!("result {:?} is not ok", result));
|
||||||
let v = result.unwrap();
|
let v = result.unwrap();
|
||||||
for (i, t) in v.iter().enumerate() {
|
for (i, t) in v.iter().enumerate() {
|
||||||
@ -183,7 +183,7 @@ fn test_tokenize_one_of_each() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_has_end() {
|
fn test_parse_has_end() {
|
||||||
let input = OffsetStrIter::new("foo");
|
let input = OffsetStrIter::new("foo");
|
||||||
let result = tokenize(&input);
|
let result = tokenize(input.clone());
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
let v = result.unwrap();
|
let v = result.unwrap();
|
||||||
assert_eq!(v.len(), 2);
|
assert_eq!(v.len(), 2);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user