Merge squash of the no_more_nom branch

commit e4c80b19f5149fb7dba0a9b785e22ff5323b5470
Author: Jeremy Wall <jeremy@marzhillstudios.com>
Date:   Sun Nov 4 09:54:16 2018 -0600

    FEATURE: report better stacktraces for parsing.

commit 24b97c1037d6ddbe21f32e172b1c14cd2cfb9910
Author: Jeremy Wall <jeremy@marzhillstudios.com>
Date:   Wed Oct 17 19:00:29 2018 -0500

    FEATURE: Use context for our error reporting.

commit c22d397545e7575608e34561a274745b28be8c2c
Author: Jeremy Wall <jeremy@marzhillstudios.com>
Date:   Sun Oct 14 14:18:42 2018 -0500

    FIX: Better error reporting.

commit 7f47dc3f38bf6a3e8686c1fa3fd50ef97d4bf3af
Author: Jeremy Wall <jeremy@marzhillstudios.com>
Date:   Sat Oct 13 19:42:22 2018 -0500

    REFACTOR: Pass our offsets everywhere.

commit 91d7ed690bfd9989270ba9fa4f44c70f513d54b9
Author: Jeremy Wall <jeremy@marzhillstudios.com>
Date:   Sun Sep 23 15:08:45 2018 -0500

    REFACTOR: Use abortable_parser.
This commit is contained in:
Jeremy Wall 2018-11-05 21:34:12 -06:00
parent 29aed2c997
commit 6712fc9bb5
24 changed files with 2333 additions and 2513 deletions

37
Cargo.lock generated
View File

@ -1,3 +1,8 @@
[[package]]
name = "abortable_parser"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "ansi_term" name = "ansi_term"
version = "0.9.0" version = "0.9.0"
@ -131,31 +136,6 @@ name = "linked-hash-map"
version = "0.5.1" version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memchr"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "nom"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "nom_locate"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"nom 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.1.40" version = "0.1.40"
@ -248,11 +228,10 @@ dependencies = [
name = "ucg" name = "ucg"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"abortable_parser 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
"cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"nom 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"nom_locate 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_yaml 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_yaml 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"simple-error 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "simple-error 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
@ -306,6 +285,7 @@ dependencies = [
] ]
[metadata] [metadata]
"checksum abortable_parser 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "09cdf5378b5e4a079fa886e621519fcb2502d9cb008d3f76b92f61f3890d5906"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652" "checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f" "checksum backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "346d7644f0b5f9bc73082d3b2236b69a05fd35cce0cfa3724e184e6a5c9e2a2f"
@ -324,9 +304,6 @@ dependencies = [
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d" "checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
"checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e" "checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
"checksum memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "148fab2e51b4f1cfc66da2a7c32981d1d3c083a803978268bb11fe4b86925e7a"
"checksum nom 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05aec50c70fd288702bcd93284a8444607f3292dbdf2a30de5ea5dcdbe72287b"
"checksum nom_locate 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49b1c61eff39ab6b91ccedfc62aff196eae066d88355b4fe3e4100c23168f0df"
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1" "checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
"checksum rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "bcfe5b13211b4d78e5c2cadfebd7769197d95c639c35a50057eb4c05de811395" "checksum rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "bcfe5b13211b4d78e5c2cadfebd7769197d95c639c35a50057eb4c05de811395"

View File

@ -9,11 +9,8 @@ readme = "README.md"
keywords = ["compiler", "config"] keywords = ["compiler", "config"]
license = "Apache-2.0" license = "Apache-2.0"
[dependencies.nom]
version = "^3.2"
[dependencies] [dependencies]
nom_locate = "^0.1.1" abortable_parser = "0.2.1"
clap = "~2.26.0" clap = "~2.26.0"
serde_json = "~1.0.9" serde_json = "~1.0.9"
simple-error = "0.1" simple-error = "0.1"

View File

@ -9,6 +9,8 @@
You should be able to ask the compiler to tell you any value or set of values in the You should be able to ask the compiler to tell you any value or set of values in the
compiled configuration. compiled configuration.
Inspect is probably the correct location for this.
## Shape equality as a form of type assertion? ## Shape equality as a form of type assertion?
# Minor Fixes and Polish # Minor Fixes and Polish

View File

@ -0,0 +1 @@
let x =

View File

@ -4,4 +4,6 @@ assert |2 * (2 + 1) == 6|;
assert |2 * 2 + 1 > 4|; assert |2 * 2 + 1 > 4|;
assert |2 * 2 + 1 < 6|; assert |2 * 2 + 1 < 6|;
assert |2 * 2 + 1 >= 5|; assert |2 * 2 + 1 >= 5|;
assert |2 * 2 + 1 <= 5|; assert |2 * 2 + 1 <= 5|;
assert |2 / 2 == 1|;
assert |2 - 1 == 1|;

View File

@ -21,10 +21,11 @@ use std::cmp::PartialEq;
use std::cmp::PartialOrd; use std::cmp::PartialOrd;
use std::collections::HashSet; use std::collections::HashSet;
use std::convert::Into; use std::convert::Into;
use std::fmt;
use std::hash::Hash; use std::hash::Hash;
use std::hash::Hasher; use std::hash::Hasher;
use std::fmt; use abortable_parser;
macro_rules! enum_type_equality { macro_rules! enum_type_equality {
( $slf:ident, $r:expr, $( $l:pat ),* ) => { ( $slf:ident, $r:expr, $( $l:pat ),* ) => {
@ -50,18 +51,26 @@ macro_rules! enum_type_equality {
pub struct Position { pub struct Position {
pub line: usize, pub line: usize,
pub column: usize, pub column: usize,
pub offset: usize,
} }
impl Position { impl Position {
/// Construct a new Position. /// Construct a new Position.
pub fn new(line: usize, column: usize) -> Self { pub fn new(line: usize, column: usize, offset: usize) -> Self {
Position { Position {
line: line, line: line,
column: column, column: column,
offset: offset,
} }
} }
} }
impl<'a> From<&'a Position> for Position {
fn from(source: &'a Position) -> Self {
source.clone()
}
}
/// Defines the types of tokens in UCG syntax. /// Defines the types of tokens in UCG syntax.
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] #[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]
pub enum TokenType { pub enum TokenType {
@ -89,8 +98,8 @@ pub struct Token {
impl Token { impl Token {
/// Constructs a new Token with a type and line and column information. /// Constructs a new Token with a type and line and column information.
pub fn new<S: Into<String>>(f: S, typ: TokenType, line: usize, col: usize) -> Self { pub fn new<S: Into<String>, P: Into<Position>>(f: S, typ: TokenType, p: P) -> Self {
Self::new_with_pos(f, typ, Position::new(line, col)) Self::new_with_pos(f, typ, p.into())
} }
// Constructs a new Token with a type and a Position. // Constructs a new Token with a type and a Position.
@ -103,6 +112,15 @@ impl Token {
} }
} }
impl abortable_parser::Positioned for Token {
fn line(&self) -> usize {
self.pos.line
}
fn column(&self) -> usize {
self.pos.column
}
}
impl Borrow<str> for Token { impl Borrow<str> for Token {
fn borrow(&self) -> &str { fn borrow(&self) -> &str {
&self.fragment &self.fragment
@ -112,58 +130,54 @@ impl Borrow<str> for Token {
/// Helper macro for making a Positioned Value. /// Helper macro for making a Positioned Value.
macro_rules! value_node { macro_rules! value_node {
($v:expr, $p:expr) => { ($v:expr, $p:expr) => {
Positioned::new_with_pos($v, $p) PositionedItem::new_with_pos($v, $p)
};
($v:expr, $l:expr, $c:expr) => {
Positioned::new($v, $l, $c)
}; };
} }
/// Helper macro for making a Token. /// Helper macro for making a Token.
#[allow(unused_macros)] #[allow(unused_macros)]
macro_rules! make_tok { macro_rules! make_tok {
(EOF => $l:expr, $c:expr) => { (EOF => $i:expr) => {
Token::new("", TokenType::END, $l, $c) Token::new("", TokenType::END, &$i)
}; };
(WS => $l:expr, $c:expr) => { (WS => $i:expr) => {
Token::new("", TokenType::WS, $l, $c) Token::new("", TokenType::WS, &$i)
}; };
(CMT => $e:expr, $l:expr, $c:expr) => { (CMT => $e:expr, $i:expr) => {
Token::new($e, TokenType::COMMENT, $l, $c) Token::new($e, TokenType::COMMENT, &$i)
}; };
(QUOT => $e:expr, $l:expr, $c:expr) => { (QUOT => $e:expr, $i:expr) => {
Token::new($e, TokenType::QUOTED, $l, $c) Token::new($e, TokenType::QUOTED, &$i)
}; };
(PUNCT => $e:expr, $l:expr, $c:expr) => { (PUNCT => $e:expr, $i:expr) => {
Token::new($e, TokenType::PUNCT, $l, $c) Token::new($e, TokenType::PUNCT, &$i)
}; };
(DIGIT => $e:expr, $l:expr, $c:expr) => { (DIGIT => $e:expr, $i:expr) => {
Token::new($e, TokenType::DIGIT, $l, $c) Token::new($e, TokenType::DIGIT, &$i)
}; };
($e:expr, $l:expr, $c:expr) => { ($e:expr, $i:expr) => {
Token::new($e, TokenType::BAREWORD, $l, $c) Token::new($e, TokenType::BAREWORD, &$i)
}; };
} }
/// Helper macro for making expressions. /// Helper macro for making expressions.
#[allow(unused_macros)] #[allow(unused_macros)]
macro_rules! make_expr { macro_rules! make_expr {
($e:expr) => { ($e:expr, $i:expr) => {
make_expr!($e, 1, 1) Expression::Simple(Value::Symbol(PositionedItem::new_with_pos(
$e.to_string(),
$i,
)))
}; };
($e:expr, $l:expr, $c:expr) => { ($e:expr => int, $i:expr) => {
Expression::Simple(Value::Symbol(Positioned::new($e.to_string(), $l, $c))) Expression::Simple(Value::Int(PositionedItem::new_with_pos($e, $i)))
};
($e:expr => int, $l:expr, $c:expr) => {
Expression::Simple(Value::Int(Positioned::new($e, $l, $c)))
}; };
} }
@ -180,30 +194,26 @@ macro_rules! make_expr {
/// ``` /// ```
#[allow(unused_macros)] #[allow(unused_macros)]
macro_rules! make_selector { macro_rules! make_selector {
( $h:expr ) => { ( $h:expr, $i:expr) => {
make_selector!($h, 1, 0)
};
( $h:expr, $l:expr, $c:expr ) => {
SelectorDef::new( SelectorDef::new(
SelectorList{head: Box::new($h), tail: None}, SelectorList{head: Box::new($h), tail: None},
$l, $c) $i)
}; };
( $h: expr, $list:expr, $l:expr, $c:expr) => { ( $h: expr, $list:expr, $i:expr) => {
SelectorDef::new( SelectorDef::new(
SelectorList{head: Box::new($h), tail: Some($list)}, SelectorList{head: Box::new($h), tail: Some($list)},
$l, $c) $i)
}; };
// Tokens // Tokens
( $h:expr => [ $( $item:expr ),* ] ) => { ( $h:expr => [ $( $item:expr ),* ], $i:expr ) => {
{ {
make_selector!($h => [ $( $item, )* ] => 1, 1) make_selector!($h => [ $( $item, )* ] => $i)
} }
}; };
( $h:expr => [ $( $item:expr ),* ] => $l:expr, $c:expr ) => { ( $h:expr => [ $( $item:expr ),* ] => $i:expr ) => {
{ {
let mut list: Vec<Token> = Vec::new(); let mut list: Vec<Token> = Vec::new();
@ -211,7 +221,7 @@ macro_rules! make_selector {
list.push($item); list.push($item);
)* )*
make_selector!($h, list, $l, $c) make_selector!($h, list, $i)
} }
}; };
@ -223,14 +233,14 @@ macro_rules! make_selector {
let mut list: Vec<Token> = Vec::new(); let mut list: Vec<Token> = Vec::new();
$( $(
list.push(make_tok!($item, 1, col)); list.push(make_tok!($item, Position::new(1, col, col)));
col += $item.len() + 1; col += $item.len() + 1;
)* )*
// Shut up the lint about unused code; // Shut up the lint about unused code;
assert!(col != 0); assert!(col != 0);
make_selector!($h, list, 1, 1) make_selector!($h, list, Position::new(1, 1, 1))
} }
}; };
@ -241,14 +251,14 @@ macro_rules! make_selector {
let mut list: Vec<Token> = Vec::new(); let mut list: Vec<Token> = Vec::new();
$( $(
list.push(make_tok!($item, $l, col)); list.push(make_tok!($item, Position::new($l, col, col)));
col += $item.len() + 1; col += $item.len() + 1;
)* )*
// Shut up the linter about unused code; // Shut up the linter about unused code;
assert!(col != 0); assert!(col != 0);
make_selector!($h, list, $l, $c) make_selector!($h, list, Position::new($l, $c, $c))
} }
}; };
} }
@ -314,9 +324,9 @@ pub struct SelectorDef {
impl SelectorDef { impl SelectorDef {
/// Constructs a new SelectorDef. /// Constructs a new SelectorDef.
pub fn new(sel: SelectorList, line: usize, col: usize) -> Self { pub fn new<P: Into<Position>>(sel: SelectorList, p: P) -> Self {
SelectorDef { SelectorDef {
pos: Position::new(line, col), pos: p.into(),
sel: sel, sel: sel,
} }
} }
@ -327,13 +337,13 @@ impl SelectorDef {
pub enum Value { pub enum Value {
// Constant Values // Constant Values
Empty(Position), Empty(Position),
Boolean(Positioned<bool>), Boolean(PositionedItem<bool>),
Int(Positioned<i64>), Int(PositionedItem<i64>),
Float(Positioned<f64>), Float(PositionedItem<f64>),
Str(Positioned<String>), Str(PositionedItem<String>),
Symbol(Positioned<String>), Symbol(PositionedItem<String>),
// Complex Values // Complex Values
Tuple(Positioned<FieldList>), Tuple(PositionedItem<FieldList>),
List(ListDef), List(ListDef),
Selector(SelectorDef), Selector(SelectorDef),
} }
@ -438,67 +448,67 @@ pub struct SelectDef {
/// Adds position information to any type `T`. /// Adds position information to any type `T`.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Positioned<T> { pub struct PositionedItem<T> {
pub pos: Position, pub pos: Position,
pub val: T, pub val: T,
} }
impl<T: std::fmt::Display> std::fmt::Display for Positioned<T> { impl<T: std::fmt::Display> std::fmt::Display for PositionedItem<T> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "{}", self.val) write!(f, "{}", self.val)
} }
} }
impl<T> Positioned<T> { impl<T> PositionedItem<T> {
/// Constructs a new Positioned<T> with a value, line, and column information. /// Constructs a new Positioned<T> with a value, line, and column information.
pub fn new(v: T, l: usize, c: usize) -> Self { pub fn new<P: Into<Position>>(v: T, p: P) -> Self {
Self::new_with_pos(v, Position::new(l, c)) Self::new_with_pos(v, p.into())
} }
/// Constructs a new Positioned<T> with a value and a Position. /// Constructs a new Positioned<T> with a value and a Position.
pub fn new_with_pos(v: T, pos: Position) -> Self { pub fn new_with_pos(v: T, pos: Position) -> Self {
Positioned { pos: pos, val: v } PositionedItem { pos: pos, val: v }
} }
} }
impl<T: PartialEq> PartialEq for Positioned<T> { impl<T: PartialEq> PartialEq for PositionedItem<T> {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.val == other.val self.val == other.val
} }
} }
impl<T: Eq> Eq for Positioned<T> {} impl<T: Eq> Eq for PositionedItem<T> {}
impl<T: Ord> Ord for Positioned<T> { impl<T: Ord> Ord for PositionedItem<T> {
fn cmp(&self, other: &Self) -> Ordering { fn cmp(&self, other: &Self) -> Ordering {
self.val.cmp(&other.val) self.val.cmp(&other.val)
} }
} }
impl<T: PartialOrd> PartialOrd for Positioned<T> { impl<T: PartialOrd> PartialOrd for PositionedItem<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.val.partial_cmp(&other.val) self.val.partial_cmp(&other.val)
} }
} }
impl<T: Hash> Hash for Positioned<T> { impl<T: Hash> Hash for PositionedItem<T> {
fn hash<H: Hasher>(&self, state: &mut H) { fn hash<H: Hasher>(&self, state: &mut H) {
self.val.hash(state); self.val.hash(state);
} }
} }
impl<'a> From<&'a Token> for Positioned<String> { impl<'a> From<&'a Token> for PositionedItem<String> {
fn from(t: &'a Token) -> Positioned<String> { fn from(t: &'a Token) -> PositionedItem<String> {
Positioned { PositionedItem {
pos: t.pos.clone(), pos: t.pos.clone(),
val: t.fragment.to_string(), val: t.fragment.to_string(),
} }
} }
} }
impl<'a> From<&'a Positioned<String>> for Positioned<String> { impl<'a> From<&'a PositionedItem<String>> for PositionedItem<String> {
fn from(t: &Positioned<String>) -> Positioned<String> { fn from(t: &PositionedItem<String>) -> PositionedItem<String> {
Positioned { PositionedItem {
pos: t.pos.clone(), pos: t.pos.clone(),
val: t.val.clone(), val: t.val.clone(),
} }
@ -512,7 +522,7 @@ impl<'a> From<&'a Positioned<String>> for Positioned<String> {
/// any values except what is defined in their arguments. /// any values except what is defined in their arguments.
#[derive(PartialEq, Debug, Clone)] #[derive(PartialEq, Debug, Clone)]
pub struct MacroDef { pub struct MacroDef {
pub argdefs: Vec<Positioned<String>>, pub argdefs: Vec<PositionedItem<String>>,
pub fields: FieldList, pub fields: FieldList,
pub pos: Position, pub pos: Position,
} }

View File

@ -17,21 +17,23 @@ use super::*;
#[test] #[test]
pub fn test_macro_validation_happy_path() { pub fn test_macro_validation_happy_path() {
let def = MacroDef { let def = MacroDef {
argdefs: vec![value_node!("foo".to_string(), 1, 0)], argdefs: vec![value_node!("foo".to_string(), Position::new(1, 0, 0))],
fields: vec![( fields: vec![(
make_tok!("f1", 1, 1), make_tok!("f1", Position::new(1, 1, 0)),
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add, kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Symbol(value_node!( left: Box::new(Expression::Simple(Value::Symbol(value_node!(
"foo".to_string(), "foo".to_string(),
1, Position::new(1, 1, 0)
1
)))), )))),
right: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))), right: Box::new(Expression::Simple(Value::Int(value_node!(
pos: Position::new(1, 0), 1,
Position::new(1, 1, 0)
)))),
pos: Position::new(1, 0, 0),
}), }),
)], )],
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}; };
assert!(def.validate_symbols().unwrap() == ()); assert!(def.validate_symbols().unwrap() == ());
} }
@ -39,21 +41,23 @@ pub fn test_macro_validation_happy_path() {
#[test] #[test]
pub fn test_macro_validation_fail() { pub fn test_macro_validation_fail() {
let def = MacroDef { let def = MacroDef {
argdefs: vec![value_node!("foo".to_string(), 1, 0)], argdefs: vec![value_node!("foo".to_string(), Position::new(1, 0, 0))],
fields: vec![( fields: vec![(
make_tok!("f1", 1, 1), make_tok!("f1", Position::new(1, 1, 0)),
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add, kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Symbol(value_node!( left: Box::new(Expression::Simple(Value::Symbol(value_node!(
"bar".to_string(), "bar".to_string(),
1, Position::new(1, 1, 0)
1
)))), )))),
right: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))), right: Box::new(Expression::Simple(Value::Int(value_node!(
pos: Position::new(1, 0), 1,
Position::new(1, 1, 0)
)))),
pos: Position::new(1, 0, 0),
}), }),
)], )],
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}; };
let mut expected = HashSet::new(); let mut expected = HashSet::new();
expected.insert("bar".to_string()); expected.insert("bar".to_string());
@ -63,20 +67,23 @@ pub fn test_macro_validation_fail() {
#[test] #[test]
pub fn test_macro_validation_selector_happy_path() { pub fn test_macro_validation_selector_happy_path() {
let def = MacroDef { let def = MacroDef {
argdefs: vec![value_node!("foo".to_string(), 1, 0)], argdefs: vec![value_node!("foo".to_string(), Position::new(1, 0, 0))],
fields: vec![( fields: vec![(
make_tok!("f1", 1, 1), make_tok!("f1", Position::new(1, 1, 0)),
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add, kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Selector( left: Box::new(Expression::Simple(Value::Selector(make_selector!(
make_selector!(make_expr!("foo", 1, 1) => [ make_expr!("foo", Position::new(1, 1, 0)) => [
make_tok!("quux", 1, 1) ] => 1, 1), make_tok!("quux", Position::new(1, 1, 0)) ]
))), => Position::new(1, 1, 0))))),
right: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))), right: Box::new(Expression::Simple(Value::Int(value_node!(
pos: Position::new(1, 0), 1,
Position::new(1, 1, 0)
)))),
pos: Position::new(1, 0, 0),
}), }),
)], )],
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}; };
assert!(def.validate_symbols().unwrap() == ()); assert!(def.validate_symbols().unwrap() == ());
} }
@ -84,20 +91,23 @@ pub fn test_macro_validation_selector_happy_path() {
#[test] #[test]
pub fn test_macro_validation_selector_fail() { pub fn test_macro_validation_selector_fail() {
let def = MacroDef { let def = MacroDef {
argdefs: vec![value_node!("foo".to_string(), 1, 0)], argdefs: vec![value_node!("foo".to_string(), Position::new(1, 0, 0))],
fields: vec![( fields: vec![(
make_tok!("f1", 1, 1), make_tok!("f1", Position::new(1, 1, 0)),
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add, kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Selector( left: Box::new(Expression::Simple(Value::Selector(
make_selector!(make_expr!("bar", 1, 1) => [ make_selector!(make_expr!("bar", Position::new(1, 1, 0)) => [
make_tok!("quux", 1, 1) ] => 1, 1), make_tok!("quux", Position::new(1, 1, 0)) ] => Position::new(1, 1, 0)),
))), ))),
right: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))), right: Box::new(Expression::Simple(Value::Int(value_node!(
pos: Position::new(1, 0), 1,
Position::new(1, 1, 0)
)))),
pos: Position::new(1, 0, 0),
}), }),
)], )],
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}; };
let mut expected = HashSet::new(); let mut expected = HashSet::new();
expected.insert("bar".to_string()); expected.insert("bar".to_string());

View File

@ -16,17 +16,20 @@
#[macro_use] #[macro_use]
extern crate bencher; extern crate bencher;
extern crate abortable_parser;
extern crate cpuprofiler; extern crate cpuprofiler;
extern crate nom_locate;
extern crate ucglib; extern crate ucglib;
use bencher::Bencher; use bencher::Bencher;
use ucglib::iter::OffsetStrIter;
//use cpuprofiler::PROFILER; //use cpuprofiler::PROFILER;
use ucglib::parse::*; use ucglib::parse::*;
fn do_parse(i: &str) { fn do_parse(i: &str) {
parse(nom_locate::LocatedSpan::new(i)); parse(OffsetStrIter::new(i));
} }
fn parse_int(b: &mut Bencher) { fn parse_int(b: &mut Bencher) {

View File

@ -18,7 +18,7 @@ pub enum Val {
Float(f64), Float(f64),
Str(String), Str(String),
List(Vec<Rc<Val>>), List(Vec<Rc<Val>>),
Tuple(Vec<(Positioned<String>, Rc<Val>)>), Tuple(Vec<(PositionedItem<String>, Rc<Val>)>),
Macro(MacroDef), Macro(MacroDef),
} }
@ -114,7 +114,7 @@ impl Val {
} }
/// Returns the fields if this Val is a tuple. None otherwise. /// Returns the fields if this Val is a tuple. None otherwise.
pub fn get_fields(&self) -> Option<&Vec<(Positioned<String>, Rc<Val>)>> { pub fn get_fields(&self) -> Option<&Vec<(PositionedItem<String>, Rc<Val>)>> {
if let &Val::Tuple(ref fs) = self { if let &Val::Tuple(ref fs) = self {
Some(fs) Some(fs)
} else { } else {

View File

@ -28,8 +28,8 @@ use std::string::ToString;
use ast::*; use ast::*;
use error; use error;
use format; use format;
use iter::OffsetStrIter;
use parse::parse; use parse::parse;
use tokenizer::Span;
pub mod assets; pub mod assets;
pub mod ir; pub mod ir;
@ -44,7 +44,7 @@ impl MacroDef {
cache: Rc<RefCell<assets::Cache>>, cache: Rc<RefCell<assets::Cache>>,
env: Rc<Val>, env: Rc<Val>,
mut args: Vec<Rc<Val>>, mut args: Vec<Rc<Val>>,
) -> Result<Vec<(Positioned<String>, Rc<Val>)>, Box<Error>> { ) -> Result<Vec<(PositionedItem<String>, Rc<Val>)>, Box<Error>> {
// Error conditions. If the args don't match the length and types of the argdefs then this is // Error conditions. If the args don't match the length and types of the argdefs then this is
// macro call error. // macro call error.
if args.len() > self.argdefs.len() { if args.len() > self.argdefs.len() {
@ -61,12 +61,12 @@ impl MacroDef {
// If the expressions reference Symbols not defined in the MacroDef that is also an error. // If the expressions reference Symbols not defined in the MacroDef that is also an error.
// TODO(jwall): We should probably enforce that the Expression Symbols must be in argdefs rules // TODO(jwall): We should probably enforce that the Expression Symbols must be in argdefs rules
// at Macro definition time not evaluation time. // at Macro definition time not evaluation time.
let mut scope = HashMap::<Positioned<String>, Rc<Val>>::new(); let mut scope = HashMap::<PositionedItem<String>, Rc<Val>>::new();
for (i, arg) in args.drain(0..).enumerate() { for (i, arg) in args.drain(0..).enumerate() {
scope.entry(self.argdefs[i].clone()).or_insert(arg.clone()); scope.entry(self.argdefs[i].clone()).or_insert(arg.clone());
} }
let b = Builder::new_with_env_and_scope(root, cache, scope, env); let b = Builder::new_with_env_and_scope(root, cache, scope, env);
let mut result: Vec<(Positioned<String>, Rc<Val>)> = Vec::new(); let mut result: Vec<(PositionedItem<String>, Rc<Val>)> = Vec::new();
for &(ref key, ref expr) in self.fields.iter() { for &(ref key, ref expr) in self.fields.iter() {
// We clone the expressions here because this macro may be consumed // We clone the expressions here because this macro may be consumed
// multiple times in the future. // multiple times in the future.
@ -81,7 +81,7 @@ impl MacroDef {
type BuildResult = Result<(), Box<Error>>; type BuildResult = Result<(), Box<Error>>;
/// Defines a set of values in a parsed file. /// Defines a set of values in a parsed file.
type ValueMap = HashMap<Positioned<String>, Rc<Val>>; type ValueMap = HashMap<PositionedItem<String>, Rc<Val>>;
/// AssertCollector collects the results of assertions in the UCG AST. /// AssertCollector collects the results of assertions in the UCG AST.
pub struct AssertCollector { pub struct AssertCollector {
@ -136,7 +136,7 @@ macro_rules! eval_binary_expr {
impl<'a> Builder<'a> { impl<'a> Builder<'a> {
// TOOD(jwall): This needs some unit tests. // TOOD(jwall): This needs some unit tests.
fn tuple_to_val(&self, fields: &Vec<(Token, Expression)>) -> Result<Rc<Val>, Box<Error>> { fn tuple_to_val(&self, fields: &Vec<(Token, Expression)>) -> Result<Rc<Val>, Box<Error>> {
let mut new_fields = Vec::<(Positioned<String>, Rc<Val>)>::new(); let mut new_fields = Vec::<(PositionedItem<String>, Rc<Val>)>::new();
for &(ref name, ref expr) in fields.iter() { for &(ref name, ref expr) in fields.iter() {
let val = try!(self.eval_expr(expr)); let val = try!(self.eval_expr(expr));
new_fields.push((name.into(), val)); new_fields.push((name.into(), val));
@ -159,17 +159,18 @@ impl<'a> Builder<'a> {
&Value::Int(ref i) => Ok(Rc::new(Val::Int(i.val))), &Value::Int(ref i) => Ok(Rc::new(Val::Int(i.val))),
&Value::Float(ref f) => Ok(Rc::new(Val::Float(f.val))), &Value::Float(ref f) => Ok(Rc::new(Val::Float(f.val))),
&Value::Str(ref s) => Ok(Rc::new(Val::Str(s.val.to_string()))), &Value::Str(ref s) => Ok(Rc::new(Val::Str(s.val.to_string()))),
&Value::Symbol(ref s) => self.lookup_sym(&(s.into())).ok_or(Box::new( &Value::Symbol(ref s) => {
error::Error::new( self.lookup_sym(&(s.into()))
format!( .ok_or(Box::new(error::Error::new(
"Unable to find {} in file: {}", format!(
s.val, "Unable to find {} in file: {}",
self.root.to_string_lossy() s.val,
), self.root.to_string_lossy()
error::ErrorType::NoSuchSymbol, ),
v.pos().clone(), error::ErrorType::NoSuchSymbol,
), v.pos().clone(),
)), )))
}
&Value::List(ref def) => self.list_to_val(def), &Value::List(ref def) => self.list_to_val(def),
&Value::Tuple(ref tuple) => self.tuple_to_val(&tuple.val), &Value::Tuple(ref tuple) => self.tuple_to_val(&tuple.val),
&Value::Selector(ref selector_list_node) => { &Value::Selector(ref selector_list_node) => {
@ -189,9 +190,13 @@ impl<'a> Builder<'a> {
cache: Rc<RefCell<assets::Cache>>, cache: Rc<RefCell<assets::Cache>>,
scope: ValueMap, scope: ValueMap,
) -> Self { ) -> Self {
let env_vars: Vec<(Positioned<String>, Rc<Val>)> = env::vars() let env_vars: Vec<(PositionedItem<String>, Rc<Val>)> = env::vars()
.map(|t| (Positioned::new(t.0, 0, 0), Rc::new(t.1.into()))) .map(|t| {
.collect(); (
PositionedItem::new(t.0, Position::new(0, 0, 0)),
Rc::new(t.1.into()),
)
}).collect();
Self::new_with_env_and_scope(root, cache, scope, Rc::new(Val::Tuple(env_vars))) Self::new_with_env_and_scope(root, cache, scope, Rc::new(Val::Tuple(env_vars)))
} }
@ -220,8 +225,8 @@ impl<'a> Builder<'a> {
/// Returns a Val by name from previously built UCG. /// Returns a Val by name from previously built UCG.
pub fn get_out_by_name(&self, name: &str) -> Option<Rc<Val>> { pub fn get_out_by_name(&self, name: &str) -> Option<Rc<Val>> {
let key = Positioned { let key = PositionedItem {
pos: Position::new(0, 0), pos: Position::new(0, 0, 0),
val: name.to_string(), val: name.to_string(),
}; };
self.lookup_sym(&key) self.lookup_sym(&key)
@ -243,8 +248,8 @@ impl<'a> Builder<'a> {
Ok(()) Ok(())
} }
fn eval_span(&mut self, input: Span) -> Result<Rc<Val>, Box<Error>> { fn eval_span(&mut self, input: OffsetStrIter) -> Result<Rc<Val>, Box<Error>> {
match parse(input) { match parse(input.clone()) {
Ok(stmts) => { Ok(stmts) => {
//panic!("Successfully parsed {}", input); //panic!("Successfully parsed {}", input);
let mut out: Option<Rc<Val>> = None; let mut out: Option<Rc<Val>> = None;
@ -256,20 +261,17 @@ impl<'a> Builder<'a> {
Some(val) => Ok(val), Some(val) => Ok(val),
} }
} }
Err(err) => Err(Box::new(error::Error::new_with_cause( Err(err) => Err(Box::new(error::Error::new(
format!( format!("{}", err,),
"Error while parsing file: {}",
self.curr_file.unwrap_or("<eval>")
),
error::ErrorType::ParseError, error::ErrorType::ParseError,
err, (&input).into(),
))), ))),
} }
} }
/// Evaluate an input string as UCG. /// Evaluate an input string as UCG.
pub fn eval_string(&mut self, input: &str) -> Result<Rc<Val>, Box<Error>> { pub fn eval_string(&mut self, input: &str) -> Result<Rc<Val>, Box<Error>> {
self.eval_span(Span::new(input)) self.eval_span(OffsetStrIter::new(input))
} }
/// Builds a ucg file at the named path. /// Builds a ucg file at the named path.
@ -295,7 +297,8 @@ impl<'a> Builder<'a> {
let mut b = Self::new(normalized.clone(), self.assets.clone()); let mut b = Self::new(normalized.clone(), self.assets.clone());
let filepath = normalized.to_str().unwrap().clone(); let filepath = normalized.to_str().unwrap().clone();
try!(b.build_file(filepath)); try!(b.build_file(filepath));
let fields: Vec<(Positioned<String>, Rc<Val>)> = b.build_output.drain().collect(); let fields: Vec<(PositionedItem<String>, Rc<Val>)> =
b.build_output.drain().collect();
Rc::new(Val::Tuple(fields)) Rc::new(Val::Tuple(fields))
} }
}; };
@ -360,7 +363,7 @@ impl<'a> Builder<'a> {
} }
} }
fn lookup_sym(&self, sym: &Positioned<String>) -> Option<Rc<Val>> { fn lookup_sym(&self, sym: &PositionedItem<String>) -> Option<Rc<Val>> {
if &sym.val == "env" { if &sym.val == "env" {
return Some(self.env.clone()); return Some(self.env.clone());
} }
@ -370,7 +373,10 @@ impl<'a> Builder<'a> {
None None
} }
fn find_in_fieldlist(target: &str, fs: &Vec<(Positioned<String>, Rc<Val>)>) -> Option<Rc<Val>> { fn find_in_fieldlist(
target: &str,
fs: &Vec<(PositionedItem<String>, Rc<Val>)>,
) -> Option<Rc<Val>> {
for (key, val) in fs.iter().cloned() { for (key, val) in fs.iter().cloned() {
if target == &key.val { if target == &key.val {
return Some(val.clone()); return Some(val.clone());
@ -384,7 +390,7 @@ impl<'a> Builder<'a> {
stack: &mut VecDeque<Rc<Val>>, stack: &mut VecDeque<Rc<Val>>,
sl: &SelectorList, sl: &SelectorList,
next: (&Position, &str), next: (&Position, &str),
fs: &Vec<(Positioned<String>, Rc<Val>)>, fs: &Vec<(PositionedItem<String>, Rc<Val>)>,
) -> Result<(), Box<Error>> { ) -> Result<(), Box<Error>> {
if let Some(vv) = Self::find_in_fieldlist(next.1, fs) { if let Some(vv) = Self::find_in_fieldlist(next.1, fs) {
stack.push_back(vv.clone()); stack.push_back(vv.clone());
@ -775,7 +781,7 @@ impl<'a> Builder<'a> {
fn eval_copy(&self, def: &CopyDef) -> Result<Rc<Val>, Box<Error>> { fn eval_copy(&self, def: &CopyDef) -> Result<Rc<Val>, Box<Error>> {
let v = try!(self.lookup_selector(&def.selector.sel)); let v = try!(self.lookup_selector(&def.selector.sel));
if let Val::Tuple(ref src_fields) = *v { if let Val::Tuple(ref src_fields) = *v {
let mut m = HashMap::<Positioned<String>, (i32, Rc<Val>)>::new(); let mut m = HashMap::<PositionedItem<String>, (i32, Rc<Val>)>::new();
// loop through fields and build up a hashmap // loop through fields and build up a hashmap
let mut count = 0; let mut count = 0;
for &(ref key, ref val) in src_fields.iter() { for &(ref key, ref val) in src_fields.iter() {
@ -824,7 +830,7 @@ impl<'a> Builder<'a> {
} }
}; };
} }
let mut new_fields: Vec<(Positioned<String>, (i32, Rc<Val>))> = m.drain().collect(); let mut new_fields: Vec<(PositionedItem<String>, (i32, Rc<Val>))> = m.drain().collect();
// We want to maintain our order for the fields to make comparing tuples // We want to maintain our order for the fields to make comparing tuples
// easier in later code. So we sort by the field order before constructing a new tuple. // easier in later code. So we sort by the field order before constructing a new tuple.
new_fields.sort_by(|a, b| { new_fields.sort_by(|a, b| {
@ -839,8 +845,7 @@ impl<'a> Builder<'a> {
let first = a.0.clone(); let first = a.0.clone();
let t = a.1.clone(); let t = a.1.clone();
(first, t.1) (first, t.1)
}) }).collect(),
.collect(),
))); )));
} }
Err(Box::new(error::Error::new( Err(Box::new(error::Error::new(
@ -992,11 +997,8 @@ impl<'a> Builder<'a> {
let expr = &tok.fragment; let expr = &tok.fragment;
expr_as_stmt.push_str(expr); expr_as_stmt.push_str(expr);
expr_as_stmt.push_str(";"); expr_as_stmt.push_str(";");
let assert_input = Span { let assert_input =
fragment: &expr_as_stmt, OffsetStrIter::new_with_offsets(&expr_as_stmt, tok.pos.line - 1, tok.pos.column - 1);
line: tok.pos.line as u32,
offset: tok.pos.column,
};
let ok = match self.eval_span(assert_input) { let ok = match self.eval_span(assert_input) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {

View File

@ -25,35 +25,6 @@ fn test_expr_to_val(mut cases: Vec<(Expression, Val)>, b: Builder) {
} }
} }
#[test]
fn test_eval_div_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let b = Builder::new(std::env::current_dir().unwrap(), cache);
test_expr_to_val(
vec![
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Div,
left: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Int(1),
),
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Div,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))),
right: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Float(1.0),
),
],
b,
);
}
#[test] #[test]
#[should_panic(expected = "Expected Float")] #[should_panic(expected = "Expected Float")]
fn test_eval_div_expr_fail() { fn test_eval_div_expr_fail() {
@ -63,9 +34,15 @@ fn test_eval_div_expr_fail() {
vec![( vec![(
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Div, kind: BinaryExprType::Div,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))), left: Box::new(Expression::Simple(Value::Float(value_node!(
right: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))), 2.0,
pos: Position::new(1, 0), Position::new(1, 1, 1)
)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(
2,
Position::new(1, 1, 1)
)))),
pos: Position::new(1, 0, 0),
}), }),
Val::Float(1.0), Val::Float(1.0),
)], )],
@ -73,35 +50,6 @@ fn test_eval_div_expr_fail() {
); );
} }
#[test]
fn test_eval_mul_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let b = Builder::new(std::env::current_dir().unwrap(), cache);
test_expr_to_val(
vec![
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Mul,
left: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Int(4),
),
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Mul,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))),
right: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Float(4.0),
),
],
b,
);
}
#[test] #[test]
#[should_panic(expected = "Expected Float")] #[should_panic(expected = "Expected Float")]
fn test_eval_mul_expr_fail() { fn test_eval_mul_expr_fail() {
@ -111,9 +59,15 @@ fn test_eval_mul_expr_fail() {
vec![( vec![(
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Mul, kind: BinaryExprType::Mul,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))), left: Box::new(Expression::Simple(Value::Float(value_node!(
right: Box::new(Expression::Simple(Value::Int(value_node!(20, 1, 1)))), 2.0,
pos: Position::new(1, 0), Position::new(1, 1, 1)
)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(
20,
Position::new(1, 1, 1)
)))),
pos: Position::new(1, 0, 0),
}), }),
Val::Float(1.0), Val::Float(1.0),
)], )],
@ -121,35 +75,6 @@ fn test_eval_mul_expr_fail() {
); );
} }
#[test]
fn test_eval_subtract_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let b = Builder::new(std::env::current_dir().unwrap(), cache);
test_expr_to_val(
vec![
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Sub,
left: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Int(1),
),
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Sub,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))),
right: Box::new(Expression::Simple(Value::Float(value_node!(1.0, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Float(1.0),
),
],
b,
);
}
#[test] #[test]
#[should_panic(expected = "Expected Float")] #[should_panic(expected = "Expected Float")]
fn test_eval_subtract_expr_fail() { fn test_eval_subtract_expr_fail() {
@ -159,88 +84,21 @@ fn test_eval_subtract_expr_fail() {
vec![( vec![(
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Sub, kind: BinaryExprType::Sub,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))), left: Box::new(Expression::Simple(Value::Float(value_node!(
right: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))), 2.0,
pos: Position::new(1, 0), Position::new(1, 1, 1)
)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(
2,
Position::new(1, 1, 1)
)))),
pos: Position::new(1, 0, 0),
}), }),
Val::Float(1.0), Val::Float(1.0),
)], )],
b, b,
); );
} }
#[test]
fn test_eval_add_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let b = Builder::new(std::env::current_dir().unwrap(), cache);
test_expr_to_val(
vec![
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Int(2),
),
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Float(value_node!(1.0, 1, 1)))),
right: Box::new(Expression::Simple(Value::Float(value_node!(1.0, 1, 1)))),
pos: Position::new(1, 0),
}),
Val::Float(2.0),
),
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Str(value_node!(
"foo".to_string(),
1,
1
)))),
right: Box::new(Expression::Simple(Value::Str(value_node!(
"bar".to_string(),
1,
1
)))),
pos: Position::new(1, 0),
}),
Val::Str("foobar".to_string()),
),
(
Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::List(ListDef {
elems: vec![Expression::Simple(Value::Str(value_node!(
"foo".to_string(),
1,
1
)))],
pos: Position::new(1, 1),
}))),
right: Box::new(Expression::Simple(Value::List(ListDef {
elems: vec![Expression::Simple(Value::Str(value_node!(
"bar".to_string(),
1,
1
)))],
pos: Position::new(1, 1),
}))),
pos: Position::new(1, 0),
}),
Val::List(vec![
Rc::new(Val::Str("foo".to_string())),
Rc::new(Val::Str("bar".to_string())),
]),
),
],
b,
);
}
#[test] #[test]
#[should_panic(expected = "Expected Float")] #[should_panic(expected = "Expected Float")]
fn test_eval_add_expr_fail() { fn test_eval_add_expr_fail() {
@ -250,9 +108,15 @@ fn test_eval_add_expr_fail() {
vec![( vec![(
Expression::Binary(BinaryOpDef { Expression::Binary(BinaryOpDef {
kind: BinaryExprType::Add, kind: BinaryExprType::Add,
left: Box::new(Expression::Simple(Value::Float(value_node!(2.0, 1, 1)))), left: Box::new(Expression::Simple(Value::Float(value_node!(
right: Box::new(Expression::Simple(Value::Int(value_node!(2, 1, 1)))), 2.0,
pos: Position::new(1, 0), Position::new(1, 1, 1)
)))),
right: Box::new(Expression::Simple(Value::Int(value_node!(
2,
Position::new(1, 1, 1)
)))),
pos: Position::new(1, 0, 0),
}), }),
Val::Float(1.0), Val::Float(1.0),
)], )],
@ -260,245 +124,20 @@ fn test_eval_add_expr_fail() {
); );
} }
#[test]
fn test_eval_nested_tuple() {
test_expr_to_val(
vec![
(
Expression::Simple(Value::Tuple(value_node!(
vec![(
Token::new("foo", TokenType::BAREWORD, 1, 1),
Expression::Simple(Value::Tuple(value_node!(Vec::new(), 1, 1))),
)],
1,
1
))),
Val::Tuple(vec![(
Positioned::new("foo".to_string(), 1, 1),
Rc::new(Val::Tuple(Vec::new())),
)]),
),
(
Expression::Simple(Value::Tuple(value_node!(
vec![(
Token::new("foo", TokenType::BAREWORD, 1, 1),
Expression::Simple(Value::Tuple(value_node!(
vec![(
Token::new("bar".to_string(), TokenType::BAREWORD, 1, 5),
Expression::Simple(Value::Tuple(value_node!(vec![], 1, 10))),
)],
1,
1
))),
)],
1,
1
))),
Val::Tuple(vec![(
Positioned::new("foo".to_string(), 1, 1),
Rc::new(Val::Tuple(vec![(
Positioned::new("bar".to_string(), 1, 10),
Rc::new(Val::Tuple(vec![])),
)])),
)]),
),
(
Expression::Simple(Value::Tuple(value_node!(
vec![(
Token::new("foo", TokenType::BAREWORD, 1, 1),
Expression::Simple(Value::Tuple(value_node!(
vec![(
Token::new("bar".to_string(), TokenType::BAREWORD, 1, 5),
Expression::Simple(Value::Tuple(value_node!(
vec![(
Token::new("quux".to_string(), TokenType::BAREWORD, 1, 1),
Expression::Simple(Value::Int(value_node!(3, 1, 1))),
)],
1,
10
))),
)],
1,
1
))),
)],
1,
1
))),
Val::Tuple(vec![(
Positioned::new("foo".to_string(), 1, 1),
Rc::new(Val::Tuple(vec![(
Positioned::new("bar".to_string(), 1, 10),
Rc::new(Val::Tuple(vec![(
Positioned::new("quux".to_string(), 1, 1),
Rc::new(Val::Int(3)),
)])),
)])),
)]),
),
],
Builder::new(
std::env::current_dir().unwrap(),
Rc::new(RefCell::new(MemoryCache::new())),
),
);
}
#[test]
fn test_eval_simple_expr() {
test_expr_to_val(
vec![
(
Expression::Simple(Value::Int(value_node!(1, 1, 1))),
Val::Int(1),
),
(
Expression::Simple(Value::Float(value_node!(2.0, 1, 1))),
Val::Float(2.0),
),
(
Expression::Simple(Value::Str(value_node!("foo".to_string(), 1, 1))),
Val::Str("foo".to_string()),
),
(
Expression::Simple(Value::Tuple(value_node!(
vec![(
make_tok!("bar", 1, 1),
Expression::Simple(Value::Int(value_node!(1, 1, 1))),
)],
1,
1
))),
Val::Tuple(vec![(
value_node!("bar".to_string(), 1, 1),
Rc::new(Val::Int(1)),
)]),
),
],
Builder::new(
std::env::current_dir().unwrap(),
Rc::new(RefCell::new(MemoryCache::new())),
),
);
}
#[test]
fn test_eval_simple_lookup_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output
.entry(value_node!("var1".to_string(), 1, 0))
.or_insert(Rc::new(Val::Int(1)));
test_expr_to_val(
vec![(
Expression::Simple(Value::Symbol(value_node!("var1".to_string(), 1, 1))),
Val::Int(1),
)],
b,
);
}
#[test] #[test]
fn test_eval_simple_lookup_error() { fn test_eval_simple_lookup_error() {
let cache = Rc::new(RefCell::new(MemoryCache::new())); let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache); let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output b.build_output
.entry(value_node!("var1".to_string(), 1, 0)) .entry(value_node!("var1".to_string(), Position::new(1, 0, 0)))
.or_insert(Rc::new(Val::Int(1))); .or_insert(Rc::new(Val::Int(1)));
let expr = Expression::Simple(Value::Symbol(value_node!("var".to_string(), 1, 1))); let expr = Expression::Simple(Value::Symbol(value_node!(
"var".to_string(),
Position::new(1, 1, 1)
)));
assert!(b.eval_expr(&expr).is_err()); assert!(b.eval_expr(&expr).is_err());
} }
#[test]
fn test_eval_selector_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output
.entry(value_node!("var1".to_string(), 1, 0))
.or_insert(Rc::new(Val::Tuple(vec![(
value_node!("lvl1".to_string(), 1, 0),
Rc::new(Val::Tuple(vec![(
value_node!("lvl2".to_string(), 1, 0),
Rc::new(Val::Int(3)),
)])),
)])));
b.build_output
.entry(value_node!("var2".to_string(), 1, 0))
.or_insert(Rc::new(Val::Int(2)));
b.build_output
.entry(value_node!("var3".to_string(), 1, 0))
.or_insert(Rc::new(Val::Tuple(vec![(
value_node!("lvl1".to_string(), 1, 0),
Rc::new(Val::Int(4)),
)])));
test_expr_to_val(
vec![
(
Expression::Simple(Value::Selector(make_selector!(make_expr!("var1")))),
Val::Tuple(vec![(
value_node!("lvl1".to_string(), 1, 0),
Rc::new(Val::Tuple(vec![(
value_node!("lvl2".to_string(), 1, 0),
Rc::new(Val::Int(3)),
)])),
)]),
),
(
Expression::Simple(Value::Selector(
make_selector!(make_expr!("var1") => "lvl1"),
)),
Val::Tuple(vec![(
value_node!("lvl2".to_string(), 1, 0),
Rc::new(Val::Int(3)),
)]),
),
(
Expression::Simple(Value::Selector(
make_selector!(make_expr!("var1") => "lvl1", "lvl2"),
)),
Val::Int(3),
),
(
Expression::Simple(Value::Selector(make_selector!(make_expr!("var2")))),
Val::Int(2),
),
(
Expression::Simple(Value::Selector(
make_selector!(make_expr!("var3") => "lvl1"),
)),
Val::Int(4),
),
],
b,
);
}
#[test]
fn test_eval_selector_list_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output
.entry(value_node!("var1".to_string(), 1, 1))
.or_insert(Rc::new(Val::List(vec![
Rc::new(Val::Str("val1".to_string())),
Rc::new(Val::Tuple(vec![(
value_node!("var2".to_string(), 1, 1),
Rc::new(Val::Int(1)),
)])),
])));
test_expr_to_val(
vec![(
Expression::Simple(Value::Selector(
make_selector!(make_expr!("var1") => "0" => 1, 1),
)),
Val::Str("val1".to_string()),
)],
b,
);
}
// Include nested for each. // Include nested for each.
#[test] #[test]
#[should_panic(expected = "Unable to find tpl1")] #[should_panic(expected = "Unable to find tpl1")]
@ -508,9 +147,12 @@ fn test_expr_copy_no_such_tuple() {
test_expr_to_val( test_expr_to_val(
vec![( vec![(
Expression::Copy(CopyDef { Expression::Copy(CopyDef {
selector: make_selector!(make_expr!("tpl1")), selector: make_selector!(
make_expr!("tpl1", Position::new(1, 1, 1)),
Position::new(1, 1, 1)
),
fields: Vec::new(), fields: Vec::new(),
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}), }),
Val::Tuple(Vec::new()), Val::Tuple(Vec::new()),
)], )],
@ -524,14 +166,17 @@ fn test_expr_copy_not_a_tuple() {
let cache = Rc::new(RefCell::new(MemoryCache::new())); let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache); let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output b.build_output
.entry(value_node!("tpl1".to_string(), 1, 0)) .entry(value_node!("tpl1".to_string(), Position::new(1, 0, 0)))
.or_insert(Rc::new(Val::Int(1))); .or_insert(Rc::new(Val::Int(1)));
test_expr_to_val( test_expr_to_val(
vec![( vec![(
Expression::Copy(CopyDef { Expression::Copy(CopyDef {
selector: make_selector!(make_expr!("tpl1")), selector: make_selector!(
make_expr!("tpl1", Position::new(1, 1, 1)),
Position::new(1, 1, 1)
),
fields: Vec::new(), fields: Vec::new(),
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}), }),
Val::Tuple(Vec::new()), Val::Tuple(Vec::new()),
)], )],
@ -545,130 +190,30 @@ fn test_expr_copy_field_type_error() {
let cache = Rc::new(RefCell::new(MemoryCache::new())); let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache); let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output b.build_output
.entry(value_node!("tpl1".to_string(), 1, 0)) .entry(value_node!("tpl1".to_string(), Position::new(1, 0, 0)))
.or_insert(Rc::new(Val::Tuple(vec![( .or_insert(Rc::new(Val::Tuple(vec![(
value_node!("fld1".to_string(), 1, 0), value_node!("fld1".to_string(), Position::new(1, 0, 0)),
Rc::new(Val::Int(1)), Rc::new(Val::Int(1)),
)]))); )])));
test_expr_to_val( test_expr_to_val(
vec![( vec![(
Expression::Copy(CopyDef { Expression::Copy(CopyDef {
selector: make_selector!(make_expr!("tpl1")), selector: make_selector!(
fields: vec![( make_expr!("tpl1", Position::new(1, 1, 1)),
make_tok!("fld1", 1, 1), Position::new(1, 1, 1)
Expression::Simple(Value::Str(value_node!("2".to_string(), 1, 1))),
)],
pos: Position::new(1, 0),
}),
Val::Tuple(vec![(
value_node!("fld1".to_string(), 1, 1),
Rc::new(Val::Str("2".to_string())),
)]),
)],
b,
);
}
#[test]
fn test_expr_copy() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output
.entry(value_node!("tpl1".to_string(), 1, 0))
.or_insert(Rc::new(Val::Tuple(vec![(
value_node!("fld1".to_string(), 1, 0),
Rc::new(Val::Int(1)),
)])));
test_expr_to_val(
vec![
(
Expression::Copy(CopyDef {
selector: make_selector!(make_expr!("tpl1")),
fields: vec![(
make_tok!("fld2", 1, 1),
Expression::Simple(Value::Str(value_node!("2".to_string(), 1, 1))),
)],
pos: Position::new(1, 0),
}),
// Add a new field to the copy
Val::Tuple(
// NOTE(jwall): The order of these is important in order to ensure
// that the compare assertion is correct. The ordering has no
// semantics though so at some point we should probably be less restrictive.
vec![
(value_node!("fld1".to_string(), 1, 0), Rc::new(Val::Int(1))),
(
value_node!("fld2".to_string(), 1, 1),
Rc::new(Val::Str("2".to_string())),
),
],
), ),
), fields: vec![(
// Overwrite a field in the copy make_tok!("fld1", Position::new(1, 1, 1)),
( Expression::Simple(Value::Str(value_node!(
Expression::Copy(CopyDef { "2".to_string(),
selector: make_selector!(make_expr!("tpl1")), Position::new(1, 1, 1)
fields: vec![ ))),
( )],
make_tok!("fld1", 1, 1), pos: Position::new(1, 0, 0),
Expression::Simple(Value::Int(value_node!(3, 1, 1))),
),
(
make_tok!("fld2", 1, 1),
Expression::Simple(Value::Str(value_node!("2".to_string(), 1, 1))),
),
],
pos: Position::new(1, 0),
}),
Val::Tuple(vec![
(value_node!("fld1".to_string(), 1, 0), Rc::new(Val::Int(3))),
(
value_node!("fld2".to_string(), 1, 0),
Rc::new(Val::Str("2".to_string())),
),
]),
),
// The source tuple is still unmodified.
(
Expression::Simple(Value::Selector(make_selector!(make_expr!["tpl1"]))),
Val::Tuple(vec![(
value_node!("fld1".to_string(), 1, 0),
Rc::new(Val::Int(1)),
)]),
),
],
b,
);
}
#[test]
fn test_macro_call() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output
.entry(value_node!("tstmac".to_string(), 1, 0))
.or_insert(Rc::new(Val::Macro(MacroDef {
argdefs: vec![value_node!("arg1".to_string(), 1, 0)],
fields: vec![(
make_tok!("foo", 1, 1),
Expression::Simple(Value::Symbol(value_node!("arg1".to_string(), 1, 1))),
)],
pos: Position::new(1, 0),
})));
test_expr_to_val(
vec![(
Expression::Call(CallDef {
macroref: make_selector!(make_expr!("tstmac")),
arglist: vec![Expression::Simple(Value::Str(value_node!(
"bar".to_string(),
1,
1
)))],
pos: Position::new(1, 0),
}), }),
Val::Tuple(vec![( Val::Tuple(vec![(
value_node!("foo".to_string(), 1, 1), value_node!("fld1".to_string(), Position::new(1, 1, 1)),
Rc::new(Val::Str("bar".to_string())), Rc::new(Val::Str("2".to_string())),
)]), )]),
)], )],
b, b,
@ -681,31 +226,36 @@ fn test_macro_hermetic() {
let cache = Rc::new(RefCell::new(MemoryCache::new())); let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache); let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output b.build_output
.entry(value_node!("arg1".to_string(), 1, 0)) .entry(value_node!("arg1".to_string(), Position::new(1, 0, 0)))
.or_insert(Rc::new(Val::Str("bar".to_string()))); .or_insert(Rc::new(Val::Str("bar".to_string())));
b.build_output b.build_output
.entry(value_node!("tstmac".to_string(), 1, 0)) .entry(value_node!("tstmac".to_string(), Position::new(1, 0, 0)))
.or_insert(Rc::new(Val::Macro(MacroDef { .or_insert(Rc::new(Val::Macro(MacroDef {
argdefs: vec![value_node!("arg2".to_string(), 1, 0)], argdefs: vec![value_node!("arg2".to_string(), Position::new(1, 0, 0))],
fields: vec![( fields: vec![(
make_tok!("foo", 1, 1), make_tok!("foo", Position::new(1, 1, 1)),
Expression::Simple(Value::Symbol(value_node!("arg1".to_string(), 1, 1))), Expression::Simple(Value::Symbol(value_node!(
"arg1".to_string(),
Position::new(1, 1, 1)
))),
)], )],
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}))); })));
test_expr_to_val( test_expr_to_val(
vec![( vec![(
Expression::Call(CallDef { Expression::Call(CallDef {
macroref: make_selector!(make_expr!("tstmac")), macroref: make_selector!(
make_expr!("tstmac", Position::new(1, 1, 1)),
Position::new(1, 1, 1)
),
arglist: vec![Expression::Simple(Value::Str(value_node!( arglist: vec![Expression::Simple(Value::Str(value_node!(
"bar".to_string(), "bar".to_string(),
1, Position::new(1, 1, 1)
1
)))], )))],
pos: Position::new(1, 1), pos: Position::new(1, 1, 1),
}), }),
Val::Tuple(vec![( Val::Tuple(vec![(
value_node!("foo".to_string(), 1, 0), value_node!("foo".to_string(), Position::new(1, 1, 1)),
Rc::new(Val::Str("bar".to_string())), Rc::new(Val::Str("bar".to_string())),
)]), )]),
)], )],
@ -713,126 +263,42 @@ fn test_macro_hermetic() {
); );
} }
#[test]
fn test_select_expr() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output
.entry(value_node!("foo".to_string(), 1, 0))
.or_insert(Rc::new(Val::Str("bar".to_string())));
b.build_output
.entry(value_node!("baz".to_string(), 1, 0))
.or_insert(Rc::new(Val::Str("boo".to_string())));
test_expr_to_val(
vec![
(
Expression::Select(SelectDef {
val: Box::new(Expression::Simple(Value::Symbol(value_node!(
"foo".to_string(),
1,
1
)))),
default: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))),
tuple: vec![
(
make_tok!("foo", 1, 1),
Expression::Simple(Value::Str(value_node!("2".to_string(), 1, 1))),
),
(
make_tok!("bar", 1, 1),
Expression::Simple(Value::Int(value_node!(2, 1, 1))),
),
],
pos: Position::new(1, 0),
}),
Val::Int(2),
),
(
Expression::Select(SelectDef {
val: Box::new(Expression::Simple(Value::Symbol(value_node!(
"baz".to_string(),
1,
1
)))),
default: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))),
tuple: vec![
(
make_tok!("bar", 1, 1),
Expression::Simple(Value::Int(value_node!(2, 1, 1))),
),
(
make_tok!("quux", 1, 1),
Expression::Simple(Value::Str(value_node!("2".to_string(), 1, 1))),
),
],
pos: Position::new(1, 0),
}),
// If the field doesn't exist then we get the default.
Val::Int(1),
),
],
b,
);
}
#[test] #[test]
#[should_panic(expected = "Expected String but got Integer in Select expression")] #[should_panic(expected = "Expected String but got Integer in Select expression")]
fn test_select_expr_not_a_string() { fn test_select_expr_not_a_string() {
let cache = Rc::new(RefCell::new(MemoryCache::new())); let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache); let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.build_output b.build_output
.entry(value_node!("foo".to_string(), 1, 0)) .entry(value_node!("foo".to_string(), Position::new(1, 0, 0)))
.or_insert(Rc::new(Val::Int(4))); .or_insert(Rc::new(Val::Int(4)));
test_expr_to_val( test_expr_to_val(
vec![( vec![(
Expression::Select(SelectDef { Expression::Select(SelectDef {
val: Box::new(Expression::Simple(Value::Symbol(value_node!( val: Box::new(Expression::Simple(Value::Symbol(value_node!(
"foo".to_string(), "foo".to_string(),
1, Position::new(1, 1, 1)
1 )))),
default: Box::new(Expression::Simple(Value::Int(value_node!(
1,
Position::new(1, 1, 1)
)))), )))),
default: Box::new(Expression::Simple(Value::Int(value_node!(1, 1, 1)))),
tuple: vec![ tuple: vec![
( (
make_tok!("bar", 1, 1), make_tok!("bar", Position::new(1, 1, 1)),
Expression::Simple(Value::Int(value_node!(2, 1, 1))), Expression::Simple(Value::Int(value_node!(2, Position::new(1, 1, 1)))),
), ),
( (
make_tok!("quux", 1, 1), make_tok!("quux", Position::new(1, 1, 1)),
Expression::Simple(Value::Str(value_node!("2".to_string(), 1, 1))), Expression::Simple(Value::Str(value_node!(
"2".to_string(),
Position::new(1, 1, 1)
))),
), ),
], ],
pos: Position::new(1, 0), pos: Position::new(1, 0, 0),
}), }),
Val::Int(2), Val::Int(2),
)], )],
b, b,
); );
} }
#[test]
fn test_let_statement() {
let cache = MemoryCache::new();
let mut b = Builder::new("<Eval>", Rc::new(RefCell::new(cache)));
let stmt = Statement::Let(LetDef {
name: make_tok!("foo", 1, 1),
value: Expression::Simple(Value::Str(value_node!("bar".to_string(), 1, 1))),
});
b.build_stmt(&stmt).unwrap();
test_expr_to_val(
vec![(
Expression::Simple(Value::Symbol(value_node!("foo".to_string(), 1, 1))),
Val::Str("bar".to_string()),
)],
b,
);
}
#[test]
fn test_build_file_string() {
let cache = Rc::new(RefCell::new(MemoryCache::new()));
let mut b = Builder::new(std::env::current_dir().unwrap(), cache);
b.eval_string("let foo = 1;").unwrap();
let key = value_node!("foo".to_string(), 1, 0);
assert!(b.build_output.contains_key(&key));
}

View File

@ -16,7 +16,7 @@
use std::io::Write; use std::io::Write;
use std::rc::Rc; use std::rc::Rc;
use ast::Positioned; use ast::PositionedItem;
use build::Val; use build::Val;
use convert::traits::{Converter, Result}; use convert::traits::{Converter, Result};
@ -29,7 +29,11 @@ impl EnvConverter {
EnvConverter {} EnvConverter {}
} }
fn convert_tuple(&self, flds: &Vec<(Positioned<String>, Rc<Val>)>, w: &mut Write) -> Result { fn convert_tuple(
&self,
flds: &Vec<(PositionedItem<String>, Rc<Val>)>,
w: &mut Write,
) -> Result {
for &(ref name, ref val) in flds.iter() { for &(ref name, ref val) in flds.iter() {
if val.is_tuple() { if val.is_tuple() {
eprintln!("Skipping embedded tuple..."); eprintln!("Skipping embedded tuple...");

View File

@ -17,7 +17,7 @@ use std;
use std::io::{Cursor, Write}; use std::io::{Cursor, Write};
use std::rc::Rc; use std::rc::Rc;
use ast::{Position, Positioned}; use ast::{Position, PositionedItem};
use build::Val; use build::Val;
use build::Val::Tuple; use build::Val::Tuple;
use convert; use convert;
@ -46,10 +46,10 @@ impl ExecConverter {
return Err(Box::new(Error::new( return Err(Box::new(Error::new(
"Exec tuples must have no more than 3 fields", "Exec tuples must have no more than 3 fields",
ErrorType::TypeFail, ErrorType::TypeFail,
Position::new(0, 0), Position::new(0, 0, 0),
))); )));
} }
let mut env: Option<&Vec<(Positioned<String>, Rc<Val>)>> = None; let mut env: Option<&Vec<(PositionedItem<String>, Rc<Val>)>> = None;
let mut command: Option<&str> = None; let mut command: Option<&str> = None;
let mut args: Option<&Vec<Rc<Val>>> = None; let mut args: Option<&Vec<Rc<Val>>> = None;
for &(ref name, ref val) in fields.iter() { for &(ref name, ref val) in fields.iter() {
@ -115,7 +115,7 @@ impl ExecConverter {
return Err(Box::new(Error::new( return Err(Box::new(Error::new(
"An exec tuple must have a command field", "An exec tuple must have a command field",
ErrorType::TypeFail, ErrorType::TypeFail,
Position::new(0, 0), Position::new(0, 0, 0),
))); )));
} }
// Okay if we have made it this far then we are ready to start creating our script. // Okay if we have made it this far then we are ready to start creating our script.
@ -157,7 +157,7 @@ impl ExecConverter {
return Err(Box::new(Error::new( return Err(Box::new(Error::new(
"Exec args must be a list of strings or tuples of strings.", "Exec args must be a list of strings or tuples of strings.",
ErrorType::TypeFail, ErrorType::TypeFail,
Position::new(0, 0), Position::new(0, 0, 0),
))) )))
} }
} }
@ -173,7 +173,7 @@ impl ExecConverter {
Err(Box::new(Error::new( Err(Box::new(Error::new(
"Exec outputs must be of type Tuple", "Exec outputs must be of type Tuple",
ErrorType::TypeFail, ErrorType::TypeFail,
Position::new(0, 0), Position::new(0, 0, 0),
))) )))
} }
} }

View File

@ -36,7 +36,7 @@ impl JsonConverter {
fn convert_tuple( fn convert_tuple(
&self, &self,
items: &Vec<(ast::Positioned<String>, Rc<Val>)>, items: &Vec<(ast::PositionedItem<String>, Rc<Val>)>,
) -> std::io::Result<serde_json::Value> { ) -> std::io::Result<serde_json::Value> {
let mut mp = serde_json::Map::new(); let mut mp = serde_json::Map::new();
for &(ref k, ref v) in items.iter() { for &(ref k, ref v) in items.iter() {

View File

@ -25,7 +25,7 @@ impl YamlConverter {
fn convert_tuple( fn convert_tuple(
&self, &self,
items: &Vec<(ast::Positioned<String>, Rc<Val>)>, items: &Vec<(ast::PositionedItem<String>, Rc<Val>)>,
) -> std::io::Result<serde_yaml::Value> { ) -> std::io::Result<serde_yaml::Value> {
let mut mapping = serde_yaml::Mapping::new(); let mut mapping = serde_yaml::Mapping::new();
for &(ref k, ref v) in items.iter() { for &(ref k, ref v) in items.iter() {

View File

@ -15,11 +15,12 @@
//! Errors for use by the ucg compiler. //! Errors for use by the ucg compiler.
use std::error; use std::error;
use std::fmt; use std::fmt;
use std::fmt::Debug;
use abortable_parser::Positioned;
use ast::*; use ast::*;
use nom;
/// ErrorType defines the various types of errors that can result from compiling UCG into an /// ErrorType defines the various types of errors that can result from compiling UCG into an
/// output format. /// output format.
pub enum ErrorType { pub enum ErrorType {
@ -62,7 +63,6 @@ pub struct Error {
pub err_type: ErrorType, pub err_type: ErrorType,
pub pos: Position, pub pos: Position,
pub msg: String, pub msg: String,
pub cause: Option<Box<Error>>,
_pkgonly: (), _pkgonly: (),
} }
@ -72,51 +72,21 @@ impl Error {
err_type: t, err_type: t,
pos: pos, pos: pos,
msg: msg.into(), msg: msg.into(),
cause: None,
_pkgonly: (), _pkgonly: (),
} }
} }
pub fn new_with_boxed_cause<S: Into<String>>(msg: S, t: ErrorType, cause: Box<Self>) -> Self {
let mut e = Self::new(msg, t, cause.pos.clone());
e.cause = Some(cause);
return e;
}
pub fn new_with_cause<S: Into<String>>(msg: S, t: ErrorType, cause: Self) -> Self {
Self::new_with_boxed_cause(msg, t, Box::new(cause))
}
pub fn new_with_errorkind<S: Into<String>>(
msg: S,
t: ErrorType,
pos: Position,
cause: nom::ErrorKind<Error>,
) -> Self {
match cause {
nom::ErrorKind::Custom(e) => Self::new_with_cause(msg, t, e),
e => Self::new_with_cause(
msg,
t,
Error::new(format!("ErrorKind: {}", e), ErrorType::Unsupported, pos),
),
}
}
fn render(&self, w: &mut fmt::Formatter) -> fmt::Result { fn render(&self, w: &mut fmt::Formatter) -> fmt::Result {
try!(write!( try!(write!(
w, w,
"{}: \"{}\" at line: {} column: {}", "{} at line: {} column: {}\nCaused By:\n\t{} ",
self.err_type, self.msg, self.pos.line, self.pos.column self.err_type, self.pos.line, self.pos.column, self.msg
)); ));
if let Some(ref cause) = self.cause {
try!(write!(w, "\n\tCaused By: {}", cause));
}
Ok(()) Ok(())
} }
} }
impl fmt::Debug for Error { impl Debug for Error {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
self.render(w) self.render(w)
} }
@ -133,3 +103,50 @@ impl error::Error for Error {
&self.msg &self.msg
} }
} }
#[derive(Debug)]
pub struct StackPrinter<C: abortable_parser::Positioned> {
pub err: abortable_parser::Error<C>,
}
impl<C> StackPrinter<C>
where
C: abortable_parser::Positioned,
{
pub fn render(&self, w: &mut fmt::Formatter) -> fmt::Result {
let mut curr_err = Some(&self.err);
let mut tabstop = "";
loop {
match curr_err {
// our exit condition;
None => break,
Some(err) => {
let context = err.get_context();
try!(write!(
w,
"{}{}: line: {}, column: {}\n",
tabstop,
err.get_msg(),
context.line(),
context.column(),
));
tabstop = "\t";
curr_err = err.get_cause();
if curr_err.is_some() {
try!(write!(w, "Caused by: \n"));
}
}
}
}
Ok(())
}
}
impl<C> fmt::Display for StackPrinter<C>
where
C: Positioned,
{
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
self.render(w)
}
}

View File

@ -82,21 +82,21 @@ mod test {
#[test] #[test]
fn test_format_happy_path() { fn test_format_happy_path() {
let formatter = Formatter::new("foo @ @ \\@", vec!["bar", "quux"]); let formatter = Formatter::new("foo @ @ \\@", vec!["bar", "quux"]);
let pos = Position { line: 0, column: 0 }; let pos = Position::new(0, 0, 0);
assert_eq!(formatter.render(&pos).unwrap(), "foo bar quux @"); assert_eq!(formatter.render(&pos).unwrap(), "foo bar quux @");
} }
#[test] #[test]
fn test_format_happy_wrong_too_few_args() { fn test_format_happy_wrong_too_few_args() {
let formatter = Formatter::new("foo @ @ \\@", vec!["bar"]); let formatter = Formatter::new("foo @ @ \\@", vec!["bar"]);
let pos = Position { line: 0, column: 0 }; let pos = Position::new(0, 0, 0);
assert!(formatter.render(&pos).is_err()); assert!(formatter.render(&pos).is_err());
} }
#[test] #[test]
fn test_format_happy_wrong_too_many_args() { fn test_format_happy_wrong_too_many_args() {
let formatter = Formatter::new("foo @ @ \\@", vec!["bar", "quux", "baz"]); let formatter = Formatter::new("foo @ @ \\@", vec!["bar", "quux", "baz"]);
let pos = Position { line: 0, column: 0 }; let pos = Position::new(0, 0, 0);
assert!(formatter.render(&pos).is_err()); assert!(formatter.render(&pos).is_err());
} }
} }

107
src/iter.rs Normal file
View File

@ -0,0 +1,107 @@
use std::convert::From;
use std::iter::Iterator;
use abortable_parser::iter::{SliceIter, StrIter};
use abortable_parser::{InputIter, Offsetable, Peekable, Positioned, Seekable, Span, SpanRange};
use ast::{Position, Token};
#[derive(Debug)]
pub struct OffsetStrIter<'a> {
contained: StrIter<'a>,
line_offset: usize,
col_offset: usize,
}
impl<'a> OffsetStrIter<'a> {
pub fn new(input: &'a str) -> Self {
Self::new_with_offsets(input, 0, 0)
}
pub fn new_with_offsets(input: &'a str, line_offset: usize, col_offset: usize) -> Self {
OffsetStrIter {
contained: StrIter::new(input),
line_offset: line_offset,
col_offset: col_offset,
}
}
}
impl<'a> Iterator for OffsetStrIter<'a> {
type Item = &'a u8;
fn next(&mut self) -> Option<Self::Item> {
self.contained.next()
}
}
impl<'a> Offsetable for OffsetStrIter<'a> {
fn get_offset(&self) -> usize {
self.contained.get_offset()
}
}
impl<'a> Clone for OffsetStrIter<'a> {
fn clone(&self) -> Self {
OffsetStrIter {
contained: self.contained.clone(),
line_offset: self.line_offset,
col_offset: self.col_offset,
}
}
}
impl<'a> From<&'a str> for OffsetStrIter<'a> {
fn from(source: &'a str) -> Self {
OffsetStrIter {
contained: StrIter::new(source),
line_offset: 0,
col_offset: 0,
}
}
}
impl<'a> Seekable for OffsetStrIter<'a> {
fn seek(&mut self, to: usize) -> usize {
self.contained.seek(to)
}
}
impl<'a> Span<&'a str> for OffsetStrIter<'a> {
fn span(&self, idx: SpanRange) -> &'a str {
self.contained.span(idx)
}
}
impl<'a> Peekable<&'a u8> for OffsetStrIter<'a> {
fn peek_next(&self) -> Option<&'a u8> {
self.contained.peek_next()
}
}
impl<'a> Positioned for OffsetStrIter<'a> {
fn line(&self) -> usize {
self.contained.line() + self.line_offset
}
fn column(&self) -> usize {
self.contained.column() + self.col_offset
}
}
impl<'a> InputIter for OffsetStrIter<'a> {}
impl<'a> From<&'a SliceIter<'a, Token>> for Position {
fn from(source: &'a SliceIter<'a, Token>) -> Self {
match source.peek_next() {
Some(t) => t.pos.clone(),
None => Position::new(0, 0, 0),
}
}
}
impl<'a> From<&'a OffsetStrIter<'a>> for Position {
fn from(s: &'a OffsetStrIter<'a>) -> Position {
Position::new(s.line(), s.column(), s.get_offset())
}
}

View File

@ -444,9 +444,7 @@
// to succeed. // to succeed.
#![recursion_limit = "128"] #![recursion_limit = "128"]
#[macro_use] #[macro_use]
extern crate nom; extern crate abortable_parser;
#[macro_use]
extern crate nom_locate;
extern crate serde_json; extern crate serde_json;
extern crate serde_yaml; extern crate serde_yaml;
extern crate simple_error; extern crate simple_error;
@ -458,6 +456,7 @@ pub mod tokenizer;
pub mod build; pub mod build;
pub mod convert; pub mod convert;
pub mod error; pub mod error;
pub mod iter;
pub mod parse; pub mod parse;
mod format; mod format;

File diff suppressed because it is too large Load Diff

View File

@ -14,14 +14,11 @@
//! Bottom up parser for precedence parsing of expressions separated by binary //! Bottom up parser for precedence parsing of expressions separated by binary
//! operators. //! operators.
use std; use abortable_parser::combinators::eoi;
use abortable_parser::{Error, Result, SliceIter};
use nom::{ErrorKind, IResult, InputIter, InputLength, Slice}; use super::{non_op_expression, NomResult};
use super::{non_op_expression, NomResult, ParseResult};
use ast::*; use ast::*;
use error;
use tokenizer::TokenIter;
/// Defines the intermediate stages of our bottom up parser for precedence parsing. /// Defines the intermediate stages of our bottom up parser for precedence parsing.
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
@ -31,120 +28,119 @@ pub enum Element {
CompareOp(CompareType), CompareOp(CompareType),
} }
named!(math_op_type<TokenIter, Element, error::Error>, make_fn!(
alt!( math_op_type<SliceIter<Token>, Element>,
do_parse!(punct!("+") >> (Element::MathOp(BinaryExprType::Add))) | either!(
do_parse!(punct!("-") >> (Element::MathOp(BinaryExprType::Sub))) | do_each!(
do_parse!(punct!("*") >> (Element::MathOp(BinaryExprType::Mul))) | _ => punct!("+"),
do_parse!(punct!("/") >> (Element::MathOp(BinaryExprType::Div))) (Element::MathOp(BinaryExprType::Add))),
do_each!(
_ => punct!("-"),
(Element::MathOp(BinaryExprType::Sub))),
do_each!(
_ => punct!("*"),
(Element::MathOp(BinaryExprType::Mul))),
do_each!(
_ => punct!("/"),
(Element::MathOp(BinaryExprType::Div)))
) )
); );
fn parse_expression(i: OpListIter) -> IResult<OpListIter, Expression, error::Error> { fn parse_expression(i: SliceIter<Element>) -> Result<SliceIter<Element>, Expression> {
let i_ = i.clone(); let mut i_ = i.clone();
if i_.input_len() == 0 { if eoi(i_.clone()).is_complete() {
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Abort(Error::new(
format!("Expected Expression found End Of Input"), "Expected Expression found End Of Input",
error::ErrorType::IncompleteParsing, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
let el = &(i_[0]); let el = i_.next();
if let &Element::Expr(ref expr) = el { if let Some(&Element::Expr(ref expr)) = el {
return IResult::Done(i.slice(1..), expr.clone()); return Result::Complete(i_.clone(), expr.clone());
} }
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!( format!(
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Expected Expression got {:?}",
el el
), ),
error::ErrorType::ParseError, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
fn parse_sum_operator(i: OpListIter) -> IResult<OpListIter, BinaryExprType, error::Error> { fn parse_sum_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, BinaryExprType> {
let i_ = i.clone(); let mut i_ = i.clone();
if i_.input_len() == 0 { if eoi(i_.clone()).is_complete() {
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"), format!("Expected Expression found End Of Input"),
error::ErrorType::IncompleteParsing, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
let el = &(i_[0]); let el = i_.next();
if let &Element::MathOp(ref op) = el { if let Some(&Element::MathOp(ref op)) = el {
match op { match op {
&BinaryExprType::Add => { &BinaryExprType::Add => {
return IResult::Done(i.slice(1..), op.clone()); return Result::Complete(i_.clone(), op.clone());
} }
&BinaryExprType::Sub => { &BinaryExprType::Sub => {
return IResult::Done(i.slice(1..), op.clone()); return Result::Complete(i_.clone(), op.clone());
} }
_other => { _other => {
// noop // noop
} }
}; };
} }
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!( format!(
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Unexpected Operator {:?}",
el el
), ),
error::ErrorType::ParseError, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
fn tuple_to_binary_expression( fn tuple_to_binary_expression(
tpl: (BinaryExprType, Expression, Expression), kind: BinaryExprType,
) -> ParseResult<Expression> { left: Expression,
let pos = tpl.1.pos().clone(); right: Expression,
Ok(Expression::Binary(BinaryOpDef { ) -> Expression {
kind: tpl.0, let pos = left.pos().clone();
left: Box::new(tpl.1), Expression::Binary(BinaryOpDef {
right: Box::new(tpl.2), kind: kind,
left: Box::new(left),
right: Box::new(right),
pos: pos, pos: pos,
})) })
} }
fn parse_product_operator(i: OpListIter) -> IResult<OpListIter, BinaryExprType, error::Error> { fn parse_product_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, BinaryExprType> {
let i_ = i.clone(); let mut i_ = i.clone();
if i_.input_len() == 0 { if eoi(i_.clone()).is_complete() {
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"), format!("Expected Expression found End Of Input"),
error::ErrorType::IncompleteParsing, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
let el = &(i_[0]); let el = i_.next();
if let &Element::MathOp(ref op) = el { if let Some(&Element::MathOp(ref op)) = el {
match op { match op {
&BinaryExprType::Mul => { &BinaryExprType::Mul => {
return IResult::Done(i.slice(1..), op.clone()); return Result::Complete(i_.clone(), op.clone());
} }
&BinaryExprType::Div => { &BinaryExprType::Div => {
return IResult::Done(i.slice(1..), op.clone()); return Result::Complete(i_.clone(), op.clone());
} }
_other => { _other => {
// noop // noop
} }
}; };
} }
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!( format!(
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Unexpected Operator {:?}",
el el
), ),
error::ErrorType::ParseError, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
/// do_binary_expr implements precedence based parsing where the more tightly bound /// do_binary_expr implements precedence based parsing where the more tightly bound
@ -152,15 +148,15 @@ fn parse_product_operator(i: OpListIter) -> IResult<OpListIter, BinaryExprType,
/// as the most tightly bound expressions. /// as the most tightly bound expressions.
macro_rules! do_binary_expr { macro_rules! do_binary_expr {
($i:expr, $oprule:ident, $lowerrule:ident) => { ($i:expr, $oprule:ident, $lowerrule:ident) => {
do_binary_expr!($i, call!($oprule), $lowerrule) do_binary_expr!($i, run!($oprule), $lowerrule)
}; };
($i:expr, $oprule:ident, $lowerrule:ident!( $($lowerargs:tt)* )) => { ($i:expr, $oprule:ident, $lowerrule:ident!( $($lowerargs:tt)* )) => {
do_binary_expr!($i, call!($oprule), $lowerrule!($($lowerargs)*)) do_binary_expr!($i, run!($oprule), $lowerrule!($($lowerargs)*))
}; };
($i:expr, $oprule:ident) => { ($i:expr, $oprule:ident) => {
do_binary_expr!($i, call!($oprule)) do_binary_expr!($i, run!($oprule))
}; };
($i:expr, $oprule:ident!( $($args:tt)* )) => { ($i:expr, $oprule:ident!( $($args:tt)* )) => {
@ -168,101 +164,97 @@ macro_rules! do_binary_expr {
}; };
($i:expr, $oprule:ident!( $($args:tt)* ), $lowerrule:ident) => { ($i:expr, $oprule:ident!( $($args:tt)* ), $lowerrule:ident) => {
do_binary_expr!($i, $oprule!($($args)*), call!($lowerrule)) do_binary_expr!($i, $oprule!($($args)*), run!($lowerrule))
}; };
($i:expr, $oprule:ident!( $($args:tt)* ), $lowerrule:ident!( $($lowerargs:tt)* )) => { ($i:expr, $oprule:ident!( $($args:tt)* ), $lowerrule:ident!( $($lowerargs:tt)* )) => {
map_res!($i, do_each!($i,
do_parse!( left => $lowerrule!($($lowerargs)*),
left: $lowerrule!($($lowerargs)*) >> typ => $oprule!($($args)*),
typ: $oprule!($($args)*) >> right => $lowerrule!($($lowerargs)*),
right: $lowerrule!($($lowerargs)*) >> (tuple_to_binary_expression(typ, left, right))
(typ, left, right)
),
tuple_to_binary_expression
) )
}; };
} }
named!(sum_expression<OpListIter, Expression, error::Error>, make_fn!(
sum_expression<SliceIter<Element>, Expression>,
do_binary_expr!( do_binary_expr!(
parse_sum_operator, parse_sum_operator,
alt!(trace_nom!(product_expression) | trace_nom!(parse_expression))) either!(trace_nom!(product_expression), trace_nom!(parse_expression))
)
); );
named!(product_expression<OpListIter, Expression, error::Error>, make_fn!(
do_binary_expr!( product_expression<SliceIter<Element>, Expression>,
parse_product_operator, do_binary_expr!(parse_product_operator, trace_nom!(parse_expression))
trace_nom!(parse_expression))
); );
named!(math_expression<OpListIter, Expression, error::Error>, make_fn!(
alt!(trace_nom!(sum_expression) | trace_nom!(product_expression)) math_expression<SliceIter<Element>, Expression>,
either!(trace_nom!(sum_expression), trace_nom!(product_expression))
); );
// TODO(jwall): Change comparison operators to use the do_binary_expr! with precedence? // TODO(jwall): Change comparison operators to use the do_binary_expr! with precedence?
fn tuple_to_compare_expression( fn tuple_to_compare_expression(
tpl: (CompareType, Expression, Expression), kind: CompareType,
) -> ParseResult<Expression> { left: Expression,
let pos = tpl.1.pos().clone(); right: Expression,
Ok(Expression::Compare(ComparisonDef { ) -> Expression {
kind: tpl.0, let pos = left.pos().clone();
left: Box::new(tpl.1), Expression::Compare(ComparisonDef {
right: Box::new(tpl.2), kind: kind,
left: Box::new(left),
right: Box::new(right),
pos: pos, pos: pos,
})) })
} }
named!(compare_op_type<TokenIter, Element, error::Error>, make_fn!(
alt!( compare_op_type<SliceIter<Token>, Element>,
do_parse!(punct!("==") >> (Element::CompareOp(CompareType::Equal))) | either!(
do_parse!(punct!("!=") >> (Element::CompareOp(CompareType::NotEqual))) | do_each!(_ => punct!("=="), (Element::CompareOp(CompareType::Equal))),
do_parse!(punct!("<=") >> (Element::CompareOp(CompareType::LTEqual))) | do_each!(_ => punct!("!="), (Element::CompareOp(CompareType::NotEqual))),
do_parse!(punct!(">=") >> (Element::CompareOp(CompareType::GTEqual))) | do_each!(_ => punct!("<="), (Element::CompareOp(CompareType::LTEqual))),
do_parse!(punct!("<") >> (Element::CompareOp(CompareType::LT))) | do_each!(_ => punct!(">="), (Element::CompareOp(CompareType::GTEqual))),
do_parse!(punct!(">") >> (Element::CompareOp(CompareType::GT))) do_each!(_ => punct!("<"), (Element::CompareOp(CompareType::LT))),
do_each!(_ => punct!(">"), (Element::CompareOp(CompareType::GT)))
) )
); );
fn parse_compare_operator(i: OpListIter) -> IResult<OpListIter, CompareType, error::Error> { fn parse_compare_operator(i: SliceIter<Element>) -> Result<SliceIter<Element>, CompareType> {
let i_ = i.clone(); let mut i_ = i.clone();
if i_.input_len() == 0 { if eoi(i_.clone()).is_complete() {
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!("Expected Expression found End Of Input"), format!("Expected Expression found End Of Input"),
error::ErrorType::IncompleteParsing, Box::new(i_),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
let el = &(i_[0]); let el = i_.next();
if let &Element::CompareOp(ref op) = el { if let Some(&Element::CompareOp(ref op)) = el {
return IResult::Done(i.slice(1..), op.clone()); return Result::Complete(i_.clone(), op.clone());
} }
return IResult::Error(ErrorKind::Custom(error::Error::new( return Result::Fail(Error::new(
format!( format!(
"Error while parsing Binary Expression Unexpected Operator {:?}", "Error while parsing Binary Expression Unexpected Operator {:?}",
el el
), ),
error::ErrorType::ParseError, Box::new(i),
// TODO(jwall): This position information is incorrect. ));
Position { line: 0, column: 0 },
)));
} }
named!(compare_expression<OpListIter, Expression, error::Error>, make_fn!(
map_res!( compare_expression<SliceIter<Element>, Expression>,
do_parse!( do_each!(
left: alt!(trace_nom!(math_expression) | trace_nom!(parse_expression)) >> left => either!(trace_nom!(math_expression), trace_nom!(parse_expression)),
typ: parse_compare_operator >> typ => parse_compare_operator,
right: alt!(trace_nom!(math_expression) | trace_nom!(parse_expression)) >> right => either!(trace_nom!(math_expression), trace_nom!(parse_expression)),
(typ, left, right) (tuple_to_compare_expression(typ, left, right))
),
tuple_to_compare_expression
) )
); );
/// Parse a list of expressions separated by operators into a Vec<Element>. /// Parse a list of expressions separated by operators into a Vec<Element>.
fn parse_operand_list(i: TokenIter) -> NomResult<Vec<Element>> { fn parse_operand_list<'a>(i: SliceIter<'a, Token>) -> NomResult<'a, Vec<Element>> {
// 1. First try to parse a non_op_expression, // 1. First try to parse a non_op_expression,
let mut _i = i.clone(); let mut _i = i.clone();
let mut list = Vec::new(); let mut list = Vec::new();
@ -271,148 +263,101 @@ fn parse_operand_list(i: TokenIter) -> NomResult<Vec<Element>> {
loop { loop {
// 2. Parse a non_op_expression. // 2. Parse a non_op_expression.
match non_op_expression(_i.clone()) { match non_op_expression(_i.clone()) {
IResult::Error(e) => { Result::Fail(e) => {
// A failure to parse an expression // A failure to parse an expression
// is always an error. // is always an error.
return IResult::Error(e); return Result::Fail(e);
} }
IResult::Incomplete(i) => { Result::Abort(e) => {
return IResult::Incomplete(i); // A failure to parse an expression
// is always an error.
return Result::Abort(e);
} }
IResult::Done(rest, expr) => { Result::Incomplete(i) => {
return Result::Incomplete(i);
}
Result::Complete(rest, expr) => {
list.push(Element::Expr(expr)); list.push(Element::Expr(expr));
_i = rest.clone(); _i = rest.clone();
} }
} }
// 3. Parse an operator. // 3. Parse an operator.
match alt!(_i, math_op_type | compare_op_type) { match either!(_i.clone(), math_op_type, compare_op_type) {
IResult::Error(e) => { Result::Fail(e) => {
if firstrun { if firstrun {
// If we don't find an operator in our first // If we don't find an operator in our first
// run then this is not an operand list. // run then this is not an operand list.
return IResult::Error(e); return Result::Fail(e);
} }
// if we don't find one on subsequent runs then // if we don't find one on subsequent runs then
// that's the end of the operand list. // that's the end of the operand list.
break; break;
} }
IResult::Incomplete(i) => { Result::Abort(e) => {
return IResult::Incomplete(i); // A failure to parse an expression
// is always an error.
return Result::Abort(e);
} }
IResult::Done(rest, el) => { Result::Incomplete(i) => {
return Result::Incomplete(i);
}
Result::Complete(rest, el) => {
list.push(el); list.push(el);
_i = rest.clone(); _i = rest.clone();
} }
} }
firstrun = false; firstrun = false;
} }
return IResult::Done(_i, list); return Result::Complete(_i, list);
}
#[derive(Clone, Debug, PartialEq)]
pub struct OpListIter<'a> {
pub source: &'a [Element],
}
impl<'a> OpListIter<'a> {
pub fn len(&self) -> usize {
self.source.len()
}
}
impl<'a> InputLength for OpListIter<'a> {
fn input_len(&self) -> usize {
self.source.input_len()
}
}
macro_rules! impl_op_iter_slice {
($r:ty) => {
impl<'a> Slice<$r> for OpListIter<'a> {
fn slice(&self, range: $r) -> Self {
OpListIter {
source: self.source.slice(range),
}
}
}
};
}
impl_op_iter_slice!(std::ops::Range<usize>);
impl_op_iter_slice!(std::ops::RangeTo<usize>);
impl_op_iter_slice!(std::ops::RangeFrom<usize>);
impl_op_iter_slice!(std::ops::RangeFull);
impl<'a> std::ops::Index<usize> for OpListIter<'a> {
type Output = Element;
fn index(&self, i: usize) -> &Self::Output {
&self.source[i]
}
}
impl<'a> InputIter for OpListIter<'a> {
type Item = &'a Element;
type RawItem = Element;
type Iter = std::iter::Enumerate<std::slice::Iter<'a, Self::RawItem>>;
type IterElem = std::slice::Iter<'a, Self::RawItem>;
fn iter_indices(&self) -> Self::Iter {
self.source.iter().enumerate()
}
fn iter_elements(&self) -> Self::IterElem {
self.source.iter()
}
fn position<P>(&self, predicate: P) -> Option<usize>
where
P: Fn(Self::RawItem) -> bool,
{
for (o, v) in self.iter_indices() {
if predicate(v.clone()) {
return Some(o);
}
}
None
}
fn slice_index(&self, count: usize) -> Option<usize> {
let mut cnt = 0;
for (index, _) in self.iter_indices() {
if cnt == count {
return Some(index);
}
cnt += 1;
}
if cnt == count {
return Some(self.len());
}
None
}
} }
/// Parse a binary operator expression. /// Parse a binary operator expression.
pub fn op_expression(i: TokenIter) -> NomResult<Expression> { pub fn op_expression<'a>(i: SliceIter<'a, Token>) -> Result<SliceIter<Token>, Expression> {
let preparse = parse_operand_list(i.clone()); let preparse = parse_operand_list(i.clone());
match preparse { match preparse {
IResult::Error(e) => IResult::Error(e), Result::Fail(e) => {
IResult::Incomplete(i) => IResult::Incomplete(i), let err = Error::caused_by(
IResult::Done(rest, oplist) => { "Failed while parsing operator expression",
let mut i_ = OpListIter { Box::new(e),
source: oplist.as_slice(), Box::new(i),
}; );
Result::Fail(err)
let parse_result = alt!( }
i_, Result::Abort(e) => {
trace_nom!(compare_expression) | trace_nom!(math_expression) let err = Error::caused_by(
"Failed while parsing operator expression",
Box::new(e),
Box::new(i),
);
Result::Fail(err)
}
Result::Incomplete(i) => Result::Incomplete(i),
Result::Complete(rest, oplist) => {
let mut i_ = SliceIter::new(&oplist);
let parse_result = either!(
i_.clone(),
trace_nom!(compare_expression),
trace_nom!(math_expression)
); );
match parse_result { match parse_result {
IResult::Error(e) => IResult::Error(e), Result::Fail(_e) => {
IResult::Incomplete(i) => IResult::Incomplete(i), // TODO(jwall): It would be good to be able to use caused_by here.
IResult::Done(_, expr) => IResult::Done(rest.clone(), expr), let err = Error::new(
"Failed while parsing operator expression",
Box::new(rest.clone()),
);
Result::Fail(err)
}
Result::Abort(_e) => {
let err = Error::new(
"Failed while parsing operator expression",
Box::new(rest.clone()),
);
Result::Abort(err)
}
Result::Incomplete(_) => Result::Incomplete(i.clone()),
Result::Complete(_, expr) => Result::Complete(rest.clone(), expr),
} }
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -12,415 +12,424 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
//! The tokenization stage of the ucg compiler. //! The tokenization stext_tokene of the ucg compiler.
use ast::*;
use error;
use nom;
use nom::{alpha, digit, is_alphanumeric, multispace};
use nom::{InputIter, InputLength, Slice};
use nom_locate::LocatedSpan;
use std; use std;
use std::result::Result;
pub type Span<'a> = LocatedSpan<&'a str>; use abortable_parser::combinators::*;
use abortable_parser::iter::SliceIter;
use abortable_parser::{Error, Offsetable, Result};
impl<'a> From<Span<'a>> for Position { use ast::*;
fn from(s: Span) -> Position { use error::StackPrinter;
Position { use iter::OffsetStrIter;
line: s.line as usize,
column: s.get_column() as usize, fn is_symbol_char<'a>(i: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, u8> {
let mut _i = i.clone();
let c = match _i.next() {
Some(c) => *c,
None => {
return Result::Fail(Error::new(
"Unexpected End of Input".to_string(),
Box::new(_i.clone()),
))
} }
};
if (c as char).is_ascii_alphanumeric() || c == b'-' || c == b'_' {
Result::Complete(_i, c)
} else {
Result::Fail(Error::new(
"Not a symbol character".to_string(),
Box::new(_i.clone()),
))
} }
} }
fn is_symbol_char(c: char) -> bool { fn escapequoted<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, String> {
is_alphanumeric(c as u8) || c == '-' as char || c == '_' as char
}
fn escapequoted(input: Span) -> nom::IResult<Span, String> {
// loop until we find a " that is not preceded by \. // loop until we find a " that is not preceded by \.
// Collapse all \<char> to just char for escaping. // Collapse all \<char> to just char for escaping.
let mut frag = String::new(); let mut frag = String::new();
let mut escape = false; let mut escape = false;
for (i, c) in input.iter_indices() { let mut _input = input.clone();
if c == '\\' && !escape { loop {
let c = match _input.next() {
Some(c) => *c,
None => break,
};
if c == '\\' as u8 && !escape {
// eat this slash and set our escaping sentinel // eat this slash and set our escaping sentinel
escape = true; escape = true;
} else if c == '"' && !escape { } else if c == '"' as u8 && !escape {
// Bail if this is an unescaped " // Bail if this is an unescaped "
// we exit here. // we exit here.
return nom::IResult::Done(input.slice(i..), frag); return Result::Complete(_input, frag);
} else { } else {
// we accumulate this character. // we accumulate this character.
frag.push(c); frag.push(c as char);
escape = false; // reset our escaping sentinel escape = false; // reset our escaping sentinel
} }
} }
return nom::IResult::Incomplete(nom::Needed::Unknown); return Result::Incomplete(_input.clone());
} }
named!(strtok( Span ) -> Token, make_fn!(strtok<OffsetStrIter, Token>,
do_parse!( do_each!(
span: position!() >> span => input!(),
tag!("\"") >> _ => text_token!("\""),
frag: escapequoted >> frag => escapequoted,
tag!("\"") >>
(Token{ (Token{
typ: TokenType::QUOTED, typ: TokenType::QUOTED,
pos: Position::from(span), pos: Position::from(&span),
fragment: frag, fragment: frag.to_string(),
}) })
) )
); );
named!(pipequotetok( Span ) -> Token, make_fn!(pipequotetok<OffsetStrIter, Token>,
do_parse!( do_each!(
span: position!() >> p => input!(),
tag!("|") >> _ => text_token!("|"),
frag: take_until!("|") >> frag => until!(text_token!("|")),
tag!("|") >> _ => text_token!("|"),
(Token{ (Token{
typ: TokenType::PIPEQUOTE, typ: TokenType::PIPEQUOTE,
pos: Position::from(span), pos: Position::from(&p),
fragment: frag.fragment.to_string(), fragment: frag.to_string(),
}) })
) )
); );
named!(barewordtok( Span ) -> Token, make_fn!(barewordtok<OffsetStrIter, Token>,
do_parse!( do_each!(
span: position!() >> span => input!(),
frag: preceded!(peek!(alpha), take_while!(is_symbol_char)) >> _ => peek!(ascii_alpha),
frag => consume_all!(is_symbol_char),
(Token{ (Token{
typ: TokenType::BAREWORD, typ: TokenType::BAREWORD,
pos: Position::from(span), pos: Position::from(&span),
fragment: frag.fragment.to_string(), fragment: frag.to_string(),
}) })
) )
); );
named!(digittok( Span ) -> Token, make_fn!(digittok<OffsetStrIter, Token>,
do_parse!( do_each!(
span: position!() >> span => input!(),
digits: digit >> _ => peek!(ascii_digit),
(Token{ digits => consume_all!(ascii_digit),
typ: TokenType::DIGIT, (Token{
pos: Position::from(span), typ: TokenType::DIGIT,
fragment: digits.fragment.to_string(), pos: Position::from(&span),
}) fragment: digits.to_string(),
})
) )
); );
named!(booleantok( Span ) -> Token, make_fn!(booleantok<OffsetStrIter, Token>,
do_parse!( do_each!(
span: position!() >> span => input!(),
b: alt!( token => either!(
tag!("true") | text_token!("true"),
tag!("false") text_token!("false")
) >> ),
(Token{ (Token{
typ: TokenType::BOOLEAN, typ: TokenType::BOOLEAN,
pos: Position::from(span), pos: Position::from(&span),
fragment: b.fragment.to_string(), fragment: token.to_string(),
}) })
) )
); );
/// do_tag_tok! is a helper macro to make building a simple tag token /// do_text_token_tok! is a helper macro to make building a simple text_token token
/// less code. /// less code.
macro_rules! do_tag_tok { macro_rules! do_text_token_tok {
// NOTE(jwall): Nom macros do magic with their inputs. They in fact ($i:expr, $type:expr, $text_token:expr, WS) => {
// rewrite your macro argumets for you by adding an initial argument do_each!($i,
// for all their sub-macros. Which means we require this $i paramater span => input!(),
// on the first macro invocation but not the rest. frag => text_token!($text_token),
($i:expr, $type:expr, $tag:expr,WS) => { _ => either!(whitespace, comment),
do_parse!( (Token {
$i, typ: $type,
span: position!() >> frag: tag!($tag) >> alt!(whitespace | comment) >> (Token { pos: Position::from(&span),
typ: $type, fragment: frag.to_string(),
pos: Position::from(span), })
fragment: frag.fragment.to_string(), )
})
)
}; };
($i:expr, $type:expr, $tag:expr) => {
do_parse!( ($i:expr, $type:expr, $text_token:expr) => {
$i, do_each!($i,
span: position!() >> frag: tag!($tag) >> (Token { span => input!(),
typ: $type, frag => text_token!($text_token),
pos: Position::from(span), (Token {
fragment: frag.fragment.to_string(), typ: $type,
}) pos: Position::from(&span),
) fragment: frag.to_string(),
})
)
}; };
} }
named!(emptytok( Span ) -> Token, make_fn!(emptytok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::EMPTY, "NULL") do_text_token_tok!(TokenType::EMPTY, "NULL")
); );
named!(commatok( Span ) -> Token, make_fn!(commatok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, ",") do_text_token_tok!(TokenType::PUNCT, ",")
); );
named!(lbracetok( Span ) -> Token, make_fn!(lbracetok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "{") do_text_token_tok!(TokenType::PUNCT, "{")
); );
named!(rbracetok( Span ) -> Token, make_fn!(rbracetok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "}") do_text_token_tok!(TokenType::PUNCT, "}")
); );
named!(lparentok( Span ) -> Token, make_fn!(lparentok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "(") do_text_token_tok!(TokenType::PUNCT, "(")
); );
named!(rparentok( Span ) -> Token, make_fn!(rparentok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, ")") do_text_token_tok!(TokenType::PUNCT, ")")
); );
named!(dottok( Span ) -> Token, make_fn!(dottok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, ".") do_text_token_tok!(TokenType::PUNCT, ".")
); );
named!(plustok( Span ) -> Token, make_fn!(plustok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "+") do_text_token_tok!(TokenType::PUNCT, "+")
); );
named!(dashtok( Span ) -> Token, make_fn!(dashtok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "-") do_text_token_tok!(TokenType::PUNCT, "-")
); );
named!(startok( Span ) -> Token, make_fn!(startok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "*") do_text_token_tok!(TokenType::PUNCT, "*")
); );
named!(slashtok( Span ) -> Token, make_fn!(slashtok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "/") do_text_token_tok!(TokenType::PUNCT, "/")
); );
named!(pcttok( Span ) -> Token, make_fn!(pcttok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "%") do_text_token_tok!(TokenType::PUNCT, "%")
); );
named!(eqeqtok( Span ) -> Token, make_fn!(eqeqtok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "==") do_text_token_tok!(TokenType::PUNCT, "==")
); );
named!(notequaltok( Span ) -> Token, make_fn!(notequaltok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "!=") do_text_token_tok!(TokenType::PUNCT, "!=")
); );
named!(gttok( Span ) -> Token, make_fn!(gttok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, ">") do_text_token_tok!(TokenType::PUNCT, ">")
); );
named!(gtequaltok( Span ) -> Token, make_fn!(gtequaltok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, ">=") do_text_token_tok!(TokenType::PUNCT, ">=")
); );
named!(ltequaltok( Span ) -> Token, make_fn!(ltequaltok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "<=") do_text_token_tok!(TokenType::PUNCT, "<=")
); );
named!(lttok( Span ) -> Token, make_fn!(lttok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "<") do_text_token_tok!(TokenType::PUNCT, "<")
); );
named!(equaltok( Span ) -> Token, make_fn!(equaltok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "=") do_text_token_tok!(TokenType::PUNCT, "=")
); );
named!(semicolontok( Span ) -> Token, make_fn!(semicolontok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, ";") do_text_token_tok!(TokenType::PUNCT, ";")
); );
named!(leftsquarebracket( Span ) -> Token, make_fn!(leftsquarebracket<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "[") do_text_token_tok!(TokenType::PUNCT, "[")
); );
named!(rightsquarebracket( Span ) -> Token, make_fn!(rightsquarebracket<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "]") do_text_token_tok!(TokenType::PUNCT, "]")
); );
named!(fatcommatok( Span ) -> Token, make_fn!(fatcommatok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::PUNCT, "=>") do_text_token_tok!(TokenType::PUNCT, "=>")
); );
named!(selecttok( Span ) -> Token, make_fn!(selecttok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "select", WS) do_text_token_tok!(TokenType::BAREWORD, "select", WS)
); );
named!(macrotok( Span ) -> Token, make_fn!(macrotok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "macro", WS) do_text_token_tok!(TokenType::BAREWORD, "macro", WS)
); );
named!(lettok( Span ) -> Token, make_fn!(lettok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "let", WS) do_text_token_tok!(TokenType::BAREWORD, "let", WS)
); );
named!(importtok( Span ) -> Token, make_fn!(importtok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "import", WS) do_text_token_tok!(TokenType::BAREWORD, "import", WS)
); );
named!(asserttok( Span ) -> Token, make_fn!(asserttok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "assert", WS) do_text_token_tok!(TokenType::BAREWORD, "assert", WS)
); );
named!(outtok( Span ) -> Token, make_fn!(outtok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "out", WS) do_text_token_tok!(TokenType::BAREWORD, "out", WS)
); );
named!(astok( Span ) -> Token, make_fn!(astok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "as", WS) do_text_token_tok!(TokenType::BAREWORD, "as", WS)
); );
named!(maptok( Span ) -> Token, make_fn!(maptok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "map", WS) do_text_token_tok!(TokenType::BAREWORD, "map", WS)
); );
named!(filtertok( Span ) -> Token, make_fn!(filtertok<OffsetStrIter, Token>,
do_tag_tok!(TokenType::BAREWORD, "filter", WS) do_text_token_tok!(TokenType::BAREWORD, "filter", WS)
); );
fn end_of_input(input: Span) -> nom::IResult<Span, Token> { fn comment(input: OffsetStrIter) -> Result<OffsetStrIter, Token> {
match eof!(input,) { match text_token!(input, "//") {
nom::IResult::Done(_, _) => { Result::Complete(rest, _) => {
return nom::IResult::Done( match until!(
input,
make_tok!(EOF => input.line as usize,
input.get_column() as usize),
);
}
nom::IResult::Incomplete(_) => {
return nom::IResult::Incomplete(nom::Needed::Unknown);
}
nom::IResult::Error(e) => {
return nom::IResult::Error(e);
}
}
}
fn comment(input: Span) -> nom::IResult<Span, Token> {
match tag!(input, "//") {
nom::IResult::Done(rest, _) => {
match alt!(
rest, rest,
take_until_and_consume!("\r\n") | take_until_and_consume!("\n") either!(
eoi,
discard!(text_token!("\r\n")),
discard!(text_token!("\n"))
)
) { ) {
nom::IResult::Done(rest, cmt) => { Result::Complete(rest, cmt) => {
return nom::IResult::Done( return Result::Complete(rest, make_tok!(CMT => cmt.to_string(), input));
rest,
make_tok!(CMT => cmt.fragment.to_string(),
input.line as usize,
input.get_column() as usize),
);
} }
// If we didn't find a new line then we just grab everything. // If we didn't find a new line then we just grab everything.
_ => { _ => {
let blen = rest.input_len(); return Result::Abort(Error::new(
let next = rest.slice(blen..); "Unparsable comment".to_string(),
let tok = rest.slice(..blen); Box::new(rest.clone()),
return nom::IResult::Done( ));
next,
make_tok!(CMT => tok.fragment.to_string(),
input.line as usize, input.get_column() as usize
),
);
} }
} }
} }
nom::IResult::Incomplete(i) => return nom::IResult::Incomplete(i), Result::Incomplete(ctx) => return Result::Incomplete(ctx),
nom::IResult::Error(e) => return nom::IResult::Error(e), Result::Fail(e) => return Result::Fail(e),
Result::Abort(e) => return Result::Abort(e),
} }
} }
named!(whitespace( Span ) -> Token, make_fn!(whitespace<OffsetStrIter, Token>,
do_parse!( do_each!(
span: position!() >> span => input!(),
many1!(multispace) >> _ => peek!(ascii_ws),
_ => repeat!(ascii_ws),
(Token{ (Token{
typ: TokenType::WS, typ: TokenType::WS,
pos: Position::from(span), pos: Position::from(&span),
fragment: String::new(), fragment: String::new(),
}) })
) )
); );
named!(token( Span ) -> Token, make_fn!(end_of_input<OffsetStrIter, Token>,
alt!( do_each!(
strtok | span => input!(),
pipequotetok | _ => eoi,
emptytok | // This must come before the barewordtok (Token{
digittok | typ: TokenType::END,
commatok | pos: Position::from(&span),
rbracetok | fragment: String::new(),
lbracetok | })
lparentok | )
rparentok |
dottok |
plustok |
dashtok |
startok |
comment | // Note comment must come before slashtok
slashtok |
pcttok |
eqeqtok |
notequaltok |
complete!(gtequaltok) |
complete!(ltequaltok) |
gttok |
lttok |
fatcommatok | // Note fatcommatok must come before equaltok
equaltok |
semicolontok |
leftsquarebracket |
rightsquarebracket |
booleantok |
lettok |
outtok |
selecttok |
asserttok |
macrotok |
importtok |
astok |
maptok |
filtertok |
barewordtok |
whitespace |
end_of_input)
); );
/// Consumes an input Span and returns either a Vec<Token> or a nom::ErrorKind. fn token<'a>(input: OffsetStrIter<'a>) -> Result<OffsetStrIter<'a>, Token> {
pub fn tokenize(input: Span) -> Result<Vec<Token>, error::Error> { either!(
input,
strtok,
pipequotetok,
emptytok, // This must come before the barewordtok
digittok,
commatok,
rbracetok,
lbracetok,
lparentok,
rparentok,
dottok,
plustok,
dashtok,
startok,
comment, // Note comment must come before slashtok
slashtok,
pcttok,
eqeqtok,
notequaltok,
complete!("Not >=".to_string(), gtequaltok),
complete!("Not <=".to_string(), ltequaltok),
gttok,
lttok,
fatcommatok, // Note fatcommatok must come before equaltok
equaltok,
semicolontok,
leftsquarebracket,
rightsquarebracket,
booleantok,
lettok,
outtok,
selecttok,
asserttok,
macrotok,
importtok,
astok,
maptok,
filtertok,
barewordtok,
whitespace,
end_of_input
)
}
/// Consumes an input OffsetStrIter and returns either a Vec<Token> or a error::Error.
pub fn tokenize<'a>(input: OffsetStrIter<'a>) -> std::result::Result<Vec<Token>, String> {
let mut out = Vec::new(); let mut out = Vec::new();
let mut i = input; let mut i = input.clone();
loop { loop {
if i.input_len() == 0 { if let Result::Complete(_, _) = eoi(i.clone()) {
break; break;
} }
match token(i) { match token(i.clone()) {
nom::IResult::Error(_e) => { Result::Abort(e) => {
return Err(error::Error::new( let err = abortable_parser::Error::caused_by(
"Invalid Token encountered", "Invalid Token encountered",
error::ErrorType::UnexpectedToken, Box::new(e),
Position { Box::new(i.clone()),
line: i.line as usize, );
column: i.get_column() as usize, let ctx_err = StackPrinter { err: err };
}, return Err(format!("{}", ctx_err));
));
} }
nom::IResult::Incomplete(_) => { Result::Fail(e) => {
return Err(error::Error::new( let err = abortable_parser::Error::caused_by(
"Unexepcted end of Input", "Invalid Token encountered",
error::ErrorType::UnexpectedToken, Box::new(e),
Position { Box::new(i.clone()),
line: i.line as usize, );
column: i.get_column() as usize, let ctx_err = StackPrinter { err: err };
}, return Err(format!("{}", ctx_err));
));
} }
nom::IResult::Done(rest, tok) => { Result::Incomplete(_offset) => {
let err =
abortable_parser::Error::new("Invalid Token encountered", Box::new(i.clone()));
let ctx_err = StackPrinter { err: err };
return Err(format!("{}", ctx_err));
}
Result::Complete(rest, tok) => {
i = rest; i = rest;
if tok.typ == TokenType::COMMENT || tok.typ == TokenType::WS { if tok.typ == TokenType::COMMENT || tok.typ == TokenType::WS {
// we skip comments and whitespace // we skip comments and whitespace
@ -434,10 +443,7 @@ pub fn tokenize(input: Span) -> Result<Vec<Token>, error::Error> {
out.push(Token { out.push(Token {
fragment: String::new(), fragment: String::new(),
typ: TokenType::END, typ: TokenType::END,
pos: Position { pos: Position::from(&i),
line: i.line as usize,
column: i.get_column() as usize,
},
}); });
Ok(out) Ok(out)
} }
@ -445,7 +451,7 @@ pub fn tokenize(input: Span) -> Result<Vec<Token>, error::Error> {
/// Clones a token. /// Clones a token.
/// ///
/// This is necessary to allow the match_type and match_token macros to work. /// This is necessary to allow the match_type and match_token macros to work.
pub fn token_clone(t: &Token) -> Result<Token, error::Error> { pub fn token_clone(t: &Token) -> std::result::Result<Token, Error<SliceIter<Token>>> {
Ok(t.clone()) Ok(t.clone())
} }
@ -517,28 +523,28 @@ macro_rules! match_type {
}; };
($i:expr, $t:expr, $msg:expr, $h:expr) => {{ ($i:expr, $t:expr, $msg:expr, $h:expr) => {{
let i_ = $i.clone(); use abortable_parser::combinators::eoi;
use nom::Slice; use abortable_parser::{Error, Result};
use std::convert::Into; use std;
if i_.input_len() == 0 {
nom::IResult::Error(nom::ErrorKind::Custom(error::Error::new( let mut _i = $i.clone();
format!("End of Input! {}", $msg), if eoi(_i.clone()).is_complete() {
error::ErrorType::IncompleteParsing, Result::Fail(Error::new(format!("End of Input! {}", $msg), Box::new(_i)))
Position { line: 0, column: 0 },
)))
} else { } else {
let tok = &(i_[0]); match _i.next() {
if tok.typ == $t { Some(tok) => {
match $h(tok) { if tok.typ == $t {
Result::Ok(v) => nom::IResult::Done($i.slice(1..), v), match $h(tok) {
Result::Err(e) => nom::IResult::Error(nom::ErrorKind::Custom(e.into())), std::result::Result::Ok(v) => Result::Complete(_i.clone(), v),
std::result::Result::Err(e) => {
Result::Fail(Error::caused_by($msg, Box::new(e), Box::new(_i)))
}
}
} else {
Result::Fail(Error::new($msg.to_string(), Box::new($i)))
}
} }
} else { None => Result::Fail(Error::new($msg.to_string(), Box::new($i))),
nom::IResult::Error(nom::ErrorKind::Custom(error::Error::new(
$msg.to_string(),
error::ErrorType::UnexpectedToken,
tok.pos.clone(),
)))
} }
} }
}}; }};
@ -553,7 +559,7 @@ macro_rules! match_token {
}}; }};
($i:expr,PUNCT => $f:expr, $h:expr) => { ($i:expr,PUNCT => $f:expr, $h:expr) => {
match_token!($i, TokenType::PUNCT, $f, format!("Not PUNCT ({})", $f), $h) match_token!($i, TokenType::PUNCT, $f, format!("({})", $f), $h)
}; };
($i:expr,BAREWORD => $f:expr) => {{ ($i:expr,BAREWORD => $f:expr) => {{
@ -572,22 +578,26 @@ macro_rules! match_token {
}; };
($i:expr, $t:expr, $f:expr, $msg:expr, $h:expr) => {{ ($i:expr, $t:expr, $f:expr, $msg:expr, $h:expr) => {{
let i_ = $i.clone(); use abortable_parser::Result;
use nom; use std;
use nom::Slice; let mut i_ = $i.clone();
use std::convert::Into; let tok = i_.next();
let tok = &(i_[0]); if let Some(tok) = tok {
if tok.typ == $t && &tok.fragment == $f { if tok.typ == $t && &tok.fragment == $f {
match $h(tok) { match $h(tok) {
Result::Ok(v) => nom::IResult::Done($i.slice(1..), v), std::result::Result::Ok(v) => Result::Complete(i_.clone(), v),
Result::Err(e) => nom::IResult::Error(nom::ErrorKind::Custom(e.into())), std::result::Result::Err(e) => {
Result::Fail(Error::caused_by($msg, Box::new(e), Box::new(i_)))
}
}
} else {
Result::Fail(Error::new(
format!("Expected {} Instead is ({})", $msg, tok.fragment),
Box::new(i_),
))
} }
} else { } else {
nom::IResult::Error(nom::ErrorKind::Custom(error::Error::new( Result::Fail(Error::new("Unexpected End Of Input", Box::new(i_)))
format!("{} Instead is ({})", $msg, tok.fragment),
error::ErrorType::UnexpectedToken,
tok.pos.clone(),
)))
} }
}}; }};
} }
@ -607,103 +617,12 @@ macro_rules! word {
} }
/// pos gets the current position from a TokenIter input without consuming it. /// pos gets the current position from a TokenIter input without consuming it.
pub fn pos(i: TokenIter) -> nom::IResult<TokenIter, Position, error::Error> { pub fn pos<'a>(i: SliceIter<'a, Token>) -> Result<SliceIter<'a, Token>, Position> {
let tok = &i[0]; let mut _i = i.clone();
let tok = _i.next().unwrap();
let line = tok.pos.line; let line = tok.pos.line;
let column = tok.pos.column; let column = tok.pos.column;
nom::IResult::Done( Result::Complete(i.clone(), Position::new(line, column, i.get_offset()))
i.clone(),
Position {
line: line,
column: column,
},
)
}
/// TokenIter wraps a slice of Tokens and implements the various necessary
/// nom traits to use it as an input to nom parsers.
#[derive(Clone, Debug, PartialEq)]
pub struct TokenIter<'a> {
pub source: &'a [Token],
}
impl<'a> TokenIter<'a> {
pub fn len(&self) -> usize {
self.source.len()
}
}
impl<'a> nom::InputLength for TokenIter<'a> {
fn input_len(&self) -> usize {
self.source.input_len()
}
}
macro_rules! impl_token_iter_slice {
($r:ty) => {
impl<'a> nom::Slice<$r> for TokenIter<'a> {
fn slice(&self, range: $r) -> Self {
TokenIter {
source: self.source.slice(range),
}
}
}
};
}
impl_token_iter_slice!(std::ops::Range<usize>);
impl_token_iter_slice!(std::ops::RangeTo<usize>);
impl_token_iter_slice!(std::ops::RangeFrom<usize>);
impl_token_iter_slice!(std::ops::RangeFull);
impl<'a> std::ops::Index<usize> for TokenIter<'a> {
type Output = Token;
fn index(&self, i: usize) -> &Self::Output {
&self.source[i]
}
}
impl<'a> InputIter for TokenIter<'a> {
type Item = &'a Token;
type RawItem = Token;
type Iter = std::iter::Enumerate<std::slice::Iter<'a, Self::RawItem>>;
type IterElem = std::slice::Iter<'a, Self::RawItem>;
fn iter_indices(&self) -> Self::Iter {
self.source.iter().enumerate()
}
fn iter_elements(&self) -> Self::IterElem {
self.source.iter()
}
fn position<P>(&self, predicate: P) -> Option<usize>
where
P: Fn(Self::RawItem) -> bool,
{
for (o, v) in self.iter_indices() {
if predicate(v.clone()) {
return Some(o);
}
}
None
}
fn slice_index(&self, count: usize) -> Option<usize> {
let mut cnt = 0;
for (index, _) in self.iter_indices() {
if cnt == count {
return Some(index);
}
cnt += 1;
}
if cnt == count {
return Some(self.len());
}
None
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -1,12 +1,17 @@
use super::*; use super::*;
use nom;
use nom_locate::LocatedSpan; use abortable_parser::{Result, SliceIter};
use iter::OffsetStrIter;
#[test] #[test]
fn test_empty_token() { fn test_empty_token() {
let result = emptytok(LocatedSpan::new("NULL ")); let result = emptytok(OffsetStrIter::new("NULL "));
assert!(result.is_done(), format!("result {:?} is not done", result)); assert!(
if let nom::IResult::Done(_, tok) = result { result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "NULL"); assert_eq!(tok.fragment, "NULL");
assert_eq!(tok.typ, TokenType::EMPTY); assert_eq!(tok.typ, TokenType::EMPTY);
} }
@ -14,9 +19,12 @@ fn test_empty_token() {
#[test] #[test]
fn test_assert_token() { fn test_assert_token() {
let result = asserttok(LocatedSpan::new("assert ")); let result = asserttok(OffsetStrIter::new("assert "));
assert!(result.is_done(), format!("result {:?} is not done", result)); assert!(
if let nom::IResult::Done(_, tok) = result { result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "assert"); assert_eq!(tok.fragment, "assert");
assert_eq!(tok.typ, TokenType::BAREWORD); assert_eq!(tok.typ, TokenType::BAREWORD);
} }
@ -24,29 +32,56 @@ fn test_assert_token() {
#[test] #[test]
fn test_out_token() { fn test_out_token() {
let result = outtok(LocatedSpan::new("out ")); let result = outtok(OffsetStrIter::new("out "));
assert!(result.is_done(), format!("result {:?} is not done", result)); assert!(
if let nom::IResult::Done(_, tok) = result { result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "out"); assert_eq!(tok.fragment, "out");
assert_eq!(tok.typ, TokenType::BAREWORD); assert_eq!(tok.typ, TokenType::BAREWORD);
} }
} }
#[test]
fn test_out_token_with_comment() {
let result = outtok(OffsetStrIter::new("out//comment"));
assert!(
result.is_complete(),
format!("result {:?} is not done", result)
);
if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "out");
assert_eq!(tok.typ, TokenType::BAREWORD);
}
}
#[test]
fn test_not_out_token() {
let result = outtok(OffsetStrIter::new("output"));
assert!(result.is_fail(), format!("result {:?} is not fail", result));
}
#[test] #[test]
fn test_escape_quoted() { fn test_escape_quoted() {
let result = escapequoted(LocatedSpan::new("foo \\\"bar\"")); let result = escapequoted(OffsetStrIter::new("foo \\\"bar\""));
assert!(result.is_done(), format!("result {:?} is not ok", result)); assert!(
if let nom::IResult::Done(rest, frag) = result { result.is_complete(),
format!("result {:?} is not ok", result)
);
if let Result::Complete(_rest, frag) = result {
assert_eq!(frag, "foo \"bar"); assert_eq!(frag, "foo \"bar");
assert_eq!(rest.fragment, "\"");
} }
} }
#[test] #[test]
fn test_pipe_quoted() { fn test_pipe_quoted() {
let result = pipequotetok(LocatedSpan::new("|foo|")); let result = pipequotetok(OffsetStrIter::new("|foo|"));
assert!(result.is_done(), format!("result {:?} is not ok", result)); assert!(
if let nom::IResult::Done(_, tok) = result { result.is_complete(),
format!("result {:?} is not ok", result)
);
if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "foo".to_string()); assert_eq!(tok.fragment, "foo".to_string());
assert_eq!(tok.typ, TokenType::PIPEQUOTE); assert_eq!(tok.typ, TokenType::PIPEQUOTE);
} }
@ -54,16 +89,20 @@ fn test_pipe_quoted() {
#[test] #[test]
fn test_string_with_escaping() { fn test_string_with_escaping() {
let result = strtok(LocatedSpan::new("\"foo \\\\ \\\"bar\"")); let result = strtok(OffsetStrIter::new("\"foo \\\\ \\\"bar\""));
assert!(result.is_done(), format!("result {:?} is not ok", result)); assert!(
if let nom::IResult::Done(_, tok) = result { result.is_complete(),
format!("result {:?} is not ok", result)
);
if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, "foo \\ \"bar".to_string()); assert_eq!(tok.fragment, "foo \\ \"bar".to_string());
} }
} }
#[test] #[test]
fn test_tokenize_bareword_with_dash() { fn test_tokenize_bareword_with_dash() {
let result = tokenize(LocatedSpan::new("foo-bar ")); let input = OffsetStrIter::new("foo-bar ");
let result = tokenize(input.clone());
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), format!("result {:?} is not ok", result));
if let Ok(toks) = result { if let Ok(toks) = result {
assert_eq!(toks.len(), 2); assert_eq!(toks.len(), 2);
@ -73,18 +112,23 @@ fn test_tokenize_bareword_with_dash() {
macro_rules! assert_token { macro_rules! assert_token {
($input:expr, $typ:expr, $msg:expr) => { ($input:expr, $typ:expr, $msg:expr) => {
let result = token(LocatedSpan::new($input)); let result = token(OffsetStrIter::new($input));
assert!( assert!(
result.is_done(), result.is_complete(),
format!("result {:?} is not a {}", result, $msg) format!("result {:?} is not a {}", result, $msg)
); );
if let nom::IResult::Done(_, tok) = result { if let Result::Complete(_, tok) = result {
assert_eq!(tok.fragment, $input);
assert_eq!(tok.typ, $typ); assert_eq!(tok.typ, $typ);
assert_eq!(tok.fragment, $input);
} }
}; };
} }
#[test]
fn test_digittok() {
assert_token!("1", TokenType::DIGIT, "1");
}
#[test] #[test]
fn test_boolean() { fn test_boolean() {
assert_token!("true", TokenType::BOOLEAN, "boolean"); assert_token!("true", TokenType::BOOLEAN, "boolean");
@ -122,10 +166,11 @@ fn test_lteqtok() {
#[test] #[test]
fn test_tokenize_one_of_each() { fn test_tokenize_one_of_each() {
let result = tokenize(LocatedSpan::new( let input = OffsetStrIter::new(
"map out filter assert let import macro select as => [ ] { } ; = % / * \ "map out filter assert let import macro select as => [ ] { } ; = % / * \
+ - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=", + - . ( ) , 1 . foo \"bar\" // comment\n ; true false == < > <= >= !=",
)); );
let result = tokenize(input.clone());
assert!(result.is_ok(), format!("result {:?} is not ok", result)); assert!(result.is_ok(), format!("result {:?} is not ok", result));
let v = result.unwrap(); let v = result.unwrap();
for (i, t) in v.iter().enumerate() { for (i, t) in v.iter().enumerate() {
@ -137,65 +182,80 @@ fn test_tokenize_one_of_each() {
#[test] #[test]
fn test_parse_has_end() { fn test_parse_has_end() {
let result = tokenize(LocatedSpan::new("foo")); let input = OffsetStrIter::new("foo");
let result = tokenize(input.clone());
assert!(result.is_ok()); assert!(result.is_ok());
let v = result.unwrap(); let v = result.unwrap();
assert_eq!(v.len(), 2); assert_eq!(v.len(), 2);
assert_eq!(v[1].typ, TokenType::END); assert_eq!(v[1].typ, TokenType::END);
} }
#[test]
fn test_whitespace() {
assert!(whitespace(OffsetStrIter::new(" ")).is_complete());
let result = whitespace(OffsetStrIter::new(" "));
match result {
Result::Complete(rest, o) => {
assert_eq!(rest.get_offset(), 2);
assert_eq!(o.typ, TokenType::WS);
}
_ => assert!(false, "Not complete"),
}
}
#[test] #[test]
fn test_parse_comment() { fn test_parse_comment() {
assert!(comment(LocatedSpan::new("// comment\n")).is_done()); assert!(comment(OffsetStrIter::new("// comment\n")).is_complete());
assert!(comment(LocatedSpan::new("// comment")).is_done()); assert!(comment(OffsetStrIter::new("// comment")).is_complete());
assert_eq!( let mut parsed = comment(OffsetStrIter::new("// comment\n"));
comment(LocatedSpan::new("// comment\n")), assert!(parsed.is_complete());
nom::IResult::Done( if let Result::Complete(_rest, cmt) = parsed {
LocatedSpan { assert_eq!(
fragment: "", cmt,
offset: 11,
line: 2,
},
Token { Token {
typ: TokenType::COMMENT, typ: TokenType::COMMENT,
fragment: " comment".to_string(), fragment: " comment".to_string(),
pos: Position { line: 1, column: 1 }, pos: Position {
line: 1,
column: 1,
offset: 0
},
} }
) );
); }
assert!(comment(LocatedSpan::new("// comment\r\n")).is_done()); assert!(comment(OffsetStrIter::new("// comment\r\n")).is_complete());
assert_eq!( parsed = comment(OffsetStrIter::new("// comment\r\n"));
comment(LocatedSpan::new("// comment\r\n")), if let Result::Complete(_rest, cmt) = parsed {
nom::IResult::Done( assert_eq!(
LocatedSpan { cmt,
fragment: "",
offset: 12,
line: 2,
},
Token { Token {
typ: TokenType::COMMENT, typ: TokenType::COMMENT,
fragment: " comment".to_string(), fragment: " comment".to_string(),
pos: Position { column: 1, line: 1 }, pos: Position {
column: 1,
line: 1,
offset: 0
},
} }
) );
); }
assert!(comment(LocatedSpan::new("// comment\r\n ")).is_done()); assert!(comment(OffsetStrIter::new("// comment\r\n ")).is_complete());
assert_eq!( parsed = comment(OffsetStrIter::new("// comment\r\n "));
comment(LocatedSpan::new("// comment\r\n ")), if let Result::Complete(_rest, cmt) = parsed {
nom::IResult::Done( assert_eq!(
LocatedSpan { cmt,
fragment: " ",
offset: 12,
line: 2,
},
Token { Token {
typ: TokenType::COMMENT, typ: TokenType::COMMENT,
fragment: " comment".to_string(), fragment: " comment".to_string(),
pos: Position { column: 1, line: 1 }, pos: Position {
column: 1,
line: 1,
offset: 0
},
} }
) );
); }
assert!(comment(LocatedSpan::new("// comment")).is_done()); assert!(comment(OffsetStrIter::new("// comment")).is_complete());
} }
#[test] #[test]
@ -203,16 +263,15 @@ fn test_match_word() {
let input = vec![Token { let input = vec![Token {
fragment: "foo".to_string(), fragment: "foo".to_string(),
typ: TokenType::BAREWORD, typ: TokenType::BAREWORD,
pos: Position { line: 1, column: 1 }, pos: Position {
}]; line: 1,
let result = word!( column: 1,
TokenIter { offset: 0,
source: input.as_slice(),
}, },
"foo" }];
); let result = word!(SliceIter::new(input.as_slice()), "foo");
match result { match result {
nom::IResult::Done(_, tok) => assert_eq!(tok, input[0]), Result::Complete(_, tok) => assert_eq!(tok, input[0]),
res => assert!(false, format!("Fail: {:?}", res)), res => assert!(false, format!("Fail: {:?}", res)),
} }
} }
@ -222,22 +281,20 @@ fn test_match_word_empty_input() {
let input = vec![Token { let input = vec![Token {
fragment: "".to_string(), fragment: "".to_string(),
typ: TokenType::END, typ: TokenType::END,
pos: Position { line: 1, column: 1 }, pos: Position {
}]; line: 1,
let result = word!( column: 1,
TokenIter { offset: 0,
source: input.as_slice(),
}, },
"foo" }];
); let result = word!(SliceIter::new(input.as_slice()), "foo");
match result { match result {
nom::IResult::Done(_, _) => assert!(false, "Should have been an error but was Done"), Result::Complete(_, _) => assert!(false, "Should have been an error but was Done"),
nom::IResult::Incomplete(_) => { Result::Incomplete(_) => assert!(false, "Should have been a Fail but was Incomplete"),
assert!(false, "Should have been an error but was Incomplete") Result::Fail(_) => {
}
nom::IResult::Error(_) => {
// noop // noop
} }
Result::Abort(_) => assert!(false, "Should have been a Fail but was Abort"),
} }
} }
@ -246,16 +303,15 @@ fn test_match_punct() {
let input = vec![Token { let input = vec![Token {
fragment: "!".to_string(), fragment: "!".to_string(),
typ: TokenType::PUNCT, typ: TokenType::PUNCT,
pos: Position { line: 1, column: 1 }, pos: Position {
}]; line: 1,
let result = punct!( column: 1,
TokenIter { offset: 0,
source: input.as_slice(),
}, },
"!" }];
); let result = punct!(SliceIter::new(input.as_slice()), "!");
match result { match result {
nom::IResult::Done(_, tok) => assert_eq!(tok, input[0]), Result::Complete(_, tok) => assert_eq!(tok, input[0]),
res => assert!(false, format!("Fail: {:?}", res)), res => assert!(false, format!("Fail: {:?}", res)),
} }
} }
@ -265,16 +321,15 @@ fn test_match_type() {
let input = vec![Token { let input = vec![Token {
fragment: "foo".to_string(), fragment: "foo".to_string(),
typ: TokenType::BAREWORD, typ: TokenType::BAREWORD,
pos: Position { line: 1, column: 1 }, pos: Position {
}]; line: 1,
let result = match_type!( column: 1,
TokenIter { offset: 0,
source: input.as_slice(),
}, },
BAREWORD }];
); let result = match_type!(SliceIter::new(input.as_slice()), BAREWORD);
match result { match result {
nom::IResult::Done(_, tok) => assert_eq!(tok, input[0]), Result::Complete(_, tok) => assert_eq!(tok, input[0]),
res => assert!(false, format!("Fail: {:?}", res)), res => assert!(false, format!("Fail: {:?}", res)),
} }
} }