diff --git a/src/ast/mod.rs b/src/ast/mod.rs index e90fe34..a713832 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -55,6 +55,7 @@ macro_rules! enum_type_equality { /// parts of the UCG AST have a positioned associated with them. #[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] pub struct Position { + pub file: Option, pub line: usize, pub column: usize, pub offset: usize, @@ -64,6 +65,7 @@ impl Position { /// Construct a new Position. pub fn new(line: usize, column: usize, offset: usize) -> Self { Position { + file: None, line: line, column: column, offset: offset, @@ -373,8 +375,6 @@ impl<'a> From<&'a PositionedItem> for PositionedItem { /// Encodes a macro expression in the UCG AST.. /// /// A macro is a pure function over a tuple. -/// MacroDefs are not closures. They can not reference -/// any values except what is defined in their arguments. #[derive(PartialEq, Debug, Clone)] pub struct MacroDef { pub scope: Option, diff --git a/src/iter.rs b/src/iter.rs index 99664ac..9936735 100644 --- a/src/iter.rs +++ b/src/iter.rs @@ -1,6 +1,7 @@ //! Input stream wrappers for use in abortable_parser. use std::convert::From; use std::iter::Iterator; +use std::path::PathBuf; use abortable_parser::iter::{SliceIter, StrIter}; use abortable_parser::{InputIter, Offsetable, Peekable, Positioned, Seekable, Span, SpanRange}; @@ -9,6 +10,7 @@ use crate::ast::{Position, Token}; #[derive(Debug)] pub struct OffsetStrIter<'a> { + source_file: Option, contained: StrIter<'a>, line_offset: usize, col_offset: usize, @@ -21,11 +23,17 @@ impl<'a> OffsetStrIter<'a> { pub fn new_with_offsets(input: &'a str, line_offset: usize, col_offset: usize) -> Self { OffsetStrIter { + source_file: None, contained: StrIter::new(input), line_offset: line_offset, col_offset: col_offset, } } + + pub fn with_src_file(mut self, file: PathBuf) -> Self { + self.source_file = Some(file); + self + } } impl<'a> Iterator for OffsetStrIter<'a> { @@ -45,6 +53,7 @@ impl<'a> Offsetable for OffsetStrIter<'a> { impl<'a> Clone for OffsetStrIter<'a> { fn clone(&self) -> Self { OffsetStrIter { + source_file: self.source_file.clone(), contained: self.contained.clone(), line_offset: self.line_offset, col_offset: self.col_offset, @@ -55,6 +64,7 @@ impl<'a> Clone for OffsetStrIter<'a> { impl<'a> From<&'a str> for OffsetStrIter<'a> { fn from(source: &'a str) -> Self { OffsetStrIter { + source_file: None, contained: StrIter::new(source), line_offset: 0, col_offset: 0, @@ -103,6 +113,11 @@ impl<'a> From<&'a SliceIter<'a, Token>> for Position { impl<'a> From<&'a OffsetStrIter<'a>> for Position { fn from(s: &'a OffsetStrIter<'a>) -> Position { - Position::new(s.line(), s.column(), s.get_offset()) + Position { + file: s.source_file.clone(), + line: s.line(), + column: s.column(), + offset: s.get_offset(), + } } } diff --git a/src/main.rs b/src/main.rs index cffa276..63d3e74 100644 --- a/src/main.rs +++ b/src/main.rs @@ -266,6 +266,7 @@ fn inspect_command( } else { sym_name.to_owned() }; + let mut builder = builder.clone_builder("/eval"); match builder.eval_string(&normalized) { Ok(v) => Some(v.clone()), Err(e) => { diff --git a/src/tokenizer/test.rs b/src/tokenizer/test.rs index 66a6eff..c05afc2 100644 --- a/src/tokenizer/test.rs +++ b/src/tokenizer/test.rs @@ -203,6 +203,7 @@ fn test_parse_comment() { typ: TokenType::COMMENT, fragment: " comment".to_string(), pos: Position { + file: None, line: 1, column: 1, offset: 0 @@ -219,6 +220,7 @@ fn test_parse_comment() { typ: TokenType::COMMENT, fragment: " comment".to_string(), pos: Position { + file: None, column: 1, line: 1, offset: 0 @@ -235,6 +237,7 @@ fn test_parse_comment() { typ: TokenType::COMMENT, fragment: " comment".to_string(), pos: Position { + file: None, column: 1, line: 1, offset: 0 @@ -251,6 +254,7 @@ fn test_match_word() { fragment: "foo".to_string(), typ: TokenType::BAREWORD, pos: Position { + file: None, line: 1, column: 1, offset: 0, @@ -269,6 +273,7 @@ fn test_match_word_empty_input() { fragment: "".to_string(), typ: TokenType::END, pos: Position { + file: None, line: 1, column: 1, offset: 0, @@ -291,6 +296,7 @@ fn test_match_punct() { fragment: "!".to_string(), typ: TokenType::PUNCT, pos: Position { + file: None, line: 1, column: 1, offset: 0, @@ -309,6 +315,7 @@ fn test_match_type() { fragment: "foo".to_string(), typ: TokenType::BAREWORD, pos: Position { + file: None, line: 1, column: 1, offset: 0,