From ac5bdd7bcbe8526ebb4cb1d1bf7f62761b6d1c74 Mon Sep 17 00:00:00 2001 From: greg Date: Wed, 6 Sep 2017 23:52:25 -0700 Subject: [PATCH] Change some func signatures around tokenizing and errors --- src/schala_lang/mod.rs | 24 ++++++++++++------------ src/schala_lang/parsing.rs | 14 +++++++++++--- 2 files changed, 23 insertions(+), 15 deletions(-) diff --git a/src/schala_lang/mod.rs b/src/schala_lang/mod.rs index 261f1de..47462a1 100644 --- a/src/schala_lang/mod.rs +++ b/src/schala_lang/mod.rs @@ -2,6 +2,8 @@ use language::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, ReplOut mod parsing; +use self::parsing::TokenType; + pub struct Schala { } @@ -18,18 +20,16 @@ impl ProgrammingLanguageInterface for Schala { fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> ReplOutput { let mut output = ReplOutput::default(); - let tokens = match parsing::tokenize(input) { - Ok(tokens) => { - if options.debug_tokens { - output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens))); - } - tokens - }, - Err(err) => { - output.add_output(format!("Tokenization error: {:?}\n", err.msg)); - return output; - } - }; + let tokens = parsing::tokenize(input); + if options.debug_tokens { + output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens))); + } + + let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect(); + if token_errors.len() != 0 { + output.add_output(format!("Tokenization error: {:?}\n", token_errors)); + return output; + } /* let ast = match parsing::parse(tokens) { diff --git a/src/schala_lang/parsing.rs b/src/schala_lang/parsing.rs index b2c9d68..118cfff 100644 --- a/src/schala_lang/parsing.rs +++ b/src/schala_lang/parsing.rs @@ -42,13 +42,22 @@ pub struct Token { offset: usize, } +impl Token { + pub fn get_error(&self) -> Option<&String> { + match self.token_type { + TokenType::Error(ref s) => Some(s), + _ => None, + } + } +} + fn is_digit(c: &char) -> bool { c.is_digit(10) } type CharIter<'a> = Peekable>>; -pub fn tokenize(input: &str) -> Result, TokenError> { +pub fn tokenize(input: &str) -> Vec { use self::TokenType::*; let mut tokens: Vec = Vec::new(); @@ -82,8 +91,7 @@ pub fn tokenize(input: &str) -> Result, TokenError> { tokens.push(Token { token_type: cur_tok_type, offset: idx }); } - - Ok(tokens) + tokens } fn handle_digit(c: char, input: &mut CharIter) -> TokenType {