diff --git a/schala-lang/src/lib.rs b/schala-lang/src/lib.rs index 6224532..355a3b7 100644 --- a/schala-lang/src/lib.rs +++ b/schala-lang/src/lib.rs @@ -40,15 +40,19 @@ impl Schala { } } -fn tokenizing_stage(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result, ()> { +fn tokenizing_stage(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result, String> { let tokens = tokenizing::tokenize(input); comp.map(|comp| { let token_string = tokens.iter().map(|t| format!("{:?}", t.token_type, t.offset.0, t.offset.1)).join(", "); comp.add_artifact(TraceArtifact::new("tokens", token_string)); }); - let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect(); - Ok(tokenizing::tokenize(input)) + let errors: Vec = tokens.iter().filter_map(|t| t.get_error()).collect(); + if errors.len() == 0 { + Ok(tokens) + } else { + Err(format!("{:?}", errors)) + } } fn parsing_stage(_handle: &mut Schala, input: Vec, comp: Option<&mut UnfinishedComputation>) -> Result { diff --git a/schala-lang/src/tokenizing.rs b/schala-lang/src/tokenizing.rs index 14e4b3c..0c6aad1 100644 --- a/schala-lang/src/tokenizing.rs +++ b/schala-lang/src/tokenizing.rs @@ -89,9 +89,9 @@ pub struct Token { } impl Token { - pub fn get_error(&self) -> Option<&String> { + pub fn get_error(&self) -> Option { match self.token_type { - TokenType::Error(ref s) => Some(s), + TokenType::Error(ref s) => Some(s.clone()), _ => None, } }