Tokenize errors

This commit is contained in:
greg 2018-05-02 01:14:46 -07:00
parent 774ab5f72e
commit 7ba8c9dab9
2 changed files with 9 additions and 5 deletions

View File

@ -40,15 +40,19 @@ impl Schala {
} }
} }
fn tokenizing_stage(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, ()> { fn tokenizing_stage(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
let tokens = tokenizing::tokenize(input); let tokens = tokenizing::tokenize(input);
comp.map(|comp| { comp.map(|comp| {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", "); let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
comp.add_artifact(TraceArtifact::new("tokens", token_string)); comp.add_artifact(TraceArtifact::new("tokens", token_string));
}); });
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
Ok(tokenizing::tokenize(input)) let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if errors.len() == 0 {
Ok(tokens)
} else {
Err(format!("{:?}", errors))
}
} }
fn parsing_stage(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<parsing::AST, parsing::ParseError> { fn parsing_stage(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<parsing::AST, parsing::ParseError> {

View File

@ -89,9 +89,9 @@ pub struct Token {
} }
impl Token { impl Token {
pub fn get_error(&self) -> Option<&String> { pub fn get_error(&self) -> Option<String> {
match self.token_type { match self.token_type {
TokenType::Error(ref s) => Some(s), TokenType::Error(ref s) => Some(s.clone()),
_ => None, _ => None,
} }
} }