Token errors WIP

This commit is contained in:
greg 2018-05-02 00:27:58 -07:00
parent 50499c8a33
commit 774ab5f72e
1 changed files with 2 additions and 0 deletions

View File

@ -46,6 +46,8 @@ fn tokenizing_stage(_handle: &mut Schala, input: &str, comp: Option<&mut Unfinis
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
comp.add_artifact(TraceArtifact::new("tokens", token_string));
});
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
Ok(tokenizing::tokenize(input))
}