Change some func signatures around tokenizing and errors

This commit is contained in:
greg 2017-09-06 23:52:25 -07:00
parent 8bf5f40a2a
commit ac5bdd7bcb
2 changed files with 23 additions and 15 deletions

View File

@ -2,6 +2,8 @@ use language::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, ReplOut
mod parsing;
use self::parsing::TokenType;
pub struct Schala {
}
@ -18,18 +20,16 @@ impl ProgrammingLanguageInterface for Schala {
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> ReplOutput {
let mut output = ReplOutput::default();
let tokens = match parsing::tokenize(input) {
Ok(tokens) => {
if options.debug_tokens {
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
}
tokens
},
Err(err) => {
output.add_output(format!("Tokenization error: {:?}\n", err.msg));
return output;
}
};
let tokens = parsing::tokenize(input);
if options.debug_tokens {
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
}
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if token_errors.len() != 0 {
output.add_output(format!("Tokenization error: {:?}\n", token_errors));
return output;
}
/*
let ast = match parsing::parse(tokens) {

View File

@ -42,13 +42,22 @@ pub struct Token {
offset: usize,
}
impl Token {
pub fn get_error(&self) -> Option<&String> {
match self.token_type {
TokenType::Error(ref s) => Some(s),
_ => None,
}
}
}
fn is_digit(c: &char) -> bool {
c.is_digit(10)
}
type CharIter<'a> = Peekable<Enumerate<Chars<'a>>>;
pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenError> {
pub fn tokenize(input: &str) -> Vec<Token> {
use self::TokenType::*;
let mut tokens: Vec<Token> = Vec::new();
@ -82,8 +91,7 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenError> {
tokens.push(Token { token_type: cur_tok_type, offset: idx });
}
Ok(tokens)
tokens
}
fn handle_digit(c: char, input: &mut CharIter) -> TokenType {