Make tokenize error-able

This commit is contained in:
greg 2016-01-06 23:48:53 -08:00
parent 5ca98c7d77
commit 8662a3ba0e
2 changed files with 12 additions and 6 deletions

View File

@ -55,15 +55,19 @@ impl ReplState for InterpreterState {
} }
fn repl_handler(input: &str, state: &mut InterpreterState) -> String { fn repl_handler(input: &str, state: &mut InterpreterState) -> String {
let tokens = match tokenize(input) {
None => return format!("Failure to tokenize"),
Some(t) => t
};
if state.show_tokens { if state.show_tokens {
println!("Tokens: {:?}", tokenize(input)); println!("Tokens: {:?}", tokens);
} }
if state.show_parse { if state.show_parse {
println!("not implemented") println!("not implemented")
} }
let tokens = tokenize(input);
let ast = parse(&tokens); let ast = parse(&tokens);
format!("{:?}", tokens) format!("{:?}", tokens)
} }

View File

@ -31,7 +31,7 @@ fn is_digit(c: &char) -> bool {
c.is_digit(10) c.is_digit(10)
} }
pub fn tokenize(input: &str) -> Vec<Token> { pub fn tokenize(input: &str) -> Option<Vec<Token>> {
use self::Token::*; use self::Token::*;
let mut tokens = Vec::new(); let mut tokens = Vec::new();
let mut iter = input.chars().peekable(); let mut iter = input.chars().peekable();
@ -63,18 +63,20 @@ pub fn tokenize(input: &str) -> Vec<Token> {
match iter.next() { match iter.next() {
Some(x) if x == '"' => break, Some(x) if x == '"' => break,
Some(x) => buffer.push(x), Some(x) => buffer.push(x),
None => return tokens, None => return None,
} }
} }
StrLiteral(buffer) StrLiteral(buffer)
} else if is_digit(&c) {
NumLiteral(45.0)
} else { } else {
StrLiteral("DUMMY".to_string()) Identifier("DUMMY".to_string())
}; };
tokens.push(cur_tok); tokens.push(cur_tok);
} }
tokens Some(tokens)
} }
#[cfg(test)] #[cfg(test)]