Convert Tokenize to Result

This commit is contained in:
greg 2016-12-28 22:52:23 -08:00
parent 17f9846bb9
commit 743311d18a
2 changed files with 25 additions and 9 deletions

View File

@ -45,8 +45,11 @@ fn run_noninteractive(filename: &str, compile: bool) {
source_file.read_to_string(&mut buffer).unwrap();
let tokens = match tokenize(&buffer) {
Some(t) => t,
None => { println!("Tokenization error"); return; }
Ok(t) => t,
Err(e) => {
println!("Tokenization error");
return;
}
};
let ast = match parse(&tokens, &[]) {
@ -106,8 +109,8 @@ fn repl_handler(input: &str, state: &mut InterpreterState) -> String {
let mut result = String::new();
let tokens = match tokenize(input) {
None => return format!("Tokenization error"),
Some(t) => t
Err(e) => return format!("Tokenization error"),
Ok(t) => t
};
if state.show_tokens {

View File

@ -31,6 +31,19 @@ pub enum Kw {
Null,
}
pub type TokenizeResult = Result<Vec<Token>, TokenizeError>;
#[derive(Debug)]
pub struct TokenizeError {
pub msg: String,
}
impl TokenizeError {
fn new(msg: &str) -> TokenizeError {
TokenizeError { msg: msg.to_string() }
}
}
fn is_digit(c: &char) -> bool {
c.is_digit(10)
}
@ -48,7 +61,7 @@ fn ends_identifier(c: &char) -> bool {
c == ':'
}
pub fn tokenize(input: &str) -> Option<Vec<Token>> {
pub fn tokenize(input: &str) -> TokenizeResult {
use self::Token::*;
let mut tokens = Vec::new();
let mut iter = input.chars().peekable();
@ -82,7 +95,7 @@ pub fn tokenize(input: &str) -> Option<Vec<Token>> {
match iter.next() {
Some(x) if x == '"' => break,
Some(x) => buffer.push(x),
None => return None,
None => return Err(TokenizeError::new("Unclosed quote")),
}
}
StrLiteral(buffer)
@ -101,7 +114,7 @@ pub fn tokenize(input: &str) -> Option<Vec<Token>> {
}
match buffer.parse::<f64>() {
Ok(f) => NumLiteral(f),
Err(_) => return None
Err(_) => return Err(TokenizeError::new("Failed to pase digit")),
}
} else if !char::is_alphanumeric(c) {
let mut buffer = String::with_capacity(20);
@ -142,7 +155,7 @@ pub fn tokenize(input: &str) -> Option<Vec<Token>> {
tokens.push(cur_tok);
}
Some(tokens)
Ok(tokens)
}
#[cfg(test)]
@ -173,7 +186,7 @@ mod tests {
tokentest!("2.3*49.2",
"[NumLiteral(2.3), Operator(Op { repr: \"*\" }), NumLiteral(49.2)]");
assert_eq!(tokenize("2.4.5"), None);
assert!(tokenize("2.4.5").is_err());
}
#[test]