schala/src/tokenizer.rs

51 lines
930 B
Rust
Raw Normal View History

2015-12-25 02:03:11 -08:00
#[derive(Debug, Clone, PartialEq)]
2015-07-22 03:02:55 -07:00
pub enum Token {
EOF,
Newline,
Semicolon,
2015-07-22 03:02:55 -07:00
LParen,
RParen,
Comma,
2015-07-26 01:51:15 -07:00
Period,
2015-07-22 03:02:55 -07:00
NumLiteral(f64),
StrLiteral(String),
2015-07-22 04:01:56 -07:00
Identifier(String),
Keyword(Kw)
}
#[derive(Debug, Clone, PartialEq)]
pub enum Kw {
If,
Then,
Else,
While,
End,
Let,
Fn,
2015-08-08 00:27:40 -07:00
Null,
Assign
2015-07-22 03:02:55 -07:00
}
2015-07-22 03:12:01 -07:00
pub fn tokenize(input: &str) -> Vec<Token> {
let mut tokens = Vec::new();
tokens
}
2015-07-22 04:01:56 -07:00
2015-12-20 17:03:03 -08:00
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn tokeniziation_tests() {
let t1 = "let a = 3\n";
assert_eq!(format!("{:?}", tokenize(t1)),
"[Keyword(Let), Identifier(\"a\"), Keyword(Assign), NumLiteral(3), Newline, EOF]");
2015-12-20 19:29:26 -08:00
// this is intentional
let t2 = "a + b*c\n";
assert_eq!(format!("{:?}", tokenize(t2)),
"[Identifier(\"a\"), Identifier(\"+\"), Identifier(\"b*c\"), Newline, EOF]");
2015-12-20 19:29:26 -08:00
2015-12-20 17:03:03 -08:00
}
}