From bfa16fd6fbefffbb56a597ff3a2b80929716454a Mon Sep 17 00:00:00 2001 From: greg Date: Wed, 22 Jul 2015 04:01:56 -0700 Subject: [PATCH] Added Keyword lexical class --- src/parser.rs | 5 +++-- src/tokenizer.rs | 27 ++++++++++++++++++++++++--- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/parser.rs b/src/parser.rs index bdc1d0d..1849974 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,7 +1,7 @@ use std::slice::Iter; use std::iter::Peekable; -use tokenizer::{Token}; +use tokenizer::{Token, Kw}; use tokenizer::Token::*; #[derive(Debug)] @@ -39,6 +39,7 @@ fn expect_token(tok: Token, tokens: &mut Tokens) -> bool { (NumLiteral(_), NumLiteral(_)) => true, (StrLiteral(_), StrLiteral(_)) => true, (Identifier(ref i1), Identifier(ref i2)) => i1 == i2, + (Keyword(k1), Keyword(k2)) => k1 == k2, _ => false } } @@ -60,7 +61,7 @@ pub fn parse(input: Vec) -> ParseResult { fn let_expression(input: &mut Tokens) -> ParseResult { - expect!(Identifier("let".to_string()), input); + expect!(Keyword(Kw::Let), input); if let Some(&Identifier(ref name)) = input.next() { if let Some(&Identifier(ref s)) = input.next() { if s == "=" { diff --git a/src/tokenizer.rs b/src/tokenizer.rs index f141311..f8aa512 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -7,8 +7,21 @@ pub enum Token { Comma, NumLiteral(f64), StrLiteral(String), - Identifier(String) - /* Keyword(Keyword) */ //implement in future + Identifier(String), + Keyword(Kw) +} + +#[derive(Debug, Clone, PartialEq)] +pub enum Kw { + If, + Then, + Else, + While, + Do, + End, + Let, + Fn, + Null } pub fn tokenize(input: &str) -> Vec { @@ -67,10 +80,18 @@ pub fn tokenize(input: &str) -> Vec { match buffer.parse::() { Ok(f) => tokens.push(Token::NumLiteral(f)), - _ => tokens.push(Token::Identifier(buffer)) + _ => tokens.push(handle_identifier(buffer)) } } } tokens.push(Token::EOF); tokens } + +fn handle_identifier(identifier: String) -> Token { + if identifier == "let" { + return Token::Keyword(Kw::Let); + } + + return Token::Identifier(identifier); +}