schala/src/parser.rs

208 lines
5.4 KiB
Rust
Raw Normal View History

2015-12-25 02:03:11 -08:00
use tokenizer::Token;
use tokenizer::Kw;
2015-12-24 22:01:59 -08:00
2016-01-12 03:29:28 -08:00
/* Grammar
program := (statement delimiter ?)*
delimiter := Newline | Semicolon
statement := declaration | expression
declaraion := Fn prototype (statement)* End
prototype := identifier LParen identlist RParen
identlist := Ident (Comma Ident)* | e
expression := primary_expression (op primary_expression)*
primary_expression := Variable | Number | String | call_expr | paren_expr
paren_expr := LParen expression RParen
call_expr := identifier LParen identlist RParen
op := '+', '-', etc.
*/
2016-01-10 01:15:34 -08:00
#[derive(Debug, Clone)]
pub enum ASTNode {
ExprNode(Expression),
FuncNode(Function),
}
#[derive(Debug, Clone)]
pub struct Function {
pub prototype: Prototype,
2016-01-12 03:29:03 -08:00
pub body: Vec<Expression>,
2016-01-10 01:15:34 -08:00
}
#[derive(Debug, Clone)]
pub struct Prototype {
pub name: String,
pub args: Vec<String>
}
#[derive(Debug, Clone)]
pub enum Expression {
StringLiteral(String),
Number(f64),
2016-01-10 01:15:34 -08:00
Variable(String),
BinExp(String, Box<Expression>, Box<Expression>),
Call(String, Vec<Expression>),
}
pub type AST = Vec<ASTNode>;
//TODO make this support incomplete parses
pub type ParseResult<T> = Result<T, ParseError>;
2015-12-24 22:01:59 -08:00
#[derive(Debug)]
2016-01-10 01:15:34 -08:00
pub struct ParseError {
pub msg: String
}
2016-01-10 01:15:34 -08:00
impl ParseError {
fn result_from_str<T>(msg: &str) -> ParseResult<T> {
2016-01-10 01:15:34 -08:00
Err(ParseError { msg: msg.to_string() })
}
}
2015-12-25 02:03:11 -08:00
2016-01-12 01:58:12 -08:00
struct Parser {
tokens: Vec<Token>,
}
2016-01-12 01:58:12 -08:00
impl Parser {
fn initialize(tokens: &[Token]) -> Parser {
let mut tokens = tokens.to_vec();
tokens.reverse();
Parser { tokens: tokens }
}
2016-01-12 01:58:12 -08:00
fn peek(&mut self) -> Option<Token> {
self.tokens.last().map(|x| x.clone())
}
2016-01-12 03:26:28 -08:00
fn next(&mut self) -> Option<Token>{
self.tokens.pop()
2016-01-12 01:58:12 -08:00
}
}
macro_rules! expect {
2016-01-12 01:58:12 -08:00
($self_:expr, $token:pat, $error:expr) => {
match $self_.peek() {
Some($token) => {$self_.next();},
_ => return ParseError::result_from_str($error)
2016-01-12 01:58:12 -08:00
}
}
}
2016-01-12 04:04:14 -08:00
fn is_delimiter(token: &Token) -> bool {
use tokenizer::Token::*;
match *token {
Newline | Semicolon => true,
_ => false
}
}
2016-01-12 01:58:12 -08:00
impl Parser {
fn program(&mut self) -> ParseResult<AST> {
use tokenizer::Token::*;
let mut ast = Vec::new(); //TODO have this come from previously-parsed tree
loop {
let cur_tok = match self.peek() {
Some(t) => t.clone(),
None => break
};
let result: ParseResult<ASTNode> = match cur_tok {
2016-01-12 04:04:14 -08:00
ref t if is_delimiter(&t) => { self.next(); continue},
2016-01-12 01:58:12 -08:00
_ => self.statement()
};
match result {
Ok(node) => ast.push(node),
Err(err) => return Err(err)
}
}
2016-01-12 01:58:12 -08:00
Ok(ast)
}
2016-01-12 01:58:12 -08:00
fn statement(&mut self) -> ParseResult<ASTNode> {
use tokenizer::Token::*;
let cur_tok: Token = self.peek().unwrap().clone();
let node: ASTNode = match cur_tok {
Keyword(Kw::Fn) => try!(self.declaration()),
2016-01-12 04:04:14 -08:00
_ => ASTNode::ExprNode(try!(self.expression())),
2016-01-12 01:58:12 -08:00
};
2016-01-12 01:58:12 -08:00
Ok(node)
}
2016-01-12 01:58:12 -08:00
fn declaration(&mut self) -> ParseResult<ASTNode> {
use tokenizer::Token::*;
expect!(self, Fn, "Expected 'fn'");
let prototype = try!(self.prototype());
2016-01-12 03:29:03 -08:00
let body: Vec<Expression> = try!(self.body());
2016-01-12 03:26:28 -08:00
expect!(self, Keyword(Kw::End), "Expected 'end'");
2016-01-12 03:29:03 -08:00
Ok(ASTNode::FuncNode(Function { prototype: prototype, body: body } ))
2016-01-12 01:58:12 -08:00
}
fn prototype(&mut self) -> ParseResult<Prototype> {
use tokenizer::Token::*;
let name: String = match self.peek() {
Some(Identifier(name)) => {self.next(); name},
_ => return ParseError::result_from_str("Expected identifier")
};
expect!(self, LParen, "Expected '('");
let mut args: Vec<String> = try!(self.identlist());
expect!(self, RParen, "Expected ')'");
Ok(Prototype {name: name, args: args})
}
fn identlist(&mut self) -> ParseResult<Vec<String>> {
use tokenizer::Token::*;
let mut args: Vec<String> = Vec::new();
loop {
match self.peek() {
Some(Identifier(name)) => {
args.push(name);
self.next();
if let Some(Comma) = self.peek() {
self.next();
} else {
break;
}
},
_ => break
}
}
2016-01-12 01:58:12 -08:00
Ok(args)
}
2016-01-12 03:29:03 -08:00
fn body(&mut self) -> ParseResult<Vec<Expression>> {
2016-01-12 01:58:12 -08:00
use tokenizer::Token::*;
2016-01-12 04:04:14 -08:00
let mut exprs = Vec::new();
loop {
match self.peek() {
Some(ref t) if is_delimiter(t) => { self.next(); continue},
Some(Keyword(Kw::End)) => break,
_ => {
let expr = try!(self.expression());
exprs.push(expr);
}
}
}
Ok(exprs)
2016-01-12 01:58:12 -08:00
}
2016-01-12 04:04:14 -08:00
fn expression(&mut self) -> ParseResult<Expression> {
2016-01-12 01:58:12 -08:00
use tokenizer::Token::*;
2016-01-12 03:26:28 -08:00
let expr: Expression = match self.next() {
Some(Identifier(s)) => Expression::StringLiteral(s),
2016-01-12 04:04:14 -08:00
_ => panic!("bad expression parse"),
2016-01-12 03:26:28 -08:00
};
2016-01-12 04:04:14 -08:00
Ok(expr)
2016-01-12 01:58:12 -08:00
}
}
2016-01-12 01:58:12 -08:00
pub fn parse(tokens: &[Token], _parsed_tree: &[ASTNode]) -> ParseResult<AST> {
let mut parser = Parser::initialize(tokens);
parser.program()
2015-12-25 02:03:11 -08:00
}
2016-01-10 01:15:34 -08:00