Run rustfmt on the rest of them

This commit is contained in:
greg 2016-12-29 02:04:03 -08:00
parent e1d07b4e66
commit 3063de1242
3 changed files with 99 additions and 77 deletions

View File

@ -11,23 +11,24 @@ use simplerepl::{REPL, ReplState};
use tokenizer::tokenize;
mod tokenizer;
use parser::{parse};
use parser::parse;
mod parser;
use eval::{Evaluator};
use eval::Evaluator;
mod eval;
use compilation::{compilation_sequence};
use compilation::compilation_sequence;
mod compilation;
mod llvm_wrap;
fn main() {
let option_matches = program_options().parse(std::env::args()).expect("Could not parse options");
let option_matches =
program_options().parse(std::env::args()).expect("Could not parse options");
match option_matches.free[..] {
[] | [_] => {
run_repl();
},
[_, ref filename, ..] => {
}
[_, ref filename, _..] => {
run_noninteractive(filename, !option_matches.opt_present("i"));
}
};
@ -35,7 +36,9 @@ fn main() {
fn program_options() -> getopts::Options {
let mut options = getopts::Options::new();
options.optflag("i", "interpret", "Interpret source file instead of compiling");
options.optflag("i",
"interpret",
"Interpret source file instead of compiling");
options
}
@ -78,8 +81,7 @@ fn run_repl() {
show_parse: false,
evaluator: Evaluator::new(),
};
REPL::with_prompt_and_state(Box::new(repl_handler), ">> ", initial_state)
.run();
REPL::with_prompt_and_state(Box::new(repl_handler), ">> ", initial_state).run();
}
struct InterpreterState {
@ -93,17 +95,17 @@ impl ReplState for InterpreterState {
match input[..] {
["set", "show", "tokens", "true"] => {
self.show_tokens = true;
},
}
["set", "show", "tokens", "false"] => {
self.show_tokens = false;
},
}
["set", "show", "parse", "true"] => {
self.show_parse = true;
},
}
["set", "show", "parse", "false"] => {
self.show_parse = false;
},
_ => ()
}
_ => (),
}
}
}
@ -113,7 +115,7 @@ fn repl_handler(input: &str, state: &mut InterpreterState) -> String {
let tokens = match tokenize(input) {
Err(e) => return format!("Tokenization error"),
Ok(t) => t
Ok(t) => t,
};
if state.show_tokens {
@ -131,7 +133,7 @@ fn repl_handler(input: &str, state: &mut InterpreterState) -> String {
let mut output: Vec<String> = state.evaluator.run(ast);
//for now only handle last output
// for now only handle last output
let interpreter_result = output.pop().unwrap_or("".to_string());
result.push_str(&interpreter_result);
result

View File

@ -1,23 +1,23 @@
use std::fmt;
use tokenizer::{Token, Kw, Op};
/* Grammar
program := (statement delimiter ?)*
delimiter := Newline | Semicolon
statement := declaration | expression
declaraion := Fn prototype (statement)* End
prototype := identifier LParen identlist RParen
identlist := Ident (Comma Ident)* | e
exprlist := Expression (Comma Expression)* | e
expression := primary_expression (op primary_expression)*
primary_expression := Number | String | identifier_expr | paren_expr | conditional_expr
identifier_expr := call_expression | Variable
paren_expr := LParen expression RParen
call_expr := Identifier LParen exprlist RParen
conditional_expr := IF expression THEN (expression delimiter?)* ELSE (expresion delimiter?)* END
op := '+', '-', etc.
*/
// Grammar
// program := (statement delimiter ?)*
// delimiter := Newline | Semicolon
// statement := declaration | expression
// declaraion := Fn prototype (statement)* End
// prototype := identifier LParen identlist RParen
// identlist := Ident (Comma Ident)* | e
// exprlist := Expression (Comma Expression)* | e
//
// expression := primary_expression (op primary_expression)*
// primary_expression := Number | String | identifier_expr | paren_expr | conditional_expr
// identifier_expr := call_expression | Variable
// paren_expr := LParen expression RParen
// call_expr := Identifier LParen exprlist RParen
// conditional_expr := IF expression THEN (expression delimiter?)* ELSE (expresion delimiter?)* END
// op := '+', '-', etc.
//
#[derive(Debug, Clone)]
pub enum ASTNode {
@ -34,7 +34,7 @@ pub struct Function {
#[derive(Debug, Clone, PartialEq)]
pub struct Prototype {
pub name: String,
pub parameters: Vec<String>
pub parameters: Vec<String>,
}
#[derive(Debug, Clone)]
@ -74,12 +74,12 @@ pub type AST = Vec<ASTNode>;
type Precedence = u8;
//TODO make this support incomplete parses
// TODO make this support incomplete parses
pub type ParseResult<T> = Result<T, ParseError>;
#[derive(Debug)]
pub struct ParseError {
pub msg: String
pub msg: String,
}
impl ParseError {
@ -103,7 +103,7 @@ impl Parser {
self.tokens.last().map(|x| x.clone())
}
fn next(&mut self) -> Option<Token>{
fn next(&mut self) -> Option<Token> {
self.tokens.pop()
}
@ -150,7 +150,7 @@ fn is_delimiter(token: &Token) -> bool {
use tokenizer::Token::*;
match *token {
Newline | Semicolon => true,
_ => false
_ => false,
}
}
@ -159,14 +159,17 @@ impl Parser {
let mut ast = Vec::new(); //TODO have this come from previously-parsed tree
loop {
let result: ParseResult<ASTNode> = match self.peek() {
Some(ref t) if is_delimiter(&t) => { self.next(); continue},
Some(ref t) if is_delimiter(&t) => {
self.next();
continue;
}
Some(_) => self.statement(),
None => break,
};
match result {
Ok(node) => ast.push(node),
Err(err) => return Err(err)
Err(err) => return Err(err),
}
}
@ -190,7 +193,10 @@ impl Parser {
let prototype = try!(self.prototype());
let body: Vec<Expression> = try!(self.body());
expect!(self, Keyword(Kw::End));
Ok(ASTNode::FuncNode(Function { prototype: prototype, body: body } ))
Ok(ASTNode::FuncNode(Function {
prototype: prototype,
body: body,
}))
}
fn prototype(&mut self) -> ParseResult<Prototype> {
@ -199,7 +205,10 @@ impl Parser {
expect!(self, LParen);
let parameters: Vec<String> = try!(self.identlist());
expect!(self, RParen);
Ok(Prototype {name: name, parameters: parameters})
Ok(Prototype {
name: name,
parameters: parameters,
})
}
fn identlist(&mut self) -> ParseResult<Vec<String>> {
@ -240,7 +249,10 @@ impl Parser {
let mut exprs = Vec::new();
loop {
match self.peek() {
Some(ref t) if is_delimiter(t) => { self.next(); continue},
Some(ref t) if is_delimiter(t) => {
self.next();
continue;
}
Some(Keyword(Kw::End)) => break,
_ => {
let expr = try!(self.expression());
@ -256,7 +268,10 @@ impl Parser {
self.precedence_expr(lhs, 0)
}
fn precedence_expr(&mut self, mut lhs: Expression, min_precedence: u8) -> ParseResult<Expression> {
fn precedence_expr(&mut self,
mut lhs: Expression,
min_precedence: u8)
-> ParseResult<Expression> {
use tokenizer::Token::*;
while let Some(Operator(op)) = self.peek() {
let precedence = self.get_precedence(&op);
@ -284,13 +299,22 @@ impl Parser {
fn primary_expression(&mut self) -> ParseResult<Expression> {
use tokenizer::Token::*;
Ok(match self.peek() {
Some(Keyword(Kw::Null)) => { self.next(); Expression::Null },
Some(NumLiteral(n)) => { self.next(); Expression::Number(n) },
Some(StrLiteral(s)) => { self.next(); Expression::StringLiteral(s) },
Some(Identifier(_)) => { try!(self.identifier_expr()) },
Some(Token::LParen) => { try!(self.paren_expr()) }
Some(Keyword(Kw::Null)) => {
self.next();
Expression::Null
}
Some(NumLiteral(n)) => {
self.next();
Expression::Number(n)
}
Some(StrLiteral(s)) => {
self.next();
Expression::StringLiteral(s)
}
Some(Identifier(_)) => try!(self.identifier_expr()),
Some(Token::LParen) => try!(self.paren_expr()),
Some(_) => return ParseError::result_from_str("Expected primary expression"),
None => return ParseError::result_from_str("Expected primary expression received EoI")
None => return ParseError::result_from_str("Expected primary expression received EoI"),
})
}
@ -301,8 +325,8 @@ impl Parser {
Some(LParen) => {
let args = try!(self.call_expr());
Expression::Call(name, args)
},
__ => Expression::Variable(name)
}
__ => Expression::Variable(name),
};
Ok(expr)

View File

@ -11,7 +11,7 @@ pub enum Token {
StrLiteral(String),
Identifier(String),
Operator(Op),
Keyword(Kw)
Keyword(Kw),
}
#[derive(Debug, Clone, PartialEq)]
@ -40,7 +40,7 @@ pub struct TokenizeError {
impl TokenizeError {
fn new(msg: &str) -> TokenizeError {
TokenizeError { msg: msg.to_string() }
TokenizeError { msg: msg.to_string() }
}
}
@ -50,15 +50,8 @@ fn is_digit(c: &char) -> bool {
fn ends_identifier(c: &char) -> bool {
let c = *c;
char::is_whitespace(c) ||
is_digit(&c) ||
c == ';' ||
c == '(' ||
c == ')' ||
c == ',' ||
c == '.' ||
c == ',' ||
c == ':'
char::is_whitespace(c) || is_digit(&c) || c == ';' || c == '(' || c == ')' || c == ',' ||
c == '.' || c == ',' || c == ':'
}
pub fn tokenize(input: &str) -> TokenizeResult {
@ -71,12 +64,13 @@ pub fn tokenize(input: &str) -> TokenizeResult {
continue;
} else if c == '#' {
while let Some(c) = iter.next() {
if c == '\n' { break; }
if c == '\n' {
break;
}
}
}
let cur_tok =
if c == '\n' {
let cur_tok = if c == '\n' {
Newline
} else if c == ';' {
Semicolon
@ -86,12 +80,12 @@ pub fn tokenize(input: &str) -> TokenizeResult {
RParen
} else if c == ':' {
Colon
} else if c == ',' {
} else if c == ',' {
Comma
} else if c == '"' {
let mut buffer = String::with_capacity(20);
loop {
//TODO handle string escapes, interpolation
// TODO handle string escapes, interpolation
match iter.next() {
Some(x) if x == '"' => break,
Some(x) => buffer.push(x),
@ -120,14 +114,15 @@ pub fn tokenize(input: &str) -> TokenizeResult {
let mut buffer = String::with_capacity(20);
buffer.push(c);
loop {
if iter.peek().map_or(false, |x| !char::is_alphanumeric(*x) && !char::is_whitespace(*x)) {
if iter.peek().map_or(false,
|x| !char::is_alphanumeric(*x) && !char::is_whitespace(*x)) {
let n = iter.next().unwrap();
buffer.push(n);
} else {
break;
}
}
Operator(Op {repr: buffer })
Operator(Op { repr: buffer })
} else {
let mut buffer = String::with_capacity(20);
buffer.push(c);
@ -148,7 +143,7 @@ pub fn tokenize(input: &str) -> TokenizeResult {
"let" => Keyword(Kw::Let),
"fn" => Keyword(Kw::Fn),
"null" => Keyword(Kw::Null),
b => Identifier(b.to_string())
b => Identifier(b.to_string()),
}
};
@ -175,16 +170,17 @@ mod tests {
#[test]
fn tokeniziation_tests() {
tokentest!("let a = 3\n",
"[Keyword(Let), Identifier(\"a\"), Operator(Op { repr: \"=\" }), NumLiteral(3), Newline]");
"[Keyword(Let), Identifier(\"a\"), Operator(Op { repr: \"=\" }), \
NumLiteral(3), Newline]");
tokentest!("2+1",
"[NumLiteral(2), Operator(Op { repr: \"+\" }), NumLiteral(1)]");
"[NumLiteral(2), Operator(Op { repr: \"+\" }), NumLiteral(1)]");
tokentest!("2 + 1",
"[NumLiteral(2), Operator(Op { repr: \"+\" }), NumLiteral(1)]");
"[NumLiteral(2), Operator(Op { repr: \"+\" }), NumLiteral(1)]");
tokentest!("2.3*49.2",
"[NumLiteral(2.3), Operator(Op { repr: \"*\" }), NumLiteral(49.2)]");
"[NumLiteral(2.3), Operator(Op { repr: \"*\" }), NumLiteral(49.2)]");
assert!(tokenize("2.4.5").is_err());
}
@ -192,9 +188,9 @@ mod tests {
#[test]
#[ignore]
fn more_tokenization() {
//it would be nice to support complicated operators in a nice, haskell-ish way
// it would be nice to support complicated operators in a nice, haskell-ish way
tokentest!("a *> b",
"[Identifier(\"a\"), Identifier(\"*>\"), Identifier(\"b\"), EOF]");
"[Identifier(\"a\"), Identifier(\"*>\"), Identifier(\"b\"), EOF]");
}
}