Refactoring Schala

Gonna work on Schala in earnest now! Using the simplerepl crate instead
of a build-in REPL, temporarily dropping parsing and evaluation code.
This commit is contained in:
greg 2015-12-18 23:40:30 -08:00
parent 123f388711
commit 3af7e6a409
4 changed files with 9 additions and 689 deletions

View File

@ -1,178 +0,0 @@
use std::collections::HashMap;
use std::clone::Clone;
use parser::AST;
use parser::AST::*;
pub struct Environment(pub HashMap<String, AST>);
type EvalResult = (AST, Environment);
impl Environment {
pub fn new() -> Environment {
let mut map = HashMap::new();
map.insert("true".to_string(), LangTrue);
map.insert("false".to_string(), LangFalse);
Environment(map)
}
fn add_binding(&mut self, name: String, binding: AST) {
match *self {
Environment(ref mut hash_map) => hash_map.insert(name, binding)
};
}
fn lookup_binding(&mut self, name: &String) -> Option<&AST> {
match *self {
Environment(ref mut hash_map) => hash_map.get(name)
}
}
pub fn display(&self) {
match *self {
Environment(ref hash_map) =>
for (var, binding) in hash_map {
println!("{} : {:?}", var, binding);
}
}
println!("----");
}
}
pub fn evaluate(ast: AST, env: Environment) -> (String, Environment) {
let mut reduction = (ast, env);
while is_reducable(&reduction.0) {
reduction = reduce(reduction);
}
let output = match reduction.0 {
DoNothing => "".to_string(),
Number(n) => format!("{}", n),
LangString(s) => format!("\"{}\"", s),
Null => "null".to_string(),
LangFalse => "false".to_string(),
LangTrue => "true".to_string(),
other => format!("reducing {:?} not implemented", other)
};
(output, reduction.1)
}
fn is_reducable(ast: &AST) -> bool {
match *ast {
DoNothing => false,
Number(_) => false,
LangString(_) => false,
Null => false,
LangFalse => false,
LangTrue => false,
_ => true
}
}
fn reduce(evr: EvalResult) -> EvalResult {
let (ast, mut env) = evr;
match ast {
IfStatement(if_clause, then_clause, else_clause) => {
let (condition, new_env) = (*if_clause, env);
match condition {
Null | LangFalse => match else_clause {
Some(cl) => (*cl, new_env),
None => (DoNothing, new_env)
},
_ => (*then_clause, new_env)
}
},
WhileStatement(condition, body) => {
let (continue_loop, env) = reduce((*condition.clone(), env));
match continue_loop {
Null | LangFalse => (DoNothing, env),
_ => {
let (_, new_env) = reduce((*body.clone(), env));
(WhileStatement(condition, body), new_env)
}
}
},
BinOp(op, lhs, rhs) => {
let (reduced_lhs, new_env) = reduce((*lhs, env));
let (reduced_rhs, new_env2) = reduce((*rhs, new_env));
let result: AST = reduce_binop(*op, reduced_lhs, reduced_rhs);
(result, new_env2)
},
Name(name) => {
let result = match env.lookup_binding(&name) {
Some(binding) => match binding {
&DoNothing => DoNothing,
&LangTrue => LangTrue,
&LangFalse => LangFalse,
&Number(n) => Number(n),
&LangString(ref s) => LangString(s.clone()),
&Null => Null,
_ => panic!("Unreduced ast node for name: {:?}", name)
},
None => Null
};
(result, env)
},
Statements(stmts) => {
let mut reduced_ast = DoNothing;
let mut reduced_env = env;
for stmt in stmts.into_iter() {
let (new_ast, new_env) = reduce((stmt, reduced_env));
reduced_env = new_env;
reduced_ast = new_ast;
}
(reduced_ast, reduced_env)
},
Binding(name, binding) => {
let unboxed_binding = *binding;
let (evaluated_binding, mut evaluated_env) = reduce((unboxed_binding, env));
evaluated_env.add_binding(name, evaluated_binding);
(DoNothing, evaluated_env)
},
other_ast => (other_ast, env)
}
}
fn reduce_binop(op: AST, lhs: AST, rhs: AST) -> AST {
match (lhs, rhs) {
(Number(l), Number(r)) => match op {
Name(ref s) if *s == "+" => Number(l + r),
Name(ref s) if *s == "-" => Number(l - r),
Name(ref s) if *s == "*" => Number(l * r),
Name(ref s) if *s == "/" => if r == 0.0 { Null } else { Number(l / r) },
Name(ref s) if *s == "==" => if l == r { LangTrue } else { LangFalse },
Name(ref s) if *s == ">" => if l > r { LangTrue } else { LangFalse },
Name(ref s) if *s == "<" => if l < r { LangTrue } else { LangFalse },
_ => Null
},
(LangString(s1), LangString(s2)) => match op {
Name(ref s) if *s == "+" => LangString(format!("{}{}", s1, s2)),
_ => Null
},
_ => Null
}
}
#[cfg(test)]
mod test {
use super::*;
}

View File

@ -1,190 +1,27 @@
use std::io;
use std::io::Write;
use std::io::BufRead;
use std::process;
use std::cell::RefCell;
use std::collections::HashMap;
extern crate simplerepl;
use std::path::Path;
use std::fs::File;
use std::io::Read;
use simplerepl::REPL;
use tokenizer::tokenize;
use parser::{parse};
use evaluate::{evaluate, Environment};
mod tokenizer;
mod parser;
mod evaluate;
struct REPLOptions {
show_tokenization: bool,
show_ast: bool
}
impl REPLOptions {
fn new() -> REPLOptions {
REPLOptions {
show_tokenization: false,
show_ast: false
}
}
}
type BinopTable = HashMap<&'static str, i32>;
thread_local!(static BINOP_TABLE: RefCell<BinopTable> = RefCell::new(HashMap::new()));
fn main() {
let args: Vec<String> = std::env::args().collect();
println!("Schala v 0.02");
init_binop_table();
if let Some(filename) = args.get(1) {
let mut source_file = File::open(&Path::new(filename)).unwrap();
let mut buffer = String::new();
source_file.read_to_string(&mut buffer).unwrap();
match parse(tokenize(&buffer)) {
Ok(ast) => {
let (result, env) = evaluate(ast, Environment::new());
println!("{}", result);
},
Err(err) => println!("{}", err)
}
panic!("Not implemented yet");
} else {
repl();
REPL::default(repl_handler).run();
}
}
fn init_binop_table() {
BINOP_TABLE.with(|hm| {
macro_rules! insert_precedence {
($op:expr, $prec:expr) => { hm.borrow_mut().insert($op, $prec) }
}
insert_precedence!("+", 20);
insert_precedence!("-", 20);
insert_precedence!("*", 40);
insert_precedence!("/", 40);
insert_precedence!("%", 40);
insert_precedence!("**", 50);
insert_precedence!("==", 10);
insert_precedence!(">", 15);
insert_precedence!("<", 15);
insert_precedence!("<=>", 15);
});
}
fn repl() {
let mut options = REPLOptions::new();
let stdin = io::stdin();
let mut stdout = io::stdout();
let mut buf = String::with_capacity(20);
let mut env = Environment::new();
loop {
buf.clear();
print!(">> ");
stdout.flush().ok();
let line = stdin.lock().read_line(&mut buf);
match line {
Ok(_) => {
if buf.is_empty() {
break;
}
if handle_interpreter_directive(&buf, &env, &mut options) {
continue;
}
let tokens = tokenize(&buf);
if options.show_tokenization {
println!("Tokens: {:?}", tokens);
}
match parse(tokens) {
Ok(ast) => {
if options.show_ast {
println!("AST: {:?}", ast);
}
let (eval, new_env) = evaluate(ast, env);
if !eval.is_empty() {
println!("{}", eval);
}
env = new_env;
},
Err(err) => println!("Error: {}", err)
}
},
Err(err) => {
println!("Error: {}", err);
}
}
}
}
fn handle_interpreter_directive(input: &str,
env: &Environment,
options: &mut REPLOptions) -> bool {
match input.chars().nth(0) {
Some('.') => (),
_ => return false
}
let commands: Vec<&str> = input.split(|c: char| c.is_whitespace()).collect();
match commands.get(0) {
Some(s) if *s == ".show" => {
match commands.get(1) {
Some(s) if *s == "parse" => {
options.show_ast = true;
println!("Showing parse result");
},
Some(s) if *s == "tokens" => {
options.show_tokenization = true;
println!("Showing tokenization");
},
_ => println!("Bad option for show"),
}
},
Some(s) if *s == ".hide" => {
match commands.get(1) {
Some(s) if *s == "parse" => {
options.show_ast = false;
println!("Hiding parse result");
},
Some(s) if *s == "tokens" => {
options.show_tokenization = false;
println!("Hiding tokenization");
},
_ => println!("Bad option for hide"),
}
},
Some(s) if *s == ".quit" => {
println!("Siturei simasu");
process::exit(0);
},
Some(s) if *s == ".env" => {
env.display();
},
Some(s) if *s == ".prec" => {
BINOP_TABLE.with(|hm| {
println!("{0: <10} | {1: <10}", "operator", "precedence");
let prec_table = hm.borrow();
for (op, prec) in prec_table.iter() {
println!("{0: <10} | {1: <10}", op, prec);
}
});
},
Some(s) => {
println!("Unknown directive: {}", s);
},
None => () //should never happen
}
return true;
fn repl_handler(input: &str) -> String {
format!("{:?}", tokenize(input))
}

View File

@ -1,340 +0,0 @@
use std::slice::Iter;
use std::iter::Peekable;
use tokenizer::{Token, Kw};
use tokenizer::Token::*;
#[derive(Debug, Clone)]
pub enum AST {
Null,
LangTrue,
LangFalse,
Name(String),
LangString(String),
Number(f64),
BinOp(Box<AST>, Box<AST>, Box<AST>),
Binding(String, Box<AST>),
Statements(Vec<AST>),
IfStatement(Box<AST>, Box<AST>, Option<Box<AST>>),
WhileStatement(Box<AST>, Box<AST>),
Function(String, Box<AST>, Box<AST>),
ArgList(Vec<String>),
DoNothing
}
pub type ParseResult = Result<AST, String>;
type Tokens<'a> = Peekable<Iter<'a,Token>>;
/* expect calls .next() and thus advances the token list */
macro_rules! expect {
($tok:expr, $tokens:expr) => ( if !expect_token($tok, $tokens) {
let tokens_left: Vec<&Token> = $tokens.collect();
let err_string = format!("Expected {:?}\ntokens: {:?}", $tok, tokens_left);
return Err(err_string);
})
}
macro_rules! expect_parse {
($parse_fn:ident, $tokens:ident) => (
match $parse_fn($tokens) {
err@Err(_) => return err,
Ok(ast) => ast
})
}
fn expect_token(tok: Token, tokens: &mut Tokens) -> bool {
if let Some(n) = tokens.next() {
let next = (*n).clone();
return match (tok, next) {
(EOF, EOF) => true,
(Separator, Separator) => true,
(LParen, LParen) => true,
(RParen, RParen) => true,
(Comma, Comma) => true,
(NumLiteral(_), NumLiteral(_)) => true,
(StrLiteral(_), StrLiteral(_)) => true,
(Identifier(ref i1), Identifier(ref i2)) => i1 == i2,
(Keyword(k1), Keyword(k2)) => k1 == k2,
_ => false
}
}
false
}
pub fn parse(input: Vec<Token>) -> ParseResult {
let mut tokens: Tokens = input.iter().peekable();
if let Some(&&EOF) = tokens.peek() {
return Ok(AST::Statements(vec!()));
}
match statements(&mut tokens) {
ok@Ok(_) => {
expect!(EOF, &mut tokens);
ok
},
err@Err(_) => err
}
}
fn statements(tokens: &mut Tokens) -> ParseResult {
let mut statements = Vec::new();
let initial_statement = expect_parse!(statement, tokens);
statements.push(initial_statement);
loop {
let lookahead = tokens.peek().map(|i| i.clone());
match lookahead {
Some(&Separator) => {
tokens.next();
match statement(tokens) {
Ok(ast_next) => {
statements.push(ast_next);
},
err@Err(_) => return err
};
},
_ => break
}
}
return Ok(AST::Statements(statements));
}
fn statement(tokens: &mut Tokens) -> ParseResult {
match tokens.peek().map(|i| i.clone()) {
Some(&Keyword(Kw::Let)) => let_expression(tokens),
Some(&Keyword(Kw::Fn)) => function_block(tokens),
_ => expression(tokens)
}
}
fn function_block(tokens: &mut Tokens) -> ParseResult {
expect!(Keyword(Kw::Fn), tokens);
let name: String = match tokens.next() {
Some(&Identifier(ref s)) => s.clone(),
_ => return Err("bad parse in function_block()".to_string())
};
expect!(LParen, tokens);
let arguments = try!(argument_list(tokens));
expect!(RParen, tokens);
let body = try!(statements(tokens));
expect!(Keyword(Kw::End), tokens);
Ok(AST::Function(
name,
Box::new(arguments),
Box::new(body)
))
}
fn argument_list(tokens: &mut Tokens) -> ParseResult {
let mut args: Vec<String> = Vec::new();
loop {
let lookahead = tokens.peek().map(|i| i.clone());
match lookahead {
Some(&Identifier(ref s)) => {
args.push(s.clone());
tokens.next();
if let Some(&Comma) = tokens.peek().map(|i| i.clone()) {
tokens.next();
} else {
break;
}
},
_ => break
}
}
Ok(AST::ArgList(args))
}
fn let_expression(tokens: &mut Tokens) -> ParseResult {
expect!(Keyword(Kw::Let), tokens);
if let Some(&Identifier(ref name)) = tokens.next() {
if let Some(&Keyword(Kw::Assign)) = tokens.next() {
if let Ok(expr) = expression(tokens) {
return Ok(
AST::Binding(name.clone(),
Box::new(expr)));
}
}
}
return Err("Bad parse in let_expression()".to_string());
}
fn expression(tokens: &mut Tokens) -> ParseResult {
let lookahead = tokens.peek().map(|i| i.clone());
match lookahead {
Some(&Keyword(Kw::If)) => {
if_expression(tokens)
},
Some(&Keyword(Kw::While)) => {
while_expression(tokens)
},
_ => binop_expression(0, tokens)
}
}
fn if_expression(tokens: &mut Tokens) -> ParseResult {
expect!(Keyword(Kw::If), tokens);
let if_clause = expect_parse!(expression, tokens);
expect!(Keyword(Kw::Then), tokens);
let then_clause = expect_parse!(expression, tokens);
let else_clause = match tokens.peek().map(|i| i.clone()) {
Some(&Keyword(Kw::Else)) => {
tokens.next();
match expression(tokens) {
err@Err(_) => return err,
Ok(ast) => Some(ast)
}
},
_ => None
};
expect!(Keyword(Kw::End), tokens);
Ok(AST::IfStatement(
Box::new(if_clause),
Box::new(then_clause),
else_clause.map(|ast| Box::new(ast))
))
}
fn while_expression(tokens: &mut Tokens) -> ParseResult {
expect!(Keyword(Kw::While), tokens);
let while_expression = expect_parse!(expression, tokens);
expect!(Separator, tokens);
let statements = expect_parse!(statements, tokens);
expect!(Keyword(Kw::End), tokens);
Ok(AST::WhileStatement(
Box::new(while_expression),
Box::new(statements),
))
}
fn binop_expression(precedence: i32, tokens: &mut Tokens) -> ParseResult {
//TODO left needs to match on an identifiers vs. a prefix operator and return *that* AST
let mut left: AST = expect_parse!(simple_expression, tokens);
loop {
let lookahead: Option<&Token> = tokens.peek().map(|i| i.clone());
let next_precedence = lookahead.and_then(|t| get_binop_precedence(t));
match next_precedence {
Some(next) if precedence < next => {
left = match binop_rhs(next, left, tokens) {
err@Err(_) => return err,
Ok(ast) => ast
};
},
_ => return Ok(left),
}
}
}
fn binop_rhs(precedence: i32, lhs: AST, tokens: &mut Tokens) -> ParseResult {
let op: AST = match simple_expression(tokens) {
err@Err(_) => return err,
Ok(ast) => ast
};
let rhs: AST = match binop_expression(precedence, tokens) {
err@Err(_) => return err,
Ok(ast) => ast
};
Ok(AST::BinOp(
Box::new(op),
Box::new(lhs),
Box::new(rhs)
))
}
fn get_binop_precedence(token: &Token) -> Option<i32> {
let identifier_str: &String = match token {
&Identifier(ref s) => s,
_ => return None
};
let output =
::BINOP_TABLE.with(|hm| {
let prec_table = hm.borrow();
let val: Option<i32> = prec_table.get(&identifier_str[..]).map(|i| *i);
val
});
output
}
fn simple_expression(tokens: &mut Tokens) -> ParseResult {
let next = tokens.next();
match next {
Some(&Keyword(Kw::Null)) =>
Ok(AST::Name("null".to_string())),
Some(&Identifier(ref value)) =>
Ok(AST::Name(value.clone())),
Some(&StrLiteral(ref value)) =>
Ok(AST::LangString(value.clone())),
Some(&NumLiteral(n)) =>
Ok(AST::Number(n)),
Some(&LParen) => {
let within_paren = expression(tokens);
expect!(RParen, tokens);
within_paren
},
_ => Err("Bad parse in simple_expression()".to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tokenizer::tokenize;
#[test]
fn parse_tests() {
::init_binop_table();
match parse(tokenize("a + b * c")) {
Ok(ast) =>
assert_eq!(format!("{:?}", ast), "Statements([BinOp(Name(\"+\"), Name(\"a\"), BinOp(Name(\"*\"), Name(\"b\"), Name(\"c\")))])"),
Err(err) => panic!("err: {:?}", err)
}
match parse(tokenize("(a + b) * c")) {
Ok(ast) =>
assert_eq!(format!("{:?}", ast), "Statements([BinOp(Name(\"*\"), BinOp(Name(\"+\"), Name(\"a\"), Name(\"b\")), Name(\"c\"))])"),
Err(err) => panic!("err: {:?}", err)
}
}
}

View File

@ -65,6 +65,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
}
} else if c == ';' || c == '\n' {
if let Some(&Token::Separator) = tokens.last() {
//skip past multiple separators
} else {
tokens.push(Token::Separator);
}