Change how parsing works

This commit is contained in:
greg 2018-10-20 14:27:00 -07:00
parent d9e67a6341
commit 6b42f8b8de
3 changed files with 45 additions and 25 deletions

View File

@ -436,17 +436,24 @@ fn case_match_expression(&mut self, cond: Expr, alternatives: Vec<Alternative>)
mod eval_tests { mod eval_tests {
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use tokenizing::{Token, tokenize};
use ::parsing::ParseResult;
use ::ast::AST;
use symbol_table::SymbolTable; use symbol_table::SymbolTable;
use tokenizing::tokenize;
use parsing::parse;
use eval::State; use eval::State;
fn parse(tokens: Vec<Token>) -> ParseResult<AST> {
let mut parser = ::parsing::Parser::new(tokens);
parser.program()
}
macro_rules! all_output { macro_rules! all_output {
($string:expr) => { ($string:expr) => {
{ {
let symbol_table = Rc::new(RefCell::new(SymbolTable::new())); let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut state = State::new(symbol_table); let mut state = State::new(symbol_table);
let ast = parse(tokenize($string)).0.unwrap(); let ast = parse(tokenize($string)).unwrap();
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap(); state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
let reduced = ast.reduce(&state.symbol_table_handle.borrow()); let reduced = ast.reduce(&state.symbol_table_handle.borrow());
let all_output = state.evaluate(reduced, true); let all_output = state.evaluate(reduced, true);

View File

@ -44,6 +44,7 @@ mod eval;
pub struct Schala { pub struct Schala {
state: eval::State<'static>, state: eval::State<'static>,
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>, symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
active_parser: Option<parsing::Parser>,
} }
impl Schala { impl Schala {
@ -62,6 +63,7 @@ impl Schala {
Schala { Schala {
symbol_table: symbols.clone(), symbol_table: symbols.clone(),
state: eval::State::new(symbols), state: eval::State::new(symbols),
active_parser: None,
} }
} }
@ -92,9 +94,17 @@ fn tokenizing(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedCom
} }
} }
fn parsing(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> { fn parsing(handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
use parsing::Parser;
let mut parser = match handle.active_parser.take() {
None => Parser::new(input),
Some(parser) => parser
};
let ast = parser.program();
let trace = parser.format_parse_trace();
let (ast, trace) = parsing::parse(input);
comp.map(|comp| { comp.map(|comp| {
//TODO need to control which of these debug stages get added //TODO need to control which of these debug stages get added
let opt = comp.cur_debug_options.get(0).map(|s| s.clone()); let opt = comp.cur_debug_options.get(0).map(|s| s.clone());

View File

@ -34,7 +34,7 @@ pub struct ParseRecord {
level: u32, level: u32,
} }
struct Parser { pub struct Parser {
tokens: Peekable<IntoIter<Token>>, tokens: Peekable<IntoIter<Token>>,
parse_record: Vec<ParseRecord>, parse_record: Vec<ParseRecord>,
parse_level: u32, parse_level: u32,
@ -46,7 +46,7 @@ struct ParserRestrictions {
} }
impl Parser { impl Parser {
fn new(input: Vec<Token>) -> Parser { pub fn new(input: Vec<Token>) -> Parser {
Parser { Parser {
tokens: input.into_iter().peekable(), tokens: input.into_iter().peekable(),
parse_record: vec![], parse_record: vec![],
@ -64,6 +64,16 @@ impl Parser {
fn next(&mut self) -> TokenType { fn next(&mut self) -> TokenType {
self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF) self.tokens.next().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
} }
pub fn format_parse_trace(self) -> Vec<String> {
self.parse_record.into_iter().map(|r| {
let mut indent = String::new();
for _ in 0..r.level {
indent.push(' ');
}
format!("{}Production `{}`, token: {}", indent, r.production_name, r.next_token)
}).collect()
}
} }
macro_rules! print_token_pattern { macro_rules! print_token_pattern {
@ -240,8 +250,9 @@ enumerator := identifier '<-' expression | identifier '=' expression //TODO add
*/ */
impl Parser { impl Parser {
//TODO make this a proper public interface
#[recursive_descent_method] #[recursive_descent_method]
fn program(&mut self) -> ParseResult<AST> { pub fn program(&mut self) -> ParseResult<AST> {
let mut statements = Vec::new(); let mut statements = Vec::new();
loop { loop {
match self.peek() { match self.peek() {
@ -1034,24 +1045,11 @@ fn parse_hex(digits: String) -> ParseResult<u64> {
Ok(result) Ok(result)
} }
pub fn parse(input: Vec<Token>) -> (Result<AST, ParseError>, Vec<String>) {
let mut parser = Parser::new(input);
let ast = parser.program();
let trace = parser.parse_record.into_iter().map(|r| {
let mut indent = String::new();
for _ in 0..r.level {
indent.push(' ');
}
format!("{}Production `{}`, token: {}", indent, r.production_name, r.next_token)
}).collect();
(ast, trace)
}
#[cfg(test)] #[cfg(test)]
mod parse_tests { mod parse_tests {
use ::std::rc::Rc; use ::std::rc::Rc;
use super::{parse, tokenize}; use super::tokenize;
use super::ParseResult;
use builtin::{PrefixOp, BinOp}; use builtin::{PrefixOp, BinOp};
use ast::{AST, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody}; use ast::{AST, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody};
use super::Statement::*; use super::Statement::*;
@ -1063,14 +1061,19 @@ mod parse_tests {
use super::Variant::*; use super::Variant::*;
use super::ForBody::*; use super::ForBody::*;
fn parse(tokens: Vec<::tokenizing::Token>) -> ParseResult<AST> {
let mut parser = super::Parser::new(tokens);
parser.program()
}
macro_rules! rc { macro_rules! rc {
($string:tt) => { Rc::new(stringify!($string).to_string()) } ($string:tt) => { Rc::new(stringify!($string).to_string()) }
} }
macro_rules! parse_test { macro_rules! parse_test {
($string:expr, $correct:expr) => { assert_eq!(parse(tokenize($string)).0.unwrap(), $correct) } ($string:expr, $correct:expr) => { assert_eq!(parse(tokenize($string)).unwrap(), $correct) }
} }
macro_rules! parse_error { macro_rules! parse_error {
($string:expr) => { assert!(parse(tokenize($string)).0.is_err()) } ($string:expr) => { assert!(parse(tokenize($string)).is_err()) }
} }
macro_rules! val { macro_rules! val {
($var:expr) => { Value(Rc::new($var.to_string())) } ($var:expr) => { Value(Rc::new($var.to_string())) }