Rearchitect parser

To ensure that the prelude gets parsed with the same ItemId context as
normal REPL input
This commit is contained in:
greg 2019-10-25 01:49:15 -07:00
parent d824b8d6ef
commit efc8497235
4 changed files with 42 additions and 34 deletions

View File

@ -167,8 +167,9 @@ use crate::tokenizing::*;
use crate::tokenizing::Kw::*;
use crate::tokenizing::TokenKind::*;
use crate::source_map::{SourceMap, Location};
use crate::source_map::Location;
use crate::ast::*;
use crate::schala::SourceMapHandle;
/// Represents a parsing error
#[derive(Debug)]
@ -195,13 +196,13 @@ pub struct ParseRecord {
}
/// Main data structure for doing parsing.
pub struct Parser<'a> {
pub struct Parser {
token_handler: TokenHandler,
parse_record: Vec<ParseRecord>,
parse_level: u32,
restrictions: ParserRestrictions,
id_store: ItemIdStore,
source_map: &'a mut SourceMap,
source_map: SourceMapHandle
}
@ -244,11 +245,11 @@ impl TokenHandler {
}
}
impl<'a> Parser<'a> {
impl Parser {
/// Create a new parser initialized with some tokens.
pub fn new(initial_input: Vec<Token>, source_map: &mut SourceMap) -> Parser {
pub fn new(source_map: SourceMapHandle) -> Parser {
Parser {
token_handler: TokenHandler::new(initial_input),
token_handler: TokenHandler::new(vec![]),
parse_record: vec![],
parse_level: 0,
restrictions: ParserRestrictions { no_struct_literal: false },
@ -257,17 +258,15 @@ impl<'a> Parser<'a> {
}
}
pub fn add_new_tokens(&mut self, new_tokens: Vec<Token>) {
self.token_handler = TokenHandler::new(new_tokens);
}
/// Parse all loaded tokens up to this point.
pub fn parse(&mut self) -> ParseResult<AST> {
self.program()
}
/*
pub fn parse_with_new_tokens(&mut self, new_tokens: Vec<Token>) -> ParseResult<AST> {
}
*/
pub fn format_parse_trace(&self) -> String {
let mut buf = String::new();
buf.push_str("Parse productions:\n");
@ -346,7 +345,7 @@ macro_rules! delimited {
}
impl<'a> Parser<'a> {
impl Parser {
/// `program := (statement delimiter)* EOF`
/// `delimiter := NEWLINE | ';'`
#[recursive_descent_method]
@ -383,7 +382,7 @@ impl<'a> Parser<'a> {
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
}?;
let id = self.id_store.fresh();
self.source_map.add_location(&id, tok.location);
self.source_map.borrow_mut().add_location(&id, tok.location);
Ok(Statement { kind, id })
}

View File

@ -1,9 +1,9 @@
#![cfg(test)]
use ::std::rc::Rc;
use std::cell::RefCell;
use std::rc::Rc;
use std::str::FromStr;
use super::tokenize;
use super::ParseResult;
use super::{Parser, ParseResult, tokenize};
use crate::ast::*;
use super::Declaration::*;
use super::Signature;
@ -13,10 +13,17 @@ use super::ExpressionKind::*;
use super::Variant::*;
use super::ForBody::*;
fn parse(input: &str) -> ParseResult<AST> {
let mut source_map = crate::source_map::SourceMap::new();
fn make_parser(input: &str) -> Parser {
let source_map = crate::source_map::SourceMap::new();
let source_map_handle = Rc::new(RefCell::new(source_map));
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
let mut parser = super::Parser::new(tokens, &mut source_map);
let mut parser = super::Parser::new(source_map_handle);
parser.add_new_tokens(tokens);
parser
}
fn parse(input: &str) -> ParseResult<AST> {
let mut parser = make_parser(input);
parser.parse()
}
@ -75,9 +82,7 @@ macro_rules! ex {
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
(s $expr_text:expr) => {
{
let mut source_map = crate::source_map::SourceMap::new();
let tokens: Vec<crate::tokenizing::Token> = tokenize($expr_text);
let mut parser = super::Parser::new(tokens, &mut source_map);
let mut parser = make_parser($expr_text);
parser.expression().unwrap()
}
};
@ -100,9 +105,7 @@ macro_rules! exst {
};
(s $statement_text:expr) => {
{
let mut source_map = crate::source_map::SourceMap::new();
let tokens: Vec<crate::tokenizing::Token> = tokenize($statement_text);
let mut parser = super::Parser::new(tokens, &mut source_map);
let mut parser = make_parser($statement_text);
parser.statement().unwrap()
}
}

View File

@ -25,7 +25,7 @@ pub struct Schala {
symbol_table: SymbolTableHandle,
resolver: crate::scope_resolution::ScopeResolver<'static>,
type_context: typechecking::TypeContext<'static>,
active_parser: Option<parsing::Parser<'static>>,
active_parser: parsing::Parser,
}
impl Schala {
@ -49,7 +49,7 @@ impl Schala {
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
state: eval::State::new(symbols),
type_context: typechecking::TypeContext::new(),
active_parser: None,
active_parser: parsing::Parser::new(source_map)
}
}
@ -106,11 +106,10 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArti
}
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
use crate::parsing::Parser;
use ParsingDebugType::*;
let ref mut source_map = handle.source_map.borrow_mut();
let mut parser = handle.active_parser.take().unwrap_or_else(|| Parser::new(input, source_map));
let ref mut parser = handle.active_parser;
parser.add_new_tokens(input);
let ast = parser.parse();
comp.map(|comp| {

View File

@ -49,10 +49,17 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
/// this is intended for use in tests, and does no error-handling whatsoever
#[allow(dead_code)]
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
let mut source_map = crate::source_map::SourceMap::new();
use std::cell::RefCell;
use std::rc::Rc;
let source_map = crate::source_map::SourceMap::new();
let source_map_handle = Rc::new(RefCell::new(source_map));
let tokens = crate::tokenizing::tokenize(input);
let mut parser = crate::parsing::Parser::new(tokens, &mut source_map);
(parser.parse().unwrap(), source_map)
let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
parser.add_new_tokens(tokens);
let output = parser.parse();
std::mem::drop(parser);
(output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
}
#[allow(unused_macros)]