Rearchitect parser
To ensure that the prelude gets parsed with the same ItemId context as normal REPL input
This commit is contained in:
parent
d824b8d6ef
commit
efc8497235
@ -167,8 +167,9 @@ use crate::tokenizing::*;
|
|||||||
use crate::tokenizing::Kw::*;
|
use crate::tokenizing::Kw::*;
|
||||||
use crate::tokenizing::TokenKind::*;
|
use crate::tokenizing::TokenKind::*;
|
||||||
|
|
||||||
use crate::source_map::{SourceMap, Location};
|
use crate::source_map::Location;
|
||||||
use crate::ast::*;
|
use crate::ast::*;
|
||||||
|
use crate::schala::SourceMapHandle;
|
||||||
|
|
||||||
/// Represents a parsing error
|
/// Represents a parsing error
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -195,13 +196,13 @@ pub struct ParseRecord {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Main data structure for doing parsing.
|
/// Main data structure for doing parsing.
|
||||||
pub struct Parser<'a> {
|
pub struct Parser {
|
||||||
token_handler: TokenHandler,
|
token_handler: TokenHandler,
|
||||||
parse_record: Vec<ParseRecord>,
|
parse_record: Vec<ParseRecord>,
|
||||||
parse_level: u32,
|
parse_level: u32,
|
||||||
restrictions: ParserRestrictions,
|
restrictions: ParserRestrictions,
|
||||||
id_store: ItemIdStore,
|
id_store: ItemIdStore,
|
||||||
source_map: &'a mut SourceMap,
|
source_map: SourceMapHandle
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -244,11 +245,11 @@ impl TokenHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl Parser {
|
||||||
/// Create a new parser initialized with some tokens.
|
/// Create a new parser initialized with some tokens.
|
||||||
pub fn new(initial_input: Vec<Token>, source_map: &mut SourceMap) -> Parser {
|
pub fn new(source_map: SourceMapHandle) -> Parser {
|
||||||
Parser {
|
Parser {
|
||||||
token_handler: TokenHandler::new(initial_input),
|
token_handler: TokenHandler::new(vec![]),
|
||||||
parse_record: vec![],
|
parse_record: vec![],
|
||||||
parse_level: 0,
|
parse_level: 0,
|
||||||
restrictions: ParserRestrictions { no_struct_literal: false },
|
restrictions: ParserRestrictions { no_struct_literal: false },
|
||||||
@ -257,17 +258,15 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn add_new_tokens(&mut self, new_tokens: Vec<Token>) {
|
||||||
|
self.token_handler = TokenHandler::new(new_tokens);
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse all loaded tokens up to this point.
|
/// Parse all loaded tokens up to this point.
|
||||||
pub fn parse(&mut self) -> ParseResult<AST> {
|
pub fn parse(&mut self) -> ParseResult<AST> {
|
||||||
self.program()
|
self.program()
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
pub fn parse_with_new_tokens(&mut self, new_tokens: Vec<Token>) -> ParseResult<AST> {
|
|
||||||
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
pub fn format_parse_trace(&self) -> String {
|
pub fn format_parse_trace(&self) -> String {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
buf.push_str("Parse productions:\n");
|
buf.push_str("Parse productions:\n");
|
||||||
@ -346,7 +345,7 @@ macro_rules! delimited {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl Parser {
|
||||||
/// `program := (statement delimiter)* EOF`
|
/// `program := (statement delimiter)* EOF`
|
||||||
/// `delimiter := NEWLINE | ';'`
|
/// `delimiter := NEWLINE | ';'`
|
||||||
#[recursive_descent_method]
|
#[recursive_descent_method]
|
||||||
@ -383,7 +382,7 @@ impl<'a> Parser<'a> {
|
|||||||
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
|
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
|
||||||
}?;
|
}?;
|
||||||
let id = self.id_store.fresh();
|
let id = self.id_store.fresh();
|
||||||
self.source_map.add_location(&id, tok.location);
|
self.source_map.borrow_mut().add_location(&id, tok.location);
|
||||||
Ok(Statement { kind, id })
|
Ok(Statement { kind, id })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#![cfg(test)]
|
#![cfg(test)]
|
||||||
use ::std::rc::Rc;
|
use std::cell::RefCell;
|
||||||
|
use std::rc::Rc;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use super::tokenize;
|
use super::{Parser, ParseResult, tokenize};
|
||||||
use super::ParseResult;
|
|
||||||
use crate::ast::*;
|
use crate::ast::*;
|
||||||
use super::Declaration::*;
|
use super::Declaration::*;
|
||||||
use super::Signature;
|
use super::Signature;
|
||||||
@ -13,10 +13,17 @@ use super::ExpressionKind::*;
|
|||||||
use super::Variant::*;
|
use super::Variant::*;
|
||||||
use super::ForBody::*;
|
use super::ForBody::*;
|
||||||
|
|
||||||
fn parse(input: &str) -> ParseResult<AST> {
|
fn make_parser(input: &str) -> Parser {
|
||||||
let mut source_map = crate::source_map::SourceMap::new();
|
let source_map = crate::source_map::SourceMap::new();
|
||||||
|
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||||
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
|
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
|
||||||
let mut parser = super::Parser::new(tokens, &mut source_map);
|
let mut parser = super::Parser::new(source_map_handle);
|
||||||
|
parser.add_new_tokens(tokens);
|
||||||
|
parser
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse(input: &str) -> ParseResult<AST> {
|
||||||
|
let mut parser = make_parser(input);
|
||||||
parser.parse()
|
parser.parse()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -75,9 +82,7 @@ macro_rules! ex {
|
|||||||
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
|
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
|
||||||
(s $expr_text:expr) => {
|
(s $expr_text:expr) => {
|
||||||
{
|
{
|
||||||
let mut source_map = crate::source_map::SourceMap::new();
|
let mut parser = make_parser($expr_text);
|
||||||
let tokens: Vec<crate::tokenizing::Token> = tokenize($expr_text);
|
|
||||||
let mut parser = super::Parser::new(tokens, &mut source_map);
|
|
||||||
parser.expression().unwrap()
|
parser.expression().unwrap()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -100,9 +105,7 @@ macro_rules! exst {
|
|||||||
};
|
};
|
||||||
(s $statement_text:expr) => {
|
(s $statement_text:expr) => {
|
||||||
{
|
{
|
||||||
let mut source_map = crate::source_map::SourceMap::new();
|
let mut parser = make_parser($statement_text);
|
||||||
let tokens: Vec<crate::tokenizing::Token> = tokenize($statement_text);
|
|
||||||
let mut parser = super::Parser::new(tokens, &mut source_map);
|
|
||||||
parser.statement().unwrap()
|
parser.statement().unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,7 @@ pub struct Schala {
|
|||||||
symbol_table: SymbolTableHandle,
|
symbol_table: SymbolTableHandle,
|
||||||
resolver: crate::scope_resolution::ScopeResolver<'static>,
|
resolver: crate::scope_resolution::ScopeResolver<'static>,
|
||||||
type_context: typechecking::TypeContext<'static>,
|
type_context: typechecking::TypeContext<'static>,
|
||||||
active_parser: Option<parsing::Parser<'static>>,
|
active_parser: parsing::Parser,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Schala {
|
impl Schala {
|
||||||
@ -49,7 +49,7 @@ impl Schala {
|
|||||||
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
|
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
|
||||||
state: eval::State::new(symbols),
|
state: eval::State::new(symbols),
|
||||||
type_context: typechecking::TypeContext::new(),
|
type_context: typechecking::TypeContext::new(),
|
||||||
active_parser: None,
|
active_parser: parsing::Parser::new(source_map)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -106,11 +106,10 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArti
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||||
use crate::parsing::Parser;
|
|
||||||
use ParsingDebugType::*;
|
use ParsingDebugType::*;
|
||||||
|
|
||||||
let ref mut source_map = handle.source_map.borrow_mut();
|
let ref mut parser = handle.active_parser;
|
||||||
let mut parser = handle.active_parser.take().unwrap_or_else(|| Parser::new(input, source_map));
|
parser.add_new_tokens(input);
|
||||||
let ast = parser.parse();
|
let ast = parser.parse();
|
||||||
|
|
||||||
comp.map(|comp| {
|
comp.map(|comp| {
|
||||||
|
@ -49,10 +49,17 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
|
|||||||
/// this is intended for use in tests, and does no error-handling whatsoever
|
/// this is intended for use in tests, and does no error-handling whatsoever
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
|
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
|
||||||
let mut source_map = crate::source_map::SourceMap::new();
|
use std::cell::RefCell;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
let source_map = crate::source_map::SourceMap::new();
|
||||||
|
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||||
let tokens = crate::tokenizing::tokenize(input);
|
let tokens = crate::tokenizing::tokenize(input);
|
||||||
let mut parser = crate::parsing::Parser::new(tokens, &mut source_map);
|
let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
|
||||||
(parser.parse().unwrap(), source_map)
|
parser.add_new_tokens(tokens);
|
||||||
|
let output = parser.parse();
|
||||||
|
std::mem::drop(parser);
|
||||||
|
(output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused_macros)]
|
#[allow(unused_macros)]
|
||||||
|
Loading…
Reference in New Issue
Block a user