Compare commits

..

No commits in common. "3e422291f49e829646401ad8c03c82174cd0b160" and "e6a9811ee5da22aa3f668529f401f738e84d42ba" have entirely different histories.

11 changed files with 54 additions and 1274 deletions

52
Cargo.lock generated
View File

@ -110,12 +110,6 @@ dependencies = [
"constant_time_eq", "constant_time_eq",
] ]
[[package]]
name = "bytecount"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
version = "1.3.2" version = "1.3.2"
@ -198,13 +192,13 @@ dependencies = [
[[package]] [[package]]
name = "derivative" name = "derivative"
version = "2.2.0" version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" checksum = "942ca430eef7a3806595a6737bc388bf51adb888d3fc0dd1b50f1c170167ee3a"
dependencies = [ dependencies = [
"proc-macro2 1.0.30", "proc-macro2 0.4.30",
"quote 1.0.10", "quote 0.6.13",
"syn 1.0.80", "syn 0.15.44",
] ]
[[package]] [[package]]
@ -403,15 +397,9 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.4.1" version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" checksum = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]] [[package]]
name = "miniz-sys" name = "miniz-sys"
@ -474,28 +462,6 @@ dependencies = [
"version_check 0.1.5", "version_check 0.1.5",
] ]
[[package]]
name = "nom"
version = "7.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1d11e1ef389c76fe5b81bcaf2ea32cf88b62bc494e19f493d0b30e7a930109"
dependencies = [
"memchr",
"minimal-lexical",
"version_check 0.9.3",
]
[[package]]
name = "nom_locate"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37794436ca3029a3089e0b95d42da1f0b565ad271e4d3bb4bad0c7bb70b10605"
dependencies = [
"bytecount",
"memchr",
"nom 7.1.0",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.1.42" version = "0.1.42"
@ -953,8 +919,6 @@ dependencies = [
"ena", "ena",
"failure", "failure",
"itertools", "itertools",
"nom 7.1.0",
"nom_locate",
"peg", "peg",
"pretty_assertions", "pretty_assertions",
"radix_trie", "radix_trie",
@ -1104,7 +1068,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e" checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e"
dependencies = [ dependencies = [
"fnv", "fnv",
"nom 4.2.3", "nom",
"phf", "phf",
"phf_codegen", "phf_codegen",
] ]

View File

@ -10,14 +10,12 @@ take_mut = "0.2.2"
failure = "0.1.5" failure = "0.1.5"
ena = "0.11.0" ena = "0.11.0"
stopwatch = "0.0.7" stopwatch = "0.0.7"
derivative = "2.2.0" derivative = "1.0.3"
colored = "1.8" colored = "1.8"
radix_trie = "0.1.5" radix_trie = "0.1.5"
assert_matches = "1.5" assert_matches = "1.5"
#peg = "0.7.0" #peg = "0.7.0"
peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "960222580c8da25b17d32c2aae6f52f902728b62" } peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "960222580c8da25b17d32c2aae6f52f902728b62" }
nom = "7.1.0"
nom_locate = "4.0.0"
schala-repl = { path = "../schala-repl" } schala-repl = { path = "../schala-repl" }

View File

@ -143,15 +143,10 @@ pub struct Signature {
} }
//TODO I can probably get rid of TypeBody //TODO I can probably get rid of TypeBody
#[derive(Debug, Derivative, Clone)] #[derive(Debug, PartialEq, Clone)]
#[derivative(PartialEq)]
pub enum TypeBody { pub enum TypeBody {
Variants(Vec<Variant>), Variants(Vec<Variant>),
ImmediateRecord { ImmediateRecord(ItemId, Vec<(Rc<String>, TypeIdentifier)>),
#[derivative(PartialEq = "ignore")]
id: ItemId,
fields: Vec<(Rc<String>, TypeIdentifier)>,
},
} }
#[derive(Debug, Derivative, Clone)] #[derive(Debug, Derivative, Clone)]

View File

@ -38,7 +38,6 @@ where T: IdKind
} }
} }
#[derive(Debug)]
pub struct IdStore<T> pub struct IdStore<T>
where T: IdKind where T: IdKind
{ {

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +1,9 @@
#![allow(clippy::upper_case_acronyms)] #![allow(clippy::upper_case_acronyms)]
pub mod combinator;
mod peg_parser; mod peg_parser;
mod test; mod test;
use std::{cell::RefCell, fmt, rc::Rc}; use std::fmt;
use combinator::Span;
#[cfg(test)] #[cfg(test)]
use crate::ast::{Block, Expression}; use crate::ast::{Block, Expression};
@ -15,82 +12,31 @@ use crate::{
identifier::{Id, IdStore}, identifier::{Id, IdStore},
}; };
pub(crate) type StoreRef = Rc<RefCell<IdStore<ASTItem>>>;
pub struct Parser { pub struct Parser {
id_store: StoreRef, id_store: IdStore<ASTItem>,
use_combinator: bool,
} }
impl Parser { impl Parser {
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
let id_store: IdStore<ASTItem> = IdStore::new(); Self { id_store: IdStore::new() }
Self { id_store: Rc::new(RefCell::new(id_store)), use_combinator: true }
} }
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> { pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> {
if self.use_combinator {
self.parse_comb(input)
} else {
self.parse_peg(input)
}
}
pub(crate) fn parse_peg(&mut self, input: &str) -> Result<AST, ParseError> {
peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg) peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg)
} }
pub(crate) fn parse_comb(&mut self, input: &str) -> Result<AST, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::program(span))
}
#[cfg(test)] #[cfg(test)]
fn expression(&mut self, input: &str) -> Result<Expression, ParseError> { fn expression(&mut self, input: &str) -> Result<Expression, ParseError> {
peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg) peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg)
} }
#[cfg(test)]
fn expression_comb(&mut self, input: &str) -> Result<Expression, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::expression(span))
}
#[cfg(test)] #[cfg(test)]
fn block(&mut self, input: &str) -> Result<Block, ParseError> { fn block(&mut self, input: &str) -> Result<Block, ParseError> {
peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg) peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg)
} }
#[cfg(test)]
fn block_comb(&mut self, input: &str) -> Result<Block, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::block(span))
}
fn fresh(&mut self) -> Id<ASTItem> { fn fresh(&mut self) -> Id<ASTItem> {
self.id_store.borrow_mut().fresh() self.id_store.fresh()
}
}
fn convert<'a, O>(input: &'a str, result: combinator::ParseResult<'a, O>) -> Result<O, ParseError> {
use nom::{error::VerboseError, Finish};
match result.finish() {
Ok((rest, output)) => {
if rest.fragment() != &"" {
return Err(ParseError {
location: Default::default(),
msg: format!("Bad parse state, remaining text: `{}`", rest.fragment()),
});
}
Ok(output)
}
Err(err) => {
let err = VerboseError {
errors: err.errors.into_iter().map(|(sp, kind)| (*sp.fragment(), kind)).collect(),
};
let msg = nom::error::convert_error(input, err);
Err(ParseError { msg, location: (0).into() })
}
} }
} }

View File

@ -80,6 +80,7 @@ peg::parser! {
"::*" { ImportedNames::All } / "::*" { ImportedNames::All } /
"::{" __ names:(identifier() ** (_ "," _)) __ "}" { ImportedNames::List(names.into_iter().map(rc_string).collect()) } "::{" __ names:(identifier() ** (_ "," _)) __ "}" { ImportedNames::List(names.into_iter().map(rc_string).collect()) }
rule declaration(parser: &mut Parser) -> Declaration = rule declaration(parser: &mut Parser) -> Declaration =
binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) / binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) /
implementation(parser) / module(parser) implementation(parser) / module(parser)
@ -164,7 +165,7 @@ peg::parser! {
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) } singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
rule type_body(parser: &mut Parser) -> TypeBody = rule type_body(parser: &mut Parser) -> TypeBody =
"{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord { id: parser.fresh(), fields: items } } / "{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord(parser.fresh(), items) } /
variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) } variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) }
rule variant_spec(parser: &mut Parser) -> Variant = rule variant_spec(parser: &mut Parser) -> Variant =

View File

@ -88,30 +88,20 @@ fn ty_simple(name: &str) -> TypeIdentifier {
macro_rules! assert_ast { macro_rules! assert_ast {
($input:expr, $statements:expr) => { ($input:expr, $statements:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let ast = parser.parse_comb($input); let ast = parser.parse($input);
let ast2 = parser.parse_peg($input);
let expected = AST { id: Default::default(), statements: $statements.into() }; let expected = AST { id: Default::default(), statements: $statements.into() };
let ast = match ast { if ast.is_err() {
Err(err) => { println!("Parse error: {}", ast.unwrap_err().msg);
println!("Parse error: {}", err.msg);
panic!(); panic!();
} }
Ok(ast) => ast, assert_eq!(ast.unwrap(), expected);
};
assert_eq!(ast, ast2.unwrap());
assert_eq!(ast, expected);
}; };
} }
macro_rules! assert_fail { macro_rules! assert_fail {
($input:expr) => {
let mut parser = Parser::new();
let _err = parser.parse_comb($input).unwrap_err();
};
($input:expr, $failure:expr) => { ($input:expr, $failure:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let err = parser.parse_comb($input).unwrap_err(); let err = parser.parse($input).unwrap_err();
println!("assert_fail: {}", err.msg);
assert_eq!(err.msg, $failure); assert_eq!(err.msg, $failure);
}; };
} }
@ -119,47 +109,23 @@ macro_rules! assert_fail {
macro_rules! assert_expr { macro_rules! assert_expr {
($input:expr, $correct:expr) => { ($input:expr, $correct:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let expr = parser.expression_comb($input.trim_start()); let expr = parser.expression($input);
let expr2 = parser.expression($input.trim_start()); if expr.is_err() {
let expr = match expr { println!("Expression parse error: {}", expr.unwrap_err().msg);
Err(err) => {
println!("Expression parse error: {}", err.msg);
panic!(); panic!();
} }
Ok(expr) => expr, assert_eq!(expr.unwrap(), $correct);
};
assert_eq!(expr, expr2.unwrap());
assert_eq!(expr, $correct);
}; };
} }
macro_rules! assert_fail_expr { macro_rules! assert_fail_expr {
($input:expr, $failure:expr) => { ($input:expr, $failure:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let _err = parser.expression_comb($input).unwrap_err(); let _err = parser.expression($input).unwrap_err();
//TODO make real tests for failures //TODO make real tests for failures
//assert_eq!(err.to_string(), $failure); //assert_eq!(err.to_string(), $failure);
}; };
} }
macro_rules! assert_block {
($input:expr, $correct:expr) => {
let mut parser = Parser::new();
let block = parser.block_comb($input);
let block2 = parser.block($input);
let block = match block {
Err(err) => {
println!("Expression parse error: {}", err.msg);
panic!();
}
Ok(item) => item,
};
assert_eq!(block, block2.unwrap());
assert_eq!(block, $correct);
};
}
#[test] #[test]
fn basic_literals() { fn basic_literals() {
use ExpressionKind::*; use ExpressionKind::*;
@ -170,7 +136,7 @@ fn basic_literals() {
assert_expr!("0b0_1_0", expr(NatLiteral(2))); assert_expr!("0b0_1_0", expr(NatLiteral(2)));
assert_expr!("0xff", expr(NatLiteral(255))); assert_expr!("0xff", expr(NatLiteral(255)));
assert_expr!("0x032f", expr(NatLiteral(815))); assert_expr!("0x032f", expr(NatLiteral(815)));
assert_expr!("0xf_f", expr(NatLiteral(255))); assert_expr!("0xf_f_", expr(NatLiteral(255)));
assert_expr!("false", expr(BoolLiteral(false))); assert_expr!("false", expr(BoolLiteral(false)));
assert_expr!("true", expr(BoolLiteral(true))); assert_expr!("true", expr(BoolLiteral(true)));
} }
@ -183,7 +149,7 @@ fn string_literals() {
assert_expr!(r#""hello""#, expr(StringLiteral(rc("hello")))); assert_expr!(r#""hello""#, expr(StringLiteral(rc("hello"))));
assert_expr!(r#"b"some bytestring""#, expr(StringLiteral(rc("some bytestring")))); assert_expr!(r#"b"some bytestring""#, expr(StringLiteral(rc("some bytestring"))));
//NOTE I'm not 100% sure this case is correct, but I'll deal with it later //NOTE I'm not 100% sure this case is correct, but I'll deal with it later
//assert_expr!(r#""Do \n \" escapes work\t""#, expr(StringLiteral(rc("Do \n \" escapes work\t")))); assert_expr!(r#""Do \n \" escapes work\t""#, expr(StringLiteral(rc(r#"Do \n \" escapes work\t"#))));
} }
#[test] #[test]
@ -192,7 +158,6 @@ fn list_literals() {
assert_expr!("[]", expr(ListLiteral(vec![]))); assert_expr!("[]", expr(ListLiteral(vec![])));
assert_expr!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),]))); assert_expr!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
assert_expr!("[1, /*no*/2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
assert_fail_expr!("[1,,2]", "some failure"); assert_fail_expr!("[1,,2]", "some failure");
} }
@ -201,7 +166,7 @@ fn binexps() {
use ExpressionKind::*; use ExpressionKind::*;
use StatementKind::Expression; use StatementKind::Expression;
assert_expr!("0xf_f+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1)))); assert_expr!("0xf_f_+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1))));
assert_ast!( assert_ast!(
"3; 4; 4.3", "3; 4; 4.3",
vec![ vec![
@ -384,7 +349,7 @@ fn index() {
fn while_expression() { fn while_expression() {
use ExpressionKind::*; use ExpressionKind::*;
// assert_expr_comb!("while { }", expr(WhileExpression { condition: None, body: Block::default() })); assert_expr!("while { }", expr(WhileExpression { condition: None, body: Block::default() }));
assert_expr!( assert_expr!(
"while a == b { }", "while a == b { }",
expr(WhileExpression { expr(WhileExpression {
@ -552,10 +517,7 @@ fn complex_lambdas() {
#[test] #[test]
fn reserved_words() { fn reserved_words() {
let err = "0: at line 1, in Verify:\nmodule::item::call()\n^\n\n1: at line 1, in token:\nmodule::item::call()\n^\n\n2: at line 1, in identifier:\nmodule::item::call()\n^\n\n3: at line 1, in token:\nmodule::item::call()\n^\n\n4: at line 1, in primary-expr-no-struct:\nmodule::item::call()\n^\n\n5: at line 1, in primary-expr:\nmodule::item::call()\n^\n\n6: at line 1, in extended-expr:\nmodule::item::call()\n^\n\n7: at line 1, in prefix-expr:\nmodule::item::call()\n^\n\n8: at line 1, in expression-kind:\nmodule::item::call()\n^\n\n9: at line 1, in Parsing-statement:\nmodule::item::call()\n^\n\n10: at line 1, in AST:\nmodule::item::call()\n^\n\n"; assert_fail!("module::item::call()", "error at 1:7: expected ['a' ..= 'z' | 'A' ..= 'Z' | '_']");
assert_fail!("module::item::call()", err);
assert_expr!("modulek::item", expr(ExpressionKind::Value(qn!(modulek, item))));
} }
#[test] #[test]
@ -673,10 +635,10 @@ fn type_declarations() {
vec![decl(TypeDecl { vec![decl(TypeDecl {
name: TypeSingletonName { name: rc("Alpha"), params: vec![] }, name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
mutable: false, mutable: false,
body: TypeBody::ImmediateRecord { id: Default::default(), fields: vec![ body: TypeBody::ImmediateRecord(Default::default(), vec![
(rc("a"), ty_simple("Int")), (rc("a"), ty_simple("Int")),
(rc("b"), ty_simple("Int")) (rc("b"), ty_simple("Int"))
]} ])
})] })]
}; };
@ -819,8 +781,7 @@ fn functions() {
] ]
}) })
); );
let err_msg = "0: at line 1, in Alpha:\na(b,,c)\n ^\n\n1: at line 1, in Alt:\na(b,,c)\n ^\n\n2: at line 1, in token:\na(b,,c)\n ^\n\n3: at line 1, in identifier-expr:\na(b,,c)\n ^\n\n4: at line 1, in Alt:\na(b,,c)\n ^\n\n5: at line 1, in primary-expr:\na(b,,c)\n ^\n\n6: at line 1, in extended-expr:\na(b,,c)\n ^\n\n7: at line 1, in prefix-expr:\na(b,,c)\n ^\n\n8: at line 1, in expression-kind:\na(b,,c)\n ^\n\n9: at line 1, in Alt:\na(b,,c)\n ^\n\n10: at line 1, in invocation-argument:\na(b,,c)\n ^\n\n11: at line 1, in call-part:\na(b,,c)\n ^\n\n12: at line 1, in extended-expr:\na(b,,c)\n^\n\n13: at line 1, in prefix-expr:\na(b,,c)\n^\n\n14: at line 1, in expression-kind:\na(b,,c)\n^\n\n15: at line 1, in Parsing-statement:\na(b,,c)\n^\n\n16: at line 1, in AST:\na(b,,c)\n^\n\n"; assert_fail!("a(b,,c)","error at 1:5: expected one of \"(\", \".\", \"0b\", \"0x\", \"[\", \"\\\"\", \"_\", \"false\", \"for\", \"if\", \"true\", \"while\", ['+' | '-' | '!'], ['0' ..= '9'], ['a' ..= 'z' | 'A' ..= 'Z' | '_'], r#\"\\\"#");
assert_fail!("a(b,,c)", err_msg);
assert_ast!( assert_ast!(
"fn a(b, c: Int): Int", "fn a(b, c: Int): Int",
@ -864,8 +825,8 @@ fn max_function_params() {
} }
write!(buf, ") {{ return 20 }}").unwrap(); write!(buf, ") {{ return 20 }}").unwrap();
//assert_fail!(&buf, "A function cannot have more than 255 arguments"); //assert_fail!(&buf, "A function cannot have more than 255 arguments");
//TODO need to create a good, custom error message for this case //TODO better errors again
assert_fail!(&buf); assert_fail!(&buf, "error at 1:1439: expected ['a' ..= 'z' | 'A' ..= 'Z' | '_']");
} }
#[test] #[test]
@ -1088,7 +1049,6 @@ fn imports() {
}))] }))]
}; };
//TODO this shouldn't be legal
assert_ast! { assert_ast! {
"import bespouri::{}", "import bespouri::{}",
vec![stmt(StatementKind::Import(ImportSpecifier { vec![stmt(StatementKind::Import(ImportSpecifier {
@ -1122,7 +1082,6 @@ fn if_exprs() {
}) })
); );
//TODO add tests for named expressions
assert_expr!( assert_expr!(
"if a then b else c", "if a then b else c",
expr(IfExpression { expr(IfExpression {
@ -1135,7 +1094,8 @@ fn if_exprs() {
); );
assert_expr!( assert_expr!(
r#"if true then { r#"
if true then {
let a = 10 let a = 10
b b
} else { } else {
@ -1303,7 +1263,7 @@ if (45, "panda", false, 2.2) {
fn flow_control() { fn flow_control() {
use ExpressionKind::*; use ExpressionKind::*;
// This is an incorrect program, but should parse correctly. // This is an incorrect program, but shoudl parse correctly.
let source = r#" let source = r#"
fn test() { fn test() {
let a = 10; let a = 10;
@ -1340,8 +1300,10 @@ fn blocks() {
let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"]; let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"];
let mut parser = Parser::new();
for case in cases.iter() { for case in cases.iter() {
assert_block!(case, vec![exst(Value(qn!(a)))].into()); let block = parser.block(case);
assert_eq!(block.unwrap(), vec![exst(Value(qn!(a)))].into());
} }
let source = r#"{ let source = r#"{
@ -1349,8 +1311,9 @@ fn blocks() {
fn foo() { } fn foo() { }
} }
}"#; }"#;
assert_block!( let block = parser.block(source);
source, assert_eq!(
block.unwrap(),
vec![decl(Declaration::FuncDecl( vec![decl(Declaration::FuncDecl(
Signature { name: rc("quah"), operator: false, params: vec![], type_anno: None }, Signature { name: rc("quah"), operator: false, params: vec![], type_anno: None },
vec![decl(Declaration::FuncDecl( vec![decl(Declaration::FuncDecl(
@ -1361,50 +1324,6 @@ fn blocks() {
))] ))]
.into() .into()
); );
assert_block!("{}", vec![].into());
//TODO this case is broken in the peg version
let source = r#"{
//hella
4_5 //bog
11; /*chutney*/0xf
}"#;
let mut parser = Parser::new();
let block = parser.block_comb(source);
assert_eq!(
block.unwrap(),
vec![
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(45)
))
},
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(11)
))
},
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(15)
))
},
]
.into()
);
} }
#[test] #[test]
@ -1423,9 +1342,6 @@ fn comments() {
let source = "5//no man\n"; let source = "5//no man\n";
assert_ast!(source, vec![exst(NatLiteral(5))]); assert_ast!(source, vec![exst(NatLiteral(5))]);
let source = " /*yolo*/ barnaby";
assert_ast!(source, exst(ExpressionKind::Value(qn!(barnaby))));
} }
//TODO support backtick operators like this //TODO support backtick operators like this

View File

@ -194,7 +194,7 @@ impl<'a> SymbolTablePopulator<'a> {
) -> Vec<SymbolError> { ) -> Vec<SymbolError> {
let (variants, immediate_variant) = match type_body { let (variants, immediate_variant) = match type_body {
TypeBody::Variants(variants) => (variants.clone(), false), TypeBody::Variants(variants) => (variants.clone(), false),
TypeBody::ImmediateRecord { id, fields } => ( TypeBody::ImmediateRecord(id, fields) => (
vec![Variant { vec![Variant {
id: *id, id: *id,
name: type_name.name.clone(), name: type_name.name.clone(),

View File

@ -95,9 +95,8 @@ trad()"#,
"30", "30",
); );
//TODO this shouldn't depend on details of id assignment
let err = let err =
"No symbol found for name: QualifiedName { id: Id { idx: 22, t: PhantomData }, components: [\"a\"] }"; "No symbol found for name: QualifiedName { id: Id { idx: 9, t: PhantomData }, components: [\"a\"] }";
eval_assert_failure( eval_assert_failure(
r#" r#"

View File

@ -54,13 +54,7 @@ where T: Hash + Eq
pub fn quick_ast(input: &str) -> crate::ast::AST { pub fn quick_ast(input: &str) -> crate::ast::AST {
let mut parser = crate::parsing::Parser::new(); let mut parser = crate::parsing::Parser::new();
let output = parser.parse(input); let output = parser.parse(input);
match output { output.unwrap()
Ok(output) => output,
Err(err) => {
println!("Parse error: {}", err.msg);
panic!();
}
}
} }
#[allow(unused_macros)] #[allow(unused_macros)]