Compare commits

...

50 Commits

Author SHA1 Message Date
Greg Shuflin
3e422291f4 assert_block! macro 2021-11-20 23:00:49 -08:00
Greg Shuflin
bba433c808 Cleanup warnings 2021-11-20 22:55:11 -08:00
Greg Shuflin
194cb2202a Fixed bug 2021-11-20 22:46:48 -08:00
Greg Shuflin
5a38ff8f41 Fix parameterized block 2021-11-20 22:36:00 -08:00
Greg Shuflin
de13e69769 Add context 2021-11-20 22:23:34 -08:00
Greg Shuflin
8f3c982131 Parameterize block 2021-11-20 22:19:59 -08:00
Greg Shuflin
e5b6f2bc2f Delimiter 2021-11-20 20:55:26 -08:00
Greg Shuflin
b760ec7eca Cut 2021-11-20 20:52:48 -08:00
Greg Shuflin
94db2ea17f Add toknl thing 2021-11-20 19:42:50 -08:00
Greg Shuflin
02ead69a44 WIP fix last few bugs 2021-11-20 11:13:21 -08:00
Greg Shuflin
c1ef0ee506 More parser refactoring 2021-11-20 02:33:09 -08:00
Greg Shuflin
9a13848f80 Parsing refactors 2021-11-20 02:26:22 -08:00
Greg Shuflin
1b6a7021e7 Fixed all tests 2021-11-20 02:04:00 -08:00
Greg Shuflin
2c139df6dd Got most tests passing 2021-11-20 01:26:31 -08:00
Greg Shuflin
7c3e924194 WIP last tests 2021-11-20 00:03:05 -08:00
Greg Shuflin
a41d808da3 More work 2021-11-19 23:04:00 -08:00
Greg Shuflin
eeec85c2b1 Fix other error 2021-11-19 20:43:37 -08:00
Greg Shuflin
ec5bf12a65 Fix test for reserved words error 2021-11-19 18:27:58 -08:00
Greg Shuflin
bb26d9e674 Force program() to consume all input 2021-11-19 10:00:16 -08:00
Greg Shuflin
219f5a183a Various cleanup 2021-11-19 09:59:08 -08:00
Greg Shuflin
69d857e94d Imports 2021-11-19 01:35:25 -08:00
Greg Shuflin
8365690860 Fixes 2021-11-19 01:08:07 -08:00
Greg Shuflin
7ae7eaa07b Flow 2021-11-19 01:01:26 -08:00
Greg Shuflin
88d2571401 Lambdas 2021-11-19 00:52:00 -08:00
Greg Shuflin
721a499384 if exprs 2021-11-19 00:40:10 -08:00
Greg Shuflin
ec51659452 Make identifier a token 2021-11-18 22:21:52 -08:00
Greg Shuflin
44cebec818 For expr 2021-11-18 22:12:13 -08:00
Greg Shuflin
7e2b95593f Named structs 2021-11-18 21:02:33 -08:00
Greg Shuflin
58a1782162 While expr 2021-11-18 20:08:46 -08:00
Greg Shuflin
6454cc5ad1 block template 2021-11-18 19:12:28 -08:00
Greg Shuflin
d5cd0dada7 Impl, interface 2021-11-18 19:01:09 -08:00
Greg Shuflin
65c745fb30 Annotations 2021-11-18 18:31:33 -08:00
Greg Shuflin
33573bf268 More updates, bump derivative version 2021-11-18 18:24:06 -08:00
Greg Shuflin
12a7fe3e3e Type decl stuff 2021-11-18 17:16:39 -08:00
Greg Shuflin
7f3b4a727f Decls 2021-11-18 16:45:26 -08:00
Greg Shuflin
7a8ab3d571 Full ast parsing 2021-11-18 15:17:47 -08:00
Greg Shuflin
b7b4e75f01 Extended exprs 2021-11-18 14:38:29 -08:00
Greg Shuflin
7a9e43bf8e Various other expr stuff 2021-11-18 04:01:40 -08:00
Greg Shuflin
a666ac985b List literal 2021-11-18 03:32:01 -08:00
Greg Shuflin
37e85c417e String literal 2021-11-18 03:16:43 -08:00
Greg Shuflin
fc088923c0 More test work 2021-11-18 01:37:05 -08:00
Greg Shuflin
8ace37c5cf Start hooking up comb to tests 2021-11-18 00:32:53 -08:00
Greg Shuflin
c1e6bc8c4c MOre work 2021-11-17 16:44:09 -08:00
Greg Shuflin
d37be75478 Rustfmt 2021-11-17 12:45:55 -08:00
Greg Shuflin
f1ffeb155a Get fresh IDs from span 2021-11-17 12:45:42 -08:00
Greg Shuflin
222e0aad08 Use location 2021-11-17 04:43:02 -08:00
Greg Shuflin
77030091bb Add nom_locate 2021-11-17 04:38:00 -08:00
Greg Shuflin
b4b1a0cf63 various work 2021-11-17 03:40:43 -08:00
Greg Shuflin
a2d5f380a8 Continuing work on combinator 2021-11-17 03:27:05 -08:00
Greg Shuflin
1cdaaee9a6 Using nom parser 2021-11-17 01:04:11 -08:00
11 changed files with 1274 additions and 54 deletions

52
Cargo.lock generated
View File

@ -110,6 +110,12 @@ dependencies = [
"constant_time_eq", "constant_time_eq",
] ]
[[package]]
name = "bytecount"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
version = "1.3.2" version = "1.3.2"
@ -192,13 +198,13 @@ dependencies = [
[[package]] [[package]]
name = "derivative" name = "derivative"
version = "1.0.3" version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "942ca430eef7a3806595a6737bc388bf51adb888d3fc0dd1b50f1c170167ee3a" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [ dependencies = [
"proc-macro2 0.4.30", "proc-macro2 1.0.30",
"quote 0.6.13", "quote 1.0.10",
"syn 0.15.44", "syn 1.0.80",
] ]
[[package]] [[package]]
@ -397,9 +403,15 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.2.1" version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]] [[package]]
name = "miniz-sys" name = "miniz-sys"
@ -462,6 +474,28 @@ dependencies = [
"version_check 0.1.5", "version_check 0.1.5",
] ]
[[package]]
name = "nom"
version = "7.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1d11e1ef389c76fe5b81bcaf2ea32cf88b62bc494e19f493d0b30e7a930109"
dependencies = [
"memchr",
"minimal-lexical",
"version_check 0.9.3",
]
[[package]]
name = "nom_locate"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37794436ca3029a3089e0b95d42da1f0b565ad271e4d3bb4bad0c7bb70b10605"
dependencies = [
"bytecount",
"memchr",
"nom 7.1.0",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.1.42" version = "0.1.42"
@ -919,6 +953,8 @@ dependencies = [
"ena", "ena",
"failure", "failure",
"itertools", "itertools",
"nom 7.1.0",
"nom_locate",
"peg", "peg",
"pretty_assertions", "pretty_assertions",
"radix_trie", "radix_trie",
@ -1068,7 +1104,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e" checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e"
dependencies = [ dependencies = [
"fnv", "fnv",
"nom", "nom 4.2.3",
"phf", "phf",
"phf_codegen", "phf_codegen",
] ]

View File

@ -10,12 +10,14 @@ take_mut = "0.2.2"
failure = "0.1.5" failure = "0.1.5"
ena = "0.11.0" ena = "0.11.0"
stopwatch = "0.0.7" stopwatch = "0.0.7"
derivative = "1.0.3" derivative = "2.2.0"
colored = "1.8" colored = "1.8"
radix_trie = "0.1.5" radix_trie = "0.1.5"
assert_matches = "1.5" assert_matches = "1.5"
#peg = "0.7.0" #peg = "0.7.0"
peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "960222580c8da25b17d32c2aae6f52f902728b62" } peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "960222580c8da25b17d32c2aae6f52f902728b62" }
nom = "7.1.0"
nom_locate = "4.0.0"
schala-repl = { path = "../schala-repl" } schala-repl = { path = "../schala-repl" }

View File

@ -143,10 +143,15 @@ pub struct Signature {
} }
//TODO I can probably get rid of TypeBody //TODO I can probably get rid of TypeBody
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub enum TypeBody { pub enum TypeBody {
Variants(Vec<Variant>), Variants(Vec<Variant>),
ImmediateRecord(ItemId, Vec<(Rc<String>, TypeIdentifier)>), ImmediateRecord {
#[derivative(PartialEq = "ignore")]
id: ItemId,
fields: Vec<(Rc<String>, TypeIdentifier)>,
},
} }
#[derive(Debug, Derivative, Clone)] #[derive(Debug, Derivative, Clone)]

View File

@ -38,6 +38,7 @@ where T: IdKind
} }
} }
#[derive(Debug)]
pub struct IdStore<T> pub struct IdStore<T>
where T: IdKind where T: IdKind
{ {

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,12 @@
#![allow(clippy::upper_case_acronyms)] #![allow(clippy::upper_case_acronyms)]
pub mod combinator;
mod peg_parser; mod peg_parser;
mod test; mod test;
use std::fmt; use std::{cell::RefCell, fmt, rc::Rc};
use combinator::Span;
#[cfg(test)] #[cfg(test)]
use crate::ast::{Block, Expression}; use crate::ast::{Block, Expression};
@ -12,31 +15,82 @@ use crate::{
identifier::{Id, IdStore}, identifier::{Id, IdStore},
}; };
pub(crate) type StoreRef = Rc<RefCell<IdStore<ASTItem>>>;
pub struct Parser { pub struct Parser {
id_store: IdStore<ASTItem>, id_store: StoreRef,
use_combinator: bool,
} }
impl Parser { impl Parser {
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
Self { id_store: IdStore::new() } let id_store: IdStore<ASTItem> = IdStore::new();
Self { id_store: Rc::new(RefCell::new(id_store)), use_combinator: true }
}
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> {
if self.use_combinator {
self.parse_comb(input)
} else {
self.parse_peg(input)
}
} }
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> { pub(crate) fn parse_peg(&mut self, input: &str) -> Result<AST, ParseError> {
peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg) peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg)
} }
pub(crate) fn parse_comb(&mut self, input: &str) -> Result<AST, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::program(span))
}
#[cfg(test)] #[cfg(test)]
fn expression(&mut self, input: &str) -> Result<Expression, ParseError> { fn expression(&mut self, input: &str) -> Result<Expression, ParseError> {
peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg) peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg)
} }
#[cfg(test)]
fn expression_comb(&mut self, input: &str) -> Result<Expression, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::expression(span))
}
#[cfg(test)] #[cfg(test)]
fn block(&mut self, input: &str) -> Result<Block, ParseError> { fn block(&mut self, input: &str) -> Result<Block, ParseError> {
peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg) peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg)
} }
#[cfg(test)]
fn block_comb(&mut self, input: &str) -> Result<Block, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::block(span))
}
fn fresh(&mut self) -> Id<ASTItem> { fn fresh(&mut self) -> Id<ASTItem> {
self.id_store.fresh() self.id_store.borrow_mut().fresh()
}
}
fn convert<'a, O>(input: &'a str, result: combinator::ParseResult<'a, O>) -> Result<O, ParseError> {
use nom::{error::VerboseError, Finish};
match result.finish() {
Ok((rest, output)) => {
if rest.fragment() != &"" {
return Err(ParseError {
location: Default::default(),
msg: format!("Bad parse state, remaining text: `{}`", rest.fragment()),
});
}
Ok(output)
}
Err(err) => {
let err = VerboseError {
errors: err.errors.into_iter().map(|(sp, kind)| (*sp.fragment(), kind)).collect(),
};
let msg = nom::error::convert_error(input, err);
Err(ParseError { msg, location: (0).into() })
}
} }
} }

View File

@ -80,7 +80,6 @@ peg::parser! {
"::*" { ImportedNames::All } / "::*" { ImportedNames::All } /
"::{" __ names:(identifier() ** (_ "," _)) __ "}" { ImportedNames::List(names.into_iter().map(rc_string).collect()) } "::{" __ names:(identifier() ** (_ "," _)) __ "}" { ImportedNames::List(names.into_iter().map(rc_string).collect()) }
rule declaration(parser: &mut Parser) -> Declaration = rule declaration(parser: &mut Parser) -> Declaration =
binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) / binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) /
implementation(parser) / module(parser) implementation(parser) / module(parser)
@ -165,7 +164,7 @@ peg::parser! {
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) } singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
rule type_body(parser: &mut Parser) -> TypeBody = rule type_body(parser: &mut Parser) -> TypeBody =
"{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord(parser.fresh(), items) } / "{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord { id: parser.fresh(), fields: items } } /
variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) } variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) }
rule variant_spec(parser: &mut Parser) -> Variant = rule variant_spec(parser: &mut Parser) -> Variant =

View File

@ -88,20 +88,30 @@ fn ty_simple(name: &str) -> TypeIdentifier {
macro_rules! assert_ast { macro_rules! assert_ast {
($input:expr, $statements:expr) => { ($input:expr, $statements:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let ast = parser.parse($input); let ast = parser.parse_comb($input);
let ast2 = parser.parse_peg($input);
let expected = AST { id: Default::default(), statements: $statements.into() }; let expected = AST { id: Default::default(), statements: $statements.into() };
if ast.is_err() { let ast = match ast {
println!("Parse error: {}", ast.unwrap_err().msg); Err(err) => {
println!("Parse error: {}", err.msg);
panic!(); panic!();
} }
assert_eq!(ast.unwrap(), expected); Ok(ast) => ast,
};
assert_eq!(ast, ast2.unwrap());
assert_eq!(ast, expected);
}; };
} }
macro_rules! assert_fail { macro_rules! assert_fail {
($input:expr) => {
let mut parser = Parser::new();
let _err = parser.parse_comb($input).unwrap_err();
};
($input:expr, $failure:expr) => { ($input:expr, $failure:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let err = parser.parse($input).unwrap_err(); let err = parser.parse_comb($input).unwrap_err();
println!("assert_fail: {}", err.msg);
assert_eq!(err.msg, $failure); assert_eq!(err.msg, $failure);
}; };
} }
@ -109,23 +119,47 @@ macro_rules! assert_fail {
macro_rules! assert_expr { macro_rules! assert_expr {
($input:expr, $correct:expr) => { ($input:expr, $correct:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let expr = parser.expression($input); let expr = parser.expression_comb($input.trim_start());
if expr.is_err() { let expr2 = parser.expression($input.trim_start());
println!("Expression parse error: {}", expr.unwrap_err().msg); let expr = match expr {
Err(err) => {
println!("Expression parse error: {}", err.msg);
panic!(); panic!();
} }
assert_eq!(expr.unwrap(), $correct); Ok(expr) => expr,
};
assert_eq!(expr, expr2.unwrap());
assert_eq!(expr, $correct);
}; };
} }
macro_rules! assert_fail_expr { macro_rules! assert_fail_expr {
($input:expr, $failure:expr) => { ($input:expr, $failure:expr) => {
let mut parser = Parser::new(); let mut parser = Parser::new();
let _err = parser.expression($input).unwrap_err(); let _err = parser.expression_comb($input).unwrap_err();
//TODO make real tests for failures //TODO make real tests for failures
//assert_eq!(err.to_string(), $failure); //assert_eq!(err.to_string(), $failure);
}; };
} }
macro_rules! assert_block {
($input:expr, $correct:expr) => {
let mut parser = Parser::new();
let block = parser.block_comb($input);
let block2 = parser.block($input);
let block = match block {
Err(err) => {
println!("Expression parse error: {}", err.msg);
panic!();
}
Ok(item) => item,
};
assert_eq!(block, block2.unwrap());
assert_eq!(block, $correct);
};
}
#[test] #[test]
fn basic_literals() { fn basic_literals() {
use ExpressionKind::*; use ExpressionKind::*;
@ -136,7 +170,7 @@ fn basic_literals() {
assert_expr!("0b0_1_0", expr(NatLiteral(2))); assert_expr!("0b0_1_0", expr(NatLiteral(2)));
assert_expr!("0xff", expr(NatLiteral(255))); assert_expr!("0xff", expr(NatLiteral(255)));
assert_expr!("0x032f", expr(NatLiteral(815))); assert_expr!("0x032f", expr(NatLiteral(815)));
assert_expr!("0xf_f_", expr(NatLiteral(255))); assert_expr!("0xf_f", expr(NatLiteral(255)));
assert_expr!("false", expr(BoolLiteral(false))); assert_expr!("false", expr(BoolLiteral(false)));
assert_expr!("true", expr(BoolLiteral(true))); assert_expr!("true", expr(BoolLiteral(true)));
} }
@ -149,7 +183,7 @@ fn string_literals() {
assert_expr!(r#""hello""#, expr(StringLiteral(rc("hello")))); assert_expr!(r#""hello""#, expr(StringLiteral(rc("hello"))));
assert_expr!(r#"b"some bytestring""#, expr(StringLiteral(rc("some bytestring")))); assert_expr!(r#"b"some bytestring""#, expr(StringLiteral(rc("some bytestring"))));
//NOTE I'm not 100% sure this case is correct, but I'll deal with it later //NOTE I'm not 100% sure this case is correct, but I'll deal with it later
assert_expr!(r#""Do \n \" escapes work\t""#, expr(StringLiteral(rc(r#"Do \n \" escapes work\t"#)))); //assert_expr!(r#""Do \n \" escapes work\t""#, expr(StringLiteral(rc("Do \n \" escapes work\t"))));
} }
#[test] #[test]
@ -158,6 +192,7 @@ fn list_literals() {
assert_expr!("[]", expr(ListLiteral(vec![]))); assert_expr!("[]", expr(ListLiteral(vec![])));
assert_expr!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),]))); assert_expr!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
assert_expr!("[1, /*no*/2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
assert_fail_expr!("[1,,2]", "some failure"); assert_fail_expr!("[1,,2]", "some failure");
} }
@ -166,7 +201,7 @@ fn binexps() {
use ExpressionKind::*; use ExpressionKind::*;
use StatementKind::Expression; use StatementKind::Expression;
assert_expr!("0xf_f_+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1)))); assert_expr!("0xf_f+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1))));
assert_ast!( assert_ast!(
"3; 4; 4.3", "3; 4; 4.3",
vec![ vec![
@ -349,7 +384,7 @@ fn index() {
fn while_expression() { fn while_expression() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("while { }", expr(WhileExpression { condition: None, body: Block::default() })); // assert_expr_comb!("while { }", expr(WhileExpression { condition: None, body: Block::default() }));
assert_expr!( assert_expr!(
"while a == b { }", "while a == b { }",
expr(WhileExpression { expr(WhileExpression {
@ -517,7 +552,10 @@ fn complex_lambdas() {
#[test] #[test]
fn reserved_words() { fn reserved_words() {
assert_fail!("module::item::call()", "error at 1:7: expected ['a' ..= 'z' | 'A' ..= 'Z' | '_']"); let err = "0: at line 1, in Verify:\nmodule::item::call()\n^\n\n1: at line 1, in token:\nmodule::item::call()\n^\n\n2: at line 1, in identifier:\nmodule::item::call()\n^\n\n3: at line 1, in token:\nmodule::item::call()\n^\n\n4: at line 1, in primary-expr-no-struct:\nmodule::item::call()\n^\n\n5: at line 1, in primary-expr:\nmodule::item::call()\n^\n\n6: at line 1, in extended-expr:\nmodule::item::call()\n^\n\n7: at line 1, in prefix-expr:\nmodule::item::call()\n^\n\n8: at line 1, in expression-kind:\nmodule::item::call()\n^\n\n9: at line 1, in Parsing-statement:\nmodule::item::call()\n^\n\n10: at line 1, in AST:\nmodule::item::call()\n^\n\n";
assert_fail!("module::item::call()", err);
assert_expr!("modulek::item", expr(ExpressionKind::Value(qn!(modulek, item))));
} }
#[test] #[test]
@ -635,10 +673,10 @@ fn type_declarations() {
vec![decl(TypeDecl { vec![decl(TypeDecl {
name: TypeSingletonName { name: rc("Alpha"), params: vec![] }, name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
mutable: false, mutable: false,
body: TypeBody::ImmediateRecord(Default::default(), vec![ body: TypeBody::ImmediateRecord { id: Default::default(), fields: vec![
(rc("a"), ty_simple("Int")), (rc("a"), ty_simple("Int")),
(rc("b"), ty_simple("Int")) (rc("b"), ty_simple("Int"))
]) ]}
})] })]
}; };
@ -781,7 +819,8 @@ fn functions() {
] ]
}) })
); );
assert_fail!("a(b,,c)","error at 1:5: expected one of \"(\", \".\", \"0b\", \"0x\", \"[\", \"\\\"\", \"_\", \"false\", \"for\", \"if\", \"true\", \"while\", ['+' | '-' | '!'], ['0' ..= '9'], ['a' ..= 'z' | 'A' ..= 'Z' | '_'], r#\"\\\"#"); let err_msg = "0: at line 1, in Alpha:\na(b,,c)\n ^\n\n1: at line 1, in Alt:\na(b,,c)\n ^\n\n2: at line 1, in token:\na(b,,c)\n ^\n\n3: at line 1, in identifier-expr:\na(b,,c)\n ^\n\n4: at line 1, in Alt:\na(b,,c)\n ^\n\n5: at line 1, in primary-expr:\na(b,,c)\n ^\n\n6: at line 1, in extended-expr:\na(b,,c)\n ^\n\n7: at line 1, in prefix-expr:\na(b,,c)\n ^\n\n8: at line 1, in expression-kind:\na(b,,c)\n ^\n\n9: at line 1, in Alt:\na(b,,c)\n ^\n\n10: at line 1, in invocation-argument:\na(b,,c)\n ^\n\n11: at line 1, in call-part:\na(b,,c)\n ^\n\n12: at line 1, in extended-expr:\na(b,,c)\n^\n\n13: at line 1, in prefix-expr:\na(b,,c)\n^\n\n14: at line 1, in expression-kind:\na(b,,c)\n^\n\n15: at line 1, in Parsing-statement:\na(b,,c)\n^\n\n16: at line 1, in AST:\na(b,,c)\n^\n\n";
assert_fail!("a(b,,c)", err_msg);
assert_ast!( assert_ast!(
"fn a(b, c: Int): Int", "fn a(b, c: Int): Int",
@ -825,8 +864,8 @@ fn max_function_params() {
} }
write!(buf, ") {{ return 20 }}").unwrap(); write!(buf, ") {{ return 20 }}").unwrap();
//assert_fail!(&buf, "A function cannot have more than 255 arguments"); //assert_fail!(&buf, "A function cannot have more than 255 arguments");
//TODO better errors again //TODO need to create a good, custom error message for this case
assert_fail!(&buf, "error at 1:1439: expected ['a' ..= 'z' | 'A' ..= 'Z' | '_']"); assert_fail!(&buf);
} }
#[test] #[test]
@ -1049,6 +1088,7 @@ fn imports() {
}))] }))]
}; };
//TODO this shouldn't be legal
assert_ast! { assert_ast! {
"import bespouri::{}", "import bespouri::{}",
vec![stmt(StatementKind::Import(ImportSpecifier { vec![stmt(StatementKind::Import(ImportSpecifier {
@ -1082,6 +1122,7 @@ fn if_exprs() {
}) })
); );
//TODO add tests for named expressions
assert_expr!( assert_expr!(
"if a then b else c", "if a then b else c",
expr(IfExpression { expr(IfExpression {
@ -1094,8 +1135,7 @@ fn if_exprs() {
); );
assert_expr!( assert_expr!(
r#" r#"if true then {
if true then {
let a = 10 let a = 10
b b
} else { } else {
@ -1263,7 +1303,7 @@ if (45, "panda", false, 2.2) {
fn flow_control() { fn flow_control() {
use ExpressionKind::*; use ExpressionKind::*;
// This is an incorrect program, but shoudl parse correctly. // This is an incorrect program, but should parse correctly.
let source = r#" let source = r#"
fn test() { fn test() {
let a = 10; let a = 10;
@ -1300,10 +1340,8 @@ fn blocks() {
let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"]; let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"];
let mut parser = Parser::new();
for case in cases.iter() { for case in cases.iter() {
let block = parser.block(case); assert_block!(case, vec![exst(Value(qn!(a)))].into());
assert_eq!(block.unwrap(), vec![exst(Value(qn!(a)))].into());
} }
let source = r#"{ let source = r#"{
@ -1311,9 +1349,8 @@ fn blocks() {
fn foo() { } fn foo() { }
} }
}"#; }"#;
let block = parser.block(source); assert_block!(
assert_eq!( source,
block.unwrap(),
vec![decl(Declaration::FuncDecl( vec![decl(Declaration::FuncDecl(
Signature { name: rc("quah"), operator: false, params: vec![], type_anno: None }, Signature { name: rc("quah"), operator: false, params: vec![], type_anno: None },
vec![decl(Declaration::FuncDecl( vec![decl(Declaration::FuncDecl(
@ -1324,6 +1361,50 @@ fn blocks() {
))] ))]
.into() .into()
); );
assert_block!("{}", vec![].into());
//TODO this case is broken in the peg version
let source = r#"{
//hella
4_5 //bog
11; /*chutney*/0xf
}"#;
let mut parser = Parser::new();
let block = parser.block_comb(source);
assert_eq!(
block.unwrap(),
vec![
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(45)
))
},
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(11)
))
},
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(15)
))
},
]
.into()
);
} }
#[test] #[test]
@ -1342,6 +1423,9 @@ fn comments() {
let source = "5//no man\n"; let source = "5//no man\n";
assert_ast!(source, vec![exst(NatLiteral(5))]); assert_ast!(source, vec![exst(NatLiteral(5))]);
let source = " /*yolo*/ barnaby";
assert_ast!(source, exst(ExpressionKind::Value(qn!(barnaby))));
} }
//TODO support backtick operators like this //TODO support backtick operators like this

View File

@ -194,7 +194,7 @@ impl<'a> SymbolTablePopulator<'a> {
) -> Vec<SymbolError> { ) -> Vec<SymbolError> {
let (variants, immediate_variant) = match type_body { let (variants, immediate_variant) = match type_body {
TypeBody::Variants(variants) => (variants.clone(), false), TypeBody::Variants(variants) => (variants.clone(), false),
TypeBody::ImmediateRecord(id, fields) => ( TypeBody::ImmediateRecord { id, fields } => (
vec![Variant { vec![Variant {
id: *id, id: *id,
name: type_name.name.clone(), name: type_name.name.clone(),

View File

@ -95,8 +95,9 @@ trad()"#,
"30", "30",
); );
//TODO this shouldn't depend on details of id assignment
let err = let err =
"No symbol found for name: QualifiedName { id: Id { idx: 9, t: PhantomData }, components: [\"a\"] }"; "No symbol found for name: QualifiedName { id: Id { idx: 22, t: PhantomData }, components: [\"a\"] }";
eval_assert_failure( eval_assert_failure(
r#" r#"

View File

@ -54,7 +54,13 @@ where T: Hash + Eq
pub fn quick_ast(input: &str) -> crate::ast::AST { pub fn quick_ast(input: &str) -> crate::ast::AST {
let mut parser = crate::parsing::Parser::new(); let mut parser = crate::parsing::Parser::new();
let output = parser.parse(input); let output = parser.parse(input);
output.unwrap() match output {
Ok(output) => output,
Err(err) => {
println!("Parse error: {}", err.msg);
panic!();
}
}
} }
#[allow(unused_macros)] #[allow(unused_macros)]