106 Commits

Author SHA1 Message Date
greg
bc87f8cd90 Initial work 2020-03-04 11:25:23 -08:00
greg
a0955e07dc Fix attribute 2020-02-12 22:14:21 -08:00
greg
afcb10bb72 Add random idea 2019-11-18 03:11:00 -08:00
greg
8de625e540 Got rid of symbol table from eval 2019-11-10 03:28:31 -08:00
greg
a2bd9a3985 Remove symbol table from evaluator 2019-11-09 19:52:05 -08:00
greg
e4a1a23f4d Moved sym lookup logic from eval to ast reducer 2019-11-09 19:49:02 -08:00
greg
2cd325ba12 Add plan of attack notes 2019-11-08 18:56:15 -08:00
greg
8218007f1c Commit this temporary fix 2019-11-08 18:53:38 -08:00
greg
040ab11873 Move reduction of values into separate method 2019-11-07 03:28:18 -08:00
greg
b967fa1911 to_repl() doesn't need symbol table handle 2019-11-07 02:42:17 -08:00
greg
4c718ed977 Add TODO for symbol resolver 2019-11-06 18:41:37 -08:00
greg
d20acf7166 Add tokenization for string literal prefixes 2019-11-05 02:22:11 -08:00
greg
efc8497235 Rearchitect parser
To ensure that the prelude gets parsed with the same ItemId context as
normal REPL input
2019-10-25 01:49:15 -07:00
greg
d824b8d6ef Idea for pattern matching 2019-10-24 03:09:17 -07:00
greg
4a1987b5a2 Test for modules in symbol table 2019-10-24 03:02:52 -07:00
greg
c96644ddce Modules in symbol table 2019-10-24 02:13:07 -07:00
greg
cc0ac83709 Refactor a lot of symbol table in prep for modules 2019-10-24 01:34:13 -07:00
greg
d6019e6f9a Improve REPL help message
Show help strings for children of a directive
2019-10-23 21:41:25 -07:00
greg
3344f6827d Clear out some compiler warnings 2019-10-23 16:07:10 -07:00
greg
b38c4b3298 SymbolTable passing, fix test for duplicate line 2019-10-23 14:47:18 -07:00
greg
a2f30b6136 Refactored symbol_table test 2019-10-23 14:47:18 -07:00
greg
11a9a60a34 Rejiggering some things with the SourceMap pointer in Parser 2019-10-23 14:47:18 -07:00
greg
5bb1a245c4 Have Parser accept SourceMap reference 2019-10-23 14:47:18 -07:00
greg
1ffe61cf5f Partway there in terms of implementing source map lookup 2019-10-23 14:47:18 -07:00
greg
7495f30e16 Pass SourceMapHandle to SymbolTable 2019-10-23 14:47:18 -07:00
greg
82520aa28d Start to add source map insertions 2019-10-23 14:47:18 -07:00
greg
129d9ec673 A bunch of infrastructure for keeping track of AST node locations
Plus a failing test to illustrate the reason we care
2019-10-23 14:47:18 -07:00
greg
7825ef1eb9 Partial module work 2019-10-23 14:47:18 -07:00
greg
f3ecdc61cb Remove old TODO 2019-10-23 02:22:10 -07:00
greg
bf59e6cc63 Just import all of AST in parse tests 2019-10-22 03:15:41 -07:00
greg
c560c29b2d Start to add module syntax 2019-10-22 03:15:14 -07:00
greg
4dcd9d0198 Some more parse trace improvements 2019-10-22 02:11:49 -07:00
greg
7ac63160c5 Remove extraneous debug print 2019-10-21 19:19:48 -07:00
greg
8656992945 Made parse trace output a bit nicer
Used ... instead of whitespace, removed extraneous "Production"
2019-10-21 19:18:47 -07:00
greg
bb87a87848 Remove this TODO; default args are parsed 2019-10-21 10:53:17 -07:00
greg
2f467702e3 Use common scope resolver
So that if you import something at the repl, it stays imported
2019-10-21 04:19:26 -07:00
greg
5ac5425fac Use symbol table handle in resolver 2019-10-21 04:17:30 -07:00
greg
944916d6af Alias for symbol table handle type 2019-10-21 04:09:43 -07:00
greg
3906210db8 Fix prelude 2019-10-21 03:26:38 -07:00
greg
f7357d4498 Add explicit panic for prelude errors
Maybe I want to handle this better in the future, but for now just panic
if the prelude is bad for some reason.
2019-10-21 03:25:45 -07:00
greg
1493d12a22 Reduce unused imports 2019-10-21 03:02:11 -07:00
greg
016d8fc900 Fixed tests
but I think importing is still not working properly
2019-10-21 02:56:21 -07:00
greg
86dc5eca02 Get rid of symbol segment kind
I don't think I need this after all
2019-10-18 18:24:57 -07:00
greg
e75958c2a2 Currently broken import all commit 2019-10-18 09:55:26 -07:00
greg
7a56b6dfc0 Add some more methods around this 2019-10-18 09:54:56 -07:00
greg
f9633ebe55 Add (broken) import all test 2019-10-18 09:53:44 -07:00
greg
854740a63f SymbolTrie 2019-10-17 03:15:39 -07:00
greg
ca10481d7c Symbol table test - multiple values 2019-10-16 22:46:58 -07:00
greg
26fa4a29ec Put type names into symbol table 2019-10-16 20:22:40 -07:00
greg
97b59d7e70 Symbol table tests to separate file 2019-10-16 19:51:43 -07:00
greg
92ad4767c8 Remove some extraneous code 2019-10-16 10:39:48 -07:00
greg
7cabca2987 Got all tests passing with visitor scope-resolver 2019-10-16 02:46:32 -07:00
greg
98e53a6d0f Start porting ScopeResolution to use Visitor pattern 2019-10-15 19:06:07 -07:00
greg
77cc1f3824 ASTVisitor imports 2019-10-15 19:03:27 -07:00
greg
9e64a22328 Invocation argument in visitor 2019-10-15 18:58:51 -07:00
greg
5afdc16f2e Still more visitor work 2019-10-15 03:51:36 -07:00
greg
f818e86f48 More visitor work 2019-10-15 00:53:21 -07:00
greg
5a01b12d9b Add note about pattern synonyms 2019-10-13 16:50:54 -07:00
greg
7c75f9b2a8 Extraneous comment 2019-10-11 18:45:52 -07:00
greg
2c34ab52c4 Make this test conform to new if syntax 2019-10-11 09:13:09 -07:00
greg
44d1f4692f Add back parser restrictions 2019-10-11 09:11:14 -07:00
greg
3cf3fce72d Fixed some code in scope resolver 2019-10-10 18:33:34 -07:00
greg
ddea470ba8 Parsing tests pass, eval ones fail 2019-10-10 18:17:59 -07:00
greg
745afe981a Got compilation working again 2019-10-10 17:50:20 -07:00
greg
a6c86d6447 Some work 2019-10-10 17:06:41 -07:00
greg
8d3639ab8e Fix everything if-refactor-related save reduced_ast 2019-10-10 14:38:48 -07:00
greg
3bca82a8c8 Still more refactoring work 2019-10-10 10:34:54 -07:00
greg
811c52c8d3 More if-expr refactoring work
Think I finished all the parsing stuff, just need to fix the types
everywhere else
2019-10-10 03:56:35 -07:00
greg
95e278d1b5 Chunk of work on if-expr AST
don't expect this to compile yet
2019-10-10 03:29:28 -07:00
greg
61b757313d Alter grammar of if-blocks 2019-10-10 02:34:56 -07:00
greg
24b48551dc More playing around with syntax for if 2019-10-09 02:32:41 -07:00
greg
2ed84de641 Introduce bare else clause in if exprs
With a non-passing test
2019-10-09 01:50:32 -07:00
greg
22efd39114 Change if-expr syntax
use else instead of ->
2019-10-08 18:23:16 -07:00
greg
a48bb61eb3 Get rid of this test
need to rethink how if-expressions should work
2019-10-05 16:41:51 -07:00
greg
904d5c4431 Add "production" line to parse debug output
And also add a .next() in the parser that should've been there
2019-10-04 03:12:09 -07:00
greg
28056b1f89 Add production name in ParseError
for debugging
2019-10-04 03:12:00 -07:00
greg
f9a59838b0 Get rid of .into()'s in parser 2019-10-01 02:19:12 -07:00
greg
f02d7cb924 Add test for failing if expression 2019-09-28 17:42:22 -07:00
greg
489819a28e Multiline prompt 2019-09-28 17:31:37 -07:00
greg
c427646e75 Change type alias 2019-09-28 02:42:18 -07:00
greg
f06b5922de Visitor cleanup 2019-09-28 02:37:36 -07:00
greg
253b5d88f0 Finish cleaning up visitor logic 2019-09-28 01:58:22 -07:00
greg
f654cd6b50 Start moving all walking logic out of visitor 2019-09-28 01:01:56 -07:00
greg
89649273d8 Still more visitor stuff 2019-09-27 22:34:00 -07:00
greg
9fa4e3797c More visitor stuff 2019-09-27 09:54:24 -07:00
greg
c8804eeefb More visitor stuff 2019-09-26 03:26:37 -07:00
greg
d80a0036b1 Enough of ASTVisitor to test something 2019-09-26 02:29:35 -07:00
greg
7533c69c49 Add note on visitors 2019-09-26 01:32:33 -07:00
greg
39bb175722 Initial WIP code 2019-09-26 01:31:39 -07:00
greg
ae65455374 Add type alias for name scope data structure 2019-09-25 03:26:31 -07:00
greg
1fc028c9fc Make lookup_name_in_scope a method 2019-09-25 03:18:54 -07:00
greg
031ff9fe7e Add top-level variable to schala prelude 2019-09-25 02:54:56 -07:00
greg
5a9f3c1850 Sort symbols in debug 2019-09-25 02:43:07 -07:00
greg
58251d3f28 Use colored in symbol table debug 2019-09-25 02:28:24 -07:00
greg
2e42313991 add_new_symbol clarification 2019-09-25 02:18:36 -07:00
greg
355604d911 Cargo.lock should be version-controlled 2019-09-25 01:54:14 -07:00
greg
0b57561114 Use block in scope resolution 2019-09-25 01:45:02 -07:00
greg
dbd81ca83d names 2019-09-24 19:24:07 -07:00
greg
6368d10d92 Rename Symbol.name -> Symbol.local_name
to make it clearer what this means
2019-09-24 18:56:53 -07:00
greg
9cd64d97a5 Isolate import handling code 2019-09-24 18:42:01 -07:00
greg
41cad61e34 Start work on name resolution 2019-09-24 03:28:59 -07:00
greg
a054de56a2 Import statement syntax 2019-09-21 02:30:28 -07:00
greg
603ea89b98 Start adding import keyword 2019-09-20 18:19:29 -07:00
greg
06026604cc Fix test 2019-09-20 12:14:15 -07:00
greg
03f8abac6a Remove Meta type 2019-09-20 12:03:42 -07:00
greg
fd3922d866 Get rid of Meta from tests 2019-09-20 10:10:57 -07:00
32 changed files with 2924 additions and 1005 deletions

1
.gitignore vendored
View File

@@ -1,4 +1,3 @@
Cargo.lock
target
.schala_repl
.schala_history

1129
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -7,6 +7,7 @@ authors = ["greg <greg.shuflin@protonmail.com>"]
schala-repl = { path = "schala-repl" }
schala-lang = { path = "schala-lang/language" }
partis = { path="partis" }
# maaru-lang = { path = "maaru" }
# rukka-lang = { path = "rukka" }
# robo-lang = { path = "robo" }

65
TODO.md
View File

@@ -1,11 +1,37 @@
# Plan of attack
1. modify visitor so it can handle scopes
-this is needed both to handle import scope correctly
-and also to support making FQSNs aware of function parameters
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
# TODO items
-use 'let' sigil in patterns for variables :
```
q is MyStruct(let a, Chrono::Trigga) then {
}
```
-idea: what if there was something like React jsx syntas built in? i.e. a way to automatically transform some kind of markup
into a function call, cf. `<h1 prop="arg">` -> h1(prop=arg)
## General code cleanup
-experiment with storing metadata via ItemIds on AST nodes (cf. https://rust-lang.github.io/rustc-guide/hir.html, https://github.com/rust-lang/rust/blob/master/src/librustc/hir/mod.rs )
- I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
DONE -experiment with storing metadata via ItemIds on AST nodes (cf. https://rust-lang.github.io/rustc-guide/hir.html, https://github.com/rust-lang/rust/blob/master/src/librustc/hir/mod.rs )
-implement and test open/use statements
-implement field access
- standardize on an error type that isn't String
-implement a visitor pattern for the use of scope_resolver
- maybe implement this twice: 1) the value-returning, no-default one in the haoyi blogpost,
-look at https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
2) the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
-parser error - should report subset of AST parsed *so far*
- what if you used python 'def' syntax to define a function? what error message makes sense here?
## Reduction
- make a good type for actual language builtins to avoid string comparisons
@@ -74,10 +100,43 @@ ex.
-consult http://gluon-lang.org/book/embedding-api.html
## Trying if-syntax again
## Playing around with conditional syntax ideas
//simple if expr
if x == 10 then "a" else "z"
//complex if expr
if x == 10 then {
let a = 1
let b = 2
a + b
} else {
55
}
// different comparison ops
if x {
== 1 then "a"
.isPrime() then "b"
else "c"
}
/* for now disallow `if x == { 1 then ... }`, b/c hard to parse
//simple pattern-matching
if x is Person("Ivan", age) then age else 0
//match-block equivalent
if x {
is Person("Ivan", _) then "Ivan"
is Person(_, age) if age > 13 then "barmitzvah'd"
else "foo"
}
## (OLD) Playing around with conditional syntax ideas
-
- if/match playground

9
partis/Cargo.toml Normal file
View File

@@ -0,0 +1,9 @@
[package]
name = "partis"
version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

17
partis/src/lib.rs Normal file
View File

@@ -0,0 +1,17 @@
struct ParseError { }
enum ParseResult<'a, T> {
Success(T, &'a str),
Failure(ParseError),
Incomplete,
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}

View File

@@ -32,7 +32,11 @@ impl Fold for RecursiveDescentFn {
if self.parse_level != 0 {
self.parse_level -= 1;
}
result
result.map_err(|mut parse_error: ParseError| {
parse_error.production_name = Some(stringify!(#ident).to_string());
parse_error
})
}
};
i.block = Box::new(new_block);

View File

@@ -13,6 +13,8 @@ failure = "0.1.5"
ena = "0.11.0"
stopwatch = "0.0.7"
derivative = "1.0.3"
colored = "1.8"
radix_trie = "0.1.5"
schala-lang-codegen = { path = "../codegen" }
schala-repl = { path = "../../schala-repl" }

View File

@@ -1,11 +1,14 @@
use std::rc::Rc;
use std::convert::From;
use crate::derivative::Derivative;
use crate::symbol_table::FullyQualifiedSymbolName;
mod walker;
mod visitor;
mod visitor_test;
mod operators;
pub use operators::*;
pub use visitor::ASTVisitor;
pub use walker::walk_ast;
/// An abstract identifier for an AST node
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
@@ -41,40 +44,6 @@ impl ItemIdStore {
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Meta<T> {
pub n: T,
pub fqsn: Option<FullyQualifiedSymbolName>
}
impl<T> Meta<T> {
pub fn new(n: T) -> Meta<T> {
Meta { n,
fqsn: None,
}
}
pub fn node(&self) -> &T {
&self.n
}
pub fn mut_node(&mut self) -> &mut T {
&mut self.n
}
}
//TODO this PartialEq is here to make tests work - find a way to make it not necessary
#[derive(Clone, Debug, Default, PartialEq)]
struct SourceMap {
}
impl From<Expression> for Meta<Expression> {
fn from(expr: Expression) -> Meta<Expression> {
Meta::new(expr)
}
}
#[derive(Derivative, Debug)]
#[derivative(PartialEq)]
pub struct AST {
@@ -94,7 +63,9 @@ pub struct Statement {
#[derive(Debug, PartialEq, Clone)]
pub enum StatementKind {
Expression(Expression),
Declaration(Declaration), //TODO Declaration should also be Meta-wrapped; only Expression and Declaration are Meta-wrapped maybe?
Declaration(Declaration),
Import(ImportSpecifier),
Module(ModuleSpecifier),
}
pub type Block = Vec<Statement>;
@@ -124,7 +95,11 @@ pub enum Declaration {
body: TypeBody,
mutable: bool
},
TypeAlias(Rc<String>, Rc<String>), //should have TypeSingletonName in it, or maybe just String, not sure
//TODO this needs to be more sophisticated
TypeAlias {
alias: Rc<String>,
original: Rc<String>,
},
Binding {
name: Rc<String>,
constant: bool,
@@ -217,7 +192,7 @@ pub enum ExpressionKind {
indexers: Vec<Expression>,
},
IfExpression {
discriminator: Box<Discriminator>,
discriminator: Option<Box<Expression>>,
body: Box<IfExpressionBody>,
},
WhileExpression {
@@ -246,35 +221,33 @@ pub enum InvocationArgument {
Ignored
}
#[derive(Debug, PartialEq, Clone)]
pub enum Discriminator {
Simple(Expression),
BinOp(Expression, BinOp)
}
#[derive(Debug, PartialEq, Clone)]
pub enum IfExpressionBody {
SimpleConditional(Block, Option<Block>),
SimplePatternMatch(Pattern, Block, Option<Block>),
GuardList(Vec<GuardArm>)
SimpleConditional {
then_case: Block,
else_case: Option<Block>
},
SimplePatternMatch {
pattern: Pattern,
then_case: Block,
else_case: Option<Block>
},
CondList(Vec<ConditionArm>)
}
#[derive(Debug, PartialEq, Clone)]
pub struct GuardArm {
pub guard: Guard,
pub struct ConditionArm {
pub condition: Condition,
pub guard: Option<Expression>,
pub body: Block,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Guard {
Pat(Pattern),
HalfExpr(HalfExpr)
}
#[derive(Debug, PartialEq, Clone)]
pub struct HalfExpr {
pub op: Option<BinOp>,
pub expr: ExpressionKind,
pub enum Condition {
Pattern(Pattern),
TruncatedOp(BinOp, Expression),
Expression(Expression),
Else,
}
#[derive(Debug, PartialEq, Clone)]
@@ -308,3 +281,27 @@ pub enum ForBody {
MonadicReturn(Expression),
StatementBlock(Block),
}
#[derive(Debug, Derivative, Clone)]
#[derivative(PartialEq)]
pub struct ImportSpecifier {
#[derivative(PartialEq="ignore")]
pub id: ItemId,
pub path_components: Vec<Rc<String>>,
pub imported_names: ImportedNames
}
#[derive(Debug, PartialEq, Clone)]
pub enum ImportedNames {
All,
LastOfPath,
List(Vec<Rc<String>>)
}
#[derive(Debug, PartialEq, Clone)]
pub struct ModuleSpecifier {
pub name: Rc<String>,
pub contents: Vec<Statement>,
}

View File

@@ -68,10 +68,6 @@ impl BinOp {
let s = token_kind_to_sigil(op_tok)?;
Some(binop_precedences(s))
}
pub fn get_precedence(&self) -> i32 {
binop_precedences(&self.sigil)
}
}
fn token_kind_to_sigil<'a>(tok: &'a TokenKind) -> Option<&'a str> {

View File

@@ -0,0 +1,41 @@
use std::rc::Rc;
use crate::ast::*;
//TODO maybe these functions should take closures that return a KeepRecursing | StopHere type,
//or a tuple of (T, <that type>)
pub trait ASTVisitor: Sized {
fn ast(&mut self, _ast: &AST) {}
fn block(&mut self, _statements: &Vec<Statement>) {}
fn statement(&mut self, _statement: &Statement) {}
fn declaration(&mut self, _declaration: &Declaration) {}
fn signature(&mut self, _signature: &Signature) {}
fn type_declaration(&mut self, _name: &TypeSingletonName, _body: &TypeBody, _mutable: bool) {}
fn type_alias(&mut self, _alias: &Rc<String>, _original: &Rc<String>) {}
fn binding(&mut self, _name: &Rc<String>, _constant: bool, _type_anno: Option<&TypeIdentifier>, _expr: &Expression) {}
fn implemention(&mut self, _type_name: &TypeIdentifier, _interface_name: Option<&TypeSingletonName>, _block: &Vec<Declaration>) {}
fn interface(&mut self, _name: &Rc<String>, _signatures: &Vec<Signature>) {}
fn expression(&mut self, _expression: &Expression) {}
fn expression_kind(&mut self, _kind: &ExpressionKind) {}
fn type_annotation(&mut self, _type_anno: Option<&TypeIdentifier>) {}
fn named_struct(&mut self, _name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {}
fn call(&mut self, _f: &Expression, _arguments: &Vec<InvocationArgument>) {}
fn index(&mut self, _indexee: &Expression, _indexers: &Vec<Expression>) {}
fn if_expression(&mut self, _discrim: Option<&Expression>, _body: &IfExpressionBody) {}
fn condition_arm(&mut self, _arm: &ConditionArm) {}
fn while_expression(&mut self, _condition: Option<&Expression>, _body: &Block) {}
fn for_expression(&mut self, _enumerators: &Vec<Enumerator>, _body: &ForBody) {}
fn lambda(&mut self, _params: &Vec<FormalParam>, _type_anno: Option<&TypeIdentifier>, _body: &Block) {}
fn invocation_argument(&mut self, _arg: &InvocationArgument) {}
fn formal_param(&mut self, _param: &FormalParam) {}
fn import(&mut self, _import: &ImportSpecifier) {}
fn module(&mut self, _module: &ModuleSpecifier) {}
fn qualified_name(&mut self, _name: &QualifiedName) {}
fn nat_literal(&mut self, _n: u64) {}
fn float_literal(&mut self, _f: f64) {}
fn string_literal(&mut self, _s: &Rc<String>) {}
fn bool_literal(&mut self, _b: bool) {}
fn binexp(&mut self, _op: &BinOp, _lhs: &Expression, _rhs: &Expression) {}
fn prefix_exp(&mut self, _op: &PrefixOp, _arg: &Expression) {}
fn pattern(&mut self, _pat: &Pattern) {}
}

View File

@@ -0,0 +1,41 @@
#![cfg(test)]
use crate::ast::visitor::ASTVisitor;
use crate::ast::walker;
use crate::util::quick_ast;
struct Tester {
count: u64,
float_count: u64
}
impl ASTVisitor for Tester {
fn nat_literal(&mut self, _n: u64) {
self.count += 1;
}
fn float_literal(&mut self, _f: f64) {
self.float_count += 1;
}
}
#[test]
fn foo() {
let mut tester = Tester { count: 0, float_count: 0 };
let (ast, _) = quick_ast(r#"
import gragh
let a = 20 + 84
let b = 28 + 1 + 2 + 2.0
fn heh() {
let m = 9
}
"#);
walker::walk_ast(&mut tester, &ast);
assert_eq!(tester.count, 6);
assert_eq!(tester.float_count, 1);
}

View File

@@ -0,0 +1,269 @@
#![allow(dead_code)]
use std::rc::Rc;
use crate::ast::*;
use crate::ast::visitor::ASTVisitor;
use crate::util::deref_optional_box;
pub fn walk_ast<V: ASTVisitor>(v: &mut V, ast: &AST) {
v.ast(ast);
walk_block(v, &ast.statements);
}
fn walk_block<V: ASTVisitor>(v: &mut V, block: &Vec<Statement>) {
for s in block {
v.statement(s);
statement(v, s);
}
}
fn statement<V: ASTVisitor>(v: &mut V, statement: &Statement) {
use StatementKind::*;
match statement.kind {
Expression(ref expr) => {
v.expression(expr);
expression(v, expr);
},
Declaration(ref decl) => {
v.declaration(decl);
declaration(v, decl);
},
Import(ref import_spec) => v.import(import_spec),
Module(ref module_spec) => {
v.module(module_spec);
walk_block(v, &module_spec.contents);
}
}
}
fn declaration<V: ASTVisitor>(v: &mut V, decl: &Declaration) {
use Declaration::*;
match decl {
FuncSig(sig) => {
v.signature(&sig);
signature(v, &sig);
},
FuncDecl(sig, block) => {
v.signature(&sig);
v.block(&block);
walk_block(v, block);
},
TypeDecl { name, body, mutable } => v.type_declaration(name, body, *mutable),
TypeAlias { alias, original} => v.type_alias(alias, original),
Binding { name, constant, type_anno, expr } => {
v.binding(name, *constant, type_anno.as_ref(), expr);
v.type_annotation(type_anno.as_ref());
v.expression(&expr);
expression(v, &expr);
},
Impl { type_name, interface_name, block } => {
v.implemention(type_name, interface_name.as_ref(), block);
}
Interface { name, signatures } => v.interface(name, signatures),
}
}
fn signature<V: ASTVisitor>(v: &mut V, signature: &Signature) {
for p in signature.params.iter() {
v.formal_param(p);
}
v.type_annotation(signature.type_anno.as_ref());
for p in signature.params.iter() {
formal_param(v, p);
}
}
fn expression<V: ASTVisitor>(v: &mut V, expression: &Expression) {
v.expression_kind(&expression.kind);
v.type_annotation(expression.type_anno.as_ref());
expression_kind(v, &expression.kind);
}
fn call<V: ASTVisitor>(v: &mut V, f: &Expression, args: &Vec<InvocationArgument>) {
v.expression(f);
expression(v, f);
for arg in args.iter() {
v.invocation_argument(arg);
invocation_argument(v, arg);
}
}
fn invocation_argument<V: ASTVisitor>(v: &mut V, arg: &InvocationArgument) {
use InvocationArgument::*;
match arg {
Positional(expr) => {
v.expression(expr);
expression(v, expr);
},
Keyword { expr, .. } => {
v.expression(expr);
expression(v, expr);
},
Ignored => (),
}
}
fn index<V: ASTVisitor>(v: &mut V, indexee: &Expression, indexers: &Vec<Expression>) {
v.expression(indexee);
for i in indexers.iter() {
v.expression(i);
}
}
fn named_struct<V: ASTVisitor>(v: &mut V, n: &QualifiedName, fields: &Vec<(Rc<String>, Expression)>) {
v.qualified_name(n);
for (_, expr) in fields.iter() {
v.expression(expr);
}
}
fn lambda<V: ASTVisitor>(v: &mut V, params: &Vec<FormalParam>, type_anno: Option<&TypeIdentifier>, body: &Block) {
for param in params {
v.formal_param(param);
formal_param(v, param);
}
v.type_annotation(type_anno);
v.block(body);
walk_block(v, body);
}
fn formal_param<V: ASTVisitor>(v: &mut V, param: &FormalParam) {
param.default.as_ref().map(|p| {
v.expression(p);
expression(v, p);
});
v.type_annotation(param.anno.as_ref());
}
fn expression_kind<V: ASTVisitor>(v: &mut V, expression_kind: &ExpressionKind) {
use ExpressionKind::*;
match expression_kind {
NatLiteral(n) => v.nat_literal(*n),
FloatLiteral(f) => v.float_literal(*f),
StringLiteral(s) => v.string_literal(s),
BoolLiteral(b) => v.bool_literal(*b),
BinExp(op, lhs, rhs) => {
v.binexp(op, lhs, rhs);
expression(v, lhs);
expression(v, rhs);
},
PrefixExp(op, arg) => {
v.prefix_exp(op, arg);
expression(v, arg);
}
TupleLiteral(exprs) => {
for expr in exprs {
v.expression(expr);
expression(v, expr);
}
},
Value(name) => v.qualified_name(name),
NamedStruct { name, fields } => {
v.named_struct(name, fields);
named_struct(v, name, fields);
}
Call { f, arguments } => {
v.call(f, arguments);
call(v, f, arguments);
},
Index { indexee, indexers } => {
v.index(indexee, indexers);
index(v, indexee, indexers);
},
IfExpression { discriminator, body } => {
v.if_expression(deref_optional_box(discriminator), body);
discriminator.as_ref().map(|d| expression(v, d));
if_expression_body(v, body);
},
WhileExpression { condition, body } => v.while_expression(deref_optional_box(condition), body),
ForExpression { enumerators, body } => v.for_expression(enumerators, body),
Lambda { params , type_anno, body } => {
v.lambda(params, type_anno.as_ref(), body);
lambda(v, params, type_anno.as_ref(), body);
},
ListLiteral(exprs) => {
for expr in exprs {
v.expression(expr);
expression(v, expr);
}
},
}
}
fn if_expression_body<V: ASTVisitor>(v: &mut V, body: &IfExpressionBody) {
use IfExpressionBody::*;
match body {
SimpleConditional { then_case, else_case } => {
walk_block(v, then_case);
else_case.as_ref().map(|block| walk_block(v, block));
},
SimplePatternMatch { pattern, then_case, else_case } => {
v.pattern(pattern);
walk_pattern(v, pattern);
walk_block(v, then_case);
else_case.as_ref().map(|block| walk_block(v, block));
},
CondList(arms) => {
for arm in arms {
v.condition_arm(arm);
condition_arm(v, arm);
}
}
}
}
fn condition_arm<V: ASTVisitor>(v: &mut V, arm: &ConditionArm) {
use Condition::*;
v.condition_arm(arm);
match arm.condition {
Pattern(ref pat) => {
v.pattern(pat);
walk_pattern(v, pat);
},
TruncatedOp(ref _binop, ref expr) => {
v.expression(expr);
expression(v, expr);
},
Expression(ref expr) => {
v.expression(expr);
expression(v, expr);
},
_ => ()
}
arm.guard.as_ref().map(|guard| {
v.expression(guard);
expression(v, guard);
});
v.block(&arm.body);
walk_block(v, &arm.body);
}
fn walk_pattern<V: ASTVisitor>(v: &mut V, pat: &Pattern) {
use Pattern::*;
match pat {
TuplePattern(patterns) => {
for pat in patterns {
v.pattern(pat);
walk_pattern(v, pat);
}
},
TupleStruct(qualified_name, patterns) => {
v.qualified_name(qualified_name);
for pat in patterns {
v.pattern(pat);
walk_pattern(v, pat);
}
},
Record(qualified_name, name_and_patterns) => {
v.qualified_name(qualified_name);
for (_, pat) in name_and_patterns {
v.pattern(pat);
walk_pattern(v, pat);
}
},
VarOrName(qualified_name) => {
v.qualified_name(qualified_name);
},
_ => ()
}
}

View File

@@ -1,27 +1,25 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::fmt::Write;
use std::io;
use itertools::Itertools;
use crate::schala::SymbolTableHandle;
use crate::util::ScopeStack;
use crate::reduced_ast::{BoundVars, ReducedAST, Stmt, Expr, Lit, Func, Alternative, Subpattern};
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable, ScopeSegment, ScopeSegmentKind, FullyQualifiedSymbolName};
use crate::symbol_table::{SymbolSpec, Symbol, SymbolTable, FullyQualifiedSymbolName};
use crate::builtin::Builtin;
mod test;
pub struct State<'a> {
values: ScopeStack<'a, Rc<String>, ValueEntry>,
symbol_table_handle: Rc<RefCell<SymbolTable>>,
}
impl<'a> State<'a> {
pub fn new(symbol_table_handle: Rc<RefCell<SymbolTable>>) -> State<'a> {
pub fn new() -> State<'a> {
let values = ScopeStack::new(Some(format!("global")));
State { values, symbol_table_handle }
State { values }
}
pub fn debug_print(&self) -> String {
@@ -31,7 +29,6 @@ impl<'a> State<'a> {
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> {
let mut inner_state = State {
values: self.values.new_scope(None),
symbol_table_handle: self.symbol_table_handle.clone(),
};
for (bound_var, val) in bound_vars.iter().zip(items.iter()) {
if let Some(bv) = bound_var.as_ref() {
@@ -70,12 +67,12 @@ fn paren_wrapped_vec(terms: impl Iterator<Item=String>) -> String {
impl Node {
fn to_repl(&self, symbol_table: &SymbolTable) -> String {
fn to_repl(&self) -> String {
match self {
Node::Expr(e) => e.to_repl(symbol_table),
Node::Expr(e) => e.to_repl(),
Node::PrimObject { name, items, .. } if items.len() == 0 => format!("{}", name),
Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl(symbol_table)))),
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl(symbol_table)))),
Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
}
}
fn is_true(&self) -> bool {
@@ -100,12 +97,10 @@ impl Expr {
fn to_node(self) -> Node {
Node::Expr(self)
}
fn to_repl(&self, symbol_table: &SymbolTable) -> String {
fn to_repl(&self) -> String {
use self::Lit::*;
use self::Func::*;
let _ = symbol_table;
match self {
Expr::Lit(ref l) => match l {
Nat(n) => format!("{}", n),
@@ -122,7 +117,7 @@ impl Expr {
Expr::Constructor { type_name, arity, .. } => {
format!("<constructor for `{}` arity {}>", type_name, arity)
},
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl(symbol_table))),
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl())),
_ => format!("{:?}", self),
}
}
@@ -157,8 +152,7 @@ impl<'a> State<'a> {
for statement in ast.0 {
match self.statement(statement) {
Ok(Some(ref output)) if repl => {
let ref symbol_table = self.symbol_table_handle.borrow();
acc.push(Ok(output.to_repl(symbol_table)))
acc.push(Ok(output.to_repl()))
},
Ok(_) => (),
Err(error) => {
@@ -212,7 +206,10 @@ impl<'a> State<'a> {
Node::Expr(expr) => match expr {
literal @ Lit(_) => Ok(Node::Expr(literal)),
Call { box f, args } => self.call_expression(f, args),
Sym(v) => self.handle_sym(v),
Sym(name) => Ok(match self.values.lookup(&name) {
Some(ValueEntry::Binding { val, .. }) => val.clone(),
None => return Err(format!("Could not look up symbol {}", name))
}),
Constructor { arity, ref name, tag, .. } if arity == 0 => Ok(Node::PrimObject { name: name.clone(), tag, items: vec![] }),
constructor @ Constructor { .. } => Ok(Node::Expr(constructor)),
func @ Func(_) => Ok(Node::Expr(func)),
@@ -264,7 +261,6 @@ impl<'a> State<'a> {
}
let mut func_state = State {
values: self.values.new_scope(name.map(|n| format!("{}", n))),
symbol_table_handle: self.symbol_table_handle.clone(),
};
for (param, val) in params.into_iter().zip(args.into_iter()) {
let val = func_state.expression(Node::Expr(val))?;
@@ -343,13 +339,11 @@ impl<'a> State<'a> {
/* builtin functions */
(IOPrint, &[ref anything]) => {
let ref symbol_table = self.symbol_table_handle.borrow();
print!("{}", anything.to_repl(symbol_table));
print!("{}", anything.to_repl());
Expr::Unit.to_node()
},
(IOPrintLn, &[ref anything]) => {
let ref symbol_table = self.symbol_table_handle.borrow();
println!("{}", anything.to_repl(symbol_table));
println!("{}", anything.to_repl());
Expr::Unit.to_node()
},
(IOGetLine, &[]) => {
@@ -458,45 +452,4 @@ impl<'a> State<'a> {
}
Err(format!("{:?} failed pattern match", cond))
}
//TODO if I don't need to lookup by name here...
fn handle_sym(&mut self, name: Rc<String>) -> EvalResult<Node> {
use self::ValueEntry::*;
use self::Func::*;
//TODO add a layer of indirection here to talk to the symbol table first, and only then look up
//in the values table
let symbol_table = self.symbol_table_handle.borrow();
let value = symbol_table.lookup_by_fqsn(&fqsn!(name ; tr));
Ok(match value {
Some(Symbol { name, spec, .. }) => match spec {
//TODO I'll need this type_name later to do a table lookup
SymbolSpec::DataConstructor { type_name: _type_name, type_args, .. } => {
if type_args.len() == 0 {
Node::PrimObject { name: name.clone(), tag: 0, items: vec![] }
} else {
return Err(format!("This data constructor thing not done"))
}
},
SymbolSpec::Func(_) => match self.values.lookup(&name) {
Some(Binding { val: Node::Expr(Expr::Func(UserDefined { name, params, body })), .. }) => {
Node::Expr(Expr::Func(UserDefined { name: name.clone(), params: params.clone(), body: body.clone() }))
},
_ => unreachable!(),
},
SymbolSpec::RecordConstructor { .. } => return Err(format!("This shouldn't be a record!")),
SymbolSpec::Binding => match self.values.lookup(&name) {
Some(Binding { val, .. }) => val.clone(),
None => return Err(format!("Symbol {} exists in symbol table but not in evaluator table", name))
}
},
//TODO ideally this should be returning a runtime error if this is ever None, but it's not
//handling all bindings correctly yet
//None => return Err(format!("Couldn't find value {}", name)),
None => match self.values.lookup(&name) {
Some(Binding { val, .. }) => val.clone(),
None => return Err(format!("Couldn't find value {}", name)),
}
})
}
}

View File

@@ -9,17 +9,17 @@ use crate::reduced_ast::reduce;
use crate::eval::State;
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
let mut state = State::new(symbol_table);
let mut ast = crate::util::quick_ast(input);
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
let (mut ast, source_map) = crate::util::quick_ast(input);
let source_map = Rc::new(RefCell::new(source_map));
let symbol_table = Rc::new(RefCell::new(SymbolTable::new(source_map)));
symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap();
{
let mut t = &mut state.symbol_table_handle.borrow_mut();
let mut scope_resolver = crate::scope_resolution::ScopeResolver::new(&mut t);
let mut scope_resolver = ScopeResolver::new(symbol_table.clone());
let _ = scope_resolver.resolve(&mut ast);
}
let reduced = reduce(&ast, &state.symbol_table_handle.borrow());
let reduced = reduce(&ast, &symbol_table.borrow());
let mut state = State::new();
let all_output = state.evaluate(reduced, true);
all_output
}
@@ -69,7 +69,7 @@ fn scopes() {
test_in_fresh_env!(scope_ok, "10");
let scope_ok = r#"
let a = 20
fn haha() {
fn queque() {
let a = 10
a
}
@@ -96,26 +96,26 @@ fn full_if_matching() {
let source = r#"
type Option<T> = Some(T) | None
let a = Option::None
if a { is Option::None -> 4, is Option::Some(x) -> x }
if a { is Option::None then 4, is Option::Some(x) then x }
"#;
test_in_fresh_env!(source, "4");
let source = r#"
type Option<T> = Some(T) | None
let a = Option::Some(99)
if a { is Option::None -> 4, is Option::Some(x) -> x }
if a { is Option::None then 4, is Option::Some(x) then x }
"#;
test_in_fresh_env!(source, "99");
let source = r#"
let a = 10
if a { is 10 -> "x", is 4 -> "y" }
if a { is 10 then "x", is 4 then "y" }
"#;
test_in_fresh_env!(source, "\"x\"");
let source = r#"
let a = 10
if a { is 15 -> "x", is 10 -> "y" }
if a { is 15 then "x", is 10 then "y" }
"#;
test_in_fresh_env!(source, "\"y\"");
}
@@ -124,7 +124,7 @@ test_in_fresh_env!(source, "\"y\"");
fn string_pattern() {
let source = r#"
let a = "foo"
if a { is "foo" -> "x", is _ -> "y" }
if a { is "foo" then "x", is _ then "y" }
"#;
test_in_fresh_env!(source, "\"x\"");
}
@@ -134,8 +134,8 @@ fn boolean_pattern() {
let source = r#"
let a = true
if a {
is true -> "x",
is false -> "y"
is true then "x",
is false then "y"
}
"#;
test_in_fresh_env!(source, "\"x\"");
@@ -145,7 +145,7 @@ test_in_fresh_env!(source, "\"x\"");
fn boolean_pattern_2() {
let source = r#"
let a = false
if a { is true -> "x", is false -> "y" }
if a { is true then "x", is false then "y" }
"#;
test_in_fresh_env!(source, "\"y\"");
}
@@ -155,7 +155,7 @@ fn ignore_pattern() {
let source = r#"
type Option<T> = Some(T) | None
if Option::Some(10) {
is _ -> "hella"
is _ then "hella"
}
"#;
test_in_fresh_env!(source, "\"hella\"");
@@ -165,8 +165,8 @@ test_in_fresh_env!(source, "\"hella\"");
fn tuple_pattern() {
let source = r#"
if (1, 2) {
is (1, x) -> x,
is _ -> 99
is (1, x) then x,
is _ then 99
}
"#;
test_in_fresh_env!(source, 2);
@@ -177,8 +177,8 @@ test_in_fresh_env!(source, 2);
fn tuple_pattern_2() {
let source = r#"
if (1, 2) {
is (10, x) -> x,
is (y, x) -> x + y
is (10, x) then x,
is (y, x) then x + y
}
"#;
test_in_fresh_env!(source, 3);
@@ -188,8 +188,8 @@ test_in_fresh_env!(source, 3);
fn tuple_pattern_3() {
let source = r#"
if (1, 5) {
is (10, x) -> x,
is (1, x) -> x
is (10, x) then x,
is (1, x) then x
}
"#;
test_in_fresh_env!(source, 5);
@@ -199,8 +199,8 @@ test_in_fresh_env!(source, 5);
fn tuple_pattern_4() {
let source = r#"
if (1, 5) {
is (10, x) -> x,
is (1, x) -> x,
is (10, x) then x,
is (1, x) then x,
}
"#;
test_in_fresh_env!(source, 5);
@@ -215,21 +215,21 @@ let b = Stuff::Jugs(1, "haha")
let c = Stuff::Mardok
let x = if a {
is Stuff::Mulch(20) -> "x",
is _ -> "ERR"
is Stuff::Mulch(20) then "x",
is _ then "ERR"
}
let y = if b {
is Stuff::Mulch(n) -> "ERR",
is Stuff::Jugs(2, _) -> "ERR",
is Stuff::Jugs(1, s) -> s,
is _ -> "ERR",
is Stuff::Mulch(n) then "ERR",
is Stuff::Jugs(2, _) then "ERR",
is Stuff::Jugs(1, s) then s,
is _ then "ERR",
}
let z = if c {
is Stuff::Jugs(_, _) -> "ERR",
is Stuff::Mardok -> "NIGH",
is _ -> "ERR",
is Stuff::Jugs(_, _) then "ERR",
is Stuff::Mardok then "NIGH",
is _ then "ERR",
}
(x, y, z)
@@ -258,3 +258,12 @@ milta()(10)
"#;
test_in_fresh_env!(source, "43");
}
#[test]
fn import_all() {
let source = r#"
type Option<T> = Some(T) | None
import Option::*
let x = Some(9); if x is Some(q) then { q } else { 0 }"#;
test_in_fresh_env!(source, "9");
}

View File

@@ -1,5 +1,4 @@
#![feature(trace_macros)]
#![feature(custom_attribute)]
//#![feature(unrestricted_attribute_tokens)]
#![feature(slice_patterns, box_patterns, box_syntax)]
@@ -17,6 +16,8 @@ extern crate schala_repl;
extern crate schala_lang_codegen;
extern crate ena;
extern crate derivative;
extern crate colored;
extern crate radix_trie;
macro_rules! bx {
@@ -38,6 +39,7 @@ mod scope_resolution;
mod builtin;
mod reduced_ast;
mod eval;
mod source_map;
mod schala;

View File

@@ -13,7 +13,7 @@
//! ```text
//! program := (statement delimiter)* EOF
//! delimiter := NEWLINE | ";"
//! statement := expression | declaration
//! statement := expression | declaration | import | module
//! block := "{" (statement delimiter)* "}"
//! declaration := type_declaration | func_declaration | binding_declaration | impl_declaration
//! ```
@@ -113,19 +113,27 @@
//! record_pattern_entry := IDENTIFIER | IDENTIFIER ":" Pattern
//! tuple_struct_pattern := qualified_identifier "(" (pattern, ",")* ")"
//! ```
//! ### If expressions
//!
//! TODO: it would be nice if the grammar could capture an incomplete precedence expr in the
//! discriminator
//!
//! ### If-expressions
//! ```text
//! if_expr := "if" discriminator ("then" condititional | "is" simple_pattern_match | guard_block)
//! discriminator := precedence_expr (operator)+
//! conditional := expr_or_block else_clause
//! simple_pattern_match := pattern "then" conditional
//! else_clause := ε | "else" expr_or_block
//! guard_block := "{" (guard_arm, ",")* "}"
//! guard_arm := guard "->" expr_or_block
//! guard := "is" pattern | (operator)+ precedence_expr
//! if_expr := "if" discriminator if_expr_body
//! if_expr_body := ("then" simple_conditional | "is" simple_pattern_match | cond_block)
//! discriminator := ε | expression
//! simple_conditional := expr_or_block else_case
//! simple_pattern_match := pattern "then" simple_conditional
//! else_case := "else" expr_or_block
//!
//! cond_block := "{" (cond_arm comma_or_delimiter)* "}"
//! cond_arm := condition guard "then" expr_or_block | "else" expr_or_block
//! condition := "is" pattern | operator precedence_expr | expression
//! guard := "if" expression
//! comma_or_delimiter := "," | delimiter
//! ```
//!
//!
//! ### While expressions
//! ```text
//! while_expr := "while" while_cond "{" (statement delimiter)* "}"
@@ -141,8 +149,15 @@
//! enumerators := enumerator ("," enumerators)*
//! enumerator := identifier "<-" expression | identifier "=" expression //TODO add guards, etc.
//! ```
//! ## Imports
//! ```text
//! import := 'import' IDENTIFIER (:: IDENTIFIER)* import_suffix
//! import_suffix := ε | '::{' IDENTIFIER (, IDENTIFIER)* '}' | '*' //TODO add qualified, exclusions, etc.
//!
//! ## Modules
//!
//! module := 'module' IDENTIFIER '{' statement* '}'
//! ```
mod test;
use std::rc::Rc;
@@ -152,18 +167,21 @@ use crate::tokenizing::*;
use crate::tokenizing::Kw::*;
use crate::tokenizing::TokenKind::*;
use crate::source_map::Location;
use crate::ast::*;
use crate::schala::SourceMapHandle;
/// Represents a parsing error
#[derive(Debug)]
pub struct ParseError {
pub production_name: Option<String>,
pub msg: String,
pub token: Token
}
impl ParseError {
fn new_with_token<T, M>(msg: M, token: Token) -> ParseResult<T> where M: Into<String> {
Err(ParseError { msg: msg.into(), token })
Err(ParseError { msg: msg.into(), token, production_name: None })
}
}
@@ -184,8 +202,10 @@ pub struct Parser {
parse_level: u32,
restrictions: ParserRestrictions,
id_store: ItemIdStore,
source_map: SourceMapHandle
}
struct ParserRestrictions {
no_struct_literal: bool
}
@@ -193,14 +213,14 @@ struct ParserRestrictions {
struct TokenHandler {
tokens: Vec<Token>,
idx: usize,
end_of_file: (usize, usize),
end_of_file: Location
}
impl TokenHandler {
fn new(tokens: Vec<Token>) -> TokenHandler {
let end_of_file = match tokens.last() {
None => (0, 0),
Some(t) => (t.line_num, t.char_num)
None => Location { line_num: 0, char_num : 0 },
Some(t) => t.location,
};
TokenHandler { idx: 0, tokens, end_of_file }
}
@@ -213,49 +233,56 @@ impl TokenHandler {
self.peek_n(n).kind
}
fn peek(&mut self) -> Token {
self.tokens.get(self.idx).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
self.tokens.get(self.idx).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
}
/// calling peek_n(0) is the same thing as peek()
fn peek_n(&mut self, n: usize) -> Token {
self.tokens.get(self.idx + n).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
self.tokens.get(self.idx + n).map(|t: &Token| { t.clone()}).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
}
fn next(&mut self) -> Token {
self.idx += 1;
self.tokens.get(self.idx - 1).map(|t: &Token| { t.clone() }).unwrap_or(Token { kind: TokenKind::EOF, line_num: self.end_of_file.0, char_num: self.end_of_file.1})
self.tokens.get(self.idx - 1).map(|t: &Token| { t.clone() }).unwrap_or(Token { kind: TokenKind::EOF, location: self.end_of_file })
}
}
impl Parser {
/// Create a new parser initialized with some tokens.
pub fn new(initial_input: Vec<Token>) -> Parser {
pub fn new(source_map: SourceMapHandle) -> Parser {
Parser {
token_handler: TokenHandler::new(initial_input),
token_handler: TokenHandler::new(vec![]),
parse_record: vec![],
parse_level: 0,
restrictions: ParserRestrictions { no_struct_literal: false },
id_store: ItemIdStore::new(),
source_map,
}
}
pub fn add_new_tokens(&mut self, new_tokens: Vec<Token>) {
self.token_handler = TokenHandler::new(new_tokens);
}
/// Parse all loaded tokens up to this point.
pub fn parse(&mut self) -> ParseResult<AST> {
self.program()
}
/*
pub fn parse_with_new_tokens(&mut self, new_tokens: Vec<Token>) -> ParseResult<AST> {
}
*/
pub fn format_parse_trace(&self) -> String {
let mut buf = String::new();
buf.push_str("Parse productions:\n");
let mut next_token = None;
for r in self.parse_record.iter() {
let mut indent = String::new();
for _ in 0..r.level {
indent.push(' ');
indent.push('.');
}
buf.push_str(&format!("{}Production `{}`, token: {}\n", indent, r.production_name, r.next_token))
let effective_token = if next_token == Some(&r.next_token) {
"".to_string()
} else {
next_token = Some(&r.next_token);
format!(", next token: {}", r.next_token)
};
buf.push_str(&format!("{}`{}`{}\n", indent, r.production_name, effective_token));
}
buf
}
@@ -343,15 +370,20 @@ impl Parser {
#[recursive_descent_method]
fn statement(&mut self) -> ParseResult<Statement> {
//TODO handle error recovery here
let kind = match self.token_handler.peek().get_kind() {
let tok = self.token_handler.peek();
let kind = match tok.get_kind() {
Keyword(Type) => self.type_declaration().map(|decl| { StatementKind::Declaration(decl) }),
Keyword(Func)=> self.func_declaration().map(|func| { StatementKind::Declaration(func) }),
Keyword(Let) => self.binding_declaration().map(|decl| StatementKind::Declaration(decl)),
Keyword(Interface) => self.interface_declaration().map(|decl| StatementKind::Declaration(decl)),
Keyword(Impl) => self.impl_declaration().map(|decl| StatementKind::Declaration(decl)),
_ => self.expression().map(|expr| { StatementKind::Expression(expr.into()) } ),
Keyword(Import) => self.import_declaration().map(|spec| StatementKind::Import(spec)),
Keyword(Module) => self.module_declaration().map(|spec| StatementKind::Module(spec)),
_ => self.expression().map(|expr| { StatementKind::Expression(expr) } ),
}?;
Ok(Statement { kind, id: self.id_store.fresh() })
let id = self.id_store.fresh();
self.source_map.borrow_mut().add_location(&id, tok.location);
Ok(Statement { kind, id })
}
#[recursive_descent_method]
@@ -384,7 +416,7 @@ impl Parser {
let alias = self.identifier()?;
expect!(self, Equals);
let original = self.identifier()?;
Ok(Declaration::TypeAlias(alias, original))
Ok(Declaration::TypeAlias { alias, original })
}
#[recursive_descent_method]
@@ -469,7 +501,6 @@ impl Parser {
Ok(delimited!(self, LParen, formal_param, Comma, RParen))
}
//TODO needs to support default values
#[recursive_descent_method]
fn formal_param(&mut self) -> ParseResult<FormalParam> {
let name = self.identifier()?;
@@ -480,7 +511,7 @@ impl Parser {
let default = match self.token_handler.peek_kind() {
Equals => {
self.token_handler.next();
Some(self.expression()?.into())
Some(self.expression()?)
},
_ => None
};
@@ -505,7 +536,7 @@ impl Parser {
};
expect!(self, Equals);
let expr = self.expression()?.into();
let expr = self.expression()?;
Ok(Declaration::Binding { name, constant, type_anno, expr })
}
@@ -611,10 +642,11 @@ impl Parser {
let next_tok = self.token_handler.next();
let operation = match BinOp::from_sigil_token(&next_tok.kind) {
Some(sigil) => sigil,
//TODO I think I can fix this unreachable
None => unreachable!()
};
let rhs = self.precedence_expr(new_precedence)?;
lhs = Expression::new(self.id_store.fresh(), ExpressionKind::BinExp(operation, bx!(lhs.into()), bx!(rhs.into())));
lhs = Expression::new(self.id_store.fresh(), ExpressionKind::BinExp(operation, bx!(lhs), bx!(rhs)));
}
self.parse_level -= 1;
Ok(lhs)
@@ -632,7 +664,7 @@ impl Parser {
let prefix_op = PrefixOp::from_str(sigil.as_str()).unwrap();
Ok(Expression::new(
self.id_store.fresh(),
ExpressionKind::PrefixExp(prefix_op, bx!(expr.into()))
ExpressionKind::PrefixExp(prefix_op, bx!(expr))
))
},
_ => self.call_expr()
@@ -644,8 +676,7 @@ impl Parser {
let mut expr = self.index_expr()?;
while let LParen = self.token_handler.peek_kind() {
let arguments = delimited!(self, LParen, invocation_argument, Comma, RParen);
let arguments = arguments.into_iter().collect();
expr = Expression::new(self.id_store.fresh(), ExpressionKind::Call { f: bx!(expr.into()), arguments }); //TODO no type anno is incorrect
expr = Expression::new(self.id_store.fresh(), ExpressionKind::Call { f: bx!(expr), arguments }); //TODO no type anno is incorrect
}
Ok(expr)
@@ -663,16 +694,16 @@ impl Parser {
Equals => {
self.token_handler.next();
self.token_handler.next();
let expr = self.expression()?.into();
let expr = self.expression()?;
InvocationArgument::Keyword { name: s.clone(), expr }
},
_ => {
let expr = self.expression()?;
InvocationArgument::Positional(expr.into())
InvocationArgument::Positional(expr)
}
}
},
_ => InvocationArgument::Positional(self.expression()?.into())
_ => InvocationArgument::Positional(self.expression()?)
})
}
@@ -680,10 +711,9 @@ impl Parser {
fn index_expr(&mut self) -> ParseResult<Expression> {
let primary = self.primary()?;
Ok(if let LSquareBracket = self.token_handler.peek_kind() {
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket)
.into_iter().map(|ex| ex.into()).collect();
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
Expression::new(self.id_store.fresh(), ExpressionKind::Index {
indexee: bx!(Expression::new(self.id_store.fresh(), primary.kind).into()),
indexee: bx!(Expression::new(self.id_store.fresh(), primary.kind)),
indexers,
})
} else {
@@ -708,8 +738,7 @@ impl Parser {
#[recursive_descent_method]
fn list_expr(&mut self) -> ParseResult<Expression> {
let exprs = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket)
.into_iter().map(|ex| ex.into()).collect();
let exprs = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::ListLiteral(exprs)))
}
@@ -750,10 +779,7 @@ impl Parser {
match inner.len() {
0 => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(vec![]))),
1 => Ok(inner.pop().unwrap()),
_ => {
let inner: Vec<Expression> = inner.into_iter().map(|ex| ex.into()).collect();
Ok(Expression::new(self.id_store.fresh(), TupleLiteral(inner)))
}
_ => Ok(Expression::new(self.id_store.fresh(), TupleLiteral(inner)))
}
};
self.restrictions.no_struct_literal = old_struct_value;
@@ -763,7 +789,8 @@ impl Parser {
#[recursive_descent_method]
fn identifier_expr(&mut self) -> ParseResult<Expression> {
use self::ExpressionKind::*;
let qualified_identifier = self.qualified_identifier()?;
let components = self.qualified_identifier()?;
let qualified_identifier = QualifiedName { id: self.id_store.fresh(), components };
Ok(match self.token_handler.peek_kind() {
LCurlyBrace if !self.restrictions.no_struct_literal => {
let fields = self.record_block()?;
@@ -774,7 +801,7 @@ impl Parser {
}
#[recursive_descent_method]
fn qualified_identifier(&mut self) -> ParseResult<QualifiedName> {
fn qualified_identifier(&mut self) -> ParseResult<Vec<Rc<String>>> {
let mut components = vec![self.identifier()?];
loop {
match (self.token_handler.peek_kind(), self.token_handler.peek_kind_n(1)) {
@@ -785,15 +812,12 @@ impl Parser {
_ => break,
}
}
Ok(QualifiedName { id: self.id_store.fresh(), components })
Ok(components)
}
#[recursive_descent_method]
fn record_block(&mut self) -> ParseResult<Vec<(Rc<String>, Expression)>> {
Ok(
delimited!(self, LCurlyBrace, record_entry, Comma, RCurlyBrace)
.into_iter().map(|(s, ex)| (s, ex.into())).collect()
)
Ok(delimited!(self, LCurlyBrace, record_entry, Comma, RCurlyBrace))
}
#[recursive_descent_method]
@@ -807,53 +831,47 @@ impl Parser {
#[recursive_descent_method]
fn if_expr(&mut self) -> ParseResult<Expression> {
expect!(self, Keyword(Kw::If));
let discriminator = Box::new({
self.restrictions.no_struct_literal = true;
let x = self.discriminator();
self.restrictions.no_struct_literal = false;
x?
});
let body = Box::new(match self.token_handler.peek_kind() {
Keyword(Kw::Then) => self.conditional()?,
Keyword(Kw::Is) => self.simple_pattern_match()? ,
_ => self.guard_block()?
});
let old_struct_value = self.restrictions.no_struct_literal;
self.restrictions.no_struct_literal = true;
let discriminator = if let LCurlyBrace = self.token_handler.peek_kind() {
None
} else {
Some(Box::new(self.expression()?))
};
let body = Box::new(self.if_expr_body()?);
self.restrictions.no_struct_literal = old_struct_value;
Ok(Expression::new(self.id_store.fresh(), ExpressionKind::IfExpression { discriminator, body }))
}
#[recursive_descent_method]
fn discriminator(&mut self) -> ParseResult<Discriminator> {
let lhs = self.prefix_expr()?;
let ref next = self.token_handler.peek_kind();
Ok(if let Some(op) = BinOp::from_sigil_token(next) {
Discriminator::BinOp(lhs.into(), op)
} else {
Discriminator::Simple(lhs.into())
})
fn if_expr_body(&mut self) -> ParseResult<IfExpressionBody> {
match self.token_handler.peek_kind() {
Keyword(Kw::Then) => self.simple_conditional(),
Keyword(Kw::Is) => self.simple_pattern_match(),
_ => self.cond_block(),
}
}
#[recursive_descent_method]
fn conditional(&mut self) -> ParseResult<IfExpressionBody> {
fn simple_conditional(&mut self) -> ParseResult<IfExpressionBody> {
expect!(self, Keyword(Kw::Then));
let then_clause = self.expr_or_block()?;
let else_clause = self.else_clause()?;
Ok(IfExpressionBody::SimpleConditional(then_clause, else_clause))
let then_case = self.expr_or_block()?;
let else_case = self.else_case()?;
Ok(IfExpressionBody::SimpleConditional {then_case, else_case })
}
#[recursive_descent_method]
fn simple_pattern_match(&mut self) -> ParseResult<IfExpressionBody> {
expect!(self, Keyword(Kw::Is));
let pat = self.pattern()?;
let pattern = self.pattern()?;
expect!(self, Keyword(Kw::Then));
let then_clause = self.expr_or_block()?;
let else_clause = self.else_clause()?;
Ok(IfExpressionBody::SimplePatternMatch(pat, then_clause, else_clause))
let then_case = self.expr_or_block()?;
let else_case = self.else_case()?;
Ok(IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case })
}
#[recursive_descent_method]
fn else_clause(&mut self) -> ParseResult<Option<Block>> {
fn else_case(&mut self) -> ParseResult<Option<Block>> {
Ok(if let Keyword(Kw::Else) = self.token_handler.peek_kind() {
self.token_handler.next();
Some(self.expr_or_block()?)
@@ -863,63 +881,67 @@ impl Parser {
}
#[recursive_descent_method]
fn guard_block(&mut self) -> ParseResult<IfExpressionBody> {
//TODO - delimited! isn't sophisticated enough to do thisa
//let guards = delimited!(self, LCurlyBrace, guard_arm, Comma, RCurlyBrace);
fn cond_block(&mut self) -> ParseResult<IfExpressionBody> {
expect!(self, LCurlyBrace);
let mut guards = vec![];
let mut cond_arms = vec![];
loop {
match self.token_handler.peek_kind() {
RCurlyBrace | EOF => break,
Semicolon | Newline => { self.token_handler.next(); continue},
_ => {
let guard_arm = self.guard_arm()?;
guards.push(guard_arm);
loop {
match self.token_handler.peek_kind() {
Semicolon | Newline => { self.token_handler.next(); continue; },
_ => break,
}
cond_arms.push(self.cond_arm()?);
match self.token_handler.peek_kind() {
Comma | Semicolon | Newline => { self.token_handler.next(); continue; },
_ => break,
}
if let RCurlyBrace = self.token_handler.peek_kind() {
break;
}
expect!(self, Comma);
}
}
}
expect!(self, RCurlyBrace);
Ok(IfExpressionBody::GuardList(guards))
Ok(IfExpressionBody::CondList(cond_arms))
}
#[recursive_descent_method]
fn guard_arm(&mut self) -> ParseResult<GuardArm> {
let guard = self.guard()?;
expect!(self, Operator(ref c) if **c == "->");
fn cond_arm(&mut self) -> ParseResult<ConditionArm> {
let (condition, guard) = if let Keyword(Kw::Else) = self.token_handler.peek_kind() {
self.token_handler.next();
(Condition::Else, None)
} else {
let condition = self.condition()?;
let guard = self.guard()?;
expect!(self, Keyword(Kw::Then));
(condition, guard)
};
let body = self.expr_or_block()?;
Ok(GuardArm { guard, body })
Ok(ConditionArm { condition, guard, body })
}
#[recursive_descent_method]
fn guard(&mut self) -> ParseResult<Guard> {
fn condition(&mut self) -> ParseResult<Condition> {
Ok(match self.token_handler.peek_kind() {
Keyword(Kw::Is) => {
self.token_handler.next();
let pat = self.pattern()?;
Guard::Pat(pat)
Condition::Pattern(self.pattern()?)
},
ref tok if BinOp::from_sigil_token(tok).is_some() => {
let op = BinOp::from_sigil_token(&self.token_handler.next().kind).unwrap();
let precedence = op.get_precedence();
let Expression { kind, .. } = self.precedence_expr(precedence)?;
Guard::HalfExpr(HalfExpr { op: Some(op), expr: kind })
let expr = self.expression()?;
Condition::TruncatedOp(op, expr)
},
_ => {
//TODO - I think there's a better way to do this involving the precedence of ->
let Expression { kind, .. } = self.prefix_expr()?;
Guard::HalfExpr(HalfExpr { op: None, expr: kind })
}
Condition::Expression(self.expression()?)
},
})
}
#[recursive_descent_method]
fn guard(&mut self) -> ParseResult<Option<Expression>> {
Ok(match self.token_handler.peek_kind() {
Keyword(Kw::If) => {
self.token_handler.next();
Some(self.expression()?)
},
_ => None
})
}
@@ -937,18 +959,19 @@ impl Parser {
fn simple_pattern(&mut self) -> ParseResult<Pattern> {
Ok(match self.token_handler.peek_kind() {
Identifier(_) => {
let qualified_name = self.qualified_identifier()?;
let components = self.qualified_identifier()?;
let qualified_identifier = QualifiedName { id: self.id_store.fresh(), components };
match self.token_handler.peek_kind() {
LCurlyBrace => {
let members = delimited!(self, LCurlyBrace, record_pattern_entry, Comma, RCurlyBrace);
Pattern::Record(qualified_name, members)
Pattern::Record(qualified_identifier, members)
},
LParen => {
let members = delimited!(self, LParen, pattern, Comma, RParen);
Pattern::TupleStruct(qualified_name, members)
Pattern::TupleStruct(qualified_identifier, members)
},
_ => {
Pattern::VarOrName(qualified_name)
Pattern::VarOrName(qualified_identifier)
},
}
},
@@ -968,7 +991,7 @@ impl Parser {
self.token_handler.next();
Pattern::Literal(PatternLiteral::BoolPattern(false))
},
StrLiteral(s) => {
StrLiteral { s, .. } => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::StringPattern(s))
},
@@ -982,50 +1005,6 @@ impl Parser {
})
}
/*
#[recursive_descent_method]
fn simple_pattern(&mut self) -> ParseResult<Pattern> {
Ok({
let tok = self.token_handler.peek();
match tok.get_kind() {
Identifier(_) => {
let id = self.identifier()?;
match self.token_handler.peek_kind() {
LCurlyBrace => {
let members = delimited!(self, LCurlyBrace, record_pattern_entry, Comma, RCurlyBrace);
Pattern::Record(id, members)
},
LParen => {
let members = delimited!(self, LParen, pattern, Comma, RParen);
Pattern::TupleStruct(id, members)
},
_ => Pattern::Literal(PatternLiteral::VarPattern(id))
}
},
Keyword(Kw::True) => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::BoolPattern(true))
},
Keyword(Kw::False) => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::BoolPattern(false))
},
StrLiteral(s) => {
self.token_handler.next();
Pattern::Literal(PatternLiteral::StringPattern(s))
},
DigitGroup(_) | HexLiteral(_) | BinNumberSigil | Period => self.signed_number_literal()?,
Operator(ref op) if **op == "-" => self.signed_number_literal()?,
Underscore => {
self.token_handler.next();
Pattern::Ignored
},
other => return ParseError::new_with_token(format!("{:?} is not a valid Pattern", other), tok)
}
})
}
*/
#[recursive_descent_method]
fn signed_number_literal(&mut self) -> ParseResult<Pattern> {
let neg = match self.token_handler.peek_kind() {
@@ -1064,7 +1043,7 @@ impl Parser {
LCurlyBrace => self.block(),
_ => {
let expr = self.expression()?;
let s = Statement { id: self.id_store.fresh(), kind: StatementKind::Expression(expr.into()) };
let s = Statement { id: self.id_store.fresh(), kind: StatementKind::Expression(expr) };
Ok(vec![s])
}
}
@@ -1078,7 +1057,7 @@ impl Parser {
self.restrictions.no_struct_literal = true;
let x = self.while_cond();
self.restrictions.no_struct_literal = false;
x?.map(|expr| bx!(expr.into()))
x?.map(|expr| bx!(expr))
};
let body = self.block()?;
Ok(Expression::new(self.id_store.fresh(), WhileExpression {condition, body}))
@@ -1114,7 +1093,7 @@ impl Parser {
fn enumerator(&mut self) -> ParseResult<Enumerator> {
let id = self.identifier()?;
expect!(self, Operator(ref c) if **c == "<-");
let generator = self.expression()?.into();
let generator = self.expression()?;
Ok(Enumerator { id, generator })
}
@@ -1129,7 +1108,7 @@ impl Parser {
},
Keyword(Kw::Return) => {
self.token_handler.next();
MonadicReturn(self.expression()?.into())
MonadicReturn(self.expression()?)
},
_ => return ParseError::new_with_token("for expressions must end in a block or 'return'", tok),
})
@@ -1161,7 +1140,7 @@ impl Parser {
let id = self.id_store.fresh();
Ok(Expression::new(id, BoolLiteral(false)))
},
StrLiteral(s) => {
StrLiteral {s, ..} => {
self.token_handler.next();
let id = self.id_store.fresh();
Ok(Expression::new(id, StringLiteral(s.clone())))
@@ -1231,6 +1210,67 @@ impl Parser {
}
Ok(ds)
}
#[recursive_descent_method]
fn import_declaration(&mut self) -> ParseResult<ImportSpecifier> {
expect!(self, Keyword(Import));
let mut path_components = vec![];
path_components.push(self.identifier()?);
loop {
match (self.token_handler.peek_kind(), self.token_handler.peek_kind_n(1)) {
(Colon, Colon) => {
self.token_handler.next(); self.token_handler.next();
if let Identifier(_) = self.token_handler.peek_kind() {
path_components.push(self.identifier()?);
} else {
break;
}
},
_ => break,
}
}
let imported_names = match self.token_handler.peek_kind() {
LCurlyBrace => {
let names = delimited!(self, LCurlyBrace, identifier, Comma, RCurlyBrace);
ImportedNames::List(names)
},
Operator(ref s) if **s == "*" => {
self.token_handler.next();
ImportedNames::All
},
_ => ImportedNames::LastOfPath
};
Ok(ImportSpecifier {
id: self.id_store.fresh(),
path_components,
imported_names
})
}
#[recursive_descent_method]
fn import_suffix(&mut self) -> ParseResult<ImportedNames> {
Ok(match self.token_handler.peek_kind() {
Operator(ref s) if **s == "*" => {
self.token_handler.next();
ImportedNames::All
},
LCurlyBrace => {
let names = delimited!(self, LCurlyBrace, identifier, Comma, RCurlyBrace);
ImportedNames::List(names)
},
_ => return ParseError::new_with_token("Expected '{{' or '*'", self.token_handler.peek()),
})
}
#[recursive_descent_method]
fn module_declaration(&mut self) -> ParseResult<ModuleSpecifier> {
expect!(self, Keyword(Kw::Module));
let name = self.identifier()?;
let contents = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
Ok(ModuleSpecifier { name, contents })
}
}
fn parse_binary(digits: String, tok: Token) -> ParseResult<u64> {
@@ -1265,3 +1305,4 @@ fn parse_hex(digits: String, tok: Token) -> ParseResult<u64> {
}
Ok(result)
}

View File

@@ -1,10 +1,10 @@
#![cfg(test)]
use ::std::rc::Rc;
use std::cell::RefCell;
use std::rc::Rc;
use std::str::FromStr;
use super::tokenize;
use super::ParseResult;
use crate::ast::{ItemIdStore, AST, Meta, Expression, Statement, StatementKind, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody, InvocationArgument, FormalParam, PrefixOp, BinOp, QualifiedName};
use super::{Parser, ParseResult, tokenize};
use crate::ast::*;
use super::Declaration::*;
use super::Signature;
use super::TypeIdentifier::*;
@@ -13,9 +13,17 @@ use super::ExpressionKind::*;
use super::Variant::*;
use super::ForBody::*;
fn parse(input: &str) -> ParseResult<AST> {
fn make_parser(input: &str) -> Parser {
let source_map = crate::source_map::SourceMap::new();
let source_map_handle = Rc::new(RefCell::new(source_map));
let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
let mut parser = super::Parser::new(tokens);
let mut parser = super::Parser::new(source_map_handle);
parser.add_new_tokens(tokens);
parser
}
fn parse(input: &str) -> ParseResult<AST> {
let mut parser = make_parser(input);
parser.parse()
}
@@ -57,14 +65,24 @@ macro_rules! decl {
};
}
macro_rules! import {
($import_spec:expr) => {
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Import($import_spec) }
}
}
macro_rules! module {
($module_spec:expr) => {
Statement { id: ItemIdStore::new_id(), kind: StatementKind::Module($module_spec) }
}
}
macro_rules! ex {
($expr_type:expr) => { Expression::new(ItemIdStore::new_id(), $expr_type) };
(m $expr_type:expr) => { Meta::new(Expression::new(ItemIdStore::new_id(), $expr_type)) };
(m $expr_type:expr, $type_anno:expr) => { Meta::new(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno)) };
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
(s $expr_text:expr) => {
{
let tokens: Vec<crate::tokenizing::Token> = tokenize($expr_text);
let mut parser = super::Parser::new(tokens);
let mut parser = make_parser($expr_text);
parser.expression().unwrap()
}
};
@@ -81,16 +99,14 @@ macro_rules! prefexp {
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_str($op).unwrap(), bx!(Expression::new(ItemIdStore::new_id(), $lhs).into())) }
}
macro_rules! exst {
($expr_type:expr) => { Meta::new(Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())}) };
($expr_type:expr, $type_anno:expr) => { Meta::new(Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())}) };
($op:expr, $lhs:expr, $rhs:expr) => { Meta::new(
Statement { id: ItemIdStore::new_id(), ,kind: StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))}
)};
($expr_type:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::new(ItemIdStore::new_id(), $expr_type).into())} };
($expr_type:expr, $type_anno:expr) => { Statement { id: ItemIdStore::new_id(), kind: StatementKind::Expression(Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno).into())} };
($op:expr, $lhs:expr, $rhs:expr) => { Statement { id: ItemIdStore::new_id(), ,kind: StatementKind::Expression(ex!(binexp!($op, $lhs, $rhs)))}
};
(s $statement_text:expr) => {
{
let tokens: Vec<crate::tokenizing::Token> = tokenize($statement_text);
let mut parser = super::Parser::new(tokens);
Meta::new(parser.statement().unwrap())
let mut parser = make_parser($statement_text);
parser.statement().unwrap()
}
}
}
@@ -170,15 +186,15 @@ fn parsing_identifiers() {
])
}
*/
parse_test_wrap_ast!("a[b,c]", exst!(Index { indexee: bx!(ex!(m val!("a"))), indexers: vec![ex!(m val!("b")), ex!(m val!("c"))]} ));
parse_test_wrap_ast!("a[b,c]", exst!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} ));
parse_test_wrap_ast!("None", exst!(val!("None")));
parse_test_wrap_ast!("Pandas { a: x + y }",
exst!(NamedStruct { name: Meta::new(qname!(Pandas)), fields: vec![(rc!(a), ex!(m binexp!("+", val!("x"), val!("y"))))]})
exst!(NamedStruct { name: qname!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
);
parse_test_wrap_ast! { "Pandas { a: n, b: q, }",
exst!(NamedStruct { name: Meta::new(qname!(Pandas)), fields:
vec![(rc!(a), ex!(m val!("n"))), (rc!(b), ex!(m val!("q")))]
exst!(NamedStruct { name: qname!(Pandas), fields:
vec![(rc!(a), ex!(val!("n"))), (rc!(b), ex!(val!("q")))]
}
)
};
@@ -189,14 +205,14 @@ fn parsing_identifiers() {
fn qualified_identifiers() {
parse_test_wrap_ast! {
"let q_q = Yolo::Swaggins",
Meta::new(decl!(Binding { name: rc!(q_q), constant: true, type_anno: None,
expr: Meta::new(Expression::new(ItemIdStore::new_id(), Value(qname!(Yolo, Swaggins)))),
}))
decl!(Binding { name: rc!(q_q), constant: true, type_anno: None,
expr: Expression::new(ItemIdStore::new_id(), Value(qname!(Yolo, Swaggins))),
})
}
parse_test_wrap_ast! {
"thing::item::call()",
exst!(Call { f: bx![ex!(m Value(qname!(thing, item, call)))], arguments: vec![] })
exst!(Call { f: bx![ex!(Value(qname!(thing, item, call)))], arguments: vec![] })
}
}
@@ -224,36 +240,36 @@ fn parsing_complicated_operators() {
#[test]
fn parsing_functions() {
parse_test_wrap_ast!("fn oi()", Meta::new(decl!(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }))));
parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(m val!("oi"))), arguments: vec![] }));
parse_test_wrap_ast!("fn oi()", decl!(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })));
parse_test_wrap_ast!("oi()", exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] }));
parse_test_wrap_ast!("oi(a, 2 + 2)", exst!(Call
{ f: bx!(ex!(m val!("oi"))),
arguments: vec![inv!(ex!(m val!("a"))), inv!(ex!(m binexp!("+", NatLiteral(2), NatLiteral(2)))).into()]
{ f: bx!(ex!(val!("oi"))),
arguments: vec![inv!(ex!(val!("a"))), inv!(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))).into()]
}));
parse_error!("a(b,,c)");
parse_test_wrap_ast!("fn a(b, c: Int): Int", Meta::new(decl!(
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
FormalParam { name: rc!(b), anno: None, default: None },
FormalParam { name: rc!(c), anno: Some(ty!("Int")), default: None }
], type_anno: Some(ty!("Int")) }))));
parse_test_wrap_ast!("fn a(b, c: Int): Int", decl!(
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
FormalParam { name: rc!(b), anno: None, default: None },
FormalParam { name: rc!(c), anno: Some(ty!("Int")), default: None }
], type_anno: Some(ty!("Int")) })));
parse_test_wrap_ast!("fn a(x) { x() }", Meta::new(decl!(
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })]))));
parse_test_wrap_ast!("fn a(x) {\n x() }", Meta::new(decl!(
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })]))));
parse_test_wrap_ast!("fn a(x) { x() }", decl!(
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
parse_test_wrap_ast!("fn a(x) {\n x() }", decl!(
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), anno: None, default: None }], type_anno: None },
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
let multiline = r#"
fn a(x) {
x()
}
"#;
parse_test_wrap_ast!(multiline, Meta::new(decl!(
parse_test_wrap_ast!(multiline, decl!(
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })]))));
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])));
let multiline2 = r#"
fn a(x) {
@@ -261,21 +277,21 @@ x()
}
"#;
parse_test_wrap_ast!(multiline2, Meta::new(decl!(
parse_test_wrap_ast!(multiline2, decl!(
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![FormalParam { name: rc!(x), default: None, anno: None }], type_anno: None },
vec![exst!(s "x()")]))));
vec![exst!(s "x()")])));
}
#[test]
fn functions_with_default_args() {
parse_test_wrap_ast! {
"fn func(x: Int, y: Int = 4) { }",
Meta::new(decl!(
decl!(
FuncDecl(Signature { name: rc!(func), operator: false, type_anno: None, params: vec![
FormalParam { name: rc!(x), default: None, anno: Some(ty!("Int")) },
FormalParam { name: rc!(y), default: Some(Meta::new(ex!(s "4"))), anno: Some(ty!("Int")) }
FormalParam { name: rc!(y), default: Some(ex!(s "4")), anno: Some(ty!("Int")) }
]}, vec![])
))
)
};
}
@@ -292,11 +308,11 @@ fn parsing_strings() {
#[test]
fn parsing_types() {
parse_test_wrap_ast!("type Yolo = Yolo", Meta::new(decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} )));
parse_test_wrap_ast!("type mut Yolo = Yolo", Meta::new(decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} )));
parse_test_wrap_ast!("type alias Sex = Drugs", Meta::new(decl!(TypeAlias(rc!(Sex), rc!(Drugs)))));
parse_test_wrap_ast!("type Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ));
parse_test_wrap_ast!("type mut Yolo = Yolo", decl!(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ));
parse_test_wrap_ast!("type alias Sex = Drugs", decl!(TypeAlias { alias: rc!(Sex), original: rc!(Drugs) }));
parse_test_wrap_ast!("type Sanchez = Miguel | Alejandro(Int, Option<a>) | Esperanza { a: Int, b: String }",
Meta::new(decl!(TypeDecl {
decl!(TypeDecl {
name: tys!("Sanchez"),
body: TypeBody(vec![
UnitStruct(rc!(Miguel)),
@@ -313,27 +329,27 @@ fn parsing_types() {
}
]),
mutable: false
})));
}));
parse_test_wrap_ast! {
"type Jorge<a> = Diego | Kike(a)",
Meta::new(decl!(TypeDecl{
decl!(TypeDecl{
name: TypeSingletonName { name: rc!(Jorge), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] },
body: TypeBody(vec![UnitStruct(rc!(Diego)), TupleStruct(rc!(Kike), vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })])]),
mutable: false
}
))
)
};
}
#[test]
fn parsing_bindings() {
parse_test_wrap_ast!("let mut a = 10", Meta::new(decl!(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(m NatLiteral(10)) } )));
parse_test_wrap_ast!("let a = 2 + 2", Meta::new(decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(m binexp!("+", NatLiteral(2), NatLiteral(2))) }) ));
parse_test_wrap_ast!("let a: Nat = 2 + 2", Meta::new(decl!(
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
expr: Meta::new(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))) }
)));
parse_test_wrap_ast!("let mut a = 10", decl!(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(NatLiteral(10)) } ));
parse_test_wrap_ast!("let a = 2 + 2", decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }));
parse_test_wrap_ast!("let a: Nat = 2 + 2", decl!(
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }
));
}
#[test]
@@ -341,14 +357,14 @@ fn parsing_block_expressions() {
parse_test_wrap_ast! {
"if a() then { b(); c() }", exst!(
IfExpression {
discriminator: bx! {
Discriminator::Simple(ex!(m Call { f: bx!(ex!(m val!("a"))), arguments: vec![]}))
},
discriminator: Some(bx! {
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
}),
body: bx! {
IfExpressionBody::SimpleConditional(
vec![exst!(Call { f: bx!(ex!(m val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(m val!("c"))), arguments: vec![] })],
None
)
IfExpressionBody::SimpleConditional {
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
else_case: None,
}
}
}
)
@@ -357,16 +373,14 @@ fn parsing_block_expressions() {
parse_test_wrap_ast! {
"if a() then { b(); c() } else { q }", exst!(
IfExpression {
discriminator: bx! {
Discriminator::Simple(ex!(m Call { f: bx!(ex!(m val!("a"))), arguments: vec![]}))
},
discriminator: Some(bx! {
ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]})
}),
body: bx! {
IfExpressionBody::SimpleConditional(
vec![exst!(Call { f: bx!(ex!(m val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(m val!("c"))), arguments: vec![] })],
Some(
vec![exst!(val!("q"))],
)
)
IfExpressionBody::SimpleConditional {
then_case: vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
else_case: Some(vec![exst!(val!("q"))]),
}
}
}
)
@@ -407,69 +421,66 @@ fn parsing_block_expressions() {
#[test]
fn parsing_interfaces() {
parse_test_wrap_ast!("interface Unglueable { fn unglue(a: Glue); fn mar(): Glue }",
Meta::new(decl!(Interface {
name: rc!(Unglueable),
signatures: vec![
Signature {
name: rc!(unglue),
operator: false,
params: vec![
FormalParam { name: rc!(a), anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })), default: None }
],
type_anno: None
},
Signature { name: rc!(mar), operator: false, params: vec![], type_anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })) },
]
}))
decl!(Interface {
name: rc!(Unglueable),
signatures: vec![
Signature {
name: rc!(unglue),
operator: false,
params: vec![
FormalParam { name: rc!(a), anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })), default: None }
],
type_anno: None
},
Signature { name: rc!(mar), operator: false, params: vec![], type_anno: Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })) },
]
})
);
}
#[test]
fn parsing_impls() {
parse_test_wrap_ast!("impl Heh { fn yolo(); fn swagg(); }",
Meta::new(
decl!(Impl {
type_name: ty!("Heh"),
interface_name: None,
block: vec![
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None }),
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
] })));
decl!(Impl {
type_name: ty!("Heh"),
interface_name: None,
block: vec![
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None }),
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
] }));
parse_test_wrap_ast!("impl Mondai for Lollerino { fn yolo(); fn swagg(); }",
Meta::new(decl!(Impl {
type_name: ty!("Lollerino"),
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
block: vec![
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None}),
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
] })));
decl!(Impl {
type_name: ty!("Lollerino"),
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
block: vec![
FuncSig(Signature { name: rc!(yolo), operator: false, params: vec![], type_anno: None}),
FuncSig(Signature { name: rc!(swagg), operator: false, params: vec![], type_anno: None })
] }));
parse_test_wrap_ast!("impl Hella<T> for (Alpha, Omega) { }",
Meta::new(decl!(Impl {
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
block: vec![]
}))
decl!(Impl {
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
block: vec![]
})
);
parse_test_wrap_ast!("impl Option<WTFMate> { fn oi() }",
Meta::new(
decl!(Impl {
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
interface_name: None,
block: vec![
FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }),
]
})));
decl!(Impl {
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
interface_name: None,
block: vec![
FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None }),
]
}));
}
#[test]
fn parsing_type_annotations() {
parse_test_wrap_ast!("let a = b : Int",
Meta::new(
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr:
ex!(m val!("b"), ty!("Int")) })));
ex!(val!("b"), ty!("Int")) }));
parse_test_wrap_ast!("a : Int",
exst!(val!("a"), ty!("Int"))
@@ -511,7 +522,7 @@ fn parsing_lambdas() {
);
parse_test_wrap_ast! { r#"\(x){y}(1)"#,
exst!(Call { f: bx!(ex!(m
exst!(Call { f: bx!(ex!(
Lambda {
params: vec![
FormalParam { name: rc!(x), anno: None, default: None }
@@ -519,7 +530,7 @@ fn parsing_lambdas() {
type_anno: None,
body: vec![exst!(s "y")] }
)),
arguments: vec![inv!(ex!(m NatLiteral(1))).into()] })
arguments: vec![inv!(ex!(NatLiteral(1))).into()] })
};
parse_test_wrap_ast! {
@@ -566,8 +577,8 @@ fn more_advanced_lambdas() {
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
exst! {
Call {
f: bx!(ex!(m Call { f: bx!(ex!(m val!("wahoo"))), arguments: vec![] })),
arguments: vec![inv!(ex!(m NatLiteral(3))).into()],
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
arguments: vec![inv!(ex!(NatLiteral(3))).into()],
}
}
]
@@ -579,7 +590,7 @@ fn more_advanced_lambdas() {
fn list_literals() {
parse_test_wrap_ast! {
"[1,2]",
exst!(ListLiteral(vec![ex!(m NatLiteral(1)), ex!(m NatLiteral(2))]))
exst!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))
};
}
@@ -592,7 +603,7 @@ fn while_expr() {
parse_test_wrap_ast! {
"while a == b { }",
exst!(WhileExpression { condition: Some(bx![ex![m binexp!("==", val!("a"), val!("b"))]]), body: vec![] })
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })
}
}
@@ -619,31 +630,41 @@ fn patterns() {
parse_test_wrap_ast! {
"if x is Some(a) then { 4 } else { 9 }", exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(qname!(Some),
vec![Pattern::VarOrName(qname!(a))]), vec![exst!(s "4")], Some(vec![exst!(s "9")]))) }
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::SimplePatternMatch {
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
then_case: vec![exst!(s "4")],
else_case: Some(vec![exst!(s "9")]) })
}
)
}
parse_test_wrap_ast! {
"if x is Some(a) then 4 else 9", exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(Pattern::TupleStruct(qname!(Some),
vec![Pattern::VarOrName(qname!(a))]), vec![exst!(s "4")], Some(vec![exst!(s "9")]))) }
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::SimplePatternMatch {
pattern: Pattern::TupleStruct(qname!(Some), vec![Pattern::VarOrName(qname!(a))]),
then_case: vec![exst!(s "4")],
else_case: Some(vec![exst!(s "9")]) }
)
}
)
}
parse_test_wrap_ast! {
"if x is Something { a, b: x } then { 4 } else { 9 }", exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(
Pattern::Record(qname!(Something), vec![
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
(rc!(b),Pattern::VarOrName(qname!(x)))
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::SimplePatternMatch {
pattern: Pattern::Record(qname!(Something), vec![
(rc!(a),Pattern::Literal(PatternLiteral::StringPattern(rc!(a)))),
(rc!(b),Pattern::VarOrName(qname!(x)))
]),
vec![exst!(s "4")], Some(vec![exst!(s "9")])))
then_case: vec![exst!(s "4")],
else_case: Some(vec![exst!(s "9")])
}
)
}
)
}
@@ -655,12 +676,12 @@ fn pattern_literals() {
"if x is -1 then 1 else 2",
exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(
Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
vec![exst!(NatLiteral(1))],
Some(vec![exst!(NatLiteral(2))]),
))
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::SimplePatternMatch {
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: true, num: NatLiteral(1) }),
then_case: vec![exst!(NatLiteral(1))],
else_case: Some(vec![exst!(NatLiteral(2))]),
})
}
)
}
@@ -669,12 +690,12 @@ fn pattern_literals() {
"if x is 1 then 1 else 2",
exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(
Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
vec![exst!(s "1")],
Some(vec![exst!(s "2")]),
))
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::SimplePatternMatch {
pattern: Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1) }),
then_case: vec![exst!(s "1")],
else_case: Some(vec![exst!(s "2")]),
})
}
)
}
@@ -683,12 +704,13 @@ fn pattern_literals() {
"if x is true then 1 else 2",
exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(
Pattern::Literal(PatternLiteral::BoolPattern(true)),
vec![exst!(NatLiteral(1))],
Some(vec![exst!(NatLiteral(2))]),
))
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(
IfExpressionBody::SimplePatternMatch {
pattern: Pattern::Literal(PatternLiteral::BoolPattern(true)),
then_case: vec![exst!(NatLiteral(1))],
else_case: Some(vec![exst!(NatLiteral(2))]),
})
}
)
}
@@ -697,13 +719,110 @@ fn pattern_literals() {
"if x is \"gnosticism\" then 1 else 2",
exst!(
IfExpression {
discriminator: bx!(Discriminator::Simple(Meta::new(ex!(s "x")))),
body: bx!(IfExpressionBody::SimplePatternMatch(
Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
vec![exst!(s "1")],
Some(vec![exst!(s "2")]),
))
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::SimplePatternMatch {
pattern: Pattern::Literal(PatternLiteral::StringPattern(rc!(gnosticism))),
then_case: vec![exst!(s "1")],
else_case: Some(vec![exst!(s "2")]),
})
}
)
}
}
#[test]
fn imports() {
parse_test_wrap_ast! {
"import harbinger::draughts::Norgleheim",
import!(ImportSpecifier {
id: ItemIdStore::new_id(),
path_components: vec![rc!(harbinger), rc!(draughts), rc!(Norgleheim)],
imported_names: ImportedNames::LastOfPath
})
}
}
#[test]
fn imports_2() {
parse_test_wrap_ast! {
"import harbinger::draughts::{Norgleheim, Xraksenlaigar}",
import!(ImportSpecifier {
id: ItemIdStore::new_id(),
path_components: vec![rc!(harbinger), rc!(draughts)],
imported_names: ImportedNames::List(vec![
rc!(Norgleheim),
rc!(Xraksenlaigar)
])
})
}
}
#[test]
fn imports_3() {
parse_test_wrap_ast! {
"import bespouri::{}",
import!(ImportSpecifier {
id: ItemIdStore::new_id(),
path_components: vec![rc!(bespouri)],
imported_names: ImportedNames::List(vec![])
})
}
}
#[test]
fn imports_4() {
parse_test_wrap_ast! {
"import bespouri::*",
import!(ImportSpecifier {
id: ItemIdStore::new_id(),
path_components: vec![rc!(bespouri)],
imported_names: ImportedNames::All
})
}
}
#[test]
fn if_expr() {
parse_test_wrap_ast! {
"if x { is 1 then 5, else 20 }",
exst! {
IfExpression {
discriminator: Some(bx!(ex!(s "x"))),
body: bx!(IfExpressionBody::CondList(
vec![
ConditionArm {
condition: Condition::Pattern(Pattern::Literal(PatternLiteral::NumPattern { neg: false, num: NatLiteral(1)})),
guard: None,
body: vec![exst!(s "5")],
},
ConditionArm {
condition: Condition::Else,
guard: None,
body: vec![exst!(s "20")],
},
]
))
}
}
}
}
#[test]
fn modules() {
parse_test_wrap_ast! {
r#"
module ephraim {
let a = 10
fn nah() { 33 }
}
"#,
module!(
ModuleSpecifier { name: rc!(ephraim), contents: vec![
decl!(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(s "10") }),
decl!(FuncDecl(Signature { name: rc!(nah), operator: false, params: vec![], type_anno: None }, vec![exst!(NatLiteral(33))])),
] }
)
}
}

View File

@@ -1,3 +1,4 @@
let _SCHALA_VERSION = "0.1.0"
type Option<T> = Some(T) | None
type Ord = LT | EQ | GT
@@ -5,10 +6,9 @@ type Ord = LT | EQ | GT
fn map(input: Option<T>, func: Func): Option<T> {
if input {
is Option::Some(x) -> Option::Some(func(x)),
is Option::None -> Option::None,
is Option::Some(x) then Option::Some(func(x)),
is Option::None then Option::None,
}
}
type Complicated = Sunrise | Metal { black: bool, norwegian: bool } | Fella(String, Int)

View File

@@ -18,6 +18,7 @@ use std::str::FromStr;
use crate::ast::*;
use crate::symbol_table::{Symbol, SymbolSpec, SymbolTable, FullyQualifiedSymbolName};
use crate::builtin::Builtin;
use crate::util::deref_optional_box;
#[derive(Debug)]
pub struct ReducedAST(pub Vec<Stmt>);
@@ -41,9 +42,9 @@ pub enum Stmt {
pub enum Expr {
Unit,
Lit(Lit),
Sym(Rc<String>), //a Sym is anything that can be looked up by name at runtime - i.e. a function or variable address
Tuple(Vec<Expr>),
Func(Func),
Sym(Rc<String>),
Constructor {
type_name: Rc<String>,
name: Rc<String>,
@@ -55,7 +56,7 @@ pub enum Expr {
args: Vec<Expr>,
},
Assign {
val: Box<Expr>,
val: Box<Expr>, //TODO this probably can't be a val
expr: Box<Expr>,
},
Conditional {
@@ -129,6 +130,13 @@ impl<'a> Reducer<'a> {
match &stmt.kind {
StatementKind::Expression(expr) => Stmt::Expr(self.expression(&expr)),
StatementKind::Declaration(decl) => self.declaration(&decl),
StatementKind::Import(_) => Stmt::Noop,
StatementKind::Module(modspec) => {
for statement in modspec.contents.iter() {
self.statement(&statement);
}
Stmt::Noop
}
}
}
@@ -156,28 +164,10 @@ impl<'a> Reducer<'a> {
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
PrefixExp(op, arg) => self.prefix(op, arg),
Value(qualified_name) => {
let ref id = qualified_name.id;
let ref sym_name = match symbol_table.get_fqsn_from_id(id) {
Some(fqsn) => fqsn,
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
};
//TODO this probably needs to change
let FullyQualifiedSymbolName(ref v) = sym_name;
let name = v.last().unwrap().name.clone();
match symbol_table.lookup_by_fqsn(&sym_name) {
Some(Symbol { spec: SymbolSpec::DataConstructor { index, type_args, type_name}, .. }) => Expr::Constructor {
type_name: type_name.clone(),
name: name.clone(),
tag: index.clone(),
arity: type_args.len(),
},
_ => Expr::Sym(name.clone()),
}
},
Value(qualified_name) => self.value(qualified_name),
Call { f, arguments } => self.reduce_call_expression(f, arguments),
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
IfExpression { discriminator, body } => self.reduce_if_expression(discriminator, body),
IfExpression { discriminator, body } => self.reduce_if_expression(deref_optional_box(discriminator), body),
Lambda { params, body, .. } => self.reduce_lambda(params, body),
NamedStruct { name, fields } => self.reduce_named_struct(name, fields),
Index { .. } => Expr::UnimplementedSigilValue,
@@ -187,6 +177,38 @@ impl<'a> Reducer<'a> {
}
}
fn value(&mut self, qualified_name: &QualifiedName) -> Expr {
let symbol_table = self.symbol_table;
let ref id = qualified_name.id;
let ref sym_name = match symbol_table.get_fqsn_from_id(id) {
Some(fqsn) => fqsn,
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
};
//TODO this probably needs to change
let FullyQualifiedSymbolName(ref v) = sym_name;
let name = v.last().unwrap().name.clone();
let Symbol { local_name, spec, .. } = match symbol_table.lookup_by_fqsn(&sym_name) {
Some(s) => s,
//None => return Expr::ReductionError(format!("Symbol {:?} not found", sym_name)),
None => return Expr::Sym(name.clone())
};
match spec {
SymbolSpec::RecordConstructor { .. } => Expr::ReductionError(format!("AST reducer doesn't expect a RecordConstructor here")),
SymbolSpec::DataConstructor { index, type_args, type_name } => Expr::Constructor {
type_name: type_name.clone(),
name: name.clone(),
tag: index.clone(),
arity: type_args.len(),
},
SymbolSpec::Func(_) => Expr::Sym(local_name.clone()),
SymbolSpec::Binding => Expr::Sym(local_name.clone()), //TODO not sure if this is right, probably needs to eventually be fqsn
SymbolSpec::Type { .. } => Expr::ReductionError("AST reducer doesnt expect a type here".to_string())
}
}
fn reduce_lambda(&mut self, params: &Vec<FormalParam>, body: &Block) -> Expr {
Expr::Func(Func::UserDefined {
name: None,
@@ -231,30 +253,31 @@ impl<'a> Reducer<'a> {
}
}
fn reduce_if_expression(&mut self, discriminator: &Discriminator, body: &IfExpressionBody) -> Expr {
fn reduce_if_expression(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> Expr {
let symbol_table = self.symbol_table;
let cond = Box::new(match *discriminator {
Discriminator::Simple(ref expr) => self.expression(expr),
Discriminator::BinOp(ref _expr, ref _binop) => panic!("Can't yet handle binop discriminators")
let cond = Box::new(match discriminator {
Some(expr) => self.expression(expr),
None => return Expr::ReductionError(format!("blank cond if-expr not supported")),
});
match *body {
IfExpressionBody::SimpleConditional(ref then_clause, ref else_clause) => {
let then_clause = self.block(then_clause);
let else_clause = match else_clause {
match body {
IfExpressionBody::SimpleConditional { then_case, else_case } => {
let then_clause = self.block(&then_case);
let else_clause = match else_case.as_ref() {
None => vec![],
Some(stmts) => self.block(stmts),
Some(stmts) => self.block(&stmts),
};
Expr::Conditional { cond, then_clause, else_clause }
},
IfExpressionBody::SimplePatternMatch(ref pat, ref then_clause, ref else_clause) => {
let then_clause = self.block(then_clause);
let else_clause = match else_clause {
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case } => {
let then_clause = self.block(&then_case);
let else_clause = match else_case.as_ref() {
None => vec![],
Some(stmts) => self.block(stmts),
Some(stmts) => self.block(&stmts),
};
let alternatives = vec![
pat.to_alternative(then_clause, symbol_table),
pattern.to_alternative(then_clause, symbol_table),
Alternative {
matchable: Subpattern {
tag: None,
@@ -271,16 +294,22 @@ impl<'a> Reducer<'a> {
alternatives,
}
},
IfExpressionBody::GuardList(ref guard_arms) => {
IfExpressionBody::CondList(ref condition_arms) => {
let mut alternatives = vec![];
for arm in guard_arms {
match arm.guard {
Guard::Pat(ref p) => {
for arm in condition_arms {
match arm.condition {
Condition::Expression(ref _expr) => {
return Expr::UnimplementedSigilValue
},
Condition::Pattern(ref p) => {
let item = self.block(&arm.body);
let alt = p.to_alternative(item, symbol_table);
alternatives.push(alt);
},
Guard::HalfExpr(HalfExpr { op: _, expr: _ }) => {
Condition::TruncatedOp(_, _) => {
return Expr::UnimplementedSigilValue
},
Condition::Else => {
return Expr::UnimplementedSigilValue
}
}
@@ -333,7 +362,7 @@ impl<'a> Reducer<'a> {
}
},
TypeDecl { .. } => Stmt::Noop,
TypeAlias(_, _) => Stmt::Noop,
TypeAlias{ .. } => Stmt::Noop,
Interface { .. } => Stmt::Noop,
Impl { .. } => Stmt::Expr(Expr::UnimplementedSigilValue),
_ => Stmt::Expr(Expr::UnimplementedSigilValue)
@@ -435,7 +464,6 @@ impl Pattern {
VarOrName(QualifiedName { components, id }) => {
// if fqsn is Some, treat this as a symbol pattern. If it's None, treat it
// as a variable.
println!("Calling VarOrName reduction with : {:?}", components);
let fqsn = symbol_table.get_fqsn_from_id(&id);
match fqsn.and_then(|fqsn| symbol_table.lookup_by_fqsn(&fqsn)) {
Some(symbol) => handle_symbol(Some(symbol), &vec![], symbol_table),

View File

@@ -10,17 +10,22 @@ use schala_repl::{ProgrammingLanguageInterface,
ComputationRequest, ComputationResponse,
LangMetaRequest, LangMetaResponse, GlobalOutputStats,
DebugResponse, DebugAsk};
use crate::{ast, reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table};
use crate::{ast, reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table, source_map};
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
/// All the state necessary to parse and execute a Schala program are stored in this struct.
/// `state` represents the execution state for the AST-walking interpreter, the other fields
/// should be self-explanatory.
pub struct Schala {
source_reference: SourceReference,
source_map: SourceMapHandle,
state: eval::State<'static>,
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
symbol_table: SymbolTableHandle,
resolver: crate::scope_resolution::ScopeResolver<'static>,
type_context: typechecking::TypeContext<'static>,
active_parser: Option<parsing::Parser>,
active_parser: parsing::Parser,
}
impl Schala {
@@ -34,13 +39,17 @@ impl Schala {
impl Schala {
/// Creates a new Schala environment *without* any prelude.
fn new_blank_env() -> Schala {
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new()));
let source_map = Rc::new(RefCell::new(source_map::SourceMap::new()));
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone())));
Schala {
//TODO maybe these can be the same structure
source_reference: SourceReference::new(),
symbol_table: symbols.clone(),
state: eval::State::new(symbols),
source_map: source_map.clone(),
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
state: eval::State::new(),
type_context: typechecking::TypeContext::new(),
active_parser: None,
active_parser: parsing::Parser::new(source_map)
}
}
@@ -51,7 +60,10 @@ impl Schala {
let mut s = Schala::new_blank_env();
let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() };
s.run_computation(request);
let response = s.run_computation(request);
if let Err(msg) = response.main_output {
panic!("Error in prelude, panicking: {}", msg);
}
s
}
@@ -94,10 +106,10 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArti
}
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
use crate::parsing::Parser;
use ParsingDebugType::*;
let mut parser = handle.active_parser.take().unwrap_or_else(|| Parser::new(input));
let ref mut parser = handle.active_parser;
parser.add_new_tokens(input);
let ast = parser.parse();
comp.map(|comp| {
@@ -115,24 +127,30 @@ fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut
};
comp.add_artifact(debug_info);
});
ast.map_err(|err| format_parse_error(err, handle))
ast.map_err(|err| format_parse_error(err, &handle.source_reference))
}
fn format_parse_error(error: parsing::ParseError, handle: &mut Schala) -> String {
let line_num = error.token.line_num;
let ch = error.token.char_num;
let line_from_program = handle.source_reference.get_line(line_num);
fn format_parse_error(error: parsing::ParseError, source_reference: &SourceReference) -> String {
let line_num = error.token.location.line_num;
let ch = error.token.location.char_num;
let line_from_program = source_reference.get_line(line_num);
let location_pointer = format!("{}^", " ".repeat(ch));
let line_num_digits = format!("{}", line_num).chars().count();
let space_padding = " ".repeat(line_num_digits);
let production = match error.production_name {
Some(n) => format!("\n(from production \"{}\")", n),
None => "".to_string()
};
format!(r#"
{error_msg}
{error_msg}{production}
{space_padding} |
{line_num} | {}
{space_padding} | {}
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num)
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
)
}
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
@@ -145,9 +163,7 @@ fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebu
}
fn scope_resolution(mut input: ast::AST, handle: &mut Schala, _com: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
let mut symbol_table = handle.symbol_table.borrow_mut();
let mut resolver = crate::scope_resolution::ScopeResolver::new(&mut symbol_table);
let () = resolver.resolve(&mut input)?;
let () = handle.resolver.resolve(&mut input)?;
Ok(input)
}

View File

@@ -1,173 +1,115 @@
use crate::symbol_table::{SymbolTable, ScopeSegment, ScopeSegmentKind, FullyQualifiedSymbolName};
use std::rc::Rc;
use crate::schala::SymbolTableHandle;
use crate::symbol_table::{ScopeSegment, FullyQualifiedSymbolName};
use crate::ast::*;
use crate::util::ScopeStack;
type FQSNPrefix = Vec<ScopeSegment>;
pub struct ScopeResolver<'a> {
symbol_table: &'a mut SymbolTable
symbol_table_handle: SymbolTableHandle,
name_scope_stack: ScopeStack<'a, Rc<String>, FQSNPrefix>,
}
impl<'a> ScopeResolver<'a> {
pub fn new(symbol_table: &'a mut SymbolTable) -> ScopeResolver {
ScopeResolver { symbol_table }
}
pub fn resolve(&mut self, ast: &mut AST) -> Result<(), String> {
for statement in ast.statements.iter() {
match statement.kind {
StatementKind::Declaration(ref decl) => self.decl(decl),
StatementKind::Expression(ref expr) => self.expr(expr),
}?;
}
Ok(())
}
fn decl(&mut self, decl: &Declaration) -> Result<(), String> {
use Declaration::*;
match decl {
Binding { expr, .. } => self.expr(expr),
FuncDecl(_, block) => self.block(block),
_ => Ok(()),
}
}
fn block(&mut self, block: &Block) -> Result<(), String> {
for statement in block.iter() {
match statement.kind {
StatementKind::Declaration(ref decl) => self.decl(decl),
StatementKind::Expression(ref expr) => self.expr(expr),
}?;
}
Ok(())
}
fn expr(&mut self, expr: &Expression) -> Result<(), String> {
use ExpressionKind::*;
match &expr.kind {
ExpressionKind::Value(qualified_name) => {
let fqsn = lookup_name_in_scope(&qualified_name);
let ref id = qualified_name.id;
self.symbol_table.map_id_to_fqsn(id, fqsn);
},
NamedStruct { name, .. } => {
let ref id = name.id;
let fqsn = lookup_name_in_scope(&name);
self.symbol_table.map_id_to_fqsn(id, fqsn);
},
BinExp(_, ref lhs, ref rhs) => {
self.expr(lhs)?;
self.expr(rhs)?;
},
PrefixExp(_, ref arg) => {
self.expr(arg)?;
},
TupleLiteral(exprs) => {
for expr in exprs.iter() {
self.expr(expr)?;
impl<'a> ASTVisitor for ScopeResolver<'a> {
//TODO need to un-insert these - maybe need to rethink visitor
fn import(&mut self, import_spec: &ImportSpecifier) {
let ref symbol_table = self.symbol_table_handle.borrow();
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
match imported_names {
ImportedNames::All => {
let prefix = FullyQualifiedSymbolName(path_components.iter().map(|c| ScopeSegment {
name: c.clone(),
}).collect());
let members = symbol_table.lookup_children_of_fqsn(&prefix);
for member in members.into_iter() {
let local_name = member.0.last().unwrap().name.clone();
self.name_scope_stack.insert(local_name.clone(), member.0);
}
},
Call { f, arguments } => {
self.expr(&f)?;
for arg in arguments.iter() {
self.invoc(arg)?;
ImportedNames::LastOfPath => {
let name = path_components.last().unwrap(); //TODO handle better
let fqsn_prefix = path_components.iter().map(|c| ScopeSegment {
name: c.clone(),
}).collect();
self.name_scope_stack.insert(name.clone(), fqsn_prefix);
}
ImportedNames::List(ref names) => {
let fqsn_prefix: FQSNPrefix = path_components.iter().map(|c| ScopeSegment {
name: c.clone(),
}).collect();
for name in names.iter() {
self.name_scope_stack.insert(name.clone(), fqsn_prefix.clone());
}
},
Lambda { params, body, .. } => {
self.block(&body)?;
for param in params.iter() {
if let Some(ref expr) = param.default {
self.expr(expr)?;
}
}
},
IfExpression { ref body, ref discriminator } => {
match &**discriminator {
Discriminator::Simple(expr) | Discriminator::BinOp(expr, _) => self.expr(expr)?
};
match &**body {
IfExpressionBody::SimplePatternMatch(ref pat, ref alt1, ref alt2) => {
self.pattern(pat)?;
self.block(alt1)?;
if let Some(alt) = alt2 {
self.block(alt)?;
}
},
IfExpressionBody::GuardList(guardarms) => {
for arm in guardarms.iter() {
if let Guard::Pat(ref pat) = arm.guard {
self.pattern(pat)?;
}
self.block(&arm.body)?;
}
}
_ => ()
}
},
_ => ()
}
};
Ok(())
}
fn invoc(&mut self, invoc: &InvocationArgument) -> Result<(), String> {
use InvocationArgument::*;
match invoc {
Positional(expr) => self.expr(expr),
Keyword { expr, .. } => self.expr(expr),
_ => Ok(())
}
fn qualified_name(&mut self, qualified_name: &QualifiedName) {
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
let fqsn = self.lookup_name_in_scope(&qualified_name);
let ref id = qualified_name.id;
symbol_table.map_id_to_fqsn(id, fqsn);
}
fn pattern(&mut self, pat: &Pattern) -> Result<(), String> {
fn named_struct(&mut self, name: &QualifiedName, _fields: &Vec<(Rc<String>, Expression)>) {
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
let ref id = name.id;
let fqsn = self.lookup_name_in_scope(&name);
symbol_table.map_id_to_fqsn(id, fqsn);
}
fn pattern(&mut self, pat: &Pattern) {
use Pattern::*;
match pat {
Ignored => (),
TuplePattern(patterns) => {
for pat in patterns {
self.pattern(pat)?;
}
},
TuplePattern(_) => (),
Literal(_) => (),
TupleStruct(name, patterns) => {
self.qualified_name_in_pattern(name);
for pat in patterns {
self.pattern(pat)?;
}
},
Record(name, key_patterns) => {
self.qualified_name_in_pattern(name);
for (_, pat) in key_patterns {
self.pattern(pat)?;
}
},
VarOrName(name) => {
self.qualified_name_in_pattern(name);
},
TupleStruct(name, _) => self.qualified_name_in_pattern(name),
Record(name, _) => self.qualified_name_in_pattern(name),
VarOrName(name) => self.qualified_name_in_pattern(name),
};
}
}
impl<'a> ScopeResolver<'a> {
pub fn new(symbol_table_handle: SymbolTableHandle) -> ScopeResolver<'static> {
let name_scope_stack = ScopeStack::new(None);
ScopeResolver { symbol_table_handle, name_scope_stack }
}
pub fn resolve(&mut self, ast: &mut AST) -> Result<(), String> {
walk_ast(self, ast);
Ok(())
}
fn lookup_name_in_scope(&self, sym_name: &QualifiedName) -> FullyQualifiedSymbolName {
let QualifiedName { components, .. } = sym_name;
let first_component = &components[0];
match self.name_scope_stack.lookup(first_component) {
None => {
FullyQualifiedSymbolName(components.iter().map(|name| ScopeSegment { name: name.clone() }).collect())
},
Some(fqsn_prefix) => {
let mut full_name = fqsn_prefix.clone();
let rest_of_name: FQSNPrefix = components[1..].iter().map(|name| ScopeSegment { name: name.clone() }).collect();
full_name.extend_from_slice(&rest_of_name);
FullyQualifiedSymbolName(full_name)
}
}
}
/// this might be a variable or a pattern. if a variable, set to none
fn qualified_name_in_pattern(&mut self, qualified_name: &QualifiedName) {
let ref mut symbol_table = self.symbol_table_handle.borrow_mut();
let ref id = qualified_name.id;
let fqsn = lookup_name_in_scope(qualified_name);
if self.symbol_table.lookup_by_fqsn(&fqsn).is_some() {
self.symbol_table.map_id_to_fqsn(&id, fqsn);
let fqsn = self.lookup_name_in_scope(qualified_name);
if symbol_table.lookup_by_fqsn(&fqsn).is_some() {
symbol_table.map_id_to_fqsn(&id, fqsn);
}
}
}
//TODO this is incomplete
fn lookup_name_in_scope(sym_name: &QualifiedName) -> FullyQualifiedSymbolName {
let QualifiedName { components: vec, .. } = sym_name;
let len = vec.len();
let new_vec: Vec<ScopeSegment> = vec.iter().enumerate().map(|(i, name)| {
let kind = if i == (len - 1) {
ScopeSegmentKind::Terminal
} else {
ScopeSegmentKind::Type
};
ScopeSegment { name: name.clone(), kind }
}).collect();
FullyQualifiedSymbolName(new_vec)
}
#[cfg(test)]
mod tests {
#[test]

View File

@@ -0,0 +1,39 @@
use std::collections::HashMap;
use std::fmt;
use crate::ast::ItemId;
pub type LineNumber = usize;
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct Location {
pub line_num: LineNumber,
pub char_num: usize,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.line_num, self.char_num)
}
}
pub struct SourceMap {
map: HashMap<ItemId, Location>
}
impl SourceMap {
pub fn new() -> SourceMap {
SourceMap { map: HashMap::new() }
}
pub fn add_location(&mut self, id: &ItemId, loc: Location) {
self.map.insert(id.clone(), loc);
}
pub fn lookup(&self, id: &ItemId) -> Option<Location> {
match self.map.get(id) {
Some(loc) => Some(loc.clone()),
None => None
}
}
}

View File

@@ -4,14 +4,61 @@ use std::rc::Rc;
use std::fmt;
use std::fmt::Write;
use crate::schala::SourceMapHandle;
use crate::source_map::{SourceMap, LineNumber};
use crate::ast;
use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind};
use crate::ast::{ItemId, TypeBody, TypeSingletonName, Signature, Statement, StatementKind, ModuleSpecifier};
use crate::typechecking::TypeName;
type LineNumber = u32;
type SymbolTrackTable = HashMap<Rc<String>, LineNumber>;
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
#[allow(unused_macros)]
macro_rules! fqsn {
( $( $name:expr ; $kind:tt),* ) => {
{
let mut vec = vec![];
$(
vec.push(crate::symbol_table::ScopeSegment::new(std::rc::Rc::new($name.to_string())));
)*
FullyQualifiedSymbolName(vec)
}
};
}
mod symbol_trie;
use symbol_trie::SymbolTrie;
mod test;
/// Keeps track of what names were used in a given namespace. Call try_register to add a name to
/// the table, or report an error if a name already exists.
struct DuplicateNameTrackTable {
table: HashMap<Rc<String>, LineNumber>,
}
impl DuplicateNameTrackTable {
fn new() -> DuplicateNameTrackTable {
DuplicateNameTrackTable { table: HashMap::new() }
}
fn try_register(&mut self, name: &Rc<String>, id: &ItemId, source_map: &SourceMap) -> Result<(), LineNumber> {
match self.table.entry(name.clone()) {
Entry::Occupied(o) => {
let line_number = o.get();
Err(*line_number)
},
Entry::Vacant(v) => {
let line_number = if let Some(loc) = source_map.lookup(id) {
loc.line_num
} else {
0
};
v.insert(line_number);
Ok(())
}
}
}
}
#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)]
pub struct FullyQualifiedSymbolName(pub Vec<ScopeSegment>);
impl fmt::Display for FullyQualifiedSymbolName {
@@ -24,72 +71,40 @@ impl fmt::Display for FullyQualifiedSymbolName {
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
#[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct ScopeSegment {
pub name: Rc<String>, //TODO maybe this could be a &str, for efficiency?
pub kind: ScopeSegmentKind,
}
impl fmt::Display for ScopeSegment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use ScopeSegmentKind::*;
let kind = match self.kind {
Function => "fn",
Type => "ty",
Terminal => "tr",
};
write!(f, "{}({})", self.name, kind)
let kind = ""; //TODO implement some kind of kind-tracking here
write!(f, "{}{}", self.name, kind)
}
}
impl ScopeSegment {
pub fn new(name: Rc<String>, kind: ScopeSegmentKind) -> ScopeSegment {
ScopeSegment { name, kind }
pub fn new(name: Rc<String>) -> ScopeSegment {
ScopeSegment { name }
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum ScopeSegmentKind {
Function,
Type,
Terminal,
}
#[allow(unused_macros)]
macro_rules! fqsn {
( $( $name:expr ; $kind:tt),* ) => {
{
let mut vec = vec![];
$(
vec.push(ScopeSegment::new(
Rc::new($name.to_string()),
sym_path_kind!($kind),
));
)*
FullyQualifiedSymbolName(vec)
}
};
}
#[allow(unused_macros)]
macro_rules! sym_path_kind {
(fn) => { ScopeSegmentKind::Function };
(ty) => { ScopeSegmentKind::Type };
(tr) => { ScopeSegmentKind::Terminal };
}
//cf. p. 150 or so of Language Implementation Patterns
pub struct SymbolTable {
source_map_handle: SourceMapHandle,
symbol_path_to_symbol: HashMap<FullyQualifiedSymbolName, Symbol>,
id_to_fqsn: HashMap<ItemId, FullyQualifiedSymbolName>,
symbol_trie: SymbolTrie,
}
//TODO add various types of lookups here, maybe multiple hash tables internally?
impl SymbolTable {
pub fn new() -> SymbolTable {
pub fn new(source_map_handle: SourceMapHandle) -> SymbolTable {
SymbolTable {
source_map_handle,
symbol_path_to_symbol: HashMap::new(),
id_to_fqsn: HashMap::new(),
symbol_trie: SymbolTrie::new()
}
}
@@ -101,29 +116,34 @@ impl SymbolTable {
self.id_to_fqsn.get(&id).cloned()
}
fn add_new_symbol(&mut self, name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
fn add_new_symbol(&mut self, local_name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
let mut vec: Vec<ScopeSegment> = scope_path.clone();
vec.push(ScopeSegment { name: name.clone(), kind: ScopeSegmentKind::Terminal });
vec.push(ScopeSegment { name: local_name.clone() });
let fully_qualified_name = FullyQualifiedSymbolName(vec);
let symbol = Symbol { name: name.clone(), fully_qualified_name: fully_qualified_name.clone(), spec };
let symbol = Symbol { local_name: local_name.clone(), fully_qualified_name: fully_qualified_name.clone(), spec };
self.symbol_trie.insert(&fully_qualified_name);
self.symbol_path_to_symbol.insert(fully_qualified_name, symbol);
}
pub fn lookup_by_fqsn(&self, fully_qualified_path: &FullyQualifiedSymbolName) -> Option<&Symbol> {
self.symbol_path_to_symbol.get(fully_qualified_path)
}
pub fn lookup_children_of_fqsn(&self, path: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
self.symbol_trie.get_children(path)
}
}
#[derive(Debug)]
pub struct Symbol {
pub name: Rc<String>, //TODO does this need to be pub?
pub local_name: Rc<String>, //TODO does this need to be pub?
fully_qualified_name: FullyQualifiedSymbolName,
pub spec: SymbolSpec,
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<Name: {}, Spec: {}>", self.name, self.spec)
write!(f, "<Local name: {}, Spec: {}>", self.local_name, self.spec)
}
}
@@ -140,7 +160,10 @@ pub enum SymbolSpec {
members: HashMap<Rc<String>, TypeName>,
type_name: TypeName,
},
Binding
Binding,
Type {
name: TypeName
},
}
impl fmt::Display for SymbolSpec {
@@ -151,6 +174,7 @@ impl fmt::Display for SymbolSpec {
DataConstructor { index, type_name, type_args } => write!(f, "DataConstructor(idx: {})({:?} -> {})", index, type_args, type_name),
RecordConstructor { type_name, index, ..} => write!(f, "RecordConstructor(idx: {})(<members> -> {})", index, type_name),
Binding => write!(f, "Binding"),
Type { name } => write!(f, "Type <{}>", name),
}
}
}
@@ -167,58 +191,60 @@ impl SymbolTable {
fn add_symbols_from_scope<'a>(&'a mut self, statements: &Vec<Statement>, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
use self::ast::Declaration::*;
fn insert_and_check_duplicate_symbol(table: &mut SymbolTrackTable, name: &Rc<String>) -> Result<(), String> {
match table.entry(name.clone()) {
Entry::Occupied(o) => {
let line_number = o.get(); //TODO make this actually work
Err(format!("Duplicate definition: {}. It's already defined at {}", name, line_number))
},
Entry::Vacant(v) => {
let line_number = 0; //TODO should work
v.insert(line_number);
Ok(())
}
}
}
let mut seen_identifiers: SymbolTrackTable = HashMap::new();
let mut seen_identifiers = DuplicateNameTrackTable::new();
let mut seen_modules = DuplicateNameTrackTable::new();
for statement in statements.iter() {
if let Statement { kind: StatementKind::Declaration(decl), .. } = statement {
match decl {
FuncSig(ref signature) => {
insert_and_check_duplicate_symbol(&mut seen_identifiers, &signature.name)?;
self.add_function_signature(signature, scope_name_stack)?
match statement {
Statement { kind: StatementKind::Declaration(decl), id } => {
match decl {
FuncSig(ref signature) => {
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
self.add_function_signature(signature, scope_name_stack)?
}
FuncDecl(ref signature, ref body) => {
seen_identifiers.try_register(&signature.name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate function definition: {}. It's already defined at {}", signature.name, line))?;
self.add_function_signature(signature, scope_name_stack)?;
scope_name_stack.push(ScopeSegment{
name: signature.name.clone(),
});
let output = self.add_symbols_from_scope(body, scope_name_stack);
scope_name_stack.pop();
output?
},
TypeDecl { name, body, mutable } => {
seen_identifiers.try_register(&name.name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate type definition: {}. It's already defined at {}", name.name, line))?;
self.add_type_decl(name, body, mutable, scope_name_stack)?
},
Binding { name, .. } => {
seen_identifiers.try_register(&name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate variable definition: {}. It's already defined at {}", name, line))?;
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
}
_ => ()
}
FuncDecl(ref signature, ref body) => {
insert_and_check_duplicate_symbol(&mut seen_identifiers, &signature.name)?;
self.add_function_signature(signature, scope_name_stack)?;
scope_name_stack.push(ScopeSegment{
name: signature.name.clone(),
kind: ScopeSegmentKind::Function,
});
let output = self.add_symbols_from_scope(body, scope_name_stack);
let _ = scope_name_stack.pop();
output?
},
TypeDecl { name, body, mutable } => {
insert_and_check_duplicate_symbol(&mut seen_identifiers, &name.name)?;
//TODO add ScopeSegmentKind::Type here
self.add_type_decl(name, body, mutable, scope_name_stack)?
},
Binding { name, .. } => {
insert_and_check_duplicate_symbol(&mut seen_identifiers, name)?;
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
}
_ => ()
}
},
Statement { kind: StatementKind::Module(ModuleSpecifier { name, contents}), id } => {
seen_modules.try_register(&name, &id, &self.source_map_handle.borrow())
.map_err(|line| format!("Duplicate module definition: {}. It's already defined at {}", name, line))?;
scope_name_stack.push(ScopeSegment { name: name.clone() });
let output = self.add_symbols_from_scope(contents, scope_name_stack);
scope_name_stack.pop();
output?
},
_ => ()
}
}
Ok(())
}
pub fn debug_symbol_table(&self) -> String {
let mut output = format!("Symbol table\n");
for (name, sym) in &self.symbol_path_to_symbol {
let mut sorted_symbols: Vec<(&FullyQualifiedSymbolName, &Symbol)> = self.symbol_path_to_symbol.iter().collect();
sorted_symbols.sort_by(|(fqsn, _), (other_fqsn, _)| fqsn.cmp(other_fqsn));
for (name, sym) in sorted_symbols.iter() {
write!(output, "{} -> {}\n", name, sym).unwrap();
}
output
@@ -239,9 +265,15 @@ impl SymbolTable {
use crate::ast::{TypeIdentifier, Variant};
let TypeBody(variants) = body;
let ref type_name = type_name.name;
let type_spec = SymbolSpec::Type {
name: type_name.clone(),
};
self.add_new_symbol(type_name, &scope_name_stack, type_spec);
scope_name_stack.push(ScopeSegment{
name: type_name.clone(),
kind: ScopeSegmentKind::Type,
});
//TODO figure out why _params isn't being used here
for (index, var) in variants.iter().enumerate() {
@@ -309,156 +341,3 @@ impl LocalTypeContext {
}
}
#[cfg(test)]
mod symbol_table_tests {
use super::*;
use crate::util::quick_ast;
macro_rules! values_in_table {
//TODO multiple values
($source:expr, $single_value:expr) => {
{
let mut symbol_table = SymbolTable::new();
let ast = quick_ast($source);
symbol_table.add_top_level_symbols(&ast).unwrap();
match symbol_table.lookup_by_fqsn($single_value) {
Some(_spec) => (),
None => panic!(),
};
}
}
}
#[test]
fn basic_symbol_table() {
values_in_table! { "let a = 10; fn b() { 20 }", &fqsn!("b"; tr) }
}
#[test]
fn no_duplicates() {
let source = r#"
fn a() { 1 }
fn b() { 2 }
fn a() { 3 }
"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
assert!(output.contains("Duplicate"))
}
#[test]
fn no_duplicates_2() {
let source = r#"
let a = 20;
let q = 39;
let a = 30;
"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
assert!(output.contains("Duplicate"))
}
#[test]
fn no_duplicates_3() {
let source = r#"
fn a() {
let a = 20
let b = 40
a + b
}
fn q() {
let x = 30
let x = 33
}
"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
assert!(output.contains("Duplicate"))
}
#[test]
fn dont_falsely_detect_duplicates() {
let source = r#"
let a = 20;
fn some_func() {
let a = 40;
77
}
let q = 39;
"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
symbol_table.add_top_level_symbols(&ast).unwrap();
assert!(symbol_table.lookup_by_fqsn(&fqsn!["a"; tr]).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!["some_func"; fn, "a";tr]).is_some());
}
#[test]
fn enclosing_scopes() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
x + inner_func(x)
}"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
symbol_table.add_top_level_symbols(&ast).unwrap();
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
}
#[test]
fn enclosing_scopes_2() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
fn second_inner_func() {
fn another_inner_func() {
}
}
inner_func(x)
}"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
symbol_table.add_top_level_symbols(&ast).unwrap();
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; fn, "another_inner_func"; tr)).is_some());
}
#[test]
fn enclosing_scopes_3() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
fn second_inner_func() {
fn another_inner_func() {
}
fn another_inner_func() {
}
}
inner_func(x)
}"#;
let mut symbol_table = SymbolTable::new();
let ast = quick_ast(source);
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
assert!(output.contains("Duplicate"))
}
}

View File

@@ -0,0 +1,51 @@
use radix_trie::{Trie, TrieCommon, TrieKey};
use super::FullyQualifiedSymbolName;
use std::hash::{Hasher, Hash};
use std::collections::hash_map::DefaultHasher;
#[derive(Debug)]
pub struct SymbolTrie(Trie<FullyQualifiedSymbolName, ()>);
impl TrieKey for FullyQualifiedSymbolName {
fn encode_bytes(&self) -> Vec<u8> {
let mut hasher = DefaultHasher::new();
let mut output = vec![];
let FullyQualifiedSymbolName(scopes) = self;
for segment in scopes.iter() {
segment.name.as_bytes().hash(&mut hasher);
output.extend_from_slice(&hasher.finish().to_be_bytes());
}
output
}
}
impl SymbolTrie {
pub fn new() -> SymbolTrie {
SymbolTrie(Trie::new())
}
pub fn insert(&mut self, fqsn: &FullyQualifiedSymbolName) {
self.0.insert(fqsn.clone(), ());
}
pub fn get_children(&self, fqsn: &FullyQualifiedSymbolName) -> Vec<FullyQualifiedSymbolName> {
let subtrie = match self.0.subtrie(fqsn) {
Some(s) => s,
None => return vec![]
};
let output: Vec<FullyQualifiedSymbolName> = subtrie.keys().filter(|cur_key| **cur_key != *fqsn).map(|fqsn| fqsn.clone()).collect();
output
}
}
#[test]
fn test_trie_insertion() {
let mut trie = SymbolTrie::new();
trie.insert(&fqsn!("unrelated"; ty, "thing"; tr));
trie.insert(&fqsn!("outer"; ty, "inner"; tr));
trie.insert(&fqsn!("outer"; ty, "inner"; ty, "still_inner"; tr));
let children = trie.get_children(&fqsn!("outer"; ty, "inner"; tr));
assert_eq!(children.len(), 1);
}

View File

@@ -0,0 +1,193 @@
#![cfg(test)]
use std::cell::RefCell;
use std::rc::Rc;
use super::*;
use crate::util::quick_ast;
fn add_symbols_from_source(src: &str) -> (SymbolTable, Result<(), String>) {
let (ast, source_map) = quick_ast(src);
let source_map = Rc::new(RefCell::new(source_map));
let mut symbol_table = SymbolTable::new(source_map);
let result = symbol_table.add_top_level_symbols(&ast);
(symbol_table, result)
}
macro_rules! values_in_table {
($source:expr, $single_value:expr) => {
values_in_table!($source => $single_value);
};
($source:expr => $( $value:expr ),* ) => {
{
let (symbol_table, _) = add_symbols_from_source($source);
$(
match symbol_table.lookup_by_fqsn($value) {
Some(_spec) => (),
None => panic!(),
};
)*
}
};
}
#[test]
fn basic_symbol_table() {
values_in_table! { "let a = 10; fn b() { 20 }", &fqsn!("b"; tr) };
values_in_table! { "type Option<T> = Some(T) | None" =>
&fqsn!("Option"; tr),
&fqsn!("Option"; ty, "Some"; tr),
&fqsn!("Option"; ty, "None"; tr) };
}
#[test]
fn no_function_definition_duplicates() {
let source = r#"
fn a() { 1 }
fn b() { 2 }
fn a() { 3 }
"#;
let (_, output) = add_symbols_from_source(source);
assert!(output.unwrap_err().contains("Duplicate function definition: a"))
}
#[test]
fn no_variable_definition_duplicates() {
let source = r#"
let x = 9
let a = 20
let q = 39
let a = 30
"#;
let (_, output) = add_symbols_from_source(source);
let output = output.unwrap_err();
assert!(output.contains("Duplicate variable definition: a"));
assert!(output.contains("already defined at 2"));
}
#[test]
fn no_variable_definition_duplicates_in_function() {
let source = r#"
fn a() {
let a = 20
let b = 40
a + b
}
fn q() {
let a = 29
let x = 30
let x = 33
}
"#;
let (_, output) = add_symbols_from_source(source);
assert!(output.unwrap_err().contains("Duplicate variable definition: x"))
}
#[test]
fn dont_falsely_detect_duplicates() {
let source = r#"
let a = 20;
fn some_func() {
let a = 40;
77
}
let q = 39;
"#;
let (symbol_table, _) = add_symbols_from_source(source);
assert!(symbol_table.lookup_by_fqsn(&fqsn!["a"; tr]).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!["some_func"; fn, "a";tr]).is_some());
}
#[test]
fn enclosing_scopes() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
x + inner_func(x)
}"#;
let (symbol_table, _) = add_symbols_from_source(source);
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
}
#[test]
fn enclosing_scopes_2() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
fn second_inner_func() {
fn another_inner_func() {
}
}
inner_func(x)
}"#;
let (symbol_table, _) = add_symbols_from_source(source);
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "inner_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; tr)).is_some());
assert!(symbol_table.lookup_by_fqsn(&fqsn!("outer_func"; fn, "second_inner_func"; fn, "another_inner_func"; tr)).is_some());
}
#[test]
fn enclosing_scopes_3() {
let source = r#"
fn outer_func(x) {
fn inner_func(arg) {
arg
}
fn second_inner_func() {
fn another_inner_func() {
}
fn another_inner_func() {
}
}
inner_func(x)
}"#;
let (_, output) = add_symbols_from_source(source);
assert!(output.unwrap_err().contains("Duplicate"))
}
#[test]
fn modules() {
let source = r#"
module stuff {
fn item() {
}
}
fn item()
"#;
values_in_table! { source =>
&fqsn!("item"; tr),
&fqsn!("stuff"; tr, "item"; tr)
};
}
#[test]
fn duplicate_modules() {
let source = r#"
module q {
fn foo() { 4 }
}
module a {
fn foo() { 334 }
}
module a {
fn foo() { 256.1 }
}
"#;
let (_, output) = add_symbols_from_source(source);
let output = output.unwrap_err();
assert!(output.contains("Duplicate module"));
assert!(output.contains("already defined at 5"));
}

View File

@@ -4,6 +4,8 @@ use std::rc::Rc;
use std::iter::{Iterator, Peekable};
use std::fmt;
use crate::source_map::Location;
#[derive(Debug, PartialEq, Clone)]
pub enum TokenKind {
Newline, Semicolon,
@@ -19,7 +21,10 @@ pub enum TokenKind {
Operator(Rc<String>),
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
StrLiteral(Rc<String>),
StrLiteral {
s: Rc<String>,
prefix: Option<Rc<String>>
},
Identifier(Rc<String>),
Keyword(Kw),
@@ -35,7 +40,7 @@ impl fmt::Display for TokenKind {
&Operator(ref s) => write!(f, "Operator({})", **s),
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
&StrLiteral(ref s) => write!(f, "StrLiteral({})", s),
&StrLiteral {ref s, .. } => write!(f, "StrLiteral({})", s),
&Identifier(ref s) => write!(f, "Identifier({})", s),
&Error(ref s) => write!(f, "Error({})", s),
other => write!(f, "{:?}", other),
@@ -55,7 +60,7 @@ pub enum Kw {
Alias, Type, SelfType, SelfIdent,
Interface, Impl,
True, False,
Module
Module, Import
}
lazy_static! {
@@ -82,14 +87,14 @@ lazy_static! {
"true" => Kw::True,
"false" => Kw::False,
"module" => Kw::Module,
"import" => Kw::Import,
};
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub struct Token {
pub kind: TokenKind,
pub line_num: usize,
pub char_num: usize
pub location: Location,
}
impl Token {
@@ -100,7 +105,7 @@ impl Token {
}
}
pub fn to_string_with_metadata(&self) -> String {
format!("{}(L:{},c:{})", self.kind, self.line_num, self.char_num)
format!("{}({})", self.kind, self.location)
}
pub fn get_kind(&self) -> TokenKind {
@@ -161,14 +166,15 @@ pub fn tokenize(input: &str) -> Vec<Token> {
'(' => LParen, ')' => RParen,
'{' => LCurlyBrace, '}' => RCurlyBrace,
'[' => LSquareBracket, ']' => RSquareBracket,
'"' => handle_quote(&mut input),
'"' => handle_quote(&mut input, None),
'\\' => Backslash,
c if c.is_digit(10) => handle_digit(c, &mut input),
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
c if is_operator(&c) => handle_operator(c, &mut input),
unknown => Error(format!("Unexpected character: {}", unknown)),
};
tokens.push(Token { kind: cur_tok_kind, line_num, char_num });
let location = Location { line_num, char_num };
tokens.push(Token { kind: cur_tok_kind, location });
}
tokens
}
@@ -188,7 +194,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
}
}
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix: Option<&str>) -> TokenKind {
let mut buf = String::new();
loop {
match input.next().map(|(_, _, c)| { c }) {
@@ -210,7 +216,7 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind
None => return TokenKind::Error(format!("Unclosed string")),
}
}
TokenKind::StrLiteral(Rc::new(buf))
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
}
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
@@ -222,6 +228,10 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
loop {
match input.peek().map(|&(_, _, c)| { c }) {
Some(c) if c == '"' => {
input.next();
return handle_quote(input, Some(&buf));
},
Some(c) if c.is_alphanumeric() || c == '_' => {
input.next();
buf.push(c);
@@ -322,4 +332,13 @@ mod schala_tokenizer_tests {
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
}
#[test]
fn string_literals() {
let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
}
}

View File

@@ -5,6 +5,7 @@ use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyVal
use crate::ast::*;
use crate::util::ScopeStack;
use crate::util::deref_optional_box;
#[derive(Debug, Clone, PartialEq)]
@@ -13,6 +14,7 @@ pub struct TypeData {
}
impl TypeData {
#[allow(dead_code)]
pub fn new() -> TypeData {
TypeData { ty: None }
}
@@ -274,6 +276,8 @@ impl<'a> TypeContext<'a> {
match &statement.kind {
StatementKind::Expression(e) => self.expr(e),
StatementKind::Declaration(decl) => self.decl(&decl),
StatementKind::Import(_) => Ok(ty!(Unit)),
StatementKind::Module(_) => Ok(ty!(Unit)),
}
}
@@ -317,7 +321,7 @@ impl<'a> TypeContext<'a> {
StringLiteral(_) => ty!(StringT),
PrefixExp(op, expr) => self.prefix(op, expr)?,
BinExp(op, lhs, rhs) => self.binexp(op, lhs, rhs)?,
IfExpression { discriminator, body } => self.if_expr(discriminator, body)?,
IfExpression { discriminator, body } => self.if_expr(deref_optional_box(discriminator), &**body)?,
Value(val) => self.handle_value(val)?,
Call { box ref f, arguments } => self.call(f, arguments)?,
Lambda { params, type_anno, body } => self.lambda(params, type_anno, body)?,
@@ -347,10 +351,10 @@ impl<'a> TypeContext<'a> {
self.handle_apply(tf, vec![t_lhs, t_rhs])
}
fn if_expr(&mut self, discriminator: &Discriminator, body: &IfExpressionBody) -> InferResult<Type> {
use self::Discriminator::*; use self::IfExpressionBody::*;
fn if_expr(&mut self, discriminator: Option<&Expression>, body: &IfExpressionBody) -> InferResult<Type> {
use self::IfExpressionBody::*;
match (discriminator, body) {
(Simple(expr), SimpleConditional(then_clause, else_clause)) => self.handle_simple_if(expr, then_clause, else_clause),
(Some(expr), SimpleConditional{ then_case, else_case }) => self.handle_simple_if(expr, then_case, else_case),
_ => TypeError::new(format!("Complex conditionals not supported"))
}
}
@@ -458,7 +462,7 @@ mod typechecking_tests {
macro_rules! assert_type_in_fresh_context {
($string:expr, $type:expr) => {
let mut tc = TypeContext::new();
let ref ast = crate::util::quick_ast($string);
let (ref ast, _) = crate::util::quick_ast($string);
let ty = tc.typecheck(ast).unwrap();
assert_eq!(ty, $type)
}

View File

@@ -1,6 +1,11 @@
use std::collections::HashMap;
use std::hash::Hash;
use std::cmp::Eq;
use std::ops::Deref;
pub fn deref_optional_box<T>(x: &Option<Box<T>>) -> Option<&T> {
x.as_ref().map(|b: &Box<T>| Deref::deref(b))
}
#[derive(Default, Debug)]
pub struct ScopeStack<'a, T: 'a, V: 'a> where T: Hash + Eq {
@@ -43,10 +48,18 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
/// this is intended for use in tests, and does no error-handling whatsoever
#[allow(dead_code)]
pub fn quick_ast(input: &str) -> crate::ast::AST {
pub fn quick_ast(input: &str) -> (crate::ast::AST, crate::source_map::SourceMap) {
use std::cell::RefCell;
use std::rc::Rc;
let source_map = crate::source_map::SourceMap::new();
let source_map_handle = Rc::new(RefCell::new(source_map));
let tokens = crate::tokenizing::tokenize(input);
let mut parser = crate::parsing::Parser::new(tokens);
parser.parse().unwrap()
let mut parser = crate::parsing::Parser::new(source_map_handle.clone());
parser.add_new_tokens(tokens);
let output = parser.parse();
std::mem::drop(parser);
(output.unwrap(), Rc::try_unwrap(source_map_handle).map_err(|_| ()).unwrap().into_inner())
}
#[allow(unused_macros)]

View File

@@ -49,8 +49,8 @@ impl CommandTree {
}
pub fn get_help(&self) -> &str {
match self {
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"),
CommandTree::Top(_) => ""
}
}

View File

@@ -13,7 +13,12 @@ pub fn help(repl: &mut Repl, arguments: &[&str]) -> InterpreterDirectiveOutput {
None => format!("Directive `{}` not found", commands.last().unwrap()),
Some(dir) => {
let mut buf = String::new();
writeln!(buf, "`{}` - {}", dir.get_cmd(), dir.get_help()).unwrap();
let cmd = dir.get_cmd();
let children = dir.get_children();
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
for sub in children.iter() {
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap();
}
buf
}
})

View File

@@ -27,6 +27,12 @@ pub struct Repl {
options: ReplOptions,
}
#[derive(Clone)]
enum PromptStyle {
Normal,
Multiline
}
impl Repl {
pub fn new(initial_states: Vec<Box<dyn ProgrammingLanguageInterface>>) -> Repl {
use linefeed::Interface;
@@ -62,37 +68,73 @@ impl Repl {
fn handle_repl_loop(&mut self) {
use linefeed::ReadResult::*;
let sigil = self.interpreter_directive_sigil;
loop {
self.update_line_reader();
match self.line_reader.read_line() {
Err(e) => {
println!("readline IO Error: {}", e);
break;
},
Ok(Eof) | Ok(Signal(_)) => break,
Ok(Input(ref input)) => {
self.line_reader.add_history_unique(input.to_string());
match input.chars().nth(0) {
Some(ch) if ch == self.interpreter_directive_sigil => match self.handle_interpreter_directive(input) {
Some(directive_output) => println!("<> {}", directive_output),
None => (),
'main: loop {
macro_rules! match_or_break {
($line:expr) => {
match $line {
Err(e) => {
println!("readline IO Error: {}", e);
break 'main;
},
_ => {
for repl_response in self.handle_input(input) {
println!("{}", repl_response);
}
}
Ok(Eof) | Ok(Signal(_)) => break 'main,
Ok(Input(ref input)) => input,
}
}
}
self.update_line_reader();
let line = self.line_reader.read_line();
let input: &str = match_or_break!(line);
self.line_reader.add_history_unique(input.to_string());
let mut chars = input.chars().peekable();
let repl_responses = match chars.nth(0) {
Some(ch) if ch == sigil => {
if chars.peek() == Some(&'{') {
let mut buf = String::new();
buf.push_str(input.get(2..).unwrap());
'multiline: loop {
self.set_prompt(PromptStyle::Multiline);
let new_line = self.line_reader.read_line();
let new_input = match_or_break!(new_line);
if new_input.starts_with(":}") {
break 'multiline;
} else {
buf.push_str(new_input);
buf.push_str("\n");
}
}
self.handle_input(&buf)
} else {
match self.handle_interpreter_directive(input) {
Some(directive_output) => println!("<> {}", directive_output),
None => (),
}
continue
}
},
_ => self.handle_input(input)
};
for repl_response in repl_responses.iter() {
println!("{}", repl_response);
}
}
}
fn update_line_reader(&mut self) {
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, self.get_directives());
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
let prompt_str = format!(">> ");
self.set_prompt(PromptStyle::Normal);
}
fn set_prompt(&mut self, prompt_style: PromptStyle) {
let prompt_str = match prompt_style {
PromptStyle::Normal => ">> ".to_string(),
PromptStyle::Multiline => ">| ".to_string(),
};
self.line_reader.set_prompt(&prompt_str).unwrap();
}