Compare commits
1 Commits
uuuu
...
fix_fqsn_e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5147a32c97 |
40
Cargo.lock
generated
40
Cargo.lock
generated
@@ -282,18 +282,6 @@ name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "lexical-core"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.62"
|
||||
@@ -394,16 +382,6 @@ dependencies = [
|
||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "5.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.1.42"
|
||||
@@ -751,14 +729,6 @@ name = "rustc-serialize"
|
||||
version = "0.3.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.0"
|
||||
@@ -784,7 +754,6 @@ dependencies = [
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"radix_trie 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"schala-lang-codegen 0.1.0",
|
||||
"schala-repl 0.1.0",
|
||||
@@ -877,11 +846,6 @@ name = "smallvec"
|
||||
version = "0.6.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "static_assertions"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "stopwatch"
|
||||
version = "0.0.7"
|
||||
@@ -1076,7 +1040,6 @@ dependencies = [
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
|
||||
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
"checksum lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2304bccb228c4b020f3a4835d247df0a02a7c4686098d4167762cfbbe4c5cb14"
|
||||
"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba"
|
||||
"checksum linefeed 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "28715d08e35c6c074f9ae6b2e6a2420bac75d050c66ecd669d7d5b98e2caa036"
|
||||
"checksum llvm-sys 70.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e8e9fc46a848cf4170694144102ec07f6eada790d8b3d7e92ffa9cc7416fc869"
|
||||
@@ -1089,7 +1052,6 @@ dependencies = [
|
||||
"checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce"
|
||||
"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945"
|
||||
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
|
||||
"checksum nom 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c433f4d505fe6ce7ff78523d2fa13a0b9f2690e181fc26168bcbe5ccc5d14e07"
|
||||
"checksum num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "4703ad64153382334aa8db57c637364c322d3372e097840c72000dabdcf6156e"
|
||||
"checksum num-bigint 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1"
|
||||
"checksum num-complex 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656"
|
||||
@@ -1129,7 +1091,6 @@ dependencies = [
|
||||
"checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf"
|
||||
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
|
||||
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997"
|
||||
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
|
||||
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||
@@ -1139,7 +1100,6 @@ dependencies = [
|
||||
"checksum siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac"
|
||||
"checksum smallstr 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6aa65bb4d5b2bbc90d36af64e29802f788aa614783fa1d0df011800ddcec6e8e"
|
||||
"checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7"
|
||||
"checksum static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7f3eb36b47e512f8f1c9e3d10c2c1965bc992bd9cdb024fa581e2194501c83d3"
|
||||
"checksum stopwatch 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "3d04b5ebc78da44d3a456319d8bc2783e7d8cc7ccbb5cb4dc3f54afbd93bf728"
|
||||
"checksum syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)" = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
|
||||
"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
|
||||
|
||||
17
TODO.md
17
TODO.md
@@ -1,14 +1,3 @@
|
||||
# Plan of attack
|
||||
|
||||
-ONLY two types of statement, Expressoin and Declaration
|
||||
-modules and imports are just types of Declarables
|
||||
|
||||
1. modify visitor so it can handle scopes
|
||||
-this is needed both to handle import scope correctly
|
||||
-and also to support making FQSNs aware of function parameters
|
||||
|
||||
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
|
||||
|
||||
# TODO items
|
||||
|
||||
-use 'let' sigil in patterns for variables :
|
||||
@@ -19,9 +8,6 @@
|
||||
}
|
||||
```
|
||||
|
||||
-idea: what if there was something like React jsx syntas built in? i.e. a way to automatically transform some kind of markup
|
||||
into a function call, cf. `<h1 prop="arg">` -> h1(prop=arg)
|
||||
|
||||
## General code cleanup
|
||||
- I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
|
||||
DONE -experiment with storing metadata via ItemIds on AST nodes (cf. https://rust-lang.github.io/rustc-guide/hir.html, https://github.com/rust-lang/rust/blob/master/src/librustc/hir/mod.rs )
|
||||
@@ -33,9 +19,6 @@ DONE -experiment with storing metadata via ItemIds on AST nodes (cf. https://rus
|
||||
-look at https://gitlab.haskell.org/ghc/ghc/wikis/pattern-synonyms
|
||||
2) the non-value-returning, default one like in rustc (cf. https://github.com/rust-unofficial/patterns/blob/master/patterns/visitor.md)
|
||||
|
||||
-parser error - should report subset of AST parsed *so far*
|
||||
- what if you used python 'def' syntax to define a function? what error message makes sense here?
|
||||
|
||||
## Reduction
|
||||
- make a good type for actual language builtins to avoid string comparisons
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ stopwatch = "0.0.7"
|
||||
derivative = "1.0.3"
|
||||
colored = "1.8"
|
||||
radix_trie = "0.1.5"
|
||||
nom = "5.1.0"
|
||||
|
||||
schala-lang-codegen = { path = "../codegen" }
|
||||
schala-repl = { path = "../../schala-repl" }
|
||||
|
||||
@@ -17,7 +17,7 @@ pub struct ItemId {
|
||||
}
|
||||
|
||||
impl ItemId {
|
||||
pub fn new(n: u32) -> ItemId {
|
||||
fn new(n: u32) -> ItemId {
|
||||
ItemId { idx: n }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,10 +68,6 @@ impl BinOp {
|
||||
let s = token_kind_to_sigil(op_tok)?;
|
||||
Some(binop_precedences(s))
|
||||
}
|
||||
|
||||
pub fn precedence(&self) -> i32 {
|
||||
binop_precedences(self.sigil.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
fn token_kind_to_sigil<'a>(tok: &'a TokenKind) -> Option<&'a str> {
|
||||
|
||||
@@ -14,12 +14,13 @@ mod test;
|
||||
|
||||
pub struct State<'a> {
|
||||
values: ScopeStack<'a, Rc<String>, ValueEntry>,
|
||||
symbol_table_handle: SymbolTableHandle,
|
||||
}
|
||||
|
||||
impl<'a> State<'a> {
|
||||
pub fn new() -> State<'a> {
|
||||
pub fn new(symbol_table_handle: SymbolTableHandle) -> State<'a> {
|
||||
let values = ScopeStack::new(Some(format!("global")));
|
||||
State { values }
|
||||
State { values, symbol_table_handle }
|
||||
}
|
||||
|
||||
pub fn debug_print(&self) -> String {
|
||||
@@ -29,6 +30,7 @@ impl<'a> State<'a> {
|
||||
fn new_frame(&'a self, items: &'a Vec<Node>, bound_vars: &BoundVars) -> State<'a> {
|
||||
let mut inner_state = State {
|
||||
values: self.values.new_scope(None),
|
||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||
};
|
||||
for (bound_var, val) in bound_vars.iter().zip(items.iter()) {
|
||||
if let Some(bv) = bound_var.as_ref() {
|
||||
@@ -67,12 +69,12 @@ fn paren_wrapped_vec(terms: impl Iterator<Item=String>) -> String {
|
||||
|
||||
|
||||
impl Node {
|
||||
fn to_repl(&self) -> String {
|
||||
fn to_repl(&self, symbol_table: &SymbolTable) -> String {
|
||||
match self {
|
||||
Node::Expr(e) => e.to_repl(),
|
||||
Node::Expr(e) => e.to_repl(symbol_table),
|
||||
Node::PrimObject { name, items, .. } if items.len() == 0 => format!("{}", name),
|
||||
Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
||||
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl()))),
|
||||
Node::PrimObject { name, items, .. } => format!("{}{}", name, paren_wrapped_vec(items.iter().map(|x| x.to_repl(symbol_table)))),
|
||||
Node::PrimTuple { items } => format!("{}", paren_wrapped_vec(items.iter().map(|x| x.to_repl(symbol_table)))),
|
||||
}
|
||||
}
|
||||
fn is_true(&self) -> bool {
|
||||
@@ -97,10 +99,12 @@ impl Expr {
|
||||
fn to_node(self) -> Node {
|
||||
Node::Expr(self)
|
||||
}
|
||||
fn to_repl(&self) -> String {
|
||||
fn to_repl(&self, symbol_table: &SymbolTable) -> String {
|
||||
use self::Lit::*;
|
||||
use self::Func::*;
|
||||
|
||||
let _ = symbol_table;
|
||||
|
||||
match self {
|
||||
Expr::Lit(ref l) => match l {
|
||||
Nat(n) => format!("{}", n),
|
||||
@@ -117,7 +121,7 @@ impl Expr {
|
||||
Expr::Constructor { type_name, arity, .. } => {
|
||||
format!("<constructor for `{}` arity {}>", type_name, arity)
|
||||
},
|
||||
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl())),
|
||||
Expr::Tuple(exprs) => paren_wrapped_vec(exprs.iter().map(|x| x.to_repl(symbol_table))),
|
||||
_ => format!("{:?}", self),
|
||||
}
|
||||
}
|
||||
@@ -152,7 +156,8 @@ impl<'a> State<'a> {
|
||||
for statement in ast.0 {
|
||||
match self.statement(statement) {
|
||||
Ok(Some(ref output)) if repl => {
|
||||
acc.push(Ok(output.to_repl()))
|
||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
||||
acc.push(Ok(output.to_repl(symbol_table)))
|
||||
},
|
||||
Ok(_) => (),
|
||||
Err(error) => {
|
||||
@@ -206,10 +211,7 @@ impl<'a> State<'a> {
|
||||
Node::Expr(expr) => match expr {
|
||||
literal @ Lit(_) => Ok(Node::Expr(literal)),
|
||||
Call { box f, args } => self.call_expression(f, args),
|
||||
Sym(name) => Ok(match self.values.lookup(&name) {
|
||||
Some(ValueEntry::Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Could not look up symbol {}", name))
|
||||
}),
|
||||
Sym(v) => self.handle_sym(v),
|
||||
Constructor { arity, ref name, tag, .. } if arity == 0 => Ok(Node::PrimObject { name: name.clone(), tag, items: vec![] }),
|
||||
constructor @ Constructor { .. } => Ok(Node::Expr(constructor)),
|
||||
func @ Func(_) => Ok(Node::Expr(func)),
|
||||
@@ -261,6 +263,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
let mut func_state = State {
|
||||
values: self.values.new_scope(name.map(|n| format!("{}", n))),
|
||||
symbol_table_handle: self.symbol_table_handle.clone(),
|
||||
};
|
||||
for (param, val) in params.into_iter().zip(args.into_iter()) {
|
||||
let val = func_state.expression(Node::Expr(val))?;
|
||||
@@ -339,11 +342,13 @@ impl<'a> State<'a> {
|
||||
|
||||
/* builtin functions */
|
||||
(IOPrint, &[ref anything]) => {
|
||||
print!("{}", anything.to_repl());
|
||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
||||
print!("{}", anything.to_repl(symbol_table));
|
||||
Expr::Unit.to_node()
|
||||
},
|
||||
(IOPrintLn, &[ref anything]) => {
|
||||
println!("{}", anything.to_repl());
|
||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
||||
println!("{}", anything.to_repl(symbol_table));
|
||||
Expr::Unit.to_node()
|
||||
},
|
||||
(IOGetLine, &[]) => {
|
||||
@@ -452,4 +457,46 @@ impl<'a> State<'a> {
|
||||
}
|
||||
Err(format!("{:?} failed pattern match", cond))
|
||||
}
|
||||
|
||||
//TODO if I don't need to lookup by name here...
|
||||
fn handle_sym(&mut self, name: Rc<String>) -> EvalResult<Node> {
|
||||
use self::ValueEntry::*;
|
||||
use self::Func::*;
|
||||
//TODO add a layer of indirection here to talk to the symbol table first, and only then look up
|
||||
//in the values table
|
||||
|
||||
let symbol_table = self.symbol_table_handle.borrow();
|
||||
let value = symbol_table.lookup_by_fqsn(&fqsn!(name ; tr));
|
||||
Ok(match value {
|
||||
Some(Symbol { local_name, spec, .. }) => match spec {
|
||||
//TODO I'll need this type_name later to do a table lookup
|
||||
SymbolSpec::DataConstructor { type_name: _type_name, type_args, .. } => {
|
||||
if type_args.len() == 0 {
|
||||
Node::PrimObject { name: local_name.clone(), tag: 0, items: vec![] }
|
||||
} else {
|
||||
return Err(format!("This data constructor thing not done"))
|
||||
}
|
||||
},
|
||||
SymbolSpec::Func(_) => match self.values.lookup(&name) {
|
||||
Some(Binding { val: Node::Expr(Expr::Func(UserDefined { name, params, body })), .. }) => {
|
||||
Node::Expr(Expr::Func(UserDefined { name: name.clone(), params: params.clone(), body: body.clone() }))
|
||||
},
|
||||
_ => unreachable!(),
|
||||
},
|
||||
SymbolSpec::RecordConstructor { .. } => return Err(format!("This shouldn't be a record!")),
|
||||
SymbolSpec::Binding => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Symbol {} exists in symbol table but not in evaluator table", name))
|
||||
}
|
||||
SymbolSpec::Type { name } => return Err(format!("Symbol {} not in scope", name)),
|
||||
},
|
||||
//TODO ideally this should be returning a runtime error if this is ever None, but it's not
|
||||
//handling all bindings correctly yet
|
||||
//None => return Err(format!("Couldn't find value {}", name)),
|
||||
None => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Couldn't find value {}", name)),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,14 +12,14 @@ fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
||||
let (mut ast, source_map) = crate::util::quick_ast(input);
|
||||
let source_map = Rc::new(RefCell::new(source_map));
|
||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new(source_map)));
|
||||
symbol_table.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
let mut state = State::new(symbol_table);
|
||||
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
{
|
||||
let mut scope_resolver = ScopeResolver::new(symbol_table.clone());
|
||||
let mut scope_resolver = ScopeResolver::new(state.symbol_table_handle.clone());
|
||||
let _ = scope_resolver.resolve(&mut ast);
|
||||
}
|
||||
|
||||
let reduced = reduce(&ast, &symbol_table.borrow());
|
||||
let mut state = State::new();
|
||||
let reduced = reduce(&ast, &state.symbol_table_handle.borrow());
|
||||
let all_output = state.evaluate(reduced, true);
|
||||
all_output
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
#![feature(box_patterns, box_syntax, trace_macros, or_patterns)]
|
||||
#![feature(trace_macros)]
|
||||
#![feature(custom_attribute)]
|
||||
//#![feature(unrestricted_attribute_tokens)]
|
||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
||||
|
||||
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
|
||||
@@ -30,7 +33,6 @@ mod debugging;
|
||||
|
||||
mod tokenizing;
|
||||
mod ast;
|
||||
mod parser;
|
||||
mod parsing;
|
||||
#[macro_use]
|
||||
mod symbol_table;
|
||||
|
||||
@@ -1,598 +0,0 @@
|
||||
extern crate nom;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use nom::IResult;
|
||||
use nom::character::complete::{one_of, space0, alphanumeric0};
|
||||
use nom::bytes::complete::{tag, take, take_while, take_while1, take_until};
|
||||
use nom::combinator::{cut, cond, map, map_res, value, opt, verify};
|
||||
use nom::multi::{separated_list, separated_nonempty_list, many1, many0};
|
||||
use nom::error::{context, ParseError, VerboseError, ErrorKind, make_error};
|
||||
use nom::branch::alt;
|
||||
use nom::sequence::{pair, tuple, delimited, preceded};
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::builtin::Builtin;
|
||||
|
||||
type ParseResult<'a, T> = IResult<&'a str, T, VerboseError<&'a str>>;
|
||||
|
||||
pub fn ws<'a, O, E: ParseError<&'a str>, F>(parser: F) -> impl Fn(&'a str) -> IResult<&'a str, O, E>
|
||||
where
|
||||
F: Fn(&'a str) -> IResult<&'a str, O, E>,
|
||||
{
|
||||
delimited(space0, parser, space0)
|
||||
}
|
||||
|
||||
fn statement_sep(text: &str) -> ParseResult<()> {
|
||||
value((), one_of("\n;"))(text)
|
||||
}
|
||||
|
||||
fn single_alphabetic_character(text: &str) -> ParseResult<char> {
|
||||
let p = verify(take(1usize), |s: &str| s.chars().nth(0).map(|c| c.is_alphabetic()).unwrap_or(false));
|
||||
map(p, |s: &str| s.chars().nth(0).unwrap())(text)
|
||||
}
|
||||
|
||||
fn single_alphanumeric_character(text: &str) -> ParseResult<char> {
|
||||
let p = verify(take(1usize), |s: &str| s.chars().nth(0).map(|c| c.is_alphanumeric() || c == '_').unwrap_or(false));
|
||||
map(p, |s: &str| s.chars().nth(0).unwrap())(text)
|
||||
}
|
||||
|
||||
fn identifier(text: &str) -> ParseResult<Rc<String>> {
|
||||
use nom::character::complete::char;
|
||||
map(alt((
|
||||
pair(char('_'), many1(single_alphanumeric_character)),
|
||||
pair(single_alphabetic_character, many0(single_alphanumeric_character))
|
||||
)),
|
||||
|(first, rest): (char, Vec<char>)| Rc::new(format!("{}{}", first, rest.into_iter().collect::<String>()))
|
||||
)(text)
|
||||
}
|
||||
|
||||
const OPERATOR_CHARS: &'static str = "~`!@#$%^&*-+=<>?/|";
|
||||
fn operator(text: &str) -> ParseResult<Vec<char>> {
|
||||
many1(one_of(OPERATOR_CHARS))(text)
|
||||
}
|
||||
|
||||
fn binop(text: &str) -> ParseResult<BinOp> {
|
||||
context("Binop", map(
|
||||
operator,
|
||||
|op| BinOp::from_sigil(&op.into_iter().collect::<String>())
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn bool_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = alt((
|
||||
value(true, tag("true")),
|
||||
value(false, tag("false"))
|
||||
));
|
||||
context("Bool literal", map(p, ExpressionKind::BoolLiteral))(text)
|
||||
}
|
||||
|
||||
fn number_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let num_lit = many1(alt((
|
||||
map(one_of("1234567890"), |s: char| Some(s)),
|
||||
value(None, nom::character::complete::char('_')),
|
||||
)));
|
||||
|
||||
let (text, n) = map_res(num_lit,
|
||||
|digits: Vec<Option<char>>| {
|
||||
let num_str: String = digits.into_iter().filter_map(|x| x).collect();
|
||||
u64::from_str_radix(&num_str, 10)
|
||||
})(text)?;
|
||||
|
||||
Ok((text, ExpressionKind::NatLiteral(n)))
|
||||
}
|
||||
|
||||
fn binary_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("0b"), cut(take_while1(|c: char| c == '0' || c == '1')));
|
||||
let (rest, n): (&str, u64) = map_res(
|
||||
p, |hex_str: &str| u64::from_str_radix(hex_str, 2)
|
||||
)(text)?;
|
||||
let expr = ExpressionKind::NatLiteral(n);
|
||||
Ok((rest, expr))
|
||||
}
|
||||
|
||||
fn hex_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("0x"), cut(take_while1(|c: char| c.is_digit(16))));
|
||||
let (rest, n): (&str, u64) = map_res(
|
||||
p, |hex_str: &str| u64::from_str_radix(hex_str, 16)
|
||||
)(text)?;
|
||||
let expr = ExpressionKind::NatLiteral(n);
|
||||
Ok((rest, expr))
|
||||
}
|
||||
|
||||
fn string_literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
use nom::character::complete::char;
|
||||
let (text, string_output) = delimited(
|
||||
char('"'), take_until("\""), char('"')
|
||||
)(text)?;
|
||||
let expr = ExpressionKind::StringLiteral(Rc::new(string_output.to_string()));
|
||||
Ok((text, expr))
|
||||
}
|
||||
|
||||
fn literal(text: &str) -> ParseResult<ExpressionKind> {
|
||||
alt((
|
||||
string_literal,
|
||||
hex_literal,
|
||||
binary_literal,
|
||||
number_literal,
|
||||
bool_literal,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn paren_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
context("Paren expression", delimited(tag("("), ws(expression_kind), tag(")")))(text)
|
||||
}
|
||||
|
||||
fn prefix_op(text: &str) -> ParseResult<PrefixOp> {
|
||||
use nom::character::complete::char;
|
||||
let p = alt((char('+'), char('-'), char('!')));
|
||||
map(p, |sigil| PrefixOp::from_str(&sigil.to_string()).unwrap())(text)
|
||||
}
|
||||
|
||||
fn qualified_name(text: &str) -> ParseResult<QualifiedName> {
|
||||
map(
|
||||
separated_nonempty_list(tag("::"), identifier),
|
||||
|components| QualifiedName { id: ItemId::new(0), components }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn identifier_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
map(qualified_name, ExpressionKind::Value)(text)
|
||||
}
|
||||
|
||||
fn primary_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
// primary := literal | paren_expr | if_expr | for_expr | while_expr | identifier_expr | lambda_expr | anonymous_struct | list_expr
|
||||
|
||||
alt((
|
||||
if_expr,
|
||||
for_expr,
|
||||
while_expr,
|
||||
literal,
|
||||
paren_expr,
|
||||
lambda_expr,
|
||||
identifier_expr,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn lambda_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(ws(tag("\\")),
|
||||
tuple((ws(lambda_param_list), ws(opt(type_anno)), ws(block))));
|
||||
context("Lambda expression",
|
||||
map(p, |(params, type_anno, body)| ExpressionKind::Lambda { params, type_anno, body })
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn lambda_param_list(text: &str) -> ParseResult<Vec<FormalParam>> {
|
||||
alt((
|
||||
map(formal_param, |x| vec![x]),
|
||||
formal_params
|
||||
))(text)
|
||||
}
|
||||
|
||||
|
||||
fn while_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("while"), tuple((ws(while_cond), ws(block))));
|
||||
let m = map(p, |(condition, body)| {
|
||||
let condition = condition.map(Box::new);
|
||||
ExpressionKind::WhileExpression {condition, body}
|
||||
});
|
||||
context("While expression", m)(text)
|
||||
}
|
||||
|
||||
fn while_cond(text: &str) -> ParseResult<Option<Expression>> {
|
||||
//TODO support is constructs?
|
||||
context("While condition",
|
||||
map(opt(ws(expression_kind)),
|
||||
|maybe_expr_kind| maybe_expr_kind.map(|kind| Expression::new(ItemId::new(0), kind)))
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn for_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
//TODO do I need something like no struct literal here?
|
||||
let en = alt((
|
||||
map(enumerator, |e| vec![e]),
|
||||
delimited(tag("{"), enumerators, tag("}"))
|
||||
));
|
||||
context("For expression",
|
||||
preceded(tag("for"),
|
||||
cut(
|
||||
map(tuple((ws(en), for_expr_body)),
|
||||
|(enumerators, body)| ExpressionKind::ForExpression { enumerators, body: Box::new(body) }
|
||||
))))(text)
|
||||
}
|
||||
|
||||
|
||||
fn enumerators(text: &str) -> ParseResult<Vec<Enumerator>> {
|
||||
separated_nonempty_list(alt((value((), tag(",")), statement_sep)),
|
||||
enumerator)(text)
|
||||
}
|
||||
|
||||
fn enumerator(text: &str) -> ParseResult<Enumerator> {
|
||||
map(
|
||||
tuple((ws(identifier), ws(tag("<-")), ws(expression))),
|
||||
|(id, _, generator)| Enumerator { id, generator }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn for_expr_body(text: &str) -> ParseResult<ForBody> {
|
||||
context("For expression body",
|
||||
alt((
|
||||
map(preceded(ws(tag("return")), expression), ForBody::MonadicReturn),
|
||||
map(block, ForBody::StatementBlock),
|
||||
)))(text)
|
||||
}
|
||||
|
||||
fn invocation_argument(text: &str) -> ParseResult<InvocationArgument> {
|
||||
use nom::character::complete::char;
|
||||
alt((
|
||||
value(InvocationArgument::Ignored, pair(char('_'), alphanumeric0)),
|
||||
map(expression_kind, |kind: ExpressionKind| InvocationArgument::Positional(
|
||||
Expression { id: ItemId::new(0), kind, type_anno: None }))
|
||||
//map(identifier, |id: Rc<String>|
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn if_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let p = preceded(tag("if"), pair(ws(discriminator), ws(if_expr_body)));
|
||||
map(p, |(discriminator, body)| {
|
||||
let discriminator = discriminator.map(Box::new);
|
||||
let body = Box::new(body);
|
||||
ExpressionKind::IfExpression { discriminator, body }
|
||||
}) (text)
|
||||
}
|
||||
|
||||
fn discriminator(text: &str) -> ParseResult<Option<Expression>> {
|
||||
use nom::combinator::verify;
|
||||
cond(text.chars().next().map(|c| c != '{').unwrap_or(true),
|
||||
expression
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn if_expr_body(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
alt((
|
||||
preceded(tag("then"), simple_conditional),
|
||||
preceded(tag("is"), simple_pattern_match),
|
||||
cond_block,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn simple_conditional(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
map(
|
||||
pair(expr_or_block, else_case),
|
||||
|(then_case, else_case)| IfExpressionBody::SimpleConditional { then_case, else_case }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn else_case(text: &str) -> ParseResult<Option<Block>> {
|
||||
opt(preceded(tag("else"), expr_or_block))(text)
|
||||
}
|
||||
|
||||
fn simple_pattern_match(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
let p = tuple((pattern, tag("then"), expr_or_block, else_case));
|
||||
map(p, |(pattern, _, then_case, else_case)|
|
||||
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn pattern(text: &str) -> ParseResult<Pattern> {
|
||||
use nom::character::complete::char;
|
||||
|
||||
let t = delimited(char('('),
|
||||
separated_nonempty_list(char(','), pattern),
|
||||
char(')')
|
||||
);
|
||||
|
||||
alt((
|
||||
map(t, |patterns| Pattern::TuplePattern(patterns)),
|
||||
simple_pattern,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn simple_pattern(text: &str) -> ParseResult<Pattern> {
|
||||
alt((
|
||||
value(Pattern::Ignored, tag("_")),
|
||||
tuple_struct_pattern,
|
||||
record_pattern,
|
||||
map(pattern_literal, Pattern::Literal),
|
||||
map(qualified_name, Pattern::VarOrName),
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn tuple_struct_pattern(text: &str) -> ParseResult<Pattern> {
|
||||
let p = tuple((
|
||||
qualified_name,
|
||||
delimited(ws(tag("(")),
|
||||
separated_nonempty_list(ws(tag(",")), ws(pattern)),
|
||||
ws(tag(")"))
|
||||
)
|
||||
));
|
||||
map(p, |(name, patterns)| Pattern::TupleStruct(name, patterns))(text)
|
||||
}
|
||||
|
||||
fn record_pattern(text: &str) -> ParseResult<Pattern> {
|
||||
let p = tuple((
|
||||
qualified_name,
|
||||
delimited(ws(tag("{")),
|
||||
separated_nonempty_list(ws(tag(",")), ws(record_pattern_entry)), //TODO support newlines?
|
||||
ws(tag("}")))
|
||||
));
|
||||
map(p, |(name, members)| Pattern::Record(name, members))(text)
|
||||
}
|
||||
|
||||
fn record_pattern_entry(text: &str) -> ParseResult<(Rc<String>, Pattern)> {
|
||||
alt((
|
||||
map(tuple((ws(identifier), ws(tag(":")), ws(pattern))),
|
||||
|(name, _, pattern)| (name, pattern)),
|
||||
map(identifier, |name|
|
||||
(name.clone(), Pattern::Literal(PatternLiteral::StringPattern(name.clone())))
|
||||
)
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn pattern_literal(text: &str) -> ParseResult<PatternLiteral> {
|
||||
use PatternLiteral::*;
|
||||
use nom::character::complete::char;
|
||||
alt((
|
||||
value(BoolPattern(true), tag("true")),
|
||||
value(BoolPattern(false), tag("false")),
|
||||
map(delimited(char('"'), take_until("\""), char('"')), |s: &str| StringPattern(Rc::new(s.to_string()))),
|
||||
))(text)
|
||||
//TODO handle signed_number_literal
|
||||
}
|
||||
|
||||
fn cond_block(text: &str) -> ParseResult<IfExpressionBody> {
|
||||
use nom::character::complete::char;
|
||||
//TODO maybe change this bit of syntax
|
||||
let comma_or_delimitor = alt((value((), char(',')), statement_sep));
|
||||
let p = delimited(char('{'),
|
||||
separated_nonempty_list(comma_or_delimitor, cond_arm),
|
||||
char('}'));
|
||||
map(p, IfExpressionBody::CondList)(text)
|
||||
}
|
||||
|
||||
fn cond_arm(text: &str) -> ParseResult<ConditionArm> {
|
||||
let variant_1 = map(
|
||||
tuple((condition, guard, tag("then"), expr_or_block)),
|
||||
|(condition, guard, _, body)| ConditionArm { condition, guard, body }
|
||||
);
|
||||
let variant_2 = map(
|
||||
preceded(tag("else"), expr_or_block),
|
||||
|body| ConditionArm { condition: Condition::Else, guard: None, body }
|
||||
);
|
||||
alt((variant_1, variant_2))(text)
|
||||
}
|
||||
|
||||
fn condition(text: &str) -> ParseResult<Condition> {
|
||||
alt((
|
||||
map(preceded(tag("is"), pattern), Condition::Pattern),
|
||||
map(tuple((binop, expression)), |(op, expr)|
|
||||
Condition::TruncatedOp(op, expr)),
|
||||
map(expression, Condition::Expression),
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn guard(text: &str) -> ParseResult<Option<Expression>> {
|
||||
opt(preceded(tag("if"), expression))(text)
|
||||
}
|
||||
|
||||
fn expr_or_block(text: &str) -> ParseResult<Block> {
|
||||
//TODO fix
|
||||
alt((block, map(expression, |expr| vec![Statement { id: ItemId::new(0), kind: StatementKind::Expression(expr)}])))(text)
|
||||
}
|
||||
|
||||
fn block(text: &str) -> ParseResult<Block> {
|
||||
//TODO fix this so it can handle nested statements
|
||||
let make_expr = |e| Statement { id: ItemId::new(0), kind: StatementKind::Expression(e) };
|
||||
delimited(ws(tag("{")),
|
||||
delimited(opt(many0(statement_sep)),
|
||||
separated_list(many1(statement_sep),
|
||||
map(expression, make_expr)
|
||||
),
|
||||
opt(many0(statement_sep))
|
||||
),
|
||||
ws(tag("}")))(text)
|
||||
}
|
||||
|
||||
fn call_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
use nom::character::complete::char;
|
||||
let parse_call = opt(
|
||||
delimited(char('('), separated_list(char(','), invocation_argument), char(')'))
|
||||
);
|
||||
let p = pair(primary_expr, parse_call);
|
||||
map(p, |(expr, call_part)| if let Some(arguments) = call_part {
|
||||
let f = bx!(Expression { id: ItemId::new(0), kind: expr, type_anno: None });
|
||||
ExpressionKind::Call { f, arguments }
|
||||
} else {
|
||||
expr
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn prefix_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
let (text, pfx) = ws(opt(prefix_op))(text)?;
|
||||
let (text, result) = call_expr(text)?;
|
||||
match pfx {
|
||||
None => Ok((text, result)),
|
||||
Some(pfx) => {
|
||||
let exp = Expression { id: ItemId::new(0), kind: result, type_anno: None };
|
||||
Ok((text, ExpressionKind::PrefixExp(pfx, Box::new(exp))))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// this implements Pratt parsing, see http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
|
||||
fn precedence_expr(text: &str) -> ParseResult<ExpressionKind> {
|
||||
fn inner_precedence_expr(input: &str, precedence: i32) -> ParseResult<ExpressionKind> {
|
||||
let (mut outer_rest, mut lhs) = prefix_expr(input)?;
|
||||
loop {
|
||||
let (rest, _) = space0(outer_rest)?;
|
||||
let (rest, maybe_binop) = opt(binop)(rest)?;
|
||||
let (new_precedence, binop) = match maybe_binop {
|
||||
Some(binop) => (binop.precedence(), binop),
|
||||
None => break,
|
||||
};
|
||||
|
||||
if precedence >= new_precedence {
|
||||
break;
|
||||
}
|
||||
let (rest, _) = space0(rest)?;
|
||||
let (rest, rhs) = inner_precedence_expr(rest, new_precedence)?;
|
||||
outer_rest = rest;
|
||||
lhs = ExpressionKind::BinExp(binop,
|
||||
bx!(Expression::new(ItemId::new(0), lhs)),
|
||||
bx!(Expression::new(ItemId::new(0), rhs))
|
||||
);
|
||||
}
|
||||
Ok((outer_rest, lhs))
|
||||
}
|
||||
context("Precedence expression",
|
||||
|input| inner_precedence_expr(input, BinOp::min_precedence())
|
||||
)(text)
|
||||
}
|
||||
|
||||
fn expression_kind(text: &str) -> ParseResult<ExpressionKind> {
|
||||
context("Expression kind", ws(precedence_expr))(text)
|
||||
}
|
||||
|
||||
fn type_anno(text: &str) -> ParseResult<TypeIdentifier> {
|
||||
preceded(ws(tag(":")), ws(type_name))(text)
|
||||
}
|
||||
|
||||
fn type_name(text: &str) -> ParseResult<TypeIdentifier> {
|
||||
//TODO incomplete
|
||||
let (text, name) = identifier(text)?;
|
||||
let id = TypeIdentifier::Singleton(TypeSingletonName { name, params: vec![] });
|
||||
Ok((text, id))
|
||||
}
|
||||
|
||||
pub fn expression(text: &str) -> ParseResult<Expression> {
|
||||
let (rest, (kind, type_anno)) = ws(pair(expression_kind, opt(type_anno)))(text)?;
|
||||
let expr = Expression { id: ItemId::new(0), kind, type_anno };
|
||||
Ok((rest, expr))
|
||||
}
|
||||
|
||||
fn import(text: &str) -> ParseResult<ImportSpecifier> {
|
||||
let p = preceded(
|
||||
tag("import"),
|
||||
separated_nonempty_list(tag("::"), identifier)
|
||||
);
|
||||
map(p, |path_components| ImportSpecifier {
|
||||
id: ItemId::new(0),
|
||||
path_components,
|
||||
imported_names: ImportedNames::LastOfPath, //TODO finish
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn module(text: &str) -> ParseResult<ModuleSpecifier> {
|
||||
let p = tuple((tag("module"), ws(identifier), ws(block)));
|
||||
map(p, |(_, name, contents)| ModuleSpecifier { name, contents })
|
||||
(text)
|
||||
}
|
||||
|
||||
fn declaration(text: &str) -> ParseResult<Declaration> {
|
||||
alt((
|
||||
func_declaration,
|
||||
type_declaration,
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn func_declaration(text: &str) -> ParseResult<Declaration> {
|
||||
use Declaration::*;
|
||||
let p = tuple((func_signature, ws(opt(block))));
|
||||
map(p, |(signature, maybe_block)| match maybe_block {
|
||||
Some(block) => FuncDecl(signature, block),
|
||||
None => FuncSig(signature),
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn func_signature(text: &str) -> ParseResult<Signature> {
|
||||
let p = preceded(tag("fn"), cut(tuple((ws(identifier), ws(formal_params), opt(ws(type_anno))))));
|
||||
//TODO fix op
|
||||
map(p, |(name, params, type_anno)| Signature { name, params, type_anno, operator: false })
|
||||
(text)
|
||||
}
|
||||
|
||||
fn formal_params(text: &str) -> ParseResult<Vec<FormalParam>> {
|
||||
delimited(tag("("), ws(separated_list(ws(tag(",")), formal_param)), tag(")"))(text)
|
||||
}
|
||||
|
||||
fn formal_param(text: &str) -> ParseResult<FormalParam> {
|
||||
let default = opt(preceded(ws(tag("=")), ws(expression)));
|
||||
let p = tuple((ws(identifier), opt(ws(type_anno)), default));
|
||||
map(p, |(name, anno, default)|
|
||||
FormalParam { name, anno, default })(text)
|
||||
}
|
||||
|
||||
fn type_declaration(text: &str) -> ParseResult<Declaration> {
|
||||
preceded(tag("type"), ws(type_declaration_body))(text)
|
||||
}
|
||||
|
||||
fn type_declaration_body(text: &str) -> ParseResult<Declaration> {
|
||||
let t = tuple((opt(tag("mut")), ws(type_singleton_name), ws(tag("=")), ws(type_body)));
|
||||
alt((
|
||||
preceded(tag("alias"), ws(type_alias)),
|
||||
map(t, |(mut_kw, name, _, body)| {
|
||||
Declaration::TypeDecl { name, body, mutable: mut_kw.is_some() }
|
||||
})
|
||||
))(text)
|
||||
}
|
||||
|
||||
fn type_body(text: &str) -> ParseResult<TypeBody> {
|
||||
let p = separated_nonempty_list(ws(tag("|")), variant_specifier);
|
||||
map(p, TypeBody)(text)
|
||||
}
|
||||
|
||||
fn variant_specifier(text: &str) -> ParseResult<Variant> {
|
||||
use self::Variant::*;
|
||||
let tuple_struct =
|
||||
delimited(tag("("), separated_nonempty_list(ws(tag(",")), type_name), ws(tag(")")));
|
||||
//TODO record
|
||||
|
||||
let p = tuple((identifier, opt(tuple_struct)));
|
||||
map(p, |(name, maybe_tuple_members)| match maybe_tuple_members {
|
||||
Some(members) => TupleStruct(name, members),
|
||||
None => UnitStruct(name),
|
||||
})(text)
|
||||
}
|
||||
|
||||
fn type_singleton_name(text: &str) -> ParseResult<TypeSingletonName> {
|
||||
let p = tuple((identifier, opt(delimited(tag("<"),
|
||||
separated_nonempty_list(tag(","), ws(type_name)),
|
||||
tag(">")))));
|
||||
map(p, |(name, params)| TypeSingletonName { name, params: params.unwrap_or(vec![]) })(text)
|
||||
}
|
||||
|
||||
fn type_alias(text: &str) -> ParseResult<Declaration> {
|
||||
let p = tuple((ws(identifier), ws(tag("=")), ws(identifier)));
|
||||
map(p, |(alias, _, original)| Declaration::TypeAlias { alias, original })
|
||||
(text)
|
||||
}
|
||||
|
||||
fn statement(text: &str) -> ParseResult<Statement> {
|
||||
let p = alt((
|
||||
map(import, StatementKind::Import),
|
||||
map(module, StatementKind::Module),
|
||||
map(declaration, StatementKind::Declaration),
|
||||
map(expression, StatementKind::Expression),
|
||||
));
|
||||
map(p, |kind| Statement { id: ItemId::new(0), kind })(text)
|
||||
}
|
||||
|
||||
pub fn parse_ast(text: &str) -> ParseResult<AST> {
|
||||
map(separated_list(statement_sep, statement),
|
||||
|statements| AST { id: ItemId::new(0), statements }
|
||||
)(text)
|
||||
}
|
||||
|
||||
pub fn perform_parsing(input: &str) -> Result<String, String> {
|
||||
let output = match parse_ast(input) {
|
||||
Ok((rest, ast)) => format!("{:?} (rest: {})", ast, rest),
|
||||
Err(nom::Err::Incomplete(needed)) => format!("Incomplete: {:?}" ,needed),
|
||||
Err(nom::Err::Error(verbose_error) | nom::Err::Failure(verbose_error)) => {
|
||||
format!("Verbose Error: ` {:?} `", verbose_error)
|
||||
//nom::error::convert_error(input, verbose_error)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
@@ -991,7 +991,7 @@ impl Parser {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::BoolPattern(false))
|
||||
},
|
||||
StrLiteral { s, .. } => {
|
||||
StrLiteral(s) => {
|
||||
self.token_handler.next();
|
||||
Pattern::Literal(PatternLiteral::StringPattern(s))
|
||||
},
|
||||
@@ -1140,7 +1140,7 @@ impl Parser {
|
||||
let id = self.id_store.fresh();
|
||||
Ok(Expression::new(id, BoolLiteral(false)))
|
||||
},
|
||||
StrLiteral {s, ..} => {
|
||||
StrLiteral(s) => {
|
||||
self.token_handler.next();
|
||||
let id = self.id_store.fresh();
|
||||
Ok(Expression::new(id, StringLiteral(s.clone())))
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::{Parser, ParseResult, tokenize, ParseError};
|
||||
use super::{Parser, ParseResult, tokenize};
|
||||
use crate::ast::*;
|
||||
use super::Declaration::*;
|
||||
use super::Signature;
|
||||
@@ -13,7 +13,6 @@ use super::ExpressionKind::*;
|
||||
use super::Variant::*;
|
||||
use super::ForBody::*;
|
||||
|
||||
/*
|
||||
fn make_parser(input: &str) -> Parser {
|
||||
let source_map = crate::source_map::SourceMap::new();
|
||||
let source_map_handle = Rc::new(RefCell::new(source_map));
|
||||
@@ -22,19 +21,10 @@ fn make_parser(input: &str) -> Parser {
|
||||
parser.add_new_tokens(tokens);
|
||||
parser
|
||||
}
|
||||
*/
|
||||
|
||||
fn parse(input: &str) -> ParseResult<AST> {
|
||||
use crate::tokenizing::*;
|
||||
crate::parser::parse_ast(input).map_err(|err| {
|
||||
let token = Token { kind: TokenKind::Newline, location: crate::source_map::Location { line_num: 0, char_num: 0 } };
|
||||
ParseError { production_name: None, msg: "".to_string(), token }
|
||||
})
|
||||
.map(|(rest, s)| s)
|
||||
/*
|
||||
let mut parser = make_parser(input);
|
||||
parser.parse()
|
||||
*/
|
||||
}
|
||||
|
||||
macro_rules! parse_test {
|
||||
@@ -92,11 +82,8 @@ macro_rules! ex {
|
||||
($expr_type:expr, $type_anno:expr) => { Expression::with_anno(ItemIdStore::new_id(), $expr_type, $type_anno) };
|
||||
(s $expr_text:expr) => {
|
||||
{
|
||||
/*
|
||||
let mut parser = make_parser($expr_text);
|
||||
parser.expression().unwrap()
|
||||
*/
|
||||
crate::parser::expression($expr_text).unwrap().1
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -118,16 +105,8 @@ macro_rules! exst {
|
||||
};
|
||||
(s $statement_text:expr) => {
|
||||
{
|
||||
/*
|
||||
let mut parser = make_parser($statement_text);
|
||||
parser.statement().unwrap()
|
||||
*/
|
||||
Statement {
|
||||
kind: StatementKind::Expression(
|
||||
crate::parser::expression($statement_text).unwrap().1
|
||||
),
|
||||
id: ItemIdStore::new_id()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -618,13 +597,13 @@ fn more_advanced_lambdas() {
|
||||
#[test]
|
||||
fn while_expr() {
|
||||
parse_test_wrap_ast! {
|
||||
"while { 3 }",
|
||||
exst!(WhileExpression { condition: None, body: vec![ exst!(s "3")] })
|
||||
"while { }",
|
||||
exst!(WhileExpression { condition: None, body: vec![] })
|
||||
}
|
||||
|
||||
parse_test_wrap_ast! {
|
||||
"while a == b { 3 }",
|
||||
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![ exst!(s "3")] })
|
||||
"while a == b { }",
|
||||
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,9 +42,9 @@ pub enum Stmt {
|
||||
pub enum Expr {
|
||||
Unit,
|
||||
Lit(Lit),
|
||||
Sym(Rc<String>), //a Sym is anything that can be looked up by name at runtime - i.e. a function or variable address
|
||||
Tuple(Vec<Expr>),
|
||||
Func(Func),
|
||||
Sym(FullyQualifiedSymbolName),
|
||||
Constructor {
|
||||
type_name: Rc<String>,
|
||||
name: Rc<String>,
|
||||
@@ -56,7 +56,7 @@ pub enum Expr {
|
||||
args: Vec<Expr>,
|
||||
},
|
||||
Assign {
|
||||
val: Box<Expr>, //TODO this probably can't be a val
|
||||
val: Box<Expr>,
|
||||
expr: Box<Expr>,
|
||||
},
|
||||
Conditional {
|
||||
@@ -164,7 +164,23 @@ impl<'a> Reducer<'a> {
|
||||
BoolLiteral(b) => Expr::Lit(Lit::Bool(*b)),
|
||||
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
|
||||
PrefixExp(op, arg) => self.prefix(op, arg),
|
||||
Value(qualified_name) => self.value(qualified_name),
|
||||
Value(qualified_name) => {
|
||||
let ref id = qualified_name.id;
|
||||
let ref fqsn = match symbol_table.get_fqsn_from_id(id) {
|
||||
Some(fqsn) => fqsn,
|
||||
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
|
||||
};
|
||||
//TODO this probably needs to change
|
||||
match symbol_table.lookup_by_fqsn(&fqsn) {
|
||||
Some(Symbol { spec: SymbolSpec::DataConstructor { index, type_args, type_name}, local_name, .. }) => Expr::Constructor {
|
||||
type_name: type_name.clone(),
|
||||
name: local_name.clone(),
|
||||
tag: index.clone(),
|
||||
arity: type_args.len(),
|
||||
},
|
||||
_ => Expr::Sym(fqsn.clone()),
|
||||
}
|
||||
},
|
||||
Call { f, arguments } => self.reduce_call_expression(f, arguments),
|
||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
|
||||
IfExpression { discriminator, body } => self.reduce_if_expression(deref_optional_box(discriminator), body),
|
||||
@@ -177,38 +193,6 @@ impl<'a> Reducer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn value(&mut self, qualified_name: &QualifiedName) -> Expr {
|
||||
let symbol_table = self.symbol_table;
|
||||
let ref id = qualified_name.id;
|
||||
let ref sym_name = match symbol_table.get_fqsn_from_id(id) {
|
||||
Some(fqsn) => fqsn,
|
||||
None => return Expr::ReductionError(format!("FQSN lookup for Value {:?} failed", qualified_name)),
|
||||
};
|
||||
|
||||
//TODO this probably needs to change
|
||||
let FullyQualifiedSymbolName(ref v) = sym_name;
|
||||
let name = v.last().unwrap().name.clone();
|
||||
|
||||
let Symbol { local_name, spec, .. } = match symbol_table.lookup_by_fqsn(&sym_name) {
|
||||
Some(s) => s,
|
||||
//None => return Expr::ReductionError(format!("Symbol {:?} not found", sym_name)),
|
||||
None => return Expr::Sym(name.clone())
|
||||
};
|
||||
|
||||
match spec {
|
||||
SymbolSpec::RecordConstructor { .. } => Expr::ReductionError(format!("AST reducer doesn't expect a RecordConstructor here")),
|
||||
SymbolSpec::DataConstructor { index, type_args, type_name } => Expr::Constructor {
|
||||
type_name: type_name.clone(),
|
||||
name: name.clone(),
|
||||
tag: index.clone(),
|
||||
arity: type_args.len(),
|
||||
},
|
||||
SymbolSpec::Func(_) => Expr::Sym(local_name.clone()),
|
||||
SymbolSpec::Binding => Expr::Sym(local_name.clone()), //TODO not sure if this is right, probably needs to eventually be fqsn
|
||||
SymbolSpec::Type { .. } => Expr::ReductionError("AST reducer doesnt expect a type here".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce_lambda(&mut self, params: &Vec<FormalParam>, body: &Block) -> Expr {
|
||||
Expr::Func(Func::UserDefined {
|
||||
name: None,
|
||||
|
||||
@@ -10,7 +10,7 @@ use schala_repl::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
LangMetaRequest, LangMetaResponse, GlobalOutputStats,
|
||||
DebugResponse, DebugAsk};
|
||||
use crate::{ast, reduced_ast, tokenizing, parsing, parser, eval, typechecking, symbol_table, source_map};
|
||||
use crate::{ast, reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table, source_map};
|
||||
|
||||
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
|
||||
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
|
||||
@@ -47,7 +47,7 @@ impl Schala {
|
||||
symbol_table: symbols.clone(),
|
||||
source_map: source_map.clone(),
|
||||
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
|
||||
state: eval::State::new(),
|
||||
state: eval::State::new(symbols),
|
||||
type_context: typechecking::TypeContext::new(),
|
||||
active_parser: parsing::Parser::new(source_map)
|
||||
}
|
||||
@@ -319,8 +319,6 @@ impl ProgrammingLanguageInterface for Schala {
|
||||
total_duration, stage_durations
|
||||
};
|
||||
|
||||
let main_output = parser::perform_parsing(source);
|
||||
|
||||
ComputationResponse {
|
||||
main_output,
|
||||
global_output_stats,
|
||||
|
||||
@@ -13,7 +13,6 @@ pub struct ScopeResolver<'a> {
|
||||
}
|
||||
|
||||
impl<'a> ASTVisitor for ScopeResolver<'a> {
|
||||
//TODO need to un-insert these - maybe need to rethink visitor
|
||||
fn import(&mut self, import_spec: &ImportSpecifier) {
|
||||
let ref symbol_table = self.symbol_table_handle.borrow();
|
||||
let ImportSpecifier { ref path_components, ref imported_names, .. } = &import_spec;
|
||||
|
||||
@@ -21,10 +21,7 @@ pub enum TokenKind {
|
||||
|
||||
Operator(Rc<String>),
|
||||
DigitGroup(Rc<String>), HexLiteral(Rc<String>), BinNumberSigil,
|
||||
StrLiteral {
|
||||
s: Rc<String>,
|
||||
prefix: Option<Rc<String>>
|
||||
},
|
||||
StrLiteral(Rc<String>),
|
||||
Identifier(Rc<String>),
|
||||
Keyword(Kw),
|
||||
|
||||
@@ -40,7 +37,7 @@ impl fmt::Display for TokenKind {
|
||||
&Operator(ref s) => write!(f, "Operator({})", **s),
|
||||
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
|
||||
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
|
||||
&StrLiteral {ref s, .. } => write!(f, "StrLiteral({})", s),
|
||||
&StrLiteral(ref s) => write!(f, "StrLiteral({})", s),
|
||||
&Identifier(ref s) => write!(f, "Identifier({})", s),
|
||||
&Error(ref s) => write!(f, "Error({})", s),
|
||||
other => write!(f, "{:?}", other),
|
||||
@@ -166,7 +163,7 @@ pub fn tokenize(input: &str) -> Vec<Token> {
|
||||
'(' => LParen, ')' => RParen,
|
||||
'{' => LCurlyBrace, '}' => RCurlyBrace,
|
||||
'[' => LSquareBracket, ']' => RSquareBracket,
|
||||
'"' => handle_quote(&mut input, None),
|
||||
'"' => handle_quote(&mut input),
|
||||
'\\' => Backslash,
|
||||
c if c.is_digit(10) => handle_digit(c, &mut input),
|
||||
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
|
||||
@@ -194,7 +191,7 @@ fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) ->
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix: Option<&str>) -> TokenKind {
|
||||
fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
let mut buf = String::new();
|
||||
loop {
|
||||
match input.next().map(|(_, _, c)| { c }) {
|
||||
@@ -216,7 +213,7 @@ fn handle_quote(input: &mut Peekable<impl Iterator<Item=CharData>>, quote_prefix
|
||||
None => return TokenKind::Error(format!("Unclosed string")),
|
||||
}
|
||||
}
|
||||
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
|
||||
TokenKind::StrLiteral(Rc::new(buf))
|
||||
}
|
||||
|
||||
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>) -> TokenKind {
|
||||
@@ -228,10 +225,6 @@ fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item=CharData>>
|
||||
|
||||
loop {
|
||||
match input.peek().map(|&(_, _, c)| { c }) {
|
||||
Some(c) if c == '"' => {
|
||||
input.next();
|
||||
return handle_quote(input, Some(&buf));
|
||||
},
|
||||
Some(c) if c.is_alphanumeric() || c == '_' => {
|
||||
input.next();
|
||||
buf.push(c);
|
||||
@@ -332,13 +325,4 @@ mod schala_tokenizer_tests {
|
||||
let token_kinds: Vec<TokenKind> = tokenize("1 `plus` 2").into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![digit!("1"), op!("plus"), digit!("2")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_literals() {
|
||||
let token_kinds: Vec<TokenKind> = tokenize(r#""some string""#).into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
|
||||
|
||||
let token_kinds: Vec<TokenKind> = tokenize(r#"b"some bytestring""#).into_iter().map(move |t| t.kind).collect();
|
||||
assert_eq!(token_kinds, vec![StrLiteral { s: Rc::new("some bytestring".to_string()), prefix: Some(Rc::new("b".to_string())) }]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#![feature(link_args, box_patterns, box_syntax, proc_macro_hygiene, decl_macro)]
|
||||
#![feature(link_args)]
|
||||
#![feature(slice_patterns, box_patterns, box_syntax, proc_macro_hygiene, decl_macro)]
|
||||
#![feature(plugin)]
|
||||
extern crate getopts;
|
||||
extern crate linefeed;
|
||||
|
||||
Reference in New Issue
Block a user