Compare commits
125 Commits
multiline_
...
new-comman
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b0ca955de6 | ||
|
|
3987360f8e | ||
|
|
78d1e93e4b | ||
|
|
856c0f95ce | ||
|
|
3fa624bef4 | ||
|
|
f27a65018d | ||
|
|
548a7b5f36 | ||
|
|
d80d0d0904 | ||
|
|
6162bae1ac | ||
|
|
fe7ba339b5 | ||
|
|
6a232907c5 | ||
|
|
a8583f6bc4 | ||
|
|
bdee4fe7c6 | ||
|
|
5cdc2f3d07 | ||
|
|
eb2adb5b79 | ||
|
|
2b407a4a83 | ||
|
|
6da6f6312d | ||
|
|
ce2a65b044 | ||
|
|
ffdae14a88 | ||
|
|
94ea7bcd09 | ||
|
|
4ebf7fe879 | ||
|
|
efbeff916a | ||
|
|
e9ea7811df | ||
|
|
198f93c533 | ||
|
|
694c152fcd | ||
|
|
f8f3095f89 | ||
|
|
c68c23ed68 | ||
|
|
4f972f20a7 | ||
|
|
9d2e5918af | ||
|
|
14fc2a5d10 | ||
|
|
2b8e2749a4 | ||
|
|
6c369b072f | ||
|
|
938c0401d1 | ||
|
|
a829fb6cd8 | ||
|
|
004b056232 | ||
|
|
8e9b410e02 | ||
|
|
b82eebdeec | ||
|
|
153e7977d3 | ||
|
|
5b5368ce6f | ||
|
|
7a67890227 | ||
|
|
04253543e9 | ||
|
|
3a98096b61 | ||
|
|
9476e7039b | ||
|
|
c767402865 | ||
|
|
61972410ea | ||
|
|
d3f9430a18 | ||
|
|
81323cafd4 | ||
|
|
14c08bbcdb | ||
|
|
4319c802f5 | ||
|
|
9e58e3d7de | ||
|
|
ac0050e5d1 | ||
|
|
d06cf90fce | ||
|
|
712da62d35 | ||
|
|
57f3d39ea1 | ||
|
|
6d88447458 | ||
|
|
0451676ba7 | ||
|
|
2929362046 | ||
|
|
375db28ebb | ||
|
|
1622a6ce44 | ||
|
|
7e899246e9 | ||
|
|
8610bd7a87 | ||
|
|
70f715fbb2 | ||
|
|
7360e698dd | ||
|
|
5b35c2a036 | ||
|
|
8d8d7d8bf8 | ||
|
|
981d4f88bf | ||
|
|
42aa316a23 | ||
|
|
58b37e56ae | ||
|
|
2bf777f37b | ||
|
|
bdcae36b60 | ||
|
|
dbcd2278a6 | ||
|
|
2490aaf3f4 | ||
|
|
d4ad97b39a | ||
|
|
24213070a3 | ||
|
|
051669b4cc | ||
|
|
c64f53a050 | ||
|
|
8f176543c7 | ||
|
|
9716b5e55b | ||
|
|
956353cd80 | ||
|
|
98db60498a | ||
|
|
7694afc9e2 | ||
|
|
0bcd7e6f41 | ||
|
|
d515b1658a | ||
|
|
e501f4bd10 | ||
|
|
5bac01cf20 | ||
|
|
0e9b3229e9 | ||
|
|
b709cfd51a | ||
|
|
e34295a6f7 | ||
|
|
8dc34e4b49 | ||
|
|
2cc3367666 | ||
|
|
452f2ab188 | ||
|
|
be175a2b75 | ||
|
|
00a0de4431 | ||
|
|
f041cc17d2 | ||
|
|
95fe1941a1 | ||
|
|
b35262c444 | ||
|
|
9bb3a2be88 | ||
|
|
9fa0576547 | ||
|
|
6fba0cc5b4 | ||
|
|
a6eb2b4020 | ||
|
|
03793e08d3 | ||
|
|
2be55958f4 | ||
|
|
bcf48d0ecb | ||
|
|
f0ed63ccf3 | ||
|
|
6012bd1087 | ||
|
|
866c9211f9 | ||
|
|
df7e74c79d | ||
|
|
abbd02eaef | ||
|
|
993741e67f | ||
|
|
fbb7b995b8 | ||
|
|
9d4f086a04 | ||
|
|
e38ae1c3f1 | ||
|
|
d969d573fa | ||
|
|
35da1748f0 | ||
|
|
5e1799268d | ||
|
|
42a801d346 | ||
|
|
a80e1bd706 | ||
|
|
afd9aa52c5 | ||
|
|
5a70784346 | ||
|
|
0dff177e8f | ||
|
|
cf91f74912 | ||
|
|
06e9452718 | ||
|
|
7d3ae36058 | ||
|
|
e8f1f51639 | ||
|
|
170cf349d7 |
@@ -6,12 +6,10 @@ authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
[dependencies]
|
||||
|
||||
schala-repl = { path = "schala-repl" }
|
||||
schala-repl-codegen = { path = "schala-repl-codegen" }
|
||||
maaru-lang = { path = "maaru" }
|
||||
rukka-lang = { path = "rukka" }
|
||||
robo-lang = { path = "robo" }
|
||||
schala-lang = { path = "schala-lang/language" }
|
||||
schala-lang-codegen = { path = "schala-lang/codegen" }
|
||||
# maaru-lang = { path = "maaru" }
|
||||
# rukka-lang = { path = "rukka" }
|
||||
# robo-lang = { path = "robo" }
|
||||
|
||||
[build-dependencies]
|
||||
includedir_codegen = "0.2.0"
|
||||
|
||||
43
README.md
43
README.md
@@ -1,19 +1,22 @@
|
||||
# Schala - a programming language meta-interpreter
|
||||
|
||||
Schala is a Rust framework written to make it easy to create and experiment
|
||||
with toy programming languages. It provides a cross-language REPL and
|
||||
with multipl toy programming languages. It provides a cross-language REPL and
|
||||
provisions for tokenizing text, parsing tokens, evaluating an abstract syntax
|
||||
tree, and other tasks that are common to all programming languages.
|
||||
tree, and other tasks that are common to all programming languages, as well as sharing state
|
||||
between multiple programming languages.
|
||||
|
||||
Schala is implemented as a Rust library `schala-repl`, which provides a
|
||||
function `repl_main` meant to be used as the equivalent of main() for library
|
||||
users. This function parses command-line arguments and either runs an interactive
|
||||
REPL or interprets a program non-interactively.
|
||||
function `start_repl`, meant to be used as entry point into a common REPL or
|
||||
non-interactive environment. Clients are expected to invoke `start_repl` with a
|
||||
vector of programming languages. Individual programming language
|
||||
implementations are Rust types that implement the
|
||||
`ProgrammingLanguageInterface` trait and store whatever persistent state is
|
||||
relevant to that language.
|
||||
|
||||
Individual programming language implementations are Rust types that implement
|
||||
the `ProgrammingLanguageInterface` trait and store whatever persistent state is
|
||||
relevant to that language. The ability to share state between different
|
||||
programming languages is in the works.
|
||||
Run schala with: `cargo run`. This will drop you into a REPL environment. Type
|
||||
`:help` for more information, or type in text in any supported programming
|
||||
language (currently only schala-lang) to evaluate it in the REPL.
|
||||
|
||||
## History
|
||||
|
||||
@@ -33,18 +36,18 @@ creating a language name confusingly close to Scala. The naming scheme for
|
||||
languages implemented with the Schala meta-interpreter is Chrono Trigger
|
||||
characters.
|
||||
|
||||
Schala is incomplete alpha software and is not ready for public release.
|
||||
Schala and languages implemented with it are incomplete alpha software and are
|
||||
not ready for public release.
|
||||
|
||||
## Languages implemented using the meta-interpreter
|
||||
|
||||
* The eponymous *Schala* language is an interpreted/compiled scripting langauge,
|
||||
designed to be relatively simple, but with a reasonably sophisticated type
|
||||
system.
|
||||
* The eponymous *Schala* language is a work-in-progress general purpose
|
||||
programming language with static typing and algebraic data types. Its design
|
||||
goals include having a very straightforward implemenation and being syntactically
|
||||
minimal.
|
||||
|
||||
* *Maaru* was the original Schala (since renamed to free up the name *Schala*
|
||||
for the above language), a very simple dynamically-typed scripting language
|
||||
such that all possible runtime errors result in null rather than program
|
||||
failure.
|
||||
* *Maaru* is a very simple dynamically-typed scripting language, with the semantics
|
||||
that all runtime errors return a `null` value rather than fail.
|
||||
|
||||
* *Robo* is an experiment in creating a lazy, functional, strongly-typed language
|
||||
much like Haskell
|
||||
@@ -62,6 +65,12 @@ https://www.youtube.com/watch?v=il3gD7XMdmA
|
||||
http://dev.stephendiehl.com/fun/006_hindley_milner.html
|
||||
https://rust-lang-nursery.github.io/rustc-guide/type-inference.html
|
||||
|
||||
https://eli.thegreenplace.net/2018/unification/
|
||||
https://eli.thegreenplace.net/2018/type-inference/
|
||||
http://smallcultfollowing.com/babysteps/blog/2017/03/25/unification-in-chalk-part-1/
|
||||
http://reasonableapproximation.net/2019/05/05/hindley-milner.html
|
||||
https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||
|
||||
### Evaluation
|
||||
*Understanding Computation*, Tom Stuart, O'Reilly 2013
|
||||
|
||||
|
||||
140
TODO.md
140
TODO.md
@@ -1,58 +1,74 @@
|
||||
#Typechecking Notes
|
||||
# TODO items
|
||||
|
||||
(cf. cardelli paper)
|
||||
## Typechecking
|
||||
|
||||
Given a length function def:
|
||||
````
|
||||
fn length(x) {
|
||||
if x.is_null {
|
||||
0
|
||||
} else {
|
||||
succ(length(x.tail))
|
||||
}
|
||||
}
|
||||
````
|
||||
Constraints:
|
||||
.null: List a -> bool
|
||||
.tail: List a -> List a
|
||||
0: Nat
|
||||
succ: Nat -> Nat
|
||||
- look at https://rickyhan.com/jekyll/update/2018/05/26/hindley-milner-tutorial-rust.html
|
||||
|
||||
- cf. the notation mentioned in the cardelli paper, the debug information for the `typechecking` pass should
|
||||
print the generated type variable for every subexpression in an expression
|
||||
|
||||
- change 'trait' to 'interface'
|
||||
|
||||
- think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
|
||||
|
||||
# TODO Items
|
||||
-should have an Idris-like `cast To From` function
|
||||
|
||||
-make the REPL more advanced!
|
||||
## Schala-lang syntax
|
||||
|
||||
-Plan of attack:
|
||||
-write a visitor pattern for AST
|
||||
-convert AST type to including SourceMap'd wrappers (w/ .into())
|
||||
-at the same time, amke sure the visitor pattern "skips over" the SourceMap'd stuff
|
||||
so it can just care about AST structure
|
||||
-idea: the `type` declaration should have some kind of GADT-like syntax
|
||||
|
||||
- AST : maybe replace the Expression type with "Ascription(TypeName, Box<Expression>) nodes??
|
||||
- parser: add a "debug" field to the Parser struct for all debug-related things
|
||||
-idea: I should make the BNF grammar part of the documentation...
|
||||
|
||||
-scala-style html"dfasfsadf${}" string interpolations!
|
||||
- Idea: if you have a pattern-match where one variant has a variable and the other lacks it
|
||||
instead of treating this as a type error, promote the bound variable to an option type
|
||||
|
||||
-fuzz test schala
|
||||
- Include extensible scala-style html"string ${var}" string interpolations
|
||||
|
||||
-look into Inkwell for LLVM
|
||||
|
||||
|
||||
*A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
||||
- A neat idea for pattern matching optimization would be if you could match on one of several things in a list
|
||||
ex:
|
||||
if x {
|
||||
```if x {
|
||||
is (comp, LHSPat, RHSPat) if comp in ["==, "<"] -> ...
|
||||
}
|
||||
}```
|
||||
|
||||
- Schala should have both currying *and* default arguments!
|
||||
```fn a(b: Int, c:Int, d:Int = 1) -> Int
|
||||
a(1,2) : Int
|
||||
a(1,2,d=2): Int
|
||||
a(_,1,3) : Int -> Int
|
||||
a(1,2, c=_): Int -> Int
|
||||
a(_,_,_) : Int -> Int -> Int -> Int
|
||||
```
|
||||
|
||||
- scoped types - be able to define a quick enum type scoped to a function or other type for
|
||||
something, that only is meant to be used as a quick bespoke interface between
|
||||
two other things
|
||||
|
||||
ex.
|
||||
```type enum {
|
||||
type enum MySubVariant {
|
||||
SubVariant1, SubVariant2, etc.
|
||||
}
|
||||
Variant1(MySubVariant),
|
||||
Variant2(...),
|
||||
}```
|
||||
|
||||
- inclusive/exclusive range syntax like .. vs ..=
|
||||
|
||||
## Compilation
|
||||
-look into Inkwell for rust LLVM bindings
|
||||
|
||||
- https://nshipster.com/never/
|
||||
-https://cranelift.readthedocs.io/en/latest/?badge=latest<Paste>
|
||||
|
||||
|
||||
## Other links of note
|
||||
|
||||
- https://nshipster.com/never/
|
||||
-consult http://gluon-lang.org/book/embedding-api.html
|
||||
|
||||
|
||||
|
||||
## Playing around with conditional syntax ideas
|
||||
|
||||
- if/match playground
|
||||
|
||||
simple if
|
||||
@@ -94,64 +110,14 @@ can replace `'if' discriminator '{' 'true' 'then' block_or_expr; 'false' 'then'
|
||||
|
||||
|
||||
|
||||
- Next priorities: - get ADTs working, get matches working
|
||||
|
||||
- inclusive/exclusive range syntax like .. vs ..=
|
||||
|
||||
- sketch of an idea for the REPL:
|
||||
-each compiler pass should be a (procedural?) macro like
|
||||
compiler_pass!("parse", dataproducts: ["ast", "parse_tree"], {
|
||||
match parsing::parse(INPUT) {
|
||||
Ok(
|
||||
PASS.add_artifact(
|
||||
}
|
||||
|
||||
-should have an Idris-like `cast To From` function
|
||||
|
||||
- REPL:
|
||||
- want to be able to do things like `:doc Identifier`, and have the language load up these definitions to the REPL
|
||||
|
||||
|
||||
* change 'trait' to 'interface'
|
||||
-think about idris-related ideas of multiple implementations of a type for an interface (+ vs * impl for monoids, for preorder/inorder/postorder for Foldable)
|
||||
|
||||
* Share state between programming languages
|
||||
|
||||
* idea for Schala - scoped types - be able to define a quick enum type scoped to a function ro something, that only is meant to be used as a quick bespoke interface between two other things
|
||||
|
||||
* another idea, allow:
|
||||
type enum {
|
||||
type enum MySubVariant {
|
||||
SubVariant1, SubVariant2, etc.
|
||||
}
|
||||
Variant1(MySubVariant),
|
||||
Variant2(...),
|
||||
}
|
||||
|
||||
|
||||
|
||||
* idea for Schala: both currying *and* default arguments!
|
||||
ex. fn a(b: Int, c:Int, d:Int = 1) -> Int
|
||||
a(1,2) : Int
|
||||
a(1,2,d=2): Int
|
||||
a(_,1,3) : Int -> Int
|
||||
a(1,2, c=_): Int -> Int
|
||||
a(_,_,_) : Int -> Int -> Int -> Int
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
*Compiler passes architecture
|
||||
|
||||
-ProgrammingLanguageInterface defines a evaluate_in_repl() and evaluate_no_repl() functions
|
||||
-these take in a vec of CompilerPasses
|
||||
|
||||
struct CompilerPass {
|
||||
name: String,
|
||||
run: fn(PrevPass) -> NextPass
|
||||
}
|
||||
|
||||
-change "Type...." names in parser.rs to "Anno..." for non-collision with names in typechecking.rs
|
||||
|
||||
-get rid of code pertaining to compilation specifically, have a more generation notion of "execution type"
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,6 @@ mod tokenizer;
|
||||
mod parser;
|
||||
mod eval;
|
||||
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation, TraceArtifact};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TokenError {
|
||||
pub msg: String,
|
||||
@@ -33,6 +31,42 @@ impl<'a> Maaru<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> Result<String, String> {
|
||||
let mut output = UnfinishedComputation::default();
|
||||
|
||||
let tokens = match tokenizer::tokenize(input) {
|
||||
Ok(tokens) => {
|
||||
if let Some(_) = options.debug_passes.get("tokens") {
|
||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
|
||||
}
|
||||
tokens
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Tokenization error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
|
||||
let ast = match parser::parse(&tokens, &[]) {
|
||||
Ok(ast) => {
|
||||
if let Some(_) = options.debug_passes.get("ast") {
|
||||
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
||||
}
|
||||
ast
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Parse error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
let mut evaluation_output = String::new();
|
||||
for s in self.evaluator.run(ast).iter() {
|
||||
evaluation_output.push_str(s);
|
||||
}
|
||||
Ok(evaluation_output)
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
|
||||
fn get_language_name(&self) -> String {
|
||||
"Maaru".to_string()
|
||||
@@ -40,37 +74,5 @@ impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("maaru")
|
||||
}
|
||||
|
||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
|
||||
let mut output = UnfinishedComputation::default();
|
||||
|
||||
let tokens = match tokenizer::tokenize(input) {
|
||||
Ok(tokens) => {
|
||||
if let Some(_) = options.debug_passes.get("tokens") {
|
||||
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
|
||||
}
|
||||
tokens
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Tokenization error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
|
||||
let ast = match parser::parse(&tokens, &[]) {
|
||||
Ok(ast) => {
|
||||
if let Some(_) = options.debug_passes.get("ast") {
|
||||
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
||||
}
|
||||
ast
|
||||
},
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Parse error: {:?}\n", err.msg)))
|
||||
}
|
||||
};
|
||||
let mut evaluation_output = String::new();
|
||||
for s in self.evaluator.run(ast).iter() {
|
||||
evaluation_output.push_str(s);
|
||||
}
|
||||
output.finish(Ok(evaluation_output))
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -4,7 +4,7 @@ extern crate itertools;
|
||||
extern crate schala_repl;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, FinishedComputation, UnfinishedComputation};
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions};
|
||||
|
||||
pub struct Robo {
|
||||
}
|
||||
@@ -154,17 +154,5 @@ impl ProgrammingLanguageInterface for Robo {
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("robo")
|
||||
}
|
||||
|
||||
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
|
||||
let output = UnfinishedComputation::default();
|
||||
let tokens = match tokenize(input) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(e) => {
|
||||
return output.finish(Err(format!("Tokenize error: {:?}", e)));
|
||||
}
|
||||
};
|
||||
|
||||
output.finish(Ok(format!("{:?}", tokens)))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ extern crate itertools;
|
||||
extern crate schala_repl;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, UnfinishedComputation, FinishedComputation};
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions};
|
||||
use std::iter::Peekable;
|
||||
use std::vec::IntoIter;
|
||||
use std::str::Chars;
|
||||
@@ -72,24 +72,6 @@ impl ProgrammingLanguageInterface for Rukka {
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
format!("rukka")
|
||||
}
|
||||
|
||||
fn execute_pipeline(&mut self, input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
|
||||
let output = UnfinishedComputation::default();
|
||||
let sexps = match read(input) {
|
||||
Err(err) => {
|
||||
return output.finish(Err(format!("Error: {}", err)));
|
||||
},
|
||||
Ok(sexps) => sexps
|
||||
};
|
||||
|
||||
let output_str: String = sexps.into_iter().enumerate().map(|(i, sexp)| {
|
||||
match self.state.eval(sexp) {
|
||||
Ok(result) => format!("{}: {}", i, result.print()),
|
||||
Err(err) => format!("{} Error: {}", i, err),
|
||||
}
|
||||
}).intersperse(format!("\n")).collect();
|
||||
output.finish(Ok(output_str))
|
||||
}
|
||||
}
|
||||
|
||||
impl EvaluatorState {
|
||||
|
||||
@@ -10,8 +10,8 @@ take_mut = "0.1.3"
|
||||
maplit = "*"
|
||||
lazy_static = "0.2.8"
|
||||
failure = "0.1.2"
|
||||
|
||||
ena = "0.11.0"
|
||||
stopwatch = "0.0.7"
|
||||
|
||||
schala-lang-codegen = { path = "../codegen" }
|
||||
schala-repl = { path = "../../schala-repl" }
|
||||
schala-repl-codegen = { path = "../../schala-repl-codegen" }
|
||||
|
||||
@@ -2,16 +2,18 @@ use std::rc::Rc;
|
||||
use std::convert::From;
|
||||
|
||||
use crate::builtin::{BinOp, PrefixOp};
|
||||
use crate::typechecking::TypeData;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Node<T> {
|
||||
pub struct Meta<T> {
|
||||
n: T,
|
||||
source_map: SourceMap
|
||||
source_map: SourceMap,
|
||||
type_data: TypeData,
|
||||
}
|
||||
|
||||
impl<T> Node<T> {
|
||||
pub fn new(n: T) -> Node<T> {
|
||||
Node { n, source_map: SourceMap::default() }
|
||||
impl<T> Meta<T> {
|
||||
pub fn new(n: T) -> Meta<T> {
|
||||
Meta { n, source_map: SourceMap::default(), type_data: TypeData::new() }
|
||||
}
|
||||
|
||||
pub fn node(&self) -> &T {
|
||||
@@ -24,22 +26,22 @@ impl<T> Node<T> {
|
||||
struct SourceMap {
|
||||
}
|
||||
|
||||
impl From<Expression> for Node<Expression> {
|
||||
fn from(expr: Expression) -> Node<Expression> {
|
||||
Node { n: expr, source_map: SourceMap::default() }
|
||||
impl From<Expression> for Meta<Expression> {
|
||||
fn from(expr: Expression) -> Meta<Expression> {
|
||||
Meta { n: expr, source_map: SourceMap::default(), type_data: TypeData::new() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct AST(pub Vec<Node<Statement>>);
|
||||
pub struct AST(pub Vec<Meta<Statement>>);
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Statement {
|
||||
ExpressionStatement(Node<Expression>),
|
||||
ExpressionStatement(Meta<Expression>),
|
||||
Declaration(Declaration),
|
||||
}
|
||||
|
||||
pub type Block = Vec<Node<Statement>>;
|
||||
pub type Block = Vec<Meta<Statement>>;
|
||||
pub type ParamName = Rc<String>;
|
||||
pub type FormalParam = (ParamName, Option<TypeIdentifier>);
|
||||
|
||||
@@ -56,7 +58,8 @@ pub enum Declaration {
|
||||
Binding {
|
||||
name: Rc<String>,
|
||||
constant: bool,
|
||||
expr: Expression,
|
||||
type_anno: Option<TypeIdentifier>,
|
||||
expr: Meta<Expression>,
|
||||
},
|
||||
Impl {
|
||||
type_name: TypeIdentifier,
|
||||
@@ -91,7 +94,7 @@ pub enum Variant {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Expression(pub ExpressionType, pub Option<TypeIdentifier>);
|
||||
pub struct Expression(pub ExpressionKind, pub Option<TypeIdentifier>);
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -107,33 +110,33 @@ pub struct TypeSingletonName {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ExpressionType {
|
||||
pub enum ExpressionKind {
|
||||
NatLiteral(u64),
|
||||
FloatLiteral(f64),
|
||||
StringLiteral(Rc<String>),
|
||||
BoolLiteral(bool),
|
||||
BinExp(BinOp, Box<Node<Expression>>, Box<Node<Expression>>),
|
||||
PrefixExp(PrefixOp, Box<Node<Expression>>),
|
||||
TupleLiteral(Vec<Node<Expression>>),
|
||||
BinExp(BinOp, Box<Meta<Expression>>, Box<Meta<Expression>>),
|
||||
PrefixExp(PrefixOp, Box<Meta<Expression>>),
|
||||
TupleLiteral(Vec<Meta<Expression>>),
|
||||
Value(Rc<String>),
|
||||
NamedStruct {
|
||||
name: Rc<String>,
|
||||
fields: Vec<(Rc<String>, Expression)>,
|
||||
fields: Vec<(Rc<String>, Meta<Expression>)>,
|
||||
},
|
||||
Call {
|
||||
f: Box<Expression>,
|
||||
arguments: Vec<Node<Expression>>,
|
||||
f: Box<Meta<Expression>>,
|
||||
arguments: Vec<Meta<Expression>>,
|
||||
},
|
||||
Index {
|
||||
indexee: Box<Expression>,
|
||||
indexers: Vec<Expression>,
|
||||
indexee: Box<Meta<Expression>>,
|
||||
indexers: Vec<Meta<Expression>>,
|
||||
},
|
||||
IfExpression {
|
||||
discriminator: Box<Discriminator>,
|
||||
body: Box<IfExpressionBody>,
|
||||
},
|
||||
WhileExpression {
|
||||
condition: Option<Box<Expression>>,
|
||||
condition: Option<Box<Meta<Expression>>>,
|
||||
body: Block,
|
||||
},
|
||||
ForExpression {
|
||||
@@ -145,7 +148,7 @@ pub enum ExpressionType {
|
||||
type_anno: Option<TypeIdentifier>,
|
||||
body: Block,
|
||||
},
|
||||
ListLiteral(Vec<Expression>),
|
||||
ListLiteral(Vec<Meta<Expression>>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -176,7 +179,7 @@ pub enum Guard {
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct HalfExpr {
|
||||
pub op: Option<BinOp>,
|
||||
pub expr: ExpressionType,
|
||||
pub expr: ExpressionKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@@ -192,7 +195,7 @@ pub enum Pattern {
|
||||
pub enum PatternLiteral {
|
||||
NumPattern {
|
||||
neg: bool,
|
||||
num: ExpressionType,
|
||||
num: ExpressionKind,
|
||||
},
|
||||
StringPattern(Rc<String>),
|
||||
BoolPattern(bool),
|
||||
@@ -202,11 +205,11 @@ pub enum PatternLiteral {
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Enumerator {
|
||||
pub id: Rc<String>,
|
||||
pub generator: Expression,
|
||||
pub generator: Meta<Expression>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum ForBody {
|
||||
MonadicReturn(Expression),
|
||||
MonadicReturn(Meta<Expression>),
|
||||
StatementBlock(Block),
|
||||
}
|
||||
|
||||
@@ -1,32 +1,8 @@
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
|
||||
use crate::tokenizing::TokenKind;
|
||||
use self::BuiltinTypeSpecifier::*;
|
||||
use self::BuiltinTConst::*;
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum BuiltinTypeSpecifier {
|
||||
Const(BuiltinTConst),
|
||||
Func(Box<BuiltinTypeSpecifier>, Box<BuiltinTypeSpecifier>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum BuiltinTConst {
|
||||
Nat,
|
||||
Int,
|
||||
Float,
|
||||
StringT,
|
||||
Bool,
|
||||
}
|
||||
|
||||
impl fmt::Display for BuiltinTypeSpecifier {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
use crate::typechecking::{TypeConst, Type};
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct BinOp {
|
||||
@@ -53,12 +29,12 @@ impl BinOp {
|
||||
};
|
||||
Some(BinOp::from_sigil(s))
|
||||
}
|
||||
/*
|
||||
|
||||
pub fn get_type(&self) -> Result<Type, String> {
|
||||
let s = self.sigil.as_str();
|
||||
BINOPS.get(s).map(|x| x.0.clone()).ok_or(format!("Binop {} not found", s))
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn min_precedence() -> i32 {
|
||||
i32::min_value()
|
||||
}
|
||||
@@ -103,43 +79,44 @@ impl PrefixOp {
|
||||
pub fn is_prefix(op: &str) -> bool {
|
||||
PREFIX_OPS.get(op).is_some()
|
||||
}
|
||||
/*
|
||||
pub fn get_type(&self) -> Result<Type, String> {
|
||||
let s = self.sigil.as_str();
|
||||
PREFIX_OPS.get(s).map(|x| x.0.clone()).ok_or(format!("Prefix op {} not found", s))
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
lazy_static! {
|
||||
static ref PREFIX_OPS: HashMap<&'static str, (BuiltinTypeSpecifier, ())> =
|
||||
static ref PREFIX_OPS: HashMap<&'static str, (Type, ())> =
|
||||
hashmap! {
|
||||
"+" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
|
||||
"-" => (Func(bx!(Const(Int)), bx!(Const(Int))), ()),
|
||||
"!" => (Func(bx!(Const(Bool)), bx!(Const(Bool))), ()),
|
||||
"+" => (ty!(Nat -> Int), ()),
|
||||
"-" => (ty!(Nat -> Int), ()),
|
||||
"!" => (ty!(Bool -> Bool), ()),
|
||||
};
|
||||
}
|
||||
|
||||
/* the second tuple member is a placeholder for when I want to make evaluation rules tied to the
|
||||
* binop definition */
|
||||
//TODO some of these types are going to have to be adjusted
|
||||
lazy_static! {
|
||||
static ref BINOPS: HashMap<&'static str, (BuiltinTypeSpecifier, (), i32)> =
|
||||
static ref BINOPS: HashMap<&'static str, (Type, (), i32)> =
|
||||
hashmap! {
|
||||
"+" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
|
||||
"-" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 10),
|
||||
"*" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"/" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Float))))), (), 20),
|
||||
"quot" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"%" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"++" => (Func(bx!(Const(StringT)), bx!(Func(bx!(Const(StringT)), bx!(Const(StringT))))), (), 30),
|
||||
"^" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"&" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"|" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
">" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
">=" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"<" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"<=" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"==" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"=" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"<=>" => (Func(bx!(Const(Nat)), bx!(Func(bx!(Const(Nat)), bx!(Const(Nat))))), (), 20),
|
||||
"+" => (ty!(Nat -> Nat -> Nat), (), 10),
|
||||
"-" => (ty!(Nat -> Nat -> Nat), (), 10),
|
||||
"*" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"/" => (ty!(Nat -> Nat -> Float), (), 20),
|
||||
"quot" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"%" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"++" => (ty!(StringT -> StringT -> StringT), (), 30),
|
||||
"^" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"&" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
"|" => (ty!(Nat -> Nat -> Nat), (), 20),
|
||||
">" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
">=" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"<" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"<=" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"==" => (ty!(Nat -> Nat -> Bool), (), 20),
|
||||
"=" => (ty!(Unit), (), 20), //TODO not sure what the type of this should be b/c special fmr
|
||||
"<=>" => (ty!(Nat -> Nat -> Ordering), (), 20), //TODO figure out how to treat Order
|
||||
};
|
||||
}
|
||||
|
||||
@@ -458,6 +458,7 @@ impl<'a> State<'a> {
|
||||
Err(format!("{:?} failed pattern match", cond))
|
||||
}
|
||||
|
||||
//TODO if I don't need to lookup by name here...
|
||||
fn value(&mut self, name: Rc<String>) -> EvalResult<Node> {
|
||||
use self::ValueEntry::*;
|
||||
use self::Func::*;
|
||||
@@ -467,7 +468,7 @@ impl<'a> State<'a> {
|
||||
let symbol_table = self.symbol_table_handle.borrow();
|
||||
let value = symbol_table.lookup_by_name(&name);
|
||||
Ok(match value {
|
||||
Some(Symbol { name, spec }) => match spec {
|
||||
Some(Symbol { name, spec, .. }) => match spec {
|
||||
//TODO I'll need this type_name later to do a table lookup
|
||||
SymbolSpec::DataConstructor { type_name: _type_name, type_args, .. } => {
|
||||
if type_args.len() == 0 {
|
||||
@@ -483,8 +484,14 @@ impl<'a> State<'a> {
|
||||
_ => unreachable!(),
|
||||
},
|
||||
SymbolSpec::RecordConstructor { .. } => return Err(format!("This shouldn't be a record!")),
|
||||
SymbolSpec::Binding => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Symbol {} exists in symbol table but not in evaluator table", name))
|
||||
}
|
||||
},
|
||||
/* see if it's an ordinary variable TODO make variables go in symbol table */
|
||||
//TODO ideally this should be returning a runtime error if this is ever None, but it's not
|
||||
//handling all bindings correctly yet
|
||||
//None => return Err(format!("Couldn't find value {}", name)),
|
||||
None => match self.values.lookup(&name) {
|
||||
Some(Binding { val, .. }) => val.clone(),
|
||||
None => return Err(format!("Couldn't find value {}", name)),
|
||||
@@ -498,21 +505,13 @@ mod eval_tests {
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::tokenizing::{Token, tokenize};
|
||||
use crate::parsing::ParseResult;
|
||||
use crate::ast::AST;
|
||||
use crate::symbol_table::SymbolTable;
|
||||
use crate::eval::State;
|
||||
|
||||
fn parse(tokens: Vec<Token>) -> ParseResult<AST> {
|
||||
let mut parser = crate::parsing::Parser::new(tokens);
|
||||
parser.parse()
|
||||
}
|
||||
|
||||
fn evaluate_all_outputs(input: &str) -> Vec<Result<String, String>> {
|
||||
let symbol_table = Rc::new(RefCell::new(SymbolTable::new()));
|
||||
let mut state = State::new(symbol_table);
|
||||
let ast = parse(tokenize(input)).unwrap();
|
||||
let ast = crate::util::quick_ast(input);
|
||||
state.symbol_table_handle.borrow_mut().add_top_level_symbols(&ast).unwrap();
|
||||
let reduced = ast.reduce(&state.symbol_table_handle.borrow());
|
||||
let all_output = state.evaluate(reduced, true);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#![feature(trace_macros)]
|
||||
#![feature(custom_attribute)]
|
||||
#![feature(unrestricted_attribute_tokens)]
|
||||
//#![feature(unrestricted_attribute_tokens)]
|
||||
#![feature(slice_patterns, box_patterns, box_syntax)]
|
||||
|
||||
//! `schala-lang` is where the Schala programming language is actually implemented.
|
||||
@@ -14,37 +14,39 @@ extern crate lazy_static;
|
||||
extern crate maplit;
|
||||
extern crate schala_repl;
|
||||
#[macro_use]
|
||||
extern crate schala_repl_codegen;
|
||||
#[macro_use]
|
||||
extern crate schala_lang_codegen;
|
||||
extern crate ena;
|
||||
|
||||
use stopwatch::Stopwatch;
|
||||
|
||||
use std::time::Duration;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use itertools::Itertools;
|
||||
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation};
|
||||
use schala_repl::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
LangMetaRequest, LangMetaResponse, GlobalOutputStats,
|
||||
DebugResponse, DebugAsk};
|
||||
|
||||
macro_rules! bx {
|
||||
($e:expr) => { Box::new($e) }
|
||||
}
|
||||
|
||||
#[macro_use]
|
||||
mod util;
|
||||
mod builtin;
|
||||
#[macro_use]
|
||||
mod typechecking;
|
||||
|
||||
mod tokenizing;
|
||||
mod ast;
|
||||
mod parsing;
|
||||
mod symbol_table;
|
||||
mod typechecking;
|
||||
mod builtin;
|
||||
mod reduced_ast;
|
||||
mod eval;
|
||||
|
||||
//trace_macros!(true);
|
||||
#[derive(ProgrammingLanguageInterface)]
|
||||
#[LanguageName = "Schala"]
|
||||
#[SourceFileExtension = "schala"]
|
||||
#[PipelineSteps(load_source, tokenizing, parsing(compact,expanded,trace), symbol_table, typechecking, ast_reducing, eval)]
|
||||
#[DocMethod = get_doc]
|
||||
#[HandleCustomInterpreterDirectives = handle_custom_interpreter_directives]
|
||||
/// All bits of state necessary to parse and execute a Schala program are stored in this struct.
|
||||
/// `state` represents the execution state for the AST-walking interpreter, the other fields
|
||||
/// should be self-explanatory.
|
||||
@@ -57,12 +59,10 @@ pub struct Schala {
|
||||
}
|
||||
|
||||
impl Schala {
|
||||
fn get_doc(&self, commands: &Vec<&str>) -> Option<String> {
|
||||
Some(format!("Documentation on commands: {:?}", commands))
|
||||
}
|
||||
|
||||
fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option<String> {
|
||||
Some(format!("Schala-lang command: {:?} not supported", commands.get(0)))
|
||||
fn handle_docs(&self, source: String) -> LangMetaResponse {
|
||||
LangMetaResponse::Docs {
|
||||
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,21 +84,40 @@ impl Schala {
|
||||
pub fn new() -> Schala {
|
||||
let prelude = include_str!("prelude.schala");
|
||||
let mut s = Schala::new_blank_env();
|
||||
s.execute_pipeline(prelude, &EvalOptions::default());
|
||||
|
||||
let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() };
|
||||
s.run_computation(request);
|
||||
s
|
||||
}
|
||||
|
||||
fn handle_debug_immediate(&self, request: DebugAsk) -> DebugResponse {
|
||||
use DebugAsk::*;
|
||||
match request {
|
||||
Timing => DebugResponse { ask: Timing, value: format!("Invalid") },
|
||||
ByStage { stage_name } => match &stage_name[..] {
|
||||
"symbol-table" => {
|
||||
let value = self.symbol_table.borrow().debug_symbol_table();
|
||||
DebugResponse {
|
||||
ask: ByStage { stage_name: format!("symbol-table") },
|
||||
value
|
||||
}
|
||||
},
|
||||
s => {
|
||||
DebugResponse {
|
||||
ask: ByStage { stage_name: s.to_string() },
|
||||
value: format!("Not-implemented")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_source<'a>(input: &'a str, handle: &mut Schala, _comp: Option<&mut UnfinishedComputation>) -> Result<&'a str, String> {
|
||||
handle.source_reference.load_new_source(input);
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
|
||||
fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<Vec<tokenizing::Token>, String> {
|
||||
let tokens = tokenizing::tokenize(input);
|
||||
comp.map(|comp| {
|
||||
let token_string = tokens.iter().map(|t| t.to_string_with_metadata()).join(", ");
|
||||
comp.add_artifact(TraceArtifact::new("tokens", token_string));
|
||||
comp.add_artifact(token_string);
|
||||
});
|
||||
|
||||
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
||||
@@ -109,7 +128,7 @@ fn tokenizing(input: &str, _handle: &mut Schala, comp: Option<&mut UnfinishedCom
|
||||
}
|
||||
}
|
||||
|
||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||
fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
use crate::parsing::Parser;
|
||||
|
||||
let mut parser = match handle.active_parser.take() {
|
||||
@@ -118,9 +137,10 @@ fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut
|
||||
};
|
||||
|
||||
let ast = parser.parse();
|
||||
let trace = parser.format_parse_trace();
|
||||
let _trace = parser.format_parse_trace();
|
||||
|
||||
comp.map(|comp| {
|
||||
comp.map(|_comp| {
|
||||
/*
|
||||
//TODO need to control which of these debug stages get added
|
||||
let opt = comp.cur_debug_options.get(0).map(|s| s.clone());
|
||||
match opt {
|
||||
@@ -130,6 +150,7 @@ fn parsing(input: Vec<tokenizing::Token>, handle: &mut Schala, comp: Option<&mut
|
||||
Some(ref s) if s == "trace" => comp.add_artifact(TraceArtifact::new_parse_trace(trace)),
|
||||
Some(ref x) => println!("Bad parsing debug option: {}", x),
|
||||
};
|
||||
*/
|
||||
});
|
||||
ast.map_err(|err| format_parse_error(err, handle))
|
||||
}
|
||||
@@ -151,38 +172,40 @@ fn format_parse_error(error: parsing::ParseError, handle: &mut Schala) -> String
|
||||
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num)
|
||||
}
|
||||
|
||||
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||
fn symbol_table(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let add = handle.symbol_table.borrow_mut().add_top_level_symbols(&input);
|
||||
match add {
|
||||
Ok(()) => {
|
||||
let artifact = TraceArtifact::new("symbol_table", handle.symbol_table.borrow().debug_symbol_table());
|
||||
comp.map(|comp| comp.add_artifact(artifact));
|
||||
let debug = handle.symbol_table.borrow().debug_symbol_table();
|
||||
comp.map(|comp| comp.add_artifact(debug));
|
||||
Ok(input)
|
||||
},
|
||||
Err(msg) => Err(msg)
|
||||
}
|
||||
}
|
||||
|
||||
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<ast::AST, String> {
|
||||
fn typechecking(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<ast::AST, String> {
|
||||
let result = handle.type_context.typecheck(&input);
|
||||
|
||||
comp.map(|comp| {
|
||||
let artifact = TraceArtifact::new("type", format!("{:?}", result));
|
||||
comp.add_artifact(artifact);
|
||||
comp.add_artifact(match result {
|
||||
Ok(ty) => ty.to_string(),
|
||||
Err(err) => format!("Type error: {}", err.msg)
|
||||
});
|
||||
});
|
||||
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<reduced_ast::ReducedAST, String> {
|
||||
fn ast_reducing(input: ast::AST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<reduced_ast::ReducedAST, String> {
|
||||
let ref symbol_table = handle.symbol_table.borrow();
|
||||
let output = input.reduce(symbol_table);
|
||||
comp.map(|comp| comp.add_artifact(TraceArtifact::new("ast_reducing", format!("{:?}", output))));
|
||||
comp.map(|comp| comp.add_artifact(format!("{:?}", output)));
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut UnfinishedComputation>) -> Result<String, String> {
|
||||
comp.map(|comp| comp.add_artifact(TraceArtifact::new("value_state", handle.state.debug_print())));
|
||||
fn eval(input: reduced_ast::ReducedAST, handle: &mut Schala, comp: Option<&mut PassDebugArtifact>) -> Result<String, String> {
|
||||
comp.map(|comp| comp.add_artifact(handle.state.debug_print()));
|
||||
let evaluation_outputs = handle.state.evaluate(input, true);
|
||||
let text_output: Result<Vec<String>, String> = evaluation_outputs
|
||||
.into_iter()
|
||||
@@ -211,3 +234,99 @@ impl SourceReference {
|
||||
self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or(format!("NO LINE FOUND"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct PassDebugArtifact {
|
||||
artifacts: Vec<String>
|
||||
}
|
||||
impl PassDebugArtifact {
|
||||
fn add_artifact(&mut self, artifact: String) {
|
||||
self.artifacts.push(artifact)
|
||||
}
|
||||
}
|
||||
|
||||
fn stage_names() -> Vec<&'static str> {
|
||||
vec![
|
||||
"tokenizing",
|
||||
"parsing",
|
||||
"symbol-table",
|
||||
"typechecking",
|
||||
"ast-reduction",
|
||||
"ast-walking-evaluation"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
impl ProgrammingLanguageInterface for Schala {
|
||||
fn get_language_name(&self) -> String { format!("Schala") }
|
||||
fn get_source_file_suffix(&self) -> String { format!("schala") }
|
||||
|
||||
fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse {
|
||||
struct PassToken<'a> {
|
||||
schala: &'a mut Schala,
|
||||
stage_durations: &'a mut Vec<(String, Duration)>,
|
||||
sw: &'a Stopwatch,
|
||||
debug_requests: &'a HashSet<DebugAsk>,
|
||||
debug_responses: &'a mut Vec<DebugResponse>,
|
||||
}
|
||||
|
||||
fn output_wrapper<Input, Output, F>(n: usize, func: F, input: Input, tok: &mut PassToken) -> Result<Output, String>
|
||||
where F: Fn(Input, &mut Schala, Option<&mut PassDebugArtifact>) -> Result<Output, String>
|
||||
{
|
||||
let stage_names = stage_names();
|
||||
let mut debug_artifact = if tok.debug_requests.contains(&DebugAsk::ByStage { stage_name: stage_names[n].to_string() }) {
|
||||
Some(PassDebugArtifact::default())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let output = func(input, tok.schala, debug_artifact.as_mut());
|
||||
tok.stage_durations.push((stage_names[n].to_string(), tok.sw.elapsed()));
|
||||
if let Some(artifact) = debug_artifact {
|
||||
for value in artifact.artifacts.into_iter() {
|
||||
let resp = DebugResponse {
|
||||
ask: DebugAsk::ByStage { stage_name: stage_names[n].to_string() },
|
||||
value,
|
||||
};
|
||||
tok.debug_responses.push(resp);
|
||||
}
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
let ComputationRequest { source, debug_requests } = request;
|
||||
self.source_reference.load_new_source(source);
|
||||
let sw = Stopwatch::start_new();
|
||||
let mut stage_durations = Vec::new();
|
||||
let mut debug_responses = Vec::new();
|
||||
let mut tok = PassToken { schala: self, stage_durations: &mut stage_durations, sw: &sw, debug_requests: &debug_requests, debug_responses: &mut debug_responses };
|
||||
|
||||
let main_output: Result<String, String> = Ok(source)
|
||||
.and_then(|source| output_wrapper(0, tokenizing, source, &mut tok))
|
||||
.and_then(|tokens| output_wrapper(1, parsing, tokens, &mut tok))
|
||||
.and_then(|ast| output_wrapper(2, symbol_table, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(3, typechecking, ast, &mut tok))
|
||||
.and_then(|ast| output_wrapper(4, ast_reducing, ast, &mut tok))
|
||||
.and_then(|reduced_ast| output_wrapper(5, eval, reduced_ast, &mut tok));
|
||||
|
||||
let total_duration = sw.elapsed();
|
||||
let global_output_stats = GlobalOutputStats {
|
||||
total_duration, stage_durations
|
||||
};
|
||||
|
||||
ComputationResponse {
|
||||
main_output,
|
||||
global_output_stats,
|
||||
debug_responses,
|
||||
}
|
||||
}
|
||||
|
||||
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
|
||||
match request {
|
||||
LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
|
||||
LangMetaRequest::Docs { source } => self.handle_docs(source),
|
||||
LangMetaRequest::ImmediateDebug(debug_request) =>
|
||||
LangMetaResponse::ImmediateDebug(self.handle_debug_immediate(debug_request)),
|
||||
LangMetaRequest::Custom { .. } => LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,8 +17,8 @@ pub struct ParseError {
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
fn new_with_token<T>(msg: &str, token: Token) -> ParseResult<T>{
|
||||
Err(ParseError { msg: msg.to_string(), token })
|
||||
fn new_with_token<T, M>(msg: M, token: Token) -> ParseResult<T> where M: Into<String> {
|
||||
Err(ParseError { msg: msg.into(), token })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ macro_rules! expect {
|
||||
$expected_kind if $cond => $self.token_handler.next(),
|
||||
actual_kind => {
|
||||
let msg = format!("Expected {}, got {:?}", print_token_pattern!($expected_kind), actual_kind);
|
||||
return ParseError::new_with_token(&msg, tok);
|
||||
return ParseError::new_with_token(msg, tok);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -288,7 +288,7 @@ impl Parser {
|
||||
continue;
|
||||
},
|
||||
_ => statements.push(
|
||||
Node::new(self.statement()?)
|
||||
Meta::new(self.statement()?)
|
||||
),
|
||||
}
|
||||
}
|
||||
@@ -413,9 +413,9 @@ impl Parser {
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn nonempty_func_body(&mut self) -> ParseResult<Vec<Node<Statement>>> {
|
||||
fn nonempty_func_body(&mut self) -> ParseResult<Vec<Meta<Statement>>> {
|
||||
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
Ok(statements.into_iter().map(|s| Node::new(s)).collect())
|
||||
Ok(statements.into_iter().map(|s| Meta::new(s)).collect())
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -444,10 +444,16 @@ impl Parser {
|
||||
_ => true
|
||||
};
|
||||
let name = self.identifier()?;
|
||||
expect!(self, Operator(ref o) if **o == "=");
|
||||
let expr = self.expression()?;
|
||||
let type_anno = if let Colon = self.token_handler.peek_kind() {
|
||||
Some(self.type_anno()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Declaration::Binding { name, constant, expr })
|
||||
expect!(self, Operator(ref o) if **o == "=");
|
||||
let expr = self.expression()?.into();
|
||||
|
||||
Ok(Declaration::Binding { name, constant, type_anno, expr })
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -554,7 +560,7 @@ impl Parser {
|
||||
None => unreachable!()
|
||||
};
|
||||
let rhs = self.precedence_expr(new_precedence)?;
|
||||
lhs = Expression(ExpressionType::BinExp(operation, bx!(lhs.into()), bx!(rhs.into())), None);
|
||||
lhs = Expression(ExpressionKind::BinExp(operation, bx!(lhs.into()), bx!(rhs.into())), None);
|
||||
}
|
||||
self.parse_level -= 1;
|
||||
Ok(lhs)
|
||||
@@ -570,7 +576,7 @@ impl Parser {
|
||||
};
|
||||
let expr = self.primary()?;
|
||||
Ok(Expression(
|
||||
ExpressionType::PrefixExp(PrefixOp::from_sigil(sigil.as_str()), bx!(expr.into())),
|
||||
ExpressionKind::PrefixExp(PrefixOp::from_sigil(sigil.as_str()), bx!(expr.into())),
|
||||
None
|
||||
))
|
||||
},
|
||||
@@ -583,8 +589,8 @@ impl Parser {
|
||||
let mut expr = self.index_expr()?;
|
||||
while let LParen = self.token_handler.peek_kind() {
|
||||
let arguments = delimited!(self, LParen, expression, Comma, RParen);
|
||||
let arguments = arguments.into_iter().map(|s| Node::new(s)).collect();
|
||||
expr = Expression(ExpressionType::Call { f: bx!(expr), arguments }, None); //TODO none is incorrect
|
||||
let arguments = arguments.into_iter().map(|s| Meta::new(s)).collect();
|
||||
expr = Expression(ExpressionKind::Call { f: bx!(expr.into()), arguments }, None); //TODO none is incorrect
|
||||
}
|
||||
|
||||
Ok(expr)
|
||||
@@ -594,9 +600,10 @@ impl Parser {
|
||||
fn index_expr(&mut self) -> ParseResult<Expression> {
|
||||
let primary = self.primary()?;
|
||||
Ok(if let LSquareBracket = self.token_handler.peek_kind() {
|
||||
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
|
||||
Expression(ExpressionType::Index {
|
||||
indexee: bx!(Expression(primary.0, None)),
|
||||
let indexers = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket)
|
||||
.into_iter().map(|ex| ex.into()).collect();
|
||||
Expression(ExpressionKind::Index {
|
||||
indexee: bx!(Expression(primary.0, None).into()),
|
||||
indexers,
|
||||
}, None)
|
||||
} else {
|
||||
@@ -621,8 +628,9 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn list_expr(&mut self) -> ParseResult<Expression> {
|
||||
let exprs = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket);
|
||||
Ok(Expression(ExpressionType::ListLiteral(exprs), None))
|
||||
let exprs = delimited!(self, LSquareBracket, expression, Comma, RSquareBracket)
|
||||
.into_iter().map(|ex| ex.into()).collect();
|
||||
Ok(Expression(ExpressionKind::ListLiteral(exprs), None))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -639,7 +647,7 @@ impl Parser {
|
||||
_ => None,
|
||||
};
|
||||
let body = self.nonempty_func_body()?;
|
||||
Ok(Expression(ExpressionType::Lambda { params, type_anno, body }, None)) //TODO need to handle types somehow
|
||||
Ok(Expression(ExpressionKind::Lambda { params, type_anno, body }, None)) //TODO need to handle types somehow
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -654,7 +662,7 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn paren_expr(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionType::*;
|
||||
use self::ExpressionKind::*;
|
||||
let old_struct_value = self.restrictions.no_struct_literal;
|
||||
self.restrictions.no_struct_literal = false;
|
||||
let output = {
|
||||
@@ -663,7 +671,7 @@ impl Parser {
|
||||
0 => Ok(Expression(TupleLiteral(vec![]), None)),
|
||||
1 => Ok(inner.pop().unwrap()),
|
||||
_ => {
|
||||
let inner: Vec<Node<Expression>> = inner.into_iter().map(|ex| ex.into()).collect();
|
||||
let inner: Vec<Meta<Expression>> = inner.into_iter().map(|ex| ex.into()).collect();
|
||||
Ok(Expression(TupleLiteral(inner), None))
|
||||
}
|
||||
}
|
||||
@@ -674,7 +682,7 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn identifier_expr(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionType::*;
|
||||
use self::ExpressionKind::*;
|
||||
let identifier = self.identifier()?;
|
||||
Ok(match self.token_handler.peek_kind() {
|
||||
LCurlyBrace if !self.restrictions.no_struct_literal => {
|
||||
@@ -686,8 +694,11 @@ impl Parser {
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn record_block(&mut self) -> ParseResult<Vec<(Rc<String>, Expression)>> {
|
||||
Ok(delimited!(self, LCurlyBrace, record_entry, Comma, RCurlyBrace))
|
||||
fn record_block(&mut self) -> ParseResult<Vec<(Rc<String>, Meta<Expression>)>> {
|
||||
Ok(
|
||||
delimited!(self, LCurlyBrace, record_entry, Comma, RCurlyBrace)
|
||||
.into_iter().map(|(s, ex)| (s, ex.into())).collect()
|
||||
)
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -714,7 +725,7 @@ impl Parser {
|
||||
_ => self.guard_block()?
|
||||
});
|
||||
|
||||
Ok(Expression(ExpressionType::IfExpression { discriminator, body }, None))
|
||||
Ok(Expression(ExpressionKind::IfExpression { discriminator, body }, None))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -864,7 +875,7 @@ impl Parser {
|
||||
self.token_handler.next();
|
||||
Pattern::Ignored
|
||||
},
|
||||
other => return ParseError::new_with_token(&format!("{:?} is not a valid Pattern", other), tok)
|
||||
other => return ParseError::new_with_token(format!("{:?} is not a valid Pattern", other), tok)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -898,7 +909,7 @@ impl Parser {
|
||||
#[recursive_descent_method]
|
||||
fn block(&mut self) -> ParseResult<Block> {
|
||||
let block = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
Ok(block.into_iter().map(|s| { Node::new(s) }).collect())
|
||||
Ok(block.into_iter().map(|s| { Meta::new(s) }).collect())
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
@@ -907,20 +918,20 @@ impl Parser {
|
||||
LCurlyBrace => self.block(),
|
||||
_ => {
|
||||
let expr = self.expression()?;
|
||||
Ok(vec![Node::new(Statement::ExpressionStatement(expr.into()))])
|
||||
Ok(vec![Meta::new(Statement::ExpressionStatement(expr.into()))])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn while_expr(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionType::*;
|
||||
use self::ExpressionKind::*;
|
||||
expect!(self, Keyword(Kw::While));
|
||||
let condition = {
|
||||
self.restrictions.no_struct_literal = true;
|
||||
let x = self.while_cond();
|
||||
self.restrictions.no_struct_literal = false;
|
||||
x?.map(|expr| bx!(expr))
|
||||
x?.map(|expr| bx!(expr.into()))
|
||||
};
|
||||
let body = self.block()?;
|
||||
Ok(Expression(WhileExpression {condition, body}, None))
|
||||
@@ -949,14 +960,14 @@ impl Parser {
|
||||
vec![single_enum]
|
||||
};
|
||||
let body = Box::new(self.for_expr_body()?);
|
||||
Ok(Expression(ExpressionType::ForExpression { enumerators, body }, None))
|
||||
Ok(Expression(ExpressionKind::ForExpression { enumerators, body }, None))
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn enumerator(&mut self) -> ParseResult<Enumerator> {
|
||||
let id = self.identifier()?;
|
||||
expect!(self, Operator(ref c) if **c == "<-");
|
||||
let generator = self.expression()?;
|
||||
let generator = self.expression()?.into();
|
||||
Ok(Enumerator { id, generator })
|
||||
}
|
||||
|
||||
@@ -967,11 +978,11 @@ impl Parser {
|
||||
Ok(match tok.get_kind() {
|
||||
LCurlyBrace => {
|
||||
let statements = delimited!(self, LCurlyBrace, statement, Newline | Semicolon, RCurlyBrace, nonstrict);
|
||||
StatementBlock(statements.into_iter().map(|s| Node::new(s)).collect())
|
||||
StatementBlock(statements.into_iter().map(|s| Meta::new(s)).collect())
|
||||
},
|
||||
Keyword(Kw::Return) => {
|
||||
self.token_handler.next();
|
||||
MonadicReturn(self.expression()?)
|
||||
MonadicReturn(self.expression()?.into())
|
||||
},
|
||||
_ => return ParseError::new_with_token("for expressions must end in a block or 'return'", tok),
|
||||
})
|
||||
@@ -982,13 +993,13 @@ impl Parser {
|
||||
let tok = self.token_handler.next();
|
||||
match tok.get_kind() {
|
||||
Identifier(s) => Ok(s),
|
||||
p => ParseError::new_with_token(&format!("Expected an identifier, got {:?}", p), tok),
|
||||
p => ParseError::new_with_token(format!("Expected an identifier, got {:?}", p), tok),
|
||||
}
|
||||
}
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn literal(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionType::*;
|
||||
use self::ExpressionKind::*;
|
||||
|
||||
let tok = self.token_handler.peek();
|
||||
match tok.get_kind() {
|
||||
@@ -1005,7 +1016,7 @@ impl Parser {
|
||||
self.token_handler.next();
|
||||
Ok(Expression(StringLiteral(s.clone()), None))
|
||||
}
|
||||
e => ParseError::new_with_token(&format!("Expected a literal expression, got {:?}", e), tok),
|
||||
e => ParseError::new_with_token(format!("Expected a literal expression, got {:?}", e), tok),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1019,7 +1030,7 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn int_literal(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionType::*;
|
||||
use self::ExpressionKind::*;
|
||||
let tok = self.token_handler.next();
|
||||
match tok.get_kind() {
|
||||
BinNumberSigil => {
|
||||
@@ -1038,7 +1049,7 @@ impl Parser {
|
||||
|
||||
#[recursive_descent_method]
|
||||
fn float_literal(&mut self) -> ParseResult<Expression> {
|
||||
use self::ExpressionType::*;
|
||||
use self::ExpressionKind::*;
|
||||
let tok = self.token_handler.peek();
|
||||
let mut digits = self.digits()?;
|
||||
if let Period = self.token_handler.peek_kind() {
|
||||
@@ -1047,13 +1058,13 @@ impl Parser {
|
||||
digits.push_str(&self.digits()?);
|
||||
match digits.parse::<f64>() {
|
||||
Ok(f) => Ok(Expression(FloatLiteral(f), None)),
|
||||
Err(e) => ParseError::new_with_token(&format!("Float failed to parse with error: {}", e), tok),
|
||||
Err(e) => ParseError::new_with_token(format!("Float failed to parse with error: {}", e), tok),
|
||||
|
||||
}
|
||||
} else {
|
||||
match digits.parse::<u64>() {
|
||||
Ok(d) => Ok(Expression(NatLiteral(d), None)),
|
||||
Err(e) => ParseError::new_with_token(&format!("Integer failed to parse with error: {}", e), tok),
|
||||
Err(e) => ParseError::new_with_token(format!("Integer failed to parse with error: {}", e), tok),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1111,13 +1122,13 @@ mod parse_tests {
|
||||
use super::tokenize;
|
||||
use super::ParseResult;
|
||||
use crate::builtin::{PrefixOp, BinOp};
|
||||
use crate::ast::{AST, Node, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody};
|
||||
use crate::ast::{AST, Meta, Expression, Statement, IfExpressionBody, Discriminator, Pattern, PatternLiteral, TypeBody, Enumerator, ForBody};
|
||||
use super::Statement::*;
|
||||
use super::Declaration::*;
|
||||
use super::Signature;
|
||||
use super::TypeIdentifier::*;
|
||||
use super::TypeSingletonName;
|
||||
use super::ExpressionType::*;
|
||||
use super::ExpressionKind::*;
|
||||
use super::Variant::*;
|
||||
use super::ForBody::*;
|
||||
|
||||
@@ -1127,9 +1138,6 @@ mod parse_tests {
|
||||
parser.parse()
|
||||
}
|
||||
|
||||
macro_rules! rc {
|
||||
($string:tt) => { Rc::new(stringify!($string).to_string()) }
|
||||
}
|
||||
macro_rules! parse_test {
|
||||
($string:expr, $correct:expr) => { assert_eq!(parse($string).unwrap(), $correct) };
|
||||
}
|
||||
@@ -1151,6 +1159,8 @@ mod parse_tests {
|
||||
|
||||
macro_rules! ex {
|
||||
($expr_type:expr) => { Expression($expr_type, None) };
|
||||
(m $expr_type:expr) => { Meta::new(Expression($expr_type, None)) };
|
||||
(m $expr_type:expr, $type_anno:expr) => { Meta::new(Expression($expr_type, Some($type_anno))) };
|
||||
(s $expr_text:expr) => {
|
||||
{
|
||||
let tokens: Vec<crate::tokenizing::Token> = tokenize($expr_text);
|
||||
@@ -1167,14 +1177,14 @@ mod parse_tests {
|
||||
($op:expr, $lhs:expr) => { PrefixExp(PrefixOp::from_sigil($op), bx!(Expression($lhs, None).into())) }
|
||||
}
|
||||
macro_rules! exst {
|
||||
($expr_type:expr) => { Node::new(Statement::ExpressionStatement(Expression($expr_type, None).into())) };
|
||||
($expr_type:expr, $type_anno:expr) => { Node::new(Statement::ExpressionStatement(Expression($expr_type, Some($type_anno)).into())) };
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { Node::new(Statement::ExpressionStatement(ex!(binexp!($op, $lhs, $rhs)))) };
|
||||
($expr_type:expr) => { Meta::new(Statement::ExpressionStatement(Expression($expr_type, None).into())) };
|
||||
($expr_type:expr, $type_anno:expr) => { Meta::new(Statement::ExpressionStatement(Expression($expr_type, Some($type_anno)).into())) };
|
||||
($op:expr, $lhs:expr, $rhs:expr) => { Meta::new(Statement::ExpressionStatement(ex!(binexp!($op, $lhs, $rhs)))) };
|
||||
(s $statement_text:expr) => {
|
||||
{
|
||||
let tokens: Vec<crate::tokenizing::Token> = tokenize($statement_text);
|
||||
let mut parser = super::Parser::new(tokens);
|
||||
Node::new(parser.statement().unwrap())
|
||||
Meta::new(parser.statement().unwrap())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1253,12 +1263,20 @@ mod parse_tests {
|
||||
])
|
||||
}
|
||||
*/
|
||||
parse_test!("a[b,c]", AST(vec![exst!(Index { indexee: bx!(ex!(val!("a"))), indexers: vec![ex!(val!("b")), ex!(val!("c"))]} )]));
|
||||
parse_test!("a[b,c]", AST(vec![exst!(Index { indexee: bx!(ex!(m val!("a"))), indexers: vec![ex!(m val!("b")), ex!(m val!("c"))]} )]));
|
||||
|
||||
parse_test!("None", AST(vec![exst!(val!("None"))]));
|
||||
parse_test!("Pandas { a: x + y }", AST(vec![
|
||||
exst!(NamedStruct { name: rc!(Pandas), fields: vec![(rc!(a), ex!(binexp!("+", val!("x"), val!("y"))))]})
|
||||
exst!(NamedStruct { name: rc!(Pandas), fields: vec![(rc!(a), ex!(m binexp!("+", val!("x"), val!("y"))))]})
|
||||
]));
|
||||
parse_test! { "Pandas { a: n, b: q, }",
|
||||
AST(vec![
|
||||
exst!(NamedStruct { name: rc!(Pandas), fields:
|
||||
vec![(rc!(a), ex!(m val!("n"))), (rc!(b), ex!(m val!("q")))]
|
||||
}
|
||||
)
|
||||
])
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1280,35 +1298,35 @@ mod parse_tests {
|
||||
|
||||
#[test]
|
||||
fn parsing_functions() {
|
||||
parse_test!("fn oi()", AST(vec![Node::new(Declaration(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })))]));
|
||||
parse_test!("oi()", AST(vec![exst!(Call { f: bx!(ex!(val!("oi"))), arguments: vec![] })]));
|
||||
parse_test!("fn oi()", AST(vec![Meta::new(Declaration(FuncSig(Signature { name: rc!(oi), operator: false, params: vec![], type_anno: None })))]));
|
||||
parse_test!("oi()", AST(vec![exst!(Call { f: bx!(ex!(m val!("oi"))), arguments: vec![] })]));
|
||||
parse_test!("oi(a, 2 + 2)", AST(vec![exst!(Call
|
||||
{ f: bx!(ex!(val!("oi"))),
|
||||
{ f: bx!(ex!(m val!("oi"))),
|
||||
arguments: vec![ex!(val!("a")).into(), ex!(binexp!("+", NatLiteral(2), NatLiteral(2))).into()]
|
||||
})]));
|
||||
parse_error!("a(b,,c)");
|
||||
|
||||
parse_test!("fn a(b, c: Int): Int", AST(vec![Node::new(Declaration(
|
||||
parse_test!("fn a(b, c: Int): Int", AST(vec![Meta::new(Declaration(
|
||||
FuncSig(Signature { name: rc!(a), operator: false, params: vec![
|
||||
(rc!(b), None), (rc!(c), Some(ty!("Int")))
|
||||
], type_anno: Some(ty!("Int")) })))]));
|
||||
|
||||
|
||||
parse_test!("fn a(x) { x() }", AST(vec![Node::new(Declaration(
|
||||
parse_test!("fn a(x) { x() }", AST(vec![Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])))]));
|
||||
parse_test!("fn a(x) {\n x() }", AST(vec![Node::new(Declaration(
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })])))]));
|
||||
parse_test!("fn a(x) {\n x() }", AST(vec![Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])))]));
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })])))]));
|
||||
|
||||
let multiline = r#"
|
||||
fn a(x) {
|
||||
x()
|
||||
}
|
||||
"#;
|
||||
parse_test!(multiline, AST(vec![Node::new(Declaration(
|
||||
parse_test!(multiline, AST(vec![Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||
vec![exst!(Call { f: bx!(ex!(val!("x"))), arguments: vec![] })])))]));
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("x"))), arguments: vec![] })])))]));
|
||||
let multiline2 = r#"
|
||||
fn a(x) {
|
||||
|
||||
@@ -1316,7 +1334,7 @@ fn a(x) {
|
||||
|
||||
}
|
||||
"#;
|
||||
parse_test!(multiline2, AST(vec![Node::new(Declaration(
|
||||
parse_test!(multiline2, AST(vec![Meta::new(Declaration(
|
||||
FuncDecl(Signature { name: rc!(a), operator: false, params: vec![(rc!(x),None)], type_anno: None },
|
||||
vec![exst!(s "x()")])))]));
|
||||
}
|
||||
@@ -1334,11 +1352,11 @@ fn a(x) {
|
||||
|
||||
#[test]
|
||||
fn parsing_types() {
|
||||
parse_test!("type Yolo = Yolo", AST(vec![Node::new(Declaration(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ))]));
|
||||
parse_test!("type mut Yolo = Yolo", AST(vec![Node::new(Declaration(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ))]));
|
||||
parse_test!("type alias Sex = Drugs", AST(vec![Node::new(Declaration(TypeAlias(rc!(Sex), rc!(Drugs))))]));
|
||||
parse_test!("type Yolo = Yolo", AST(vec![Meta::new(Declaration(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: false} ))]));
|
||||
parse_test!("type mut Yolo = Yolo", AST(vec![Meta::new(Declaration(TypeDecl { name: tys!("Yolo"), body: TypeBody(vec![UnitStruct(rc!(Yolo))]), mutable: true} ))]));
|
||||
parse_test!("type alias Sex = Drugs", AST(vec![Meta::new(Declaration(TypeAlias(rc!(Sex), rc!(Drugs))))]));
|
||||
parse_test!("type Sanchez = Miguel | Alejandro(Int, Option<a>) | Esperanza { a: Int, b: String }",
|
||||
AST(vec![Node::new(Declaration(TypeDecl{
|
||||
AST(vec![Meta::new(Declaration(TypeDecl{
|
||||
name: tys!("Sanchez"),
|
||||
body: TypeBody(vec![
|
||||
UnitStruct(rc!(Miguel)),
|
||||
@@ -1358,7 +1376,7 @@ fn a(x) {
|
||||
}))]));
|
||||
|
||||
parse_test!("type Jorge<a> = Diego | Kike(a)", AST(vec![
|
||||
Node::new(Declaration(TypeDecl{
|
||||
Meta::new(Declaration(TypeDecl{
|
||||
name: TypeSingletonName { name: rc!(Jorge), params: vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })] },
|
||||
body: TypeBody(vec![UnitStruct(rc!(Diego)), TupleStruct(rc!(Kike), vec![Singleton(TypeSingletonName { name: rc!(a), params: vec![] })])]),
|
||||
mutable: false
|
||||
@@ -1368,8 +1386,12 @@ fn a(x) {
|
||||
|
||||
#[test]
|
||||
fn parsing_bindings() {
|
||||
parse_test!("let mut a = 10", AST(vec![Node::new(Declaration(Binding { name: rc!(a), constant: false, expr: ex!(NatLiteral(10)) } ))]));
|
||||
parse_test!("let a = 2 + 2", AST(vec![Node::new(Declaration(Binding { name: rc!(a), constant: true, expr: ex!(binexp!("+", NatLiteral(2), NatLiteral(2))) }) )]));
|
||||
parse_test!("let mut a = 10", AST(vec![Meta::new(Declaration(Binding { name: rc!(a), constant: false, type_anno: None, expr: ex!(m NatLiteral(10)) } ))]));
|
||||
parse_test!("let a = 2 + 2", AST(vec![Meta::new(Declaration(Binding { name: rc!(a), constant: true, type_anno: None, expr: ex!(m binexp!("+", NatLiteral(2), NatLiteral(2))) }) )]));
|
||||
parse_test!("let a: Nat = 2 + 2", AST(vec![Meta::new(Declaration(
|
||||
Binding { name: rc!(a), constant: true, type_anno: Some(Singleton(TypeSingletonName { name: rc!(Nat), params: vec![] })),
|
||||
expr: Meta::new(ex!(binexp!("+", NatLiteral(2), NatLiteral(2)))) }
|
||||
))]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1378,11 +1400,11 @@ fn a(x) {
|
||||
"if a() then { b(); c() }", AST(vec![exst!(
|
||||
IfExpression {
|
||||
discriminator: bx! {
|
||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]}))
|
||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(m val!("a"))), arguments: vec![]}))
|
||||
},
|
||||
body: bx! {
|
||||
IfExpressionBody::SimpleConditional(
|
||||
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(m val!("c"))), arguments: vec![] })],
|
||||
None
|
||||
)
|
||||
}
|
||||
@@ -1394,11 +1416,11 @@ fn a(x) {
|
||||
"if a() then { b(); c() } else { q }", AST(vec![exst!(
|
||||
IfExpression {
|
||||
discriminator: bx! {
|
||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(val!("a"))), arguments: vec![]}))
|
||||
Discriminator::Simple(ex!(Call { f: bx!(ex!(m val!("a"))), arguments: vec![]}))
|
||||
},
|
||||
body: bx! {
|
||||
IfExpressionBody::SimpleConditional(
|
||||
vec![exst!(Call { f: bx!(ex!(val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(val!("c"))), arguments: vec![] })],
|
||||
vec![exst!(Call { f: bx!(ex!(m val!("b"))), arguments: vec![]}), exst!(Call { f: bx!(ex!(m val!("c"))), arguments: vec![] })],
|
||||
Some(
|
||||
vec![exst!(val!("q"))],
|
||||
)
|
||||
@@ -1443,7 +1465,7 @@ fn a(x) {
|
||||
#[test]
|
||||
fn parsing_interfaces() {
|
||||
parse_test!("interface Unglueable { fn unglue(a: Glue); fn mar(): Glue }", AST(vec![
|
||||
Node::new(Declaration(Interface {
|
||||
Meta::new(Declaration(Interface {
|
||||
name: rc!(Unglueable),
|
||||
signatures: vec![
|
||||
Signature { name: rc!(unglue), operator: false, params: vec![(rc!(a), Some(Singleton(TypeSingletonName { name: rc!(Glue), params: vec![] })))], type_anno: None },
|
||||
@@ -1456,7 +1478,7 @@ fn a(x) {
|
||||
#[test]
|
||||
fn parsing_impls() {
|
||||
parse_test!("impl Heh { fn yolo(); fn swagg(); }", AST(vec![
|
||||
Node::new(
|
||||
Meta::new(
|
||||
Declaration(Impl {
|
||||
type_name: ty!("Heh"),
|
||||
interface_name: None,
|
||||
@@ -1466,7 +1488,7 @@ fn a(x) {
|
||||
] }))]));
|
||||
|
||||
parse_test!("impl Mondai for Lollerino { fn yolo(); fn swagg(); }", AST(vec![
|
||||
Node::new(Declaration(Impl {
|
||||
Meta::new(Declaration(Impl {
|
||||
type_name: ty!("Lollerino"),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Mondai), params: vec![] }),
|
||||
block: vec![
|
||||
@@ -1475,7 +1497,7 @@ fn a(x) {
|
||||
] }))]));
|
||||
|
||||
parse_test!("impl Hella<T> for (Alpha, Omega) { }", AST(vec![
|
||||
Node::new(Declaration(Impl {
|
||||
Meta::new(Declaration(Impl {
|
||||
type_name: Tuple(vec![ty!("Alpha"), ty!("Omega")]),
|
||||
interface_name: Some(TypeSingletonName { name: rc!(Hella), params: vec![ty!("T")] }),
|
||||
block: vec![]
|
||||
@@ -1483,7 +1505,7 @@ fn a(x) {
|
||||
]));
|
||||
|
||||
parse_test!("impl Option<WTFMate> { fn oi() }", AST(vec![
|
||||
Node::new(
|
||||
Meta::new(
|
||||
Declaration(Impl {
|
||||
type_name: Singleton(TypeSingletonName { name: rc!(Option), params: vec![ty!("WTFMate")]}),
|
||||
interface_name: None,
|
||||
@@ -1496,9 +1518,9 @@ fn a(x) {
|
||||
#[test]
|
||||
fn parsing_type_annotations() {
|
||||
parse_test!("let a = b : Int", AST(vec![
|
||||
Node::new(
|
||||
Declaration(Binding { name: rc!(a), constant: true, expr:
|
||||
Expression(val!("b"), Some(ty!("Int"))) }))]));
|
||||
Meta::new(
|
||||
Declaration(Binding { name: rc!(a), constant: true, type_anno: None, expr:
|
||||
ex!(m val!("b"), ty!("Int")) }))]));
|
||||
|
||||
parse_test!("a : Int", AST(vec![
|
||||
exst!(val!("a"), ty!("Int"))
|
||||
@@ -1537,7 +1559,7 @@ fn a(x) {
|
||||
]));
|
||||
|
||||
parse_test!(r#"\(x){y}(1)"#, AST(vec![
|
||||
exst!(Call { f: bx!(ex!(
|
||||
exst!(Call { f: bx!(ex!(m
|
||||
Lambda {
|
||||
params: vec![(rc!(x), None)],
|
||||
type_anno: None,
|
||||
@@ -1584,7 +1606,7 @@ fn a(x) {
|
||||
exst!(s r"fn wahoo() { let a = 10; \(x) { x + a } }"),
|
||||
exst! {
|
||||
Call {
|
||||
f: bx!(ex!(Call { f: bx!(ex!(val!("wahoo"))), arguments: vec![] })),
|
||||
f: bx!(ex!(m Call { f: bx!(ex!(m val!("wahoo"))), arguments: vec![] })),
|
||||
arguments: vec![ex!(s "3").into()],
|
||||
}
|
||||
}
|
||||
@@ -1596,7 +1618,7 @@ fn a(x) {
|
||||
fn list_literals() {
|
||||
parse_test! {
|
||||
"[1,2]", AST(vec![
|
||||
exst!(ListLiteral(vec![ex!(NatLiteral(1)), ex!(NatLiteral(2))]))])
|
||||
exst!(ListLiteral(vec![ex!(m NatLiteral(1)), ex!(m NatLiteral(2))]))])
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1609,7 +1631,7 @@ fn a(x) {
|
||||
|
||||
parse_test! {
|
||||
"while a == b { }", AST(vec![
|
||||
exst!(WhileExpression { condition: Some(bx![ex![binexp!("==", val!("a"), val!("b"))]]), body: vec![] })])
|
||||
exst!(WhileExpression { condition: Some(bx![ex![m binexp!("==", val!("a"), val!("b"))]]), body: vec![] })])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1618,14 +1640,14 @@ fn a(x) {
|
||||
parse_test! {
|
||||
"for { a <- maybeValue } return 1", AST(vec![
|
||||
exst!(ForExpression {
|
||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(val!("maybeValue")) }],
|
||||
body: bx!(MonadicReturn(ex!(s "1")))
|
||||
enumerators: vec![Enumerator { id: rc!(a), generator: ex!(m val!("maybeValue")) }],
|
||||
body: bx!(MonadicReturn(Meta::new(ex!(s "1"))))
|
||||
})])
|
||||
}
|
||||
|
||||
parse_test! {
|
||||
"for n <- someRange { f(n); }", AST(vec![
|
||||
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(val!("someRange"))}],
|
||||
exst!(ForExpression { enumerators: vec![Enumerator { id: rc!(n), generator: ex!(m val!("someRange"))}],
|
||||
body: bx!(ForBody::StatementBlock(vec![exst!(s "f(n)")]))
|
||||
})])
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ fn reduce_block(block: &Block, symbol_table: &SymbolTable) -> Vec<Stmt> {
|
||||
|
||||
impl Expression {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Expr {
|
||||
use crate::ast::ExpressionType::*;
|
||||
use crate::ast::ExpressionKind::*;
|
||||
let ref input = self.0;
|
||||
match input {
|
||||
NatLiteral(n) => Expr::Lit(Lit::Nat(*n)),
|
||||
@@ -139,7 +139,7 @@ impl Expression {
|
||||
_ => Expr::Val(name.clone()),
|
||||
},
|
||||
Call { f, arguments } => Expr::Call {
|
||||
f: Box::new(f.reduce(symbol_table)),
|
||||
f: Box::new(f.node().reduce(symbol_table)),
|
||||
args: arguments.iter().map(|arg| arg.node().reduce(symbol_table)).collect(),
|
||||
},
|
||||
TupleLiteral(exprs) => Expr::Tuple(exprs.iter().map(|e| e.node().reduce(symbol_table)).collect()),
|
||||
@@ -294,10 +294,10 @@ impl PatternLiteral {
|
||||
match self {
|
||||
NumPattern { neg, num } => {
|
||||
let comparison = Expr::Lit(match (neg, num) {
|
||||
(false, ExpressionType::NatLiteral(n)) => Lit::Nat(*n),
|
||||
(false, ExpressionType::FloatLiteral(f)) => Lit::Float(*f),
|
||||
(true, ExpressionType::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
||||
(true, ExpressionType::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
||||
(false, ExpressionKind::NatLiteral(n)) => Lit::Nat(*n),
|
||||
(false, ExpressionKind::FloatLiteral(f)) => Lit::Float(*f),
|
||||
(true, ExpressionKind::NatLiteral(n)) => Lit::Int(-1*(*n as i64)),
|
||||
(true, ExpressionKind::FloatLiteral(f)) => Lit::Float(-1.0*f),
|
||||
_ => panic!("This should never happen")
|
||||
});
|
||||
let guard = Some(Expr::Call {
|
||||
@@ -356,9 +356,8 @@ impl PatternLiteral {
|
||||
impl Declaration {
|
||||
fn reduce(&self, symbol_table: &SymbolTable) -> Stmt {
|
||||
use self::Declaration::*;
|
||||
use crate::ast::Signature;
|
||||
match self {
|
||||
Binding {name, constant, expr } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.reduce(symbol_table) },
|
||||
Binding {name, constant, expr, .. } => Stmt::Binding { name: name.clone(), constant: *constant, expr: expr.node().reduce(symbol_table) },
|
||||
FuncDecl(Signature { name, params, .. }, statements) => Stmt::PreBinding {
|
||||
name: name.clone(),
|
||||
func: Func::UserDefined {
|
||||
@@ -377,7 +376,7 @@ impl Declaration {
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, lhs: &Box<Node<Expression>>, rhs: &Box<Node<Expression>>) -> Expr {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, lhs: &Box<Meta<Expression>>, rhs: &Box<Meta<Expression>>) -> Expr {
|
||||
if **self.sigil() == "=" {
|
||||
Expr::Assign {
|
||||
val: Box::new(lhs.node().reduce(symbol_table)),
|
||||
@@ -391,7 +390,7 @@ impl BinOp {
|
||||
}
|
||||
|
||||
impl PrefixOp {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, arg: &Box<Node<Expression>>) -> Expr {
|
||||
fn reduce(&self, symbol_table: &SymbolTable, arg: &Box<Meta<Expression>>) -> Expr {
|
||||
let f = Box::new(Expr::Func(Func::BuiltIn(self.sigil().clone())));
|
||||
Expr::Call { f, args: vec![arg.node().reduce(symbol_table)]}
|
||||
}
|
||||
|
||||
@@ -1,32 +1,68 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::rc::Rc;
|
||||
use std::fmt;
|
||||
use std::fmt::Write;
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::{TypeBody, TypeSingletonName, Signature};
|
||||
use crate::ast::{Meta, TypeBody, TypeSingletonName, Signature, Statement};
|
||||
use crate::typechecking::TypeName;
|
||||
|
||||
type LineNumber = u32;
|
||||
type SymbolTrackTable = HashMap<Rc<String>, LineNumber>;
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
struct PathToSymbol(Vec<Rc<String>>);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ScopeSegment {
|
||||
scope_name: Rc<String>,
|
||||
scope_type: ScopeSegmentKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum ScopeSegmentKind {
|
||||
Function,
|
||||
//Type,
|
||||
}
|
||||
|
||||
//cf. p. 150 or so of Language Implementation Patterns
|
||||
pub struct SymbolTable {
|
||||
pub values: HashMap<Rc<String>, Symbol> //TODO this will eventually have real type information
|
||||
values: HashMap<PathToSymbol, Symbol>,
|
||||
}
|
||||
|
||||
//TODO add various types of lookups here, maybe multiple hash tables internally? also make values
|
||||
//non-public
|
||||
//TODO add various types of lookups here, maybe multiple hash tables internally?
|
||||
impl SymbolTable {
|
||||
pub fn new() -> SymbolTable {
|
||||
SymbolTable { values: HashMap::new() }
|
||||
SymbolTable {
|
||||
values: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_new_symbol(&mut self, name: &Rc<String>, scope_path: &Vec<ScopeSegment>, spec: SymbolSpec) {
|
||||
let mut vec: Vec<Rc<String>> = scope_path.iter().map(|segment| segment.scope_name.clone()).collect();
|
||||
vec.push(name.clone());
|
||||
let symbol_path = PathToSymbol(vec);
|
||||
let symbol = Symbol { name: name.clone(), scopes: scope_path.to_vec(), spec };
|
||||
self.values.insert(symbol_path, symbol);
|
||||
}
|
||||
|
||||
pub fn lookup_by_name(&self, name: &Rc<String>) -> Option<&Symbol> {
|
||||
self.values.get(name)
|
||||
self.lookup_by_path(name, &vec![])
|
||||
}
|
||||
|
||||
pub fn lookup_by_path(&self, name: &Rc<String>, path: &Vec<Rc<String>>) -> Option<&Symbol> {
|
||||
let mut vec = path.clone();
|
||||
vec.push(name.clone());
|
||||
let symbol_path = PathToSymbol(vec);
|
||||
self.values.get(&symbol_path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
pub name: Rc<String>,
|
||||
pub name: Rc<String>, //TODO does this need to be pub?
|
||||
scopes: Vec<ScopeSegment>,
|
||||
pub spec: SymbolSpec,
|
||||
}
|
||||
|
||||
@@ -46,7 +82,8 @@ pub enum SymbolSpec {
|
||||
},
|
||||
RecordConstructor {
|
||||
fields: HashMap<Rc<String>, Rc<String>>
|
||||
}
|
||||
},
|
||||
Binding
|
||||
}
|
||||
|
||||
impl fmt::Display for SymbolSpec {
|
||||
@@ -55,7 +92,8 @@ impl fmt::Display for SymbolSpec {
|
||||
match self {
|
||||
Func(type_names) => write!(f, "Func({:?})", type_names),
|
||||
DataConstructor { index, type_name, type_args } => write!(f, "DataConstructor(idx: {})({:?} -> {})", index, type_args, type_name),
|
||||
RecordConstructor { fields } => write!(f, "RecordConstructor( <fields> )"),
|
||||
RecordConstructor { fields: _fields } => write!(f, "RecordConstructor( <fields> )"),
|
||||
Binding => write!(f, "Binding"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -63,15 +101,58 @@ impl fmt::Display for SymbolSpec {
|
||||
impl SymbolTable {
|
||||
/* note: this adds names for *forward reference* but doesn't actually create any types. solve that problem
|
||||
* later */
|
||||
|
||||
pub fn add_top_level_symbols(&mut self, ast: &ast::AST) -> Result<(), String> {
|
||||
use self::ast::Statement;
|
||||
let mut scope_name_stack = Vec::new();
|
||||
self.add_symbols_from_scope(&ast.0, &mut scope_name_stack)
|
||||
}
|
||||
|
||||
fn add_symbols_from_scope<'a>(&'a mut self, statements: &Vec<Meta<Statement>>, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
use self::ast::Declaration::*;
|
||||
for statement in ast.0.iter() {
|
||||
let statement = statement.node();
|
||||
|
||||
fn insert_and_check_duplicate_symbol(table: &mut SymbolTrackTable, name: &Rc<String>) -> Result<(), String> {
|
||||
match table.entry(name.clone()) {
|
||||
Entry::Occupied(o) => {
|
||||
let line_number = o.get(); //TODO make this actually work
|
||||
Err(format!("Duplicate definition: {}. It's already defined at {}", name, line_number))
|
||||
},
|
||||
Entry::Vacant(v) => {
|
||||
let line_number = 0; //TODO should work
|
||||
v.insert(line_number);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut seen_identifiers: SymbolTrackTable = HashMap::new();
|
||||
|
||||
for meta in statements.iter() {
|
||||
let statement = meta.node();
|
||||
if let Statement::Declaration(decl) = statement {
|
||||
match decl {
|
||||
FuncSig(signature) | FuncDecl(signature, _) => self.add_function_signature(signature)?,
|
||||
TypeDecl { name, body, mutable } => self.add_type_decl(name, body, mutable)?,
|
||||
FuncSig(ref signature) => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, &signature.name)?;
|
||||
self.add_function_signature(signature, scope_name_stack)?
|
||||
}
|
||||
FuncDecl(ref signature, ref body) => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, &signature.name)?;
|
||||
self.add_function_signature(signature, scope_name_stack)?;
|
||||
scope_name_stack.push(ScopeSegment{
|
||||
scope_name: signature.name.clone(),
|
||||
scope_type: ScopeSegmentKind::Function,
|
||||
});
|
||||
let output = self.add_symbols_from_scope(body, scope_name_stack);
|
||||
let _ = scope_name_stack.pop();
|
||||
output?
|
||||
},
|
||||
TypeDecl { name, body, mutable } => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, &name.name)?;
|
||||
self.add_type_decl(name, body, mutable, scope_name_stack)?
|
||||
},
|
||||
Binding { name, .. } => {
|
||||
insert_and_check_duplicate_symbol(&mut seen_identifiers, name)?;
|
||||
self.add_new_symbol(name, scope_name_stack, SymbolSpec::Binding);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@@ -81,29 +162,27 @@ impl SymbolTable {
|
||||
pub fn debug_symbol_table(&self) -> String {
|
||||
let mut output = format!("Symbol table\n");
|
||||
for (name, sym) in &self.values {
|
||||
write!(output, "{} -> {}\n", name, sym).unwrap();
|
||||
write!(output, "{:?} -> {}\n", name, sym).unwrap();
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn add_function_signature(&mut self, signature: &Signature) -> Result<(), String> {
|
||||
fn add_function_signature(&mut self, signature: &Signature, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
let mut local_type_context = LocalTypeContext::new();
|
||||
let types = signature.params.iter().map(|param| match param {
|
||||
(_, Some(type_identifier)) => Rc::new(format!("{:?}", type_identifier)),
|
||||
(_, None) => local_type_context.new_universal_type()
|
||||
}).collect();
|
||||
let spec = SymbolSpec::Func(types);
|
||||
self.values.insert(
|
||||
signature.name.clone(),
|
||||
Symbol { name: signature.name.clone(), spec }
|
||||
);
|
||||
self.add_new_symbol(&signature.name, scope_name_stack, SymbolSpec::Func(types));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_type_decl(&mut self, type_name: &TypeSingletonName, body: &TypeBody, _mutable: &bool) -> Result<(), String> {
|
||||
fn add_type_decl(&mut self, type_name: &TypeSingletonName, body: &TypeBody, _mutable: &bool, scope_name_stack: &mut Vec<ScopeSegment>) -> Result<(), String> {
|
||||
use crate::ast::{TypeIdentifier, Variant};
|
||||
let TypeBody(variants) = body;
|
||||
let TypeSingletonName { name, .. } = type_name;
|
||||
//scope_name_stack.push(name.clone()); //TODO adding this makes variants scoped under their
|
||||
//type name and breaks a lot of things - don't add it until importing names works
|
||||
//TODO figure out why _params isn't being used here
|
||||
for (index, var) in variants.iter().enumerate() {
|
||||
match var {
|
||||
@@ -113,7 +192,7 @@ impl SymbolTable {
|
||||
type_name: name.clone(),
|
||||
type_args: vec![],
|
||||
};
|
||||
self.values.insert(variant_name.clone(), Symbol { name: variant_name.clone(), spec });
|
||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
||||
},
|
||||
Variant::TupleStruct(variant_name, tuple_members) => {
|
||||
let type_args = tuple_members.iter().map(|type_name| match type_name {
|
||||
@@ -125,19 +204,18 @@ impl SymbolTable {
|
||||
type_name: name.clone(),
|
||||
type_args
|
||||
};
|
||||
let symbol = Symbol { name: variant_name.clone(), spec };
|
||||
self.values.insert(variant_name.clone(), symbol);
|
||||
self.add_new_symbol(variant_name, scope_name_stack, spec);
|
||||
},
|
||||
//TODO if there is only one variant, and it is a record, it doesn't need to have an
|
||||
//explicit name
|
||||
Variant::Record { name, members } => {
|
||||
Variant::Record { name, members: _members } => {
|
||||
let fields = HashMap::new();
|
||||
let spec = SymbolSpec::RecordConstructor { fields };
|
||||
let symbol = Symbol { name: name.clone(), spec };
|
||||
self.values.insert(name.clone(), symbol);
|
||||
self.add_new_symbol(name, scope_name_stack, spec);
|
||||
},
|
||||
}
|
||||
}
|
||||
//scope_name_stack.pop();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -156,3 +234,158 @@ impl LocalTypeContext {
|
||||
Rc::new(format!("{}", (('a' as u8) + n) as char))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod symbol_table_tests {
|
||||
use super::*;
|
||||
use crate::util::quick_ast;
|
||||
|
||||
macro_rules! values_in_table {
|
||||
//TODO multiple values
|
||||
($source:expr, $single_value:expr) => {
|
||||
{
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast($source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
match symbol_table.lookup_by_name($single_value) {
|
||||
Some(_spec) => (),
|
||||
None => panic!(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_symbol_table() {
|
||||
values_in_table! { "let a = 10; fn b() { 20 }", &rc!(b) };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicates() {
|
||||
let source = r#"
|
||||
fn a() { 1 }
|
||||
fn b() { 2 }
|
||||
fn a() { 3 }
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicates_2() {
|
||||
let source = r#"
|
||||
let a = 20;
|
||||
let q = 39;
|
||||
let a = 30;
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicates_3() {
|
||||
let source = r#"
|
||||
fn a() {
|
||||
let a = 20
|
||||
let b = 40
|
||||
a + b
|
||||
}
|
||||
|
||||
fn q() {
|
||||
let x = 30
|
||||
let x = 33
|
||||
}
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_falsely_detect_duplicates() {
|
||||
let source = r#"
|
||||
let a = 20;
|
||||
fn some_func() {
|
||||
let a = 40;
|
||||
77
|
||||
}
|
||||
let q = 39;
|
||||
"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
assert!(symbol_table.lookup_by_path(&rc!(a), &vec![]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(a), &vec![rc!(some_func)]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
x + inner_func(x)
|
||||
}"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
assert!(symbol_table.lookup_by_path(&rc!(outer_func), &vec![]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(inner_func), &vec![rc!(outer_func)]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_2() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
symbol_table.add_top_level_symbols(&ast).unwrap();
|
||||
println!("{}", symbol_table.debug_symbol_table());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(outer_func), &vec![]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(inner_func), &vec![rc!(outer_func)]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(second_inner_func), &vec![rc!(outer_func)]).is_some());
|
||||
assert!(symbol_table.lookup_by_path(&rc!(another_inner_func), &vec![rc!(outer_func), rc!(second_inner_func)]).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enclosing_scopes_3() {
|
||||
let source = r#"
|
||||
fn outer_func(x) {
|
||||
fn inner_func(arg) {
|
||||
arg
|
||||
}
|
||||
|
||||
fn second_inner_func() {
|
||||
fn another_inner_func() {
|
||||
}
|
||||
fn another_inner_func() {
|
||||
}
|
||||
}
|
||||
|
||||
inner_func(x)
|
||||
}"#;
|
||||
let mut symbol_table = SymbolTable::new();
|
||||
let ast = quick_ast(source);
|
||||
let output = symbol_table.add_top_level_symbols(&ast).unwrap_err();
|
||||
assert!(output.contains("Duplicate"))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,27 +1,146 @@
|
||||
use std::rc::Rc;
|
||||
use std::fmt::Write;
|
||||
|
||||
use ena::unify::{UnifyKey, InPlaceUnificationTable, UnificationTable, EqUnifyValue};
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::util::ScopeStack;
|
||||
use crate::builtin::{PrefixOp, BinOp};
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct TypeData {
|
||||
ty: Option<Type>
|
||||
}
|
||||
|
||||
impl TypeData {
|
||||
pub fn new() -> TypeData {
|
||||
TypeData { ty: None }
|
||||
}
|
||||
}
|
||||
|
||||
pub type TypeName = Rc<String>;
|
||||
|
||||
pub struct TypeContext<'a> {
|
||||
variable_map: ScopeStack<'a, Rc<String>, Type<TVar>>,
|
||||
evar_count: u32
|
||||
variable_map: ScopeStack<'a, Rc<String>, Type>,
|
||||
unification_table: InPlaceUnificationTable<TypeVar>,
|
||||
}
|
||||
|
||||
/// `InferResult` is the monad in which type inference takes place.
|
||||
type InferResult<T> = Result<T, TypeError>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct TypeError { msg: String }
|
||||
pub struct TypeError { pub msg: String }
|
||||
|
||||
impl TypeError {
|
||||
fn new<A>(msg: &str) -> InferResult<A> {
|
||||
Err(TypeError { msg: msg.to_string() })
|
||||
fn new<A, T>(msg: T) -> InferResult<A> where T: Into<String> {
|
||||
Err(TypeError { msg: msg.into() })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Type {
|
||||
Const(TypeConst),
|
||||
Var(TypeVar),
|
||||
Arrow {
|
||||
params: Vec<Type>,
|
||||
ret: Box<Type>
|
||||
},
|
||||
Compound {
|
||||
ty_name: String,
|
||||
args:Vec<Type>
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TypeVar(usize);
|
||||
|
||||
impl UnifyKey for TypeVar {
|
||||
type Value = Option<TypeConst>;
|
||||
fn index(&self) -> u32 { self.0 as u32 }
|
||||
fn from_index(u: u32) -> TypeVar { TypeVar(u as usize) }
|
||||
fn tag() -> &'static str { "TypeVar" }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TypeConst {
|
||||
Unit,
|
||||
Nat,
|
||||
Int,
|
||||
Float,
|
||||
StringT,
|
||||
Bool,
|
||||
Ordering,
|
||||
//UserDefined
|
||||
}
|
||||
|
||||
impl TypeConst {
|
||||
pub fn to_string(&self) -> String {
|
||||
use self::TypeConst::*;
|
||||
match self {
|
||||
Unit => format!("()"),
|
||||
Nat => format!("Nat"),
|
||||
Int => format!("Int"),
|
||||
Float => format!("Float"),
|
||||
StringT => format!("String"),
|
||||
Bool => format!("Bool"),
|
||||
Ordering => format!("Ordering"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EqUnifyValue for TypeConst { }
|
||||
|
||||
macro_rules! ty {
|
||||
($type_name:ident) => { Type::Const(TypeConst::$type_name) };
|
||||
($t1:ident -> $t2:ident) => { Type::Arrow { params: vec![ty!($t1)], ret: box ty!($t2) } };
|
||||
($t1:ident -> $t2:ident -> $t3:ident) => { Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: box ty!($t3) } };
|
||||
($type_list:ident, $ret_type:ident) => {
|
||||
Type::Arrow {
|
||||
params: $type_list,
|
||||
ret: box $ret_type,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//TODO find a better way to capture the to/from string logic
|
||||
impl Type {
|
||||
pub fn to_string(&self) -> String {
|
||||
use self::Type::*;
|
||||
match self {
|
||||
Const(c) => c.to_string(),
|
||||
Var(v) => format!("t_{}", v.0),
|
||||
Arrow { params, box ref ret } => {
|
||||
if params.len() == 0 {
|
||||
format!("-> {}", ret.to_string())
|
||||
} else {
|
||||
let mut buf = String::new();
|
||||
for p in params.iter() {
|
||||
write!(buf, "{} -> ", p.to_string()).unwrap();
|
||||
}
|
||||
write!(buf, "{}", ret.to_string()).unwrap();
|
||||
buf
|
||||
}
|
||||
},
|
||||
Compound { .. } => format!("<some compound type>")
|
||||
}
|
||||
}
|
||||
|
||||
fn from_string(string: &str) -> Option<Type> {
|
||||
Some(match string {
|
||||
"()" | "Unit" => ty!(Unit),
|
||||
"Nat" => ty!(Nat),
|
||||
"Int" => ty!(Int),
|
||||
"Float" => ty!(Float),
|
||||
"String" => ty!(StringT),
|
||||
"Bool" => ty!(Bool),
|
||||
"Ordering" => ty!(Ordering),
|
||||
_ => return None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
/// `Type` is parameterized by whether the type variables can be just universal, or universal or
|
||||
/// existential.
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -104,151 +223,250 @@ impl TConst {
|
||||
TConst::User(Rc::new(name.to_string()))
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
impl<'a> TypeContext<'a> {
|
||||
pub fn new() -> TypeContext<'a> {
|
||||
TypeContext {
|
||||
variable_map: ScopeStack::new(None),
|
||||
evar_count: 0
|
||||
unification_table: UnificationTable::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn typecheck(&mut self, ast: &AST) -> Result<String, String> {
|
||||
match self.infer_ast(ast) {
|
||||
Ok(t) => Ok(format!("{:?}", t)),
|
||||
Err(err) => Err(format!("Type error: {:?}", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
/*
|
||||
fn new_env(&'a self, new_var: Rc<String>, ty: Type) -> TypeContext<'a> {
|
||||
let mut new_context = TypeContext {
|
||||
variable_map: self.variable_map.new_scope(None),
|
||||
unification_table: UnificationTable::new(), //???? not sure if i want this
|
||||
};
|
||||
|
||||
impl<'a> TypeContext<'a> {
|
||||
fn infer_ast(&mut self, ast: &AST) -> InferResult<Type<UVar>> {
|
||||
self.infer_block(&ast.0)
|
||||
new_context.variable_map.insert(new_var, ty);
|
||||
new_context
|
||||
}
|
||||
*/
|
||||
|
||||
fn infer_statement(&mut self, stmt: &Statement) -> InferResult<Type<UVar>> {
|
||||
match stmt {
|
||||
Statement::ExpressionStatement(ref expr) => self.infer_expr(expr.node()),
|
||||
Statement::Declaration(ref decl) => self.infer_decl(decl),
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_expr(&mut self, expr: &Expression) -> InferResult<Type<UVar>> {
|
||||
match expr {
|
||||
Expression(expr_type, Some(type_anno)) => {
|
||||
let tx = self.infer_expr_type(expr_type)?;
|
||||
let ty = type_anno.to_monotype();
|
||||
self.unify(&ty.to_tvar(), &tx.to_tvar()).map(|x| x.skolemize())
|
||||
},
|
||||
Expression(expr_type, None) => self.infer_expr_type(expr_type)
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_decl(&mut self, _decl: &Declaration) -> InferResult<Type<UVar>> {
|
||||
Ok(Type::Const(TConst::user("unimplemented")))
|
||||
}
|
||||
|
||||
fn infer_expr_type(&mut self, expr_type: &ExpressionType) -> InferResult<Type<UVar>> {
|
||||
use self::ExpressionType::*;
|
||||
Ok(match expr_type {
|
||||
NatLiteral(_) => Type::Const(TConst::Nat),
|
||||
FloatLiteral(_) => Type::Const(TConst::Float),
|
||||
StringLiteral(_) => Type::Const(TConst::StringT),
|
||||
BoolLiteral(_) => Type::Const(TConst::Bool),
|
||||
Value(name) => {
|
||||
//TODO handle the distinction between 0-arg constructors and variables at some point
|
||||
// need symbol table for that
|
||||
match self.variable_map.lookup(name) {
|
||||
Some(ty) => ty.clone().skolemize(),
|
||||
None => return TypeError::new(&format!("Variable {} not found", name))
|
||||
fn get_type_from_name(&self, name: &TypeIdentifier) -> InferResult<Type> {
|
||||
use self::TypeIdentifier::*;
|
||||
Ok(match name {
|
||||
Singleton(TypeSingletonName { name,.. }) => {
|
||||
match Type::from_string(&name) {
|
||||
Some(ty) => ty,
|
||||
None => return TypeError::new(format!("Unknown type name: {}", name))
|
||||
}
|
||||
},
|
||||
IfExpression { discriminator, body } => self.infer_if_expr(discriminator, body)?,
|
||||
Call { f, arguments } => {
|
||||
let tf = self.infer_expr(f)?; //has to be an Arrow Type
|
||||
let targ = self.infer_expr(&arguments[0].node())?; // TODO make this work with functions with more than one arg
|
||||
match tf {
|
||||
Type::Arrow(t1, t2) => {
|
||||
self.unify(&t1.to_tvar(), &targ.to_tvar())?;
|
||||
*t2.clone()
|
||||
},
|
||||
_ => return TypeError::new("not a function")
|
||||
}
|
||||
},
|
||||
|
||||
Lambda { params, .. } => {
|
||||
|
||||
let _arg_type = match ¶ms[0] {
|
||||
(_, Some(type_anno)) => type_anno.to_monotype().to_tvar(),
|
||||
(_, None) => self.allocate_existential(),
|
||||
};
|
||||
//let _result_type = unimplemented!();
|
||||
return TypeError::new("Unimplemented");
|
||||
|
||||
//Type::Arrow(Box::new(arg_type), Box::new(result_type))
|
||||
}
|
||||
_ => Type::Const(TConst::user("unimplemented"))
|
||||
Tuple(_) => return TypeError::new("tuples aren't ready yet"),
|
||||
})
|
||||
}
|
||||
|
||||
fn infer_if_expr(&mut self, discriminator: &Discriminator, body: &IfExpressionBody) -> InferResult<Type<UVar>> {
|
||||
let _test = match discriminator {
|
||||
Discriminator::Simple(expr) => expr,
|
||||
_ => return TypeError::new("Dame desu")
|
||||
};
|
||||
|
||||
let (_then_clause, _maybe_else_clause) = match body {
|
||||
IfExpressionBody::SimpleConditional(a, b) => (a, b),
|
||||
_ => return TypeError::new("Dont work")
|
||||
};
|
||||
|
||||
TypeError::new("Not implemented")
|
||||
/// `typecheck` is the entry into the type-inference system, accepting an AST as an argument
|
||||
/// Following the example of GHC, the compiler deliberately does typechecking before de-sugaring
|
||||
/// the AST to ReducedAST
|
||||
pub fn typecheck(&mut self, ast: &AST) -> Result<Type, TypeError> {
|
||||
let mut returned_type = Type::Const(TypeConst::Unit);
|
||||
for statement in ast.0.iter() {
|
||||
returned_type = self.statement(statement.node())?;
|
||||
}
|
||||
Ok(returned_type)
|
||||
}
|
||||
|
||||
fn infer_block(&mut self, block: &Block) -> InferResult<Type<UVar>> {
|
||||
let mut output = Type::Const(TConst::Unit);
|
||||
for statement in block.iter() {
|
||||
output = self.infer_statement(statement.node())?;
|
||||
fn statement(&mut self, statement: &Statement) -> InferResult<Type> {
|
||||
match statement {
|
||||
Statement::ExpressionStatement(e) => self.expr(e.node()),
|
||||
Statement::Declaration(decl) => self.decl(decl),
|
||||
}
|
||||
}
|
||||
|
||||
fn decl(&mut self, decl: &Declaration) -> InferResult<Type> {
|
||||
use self::Declaration::*;
|
||||
match decl {
|
||||
Binding { name, expr, .. } => {
|
||||
let ty = self.expr(expr.node())?;
|
||||
self.variable_map.insert(name.clone(), ty);
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
Ok(ty!(Unit))
|
||||
}
|
||||
|
||||
fn expr(&mut self, expr: &Expression) -> InferResult<Type> {
|
||||
match expr {
|
||||
Expression(expr_type, Some(anno)) => {
|
||||
let t1 = self.expr_type(expr_type)?;
|
||||
let t2 = self.get_type_from_name(anno)?;
|
||||
self.unify(t2, t1)
|
||||
},
|
||||
Expression(expr_type, None) => self.expr_type(expr_type)
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_type(&mut self, expr: &ExpressionKind) -> InferResult<Type> {
|
||||
use self::ExpressionKind::*;
|
||||
Ok(match expr {
|
||||
NatLiteral(_) => ty!(Nat),
|
||||
BoolLiteral(_) => ty!(Bool),
|
||||
FloatLiteral(_) => ty!(Float),
|
||||
StringLiteral(_) => ty!(StringT),
|
||||
PrefixExp(op, expr) => self.prefix(op, expr.node())?,
|
||||
BinExp(op, lhs, rhs) => self.binexp(op, lhs.node(), rhs.node())?,
|
||||
IfExpression { discriminator, body } => self.if_expr(discriminator, body)?,
|
||||
Value(val) => self.handle_value(val)?,
|
||||
Call { box ref f, arguments } => self.call(f.node(), arguments)?,
|
||||
Lambda { params, type_anno, body } => self.lambda(params, type_anno, body)?,
|
||||
_ => ty!(Unit),
|
||||
})
|
||||
}
|
||||
|
||||
fn prefix(&mut self, op: &PrefixOp, expr: &Expression) -> InferResult<Type> {
|
||||
let tf = match op.get_type() {
|
||||
Ok(ty) => ty,
|
||||
Err(e) => return TypeError::new(e)
|
||||
};
|
||||
|
||||
let tx = self.expr(expr)?;
|
||||
self.handle_apply(tf, vec![tx])
|
||||
}
|
||||
|
||||
fn binexp(&mut self, op: &BinOp, lhs: &Expression, rhs: &Expression) -> InferResult<Type> {
|
||||
let tf = match op.get_type() {
|
||||
Ok(ty) => ty,
|
||||
Err(e) => return TypeError::new(e),
|
||||
};
|
||||
|
||||
let t_lhs = self.expr(lhs)?;
|
||||
let t_rhs = self.expr(rhs)?; //TODO is this order a problem? not sure
|
||||
|
||||
self.handle_apply(tf, vec![t_lhs, t_rhs])
|
||||
}
|
||||
|
||||
fn if_expr(&mut self, discriminator: &Discriminator, body: &IfExpressionBody) -> InferResult<Type> {
|
||||
use self::Discriminator::*; use self::IfExpressionBody::*;
|
||||
match (discriminator, body) {
|
||||
(Simple(expr), SimpleConditional(then_clause, else_clause)) => self.handle_simple_if(expr, then_clause, else_clause),
|
||||
_ => TypeError::new(format!("Complex conditionals not supported"))
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_simple_if(&mut self, expr: &Expression, then_clause: &Block, else_clause: &Option<Block>) -> InferResult<Type> {
|
||||
let t1 = self.expr(expr)?;
|
||||
let t2 = self.block(then_clause)?;
|
||||
let t3 = match else_clause {
|
||||
Some(block) => self.block(block)?,
|
||||
None => ty!(Unit)
|
||||
};
|
||||
|
||||
let _ = self.unify(ty!(Bool), t1)?;
|
||||
self.unify(t2, t3)
|
||||
}
|
||||
|
||||
fn lambda(&mut self, params: &Vec<FormalParam>, type_anno: &Option<TypeIdentifier>, _body: &Block) -> InferResult<Type> {
|
||||
let argument_types: InferResult<Vec<Type>> = params.iter().map(|param: &FormalParam| {
|
||||
if let (_, Some(type_identifier)) = param {
|
||||
self.get_type_from_name(type_identifier)
|
||||
} else {
|
||||
Ok(Type::Var(self.fresh_type_variable()))
|
||||
}
|
||||
}).collect();
|
||||
let argument_types = argument_types?;
|
||||
let ret_type = match type_anno.as_ref() {
|
||||
Some(anno) => self.get_type_from_name(anno)?,
|
||||
None => Type::Var(self.fresh_type_variable())
|
||||
};
|
||||
|
||||
Ok(ty!(argument_types, ret_type))
|
||||
}
|
||||
|
||||
fn call(&mut self, f: &Expression, args: &Vec<Meta<Expression>>) -> InferResult<Type> {
|
||||
let tf = self.expr(f)?;
|
||||
let arg_types: InferResult<Vec<Type>> = args.iter().map(|ex| self.expr(ex.node())).collect();
|
||||
let arg_types = arg_types?;
|
||||
self.handle_apply(tf, arg_types)
|
||||
}
|
||||
|
||||
fn handle_apply(&mut self, tf: Type, args: Vec<Type>) -> InferResult<Type> {
|
||||
Ok(match tf {
|
||||
Type::Arrow { ref params, ret: box ref t_ret } if params.len() == args.len() => {
|
||||
for (t_param, t_arg) in params.iter().zip(args.iter()) {
|
||||
let _ = self.unify(t_param.clone(), t_arg.clone())?; //TODO I think this needs to reference a sub-scope
|
||||
}
|
||||
t_ret.clone()
|
||||
},
|
||||
Type::Arrow { .. } => return TypeError::new("Wrong length"),
|
||||
_ => return TypeError::new(format!("Not a function"))
|
||||
})
|
||||
}
|
||||
|
||||
fn block(&mut self, block: &Block) -> InferResult<Type> {
|
||||
let mut output = ty!(Unit);
|
||||
for s in block.iter() {
|
||||
let statement = s.node();
|
||||
output = self.statement(statement)?;
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn unify(&mut self, _t1: &Type<TVar>, _t2: &Type<TVar>) -> InferResult<Type<TVar>> {
|
||||
TypeError::new("not implemented")
|
||||
fn handle_value(&mut self, val: &Rc<String>) -> InferResult<Type> {
|
||||
match self.variable_map.lookup(val) {
|
||||
Some(ty) => Ok(ty.clone()),
|
||||
None => TypeError::new(format!("Couldn't find variable: {}", val))
|
||||
}
|
||||
}
|
||||
|
||||
fn allocate_existential(&mut self) -> Type<TVar> {
|
||||
let n = self.evar_count;
|
||||
self.evar_count += 1;
|
||||
Type::Var(TVar::Exist(ExistentialVar(n)))
|
||||
fn unify(&mut self, t1: Type, t2: Type) -> InferResult<Type> {
|
||||
use self::Type::*;
|
||||
|
||||
match (t1, t2) {
|
||||
(Const(ref c1), Const(ref c2)) if c1 == c2 => Ok(Const(c1.clone())), //choice of c1 is arbitrary I *think*
|
||||
(a @ Var(_), b @ Const(_)) => self.unify(b, a),
|
||||
(Const(ref c1), Var(ref v2)) => {
|
||||
self.unification_table.unify_var_value(v2.clone(), Some(c1.clone()))
|
||||
.or_else(|_| TypeError::new(format!("Couldn't unify {:?} and {:?}", Const(c1.clone()), Var(*v2))))?;
|
||||
Ok(Const(c1.clone()))
|
||||
},
|
||||
(Var(v1), Var(v2)) => {
|
||||
//TODO add occurs check
|
||||
self.unification_table.unify_var_var(v1.clone(), v2.clone())
|
||||
.or_else(|e| {
|
||||
println!("Unify error: {:?}", e);
|
||||
TypeError::new(format!("Two type variables {:?} and {:?} couldn't unify", v1, v2))
|
||||
})?;
|
||||
Ok(Var(v1.clone())) //arbitrary decision I think
|
||||
},
|
||||
(a, b) => TypeError::new(format!("{:?} and {:?} do not unify", a, b)),
|
||||
}
|
||||
}
|
||||
|
||||
fn fresh_type_variable(&mut self) -> TypeVar {
|
||||
let new_type_var = self.unification_table.new_key(None);
|
||||
new_type_var
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
mod typechecking_tests {
|
||||
use super::*;
|
||||
|
||||
fn parse(input: &str) -> AST {
|
||||
let tokens: Vec<crate::tokenizing::Token> = crate::tokenizing::tokenize(input);
|
||||
let mut parser = crate::parsing::Parser::new(tokens);
|
||||
parser.parse().unwrap()
|
||||
}
|
||||
|
||||
macro_rules! type_test {
|
||||
($input:expr, $correct:expr) => {
|
||||
{
|
||||
macro_rules! assert_type_in_fresh_context {
|
||||
($string:expr, $type:expr) => {
|
||||
let mut tc = TypeContext::new();
|
||||
let ast = parse($input);
|
||||
tc.add_symbols(&ast);
|
||||
assert_eq!($correct, tc.type_check(&ast).unwrap())
|
||||
}
|
||||
let ref ast = crate::util::quick_ast($string);
|
||||
let ty = tc.typecheck(ast).unwrap();
|
||||
assert_eq!(ty, $type)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn basic_test() {
|
||||
assert_type_in_fresh_context!("1", ty!(Nat));
|
||||
assert_type_in_fresh_context!(r#""drugs""#, ty!(StringT));
|
||||
assert_type_in_fresh_context!("true", ty!(Bool));
|
||||
assert_type_in_fresh_context!("-1", ty!(Int));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_inference() {
|
||||
|
||||
fn operators() {
|
||||
assert_type_in_fresh_context!("1 + 2", ty!(Nat));
|
||||
assert_type_in_fresh_context!("-2", ty!(Int));
|
||||
assert_type_in_fresh_context!("!true", ty!(Bool));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,3 +41,15 @@ impl<'a, T, V> ScopeStack<'a, T, V> where T: Hash + Eq {
|
||||
}
|
||||
}
|
||||
|
||||
/// this is intended for use in tests, and does no error-handling whatsoever
|
||||
#[allow(dead_code)]
|
||||
pub fn quick_ast(input: &str) -> crate::ast::AST {
|
||||
let tokens = crate::tokenizing::tokenize(input);
|
||||
let mut parser = crate::parsing::Parser::new(tokens);
|
||||
parser.parse().unwrap()
|
||||
}
|
||||
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! rc {
|
||||
($string:tt) => { Rc::new(stringify!($string).to_string()) }
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
[package]
|
||||
name = "schala-repl-codegen"
|
||||
version = "0.1.0"
|
||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "0.15.6", features = ["full", "extra-traits"] }
|
||||
quote = "0.6.8"
|
||||
proc-macro2 = "0.4.19"
|
||||
schala-repl = { path = "../schala-repl" }
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
@@ -1,199 +0,0 @@
|
||||
#![feature(trace_macros)]
|
||||
#![recursion_limit="128"]
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use syn::{Ident, Attribute, DeriveInput};
|
||||
|
||||
fn find_attr_by_name<'a>(name: &str, attrs: &'a Vec<Attribute>) -> Option<&'a Attribute> {
|
||||
attrs.iter().find(|attr| {
|
||||
let first = attr.path.segments.first();
|
||||
let seg: Option<&&syn::PathSegment> = first.as_ref().map(|x| x.value());
|
||||
seg.map(|seg| seg.ident.to_string() == name).unwrap_or(false)
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_attribute_arg_by_name(name: &str, attrs: &Vec<Attribute>) -> Option<String> {
|
||||
use syn::{Meta, Lit, MetaNameValue};
|
||||
find_attr_by_name(name, attrs)
|
||||
.and_then(|attr| {
|
||||
match attr.interpret_meta() {
|
||||
Some(Meta::NameValue(MetaNameValue { lit: Lit::Str(litstr), .. })) => Some(litstr.value()),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_attribute_list(name: &str, attrs: &Vec<Attribute>) -> Option<Vec<(Ident, Option<Vec<Ident>>)>> {
|
||||
use syn::{Meta, MetaList, NestedMeta};
|
||||
find_attr_by_name(name, attrs)
|
||||
.and_then(|attr| {
|
||||
match attr.interpret_meta() {
|
||||
Some(Meta::List(MetaList { nested, .. })) => {
|
||||
Some(nested.iter().map(|nested_meta| match nested_meta {
|
||||
&NestedMeta::Meta(Meta::Word(ref ident)) => (ident.clone(), None),
|
||||
&NestedMeta::Meta(Meta::List(MetaList { ref ident, nested: ref nested2, .. })) => {
|
||||
let own_args = nested2.iter().map(|nested_meta2| match nested_meta2 {
|
||||
&NestedMeta::Meta(Meta::Word(ref ident)) => ident.clone(),
|
||||
_ => panic!("Bad format for doubly-nested attribute list")
|
||||
}).collect();
|
||||
(ident.clone(), Some(own_args))
|
||||
},
|
||||
_ => panic!("Bad format for nested list")
|
||||
}).collect())
|
||||
},
|
||||
_ => panic!("{} must be a comma-delimited list surrounded by parens", name)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn get_attribute_identifier(attr_name: &str, attrs: &Vec<Attribute>) -> Option<proc_macro2::Ident> {
|
||||
find_attr_by_name(attr_name, attrs).and_then(|attr| {
|
||||
let tts = attr.tts.clone().into_iter().collect::<Vec<_>>();
|
||||
|
||||
if tts.len() == 2 {
|
||||
let ref after_equals: proc_macro2::TokenTree = tts[1];
|
||||
match after_equals {
|
||||
proc_macro2::TokenTree::Ident(ident) => Some(ident.clone()),
|
||||
_ => None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/* a pass_chain function signature with input A and output B looks like:
|
||||
* fn(A, &mut ProgrammingLanguageInterface, Option<&mut DebugHandler>) -> Result<B, String>
|
||||
*
|
||||
* TODO use some kind of failure-handling library to make this better
|
||||
*/
|
||||
fn generate_pass_chain(idents: Vec<Ident>) -> proc_macro2::TokenStream {
|
||||
let final_return = quote! {
|
||||
{
|
||||
let final_output: FinishedComputation = unfinished_computation.finish(Ok(input_to_next_stage));
|
||||
final_output
|
||||
}
|
||||
};
|
||||
|
||||
let nested_passes = idents.iter()
|
||||
.rev()
|
||||
.fold(final_return, |later_fragment, pass_name| {
|
||||
quote! {
|
||||
{
|
||||
let pass_name = stringify!(#pass_name);
|
||||
let (output, duration) = {
|
||||
let ref debug_map = eval_options.debug_passes;
|
||||
let debug_handle = match debug_map.get(pass_name) {
|
||||
Some(PassDebugOptionsDescriptor { opts }) => {
|
||||
let ptr = &mut unfinished_computation;
|
||||
ptr.cur_debug_options = opts.clone();
|
||||
Some(ptr)
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
let start = time::Instant::now();
|
||||
let pass_output = #pass_name(input_to_next_stage, self, debug_handle);
|
||||
let elapsed = start.elapsed();
|
||||
(pass_output, elapsed)
|
||||
};
|
||||
if eval_options.debug_timing {
|
||||
unfinished_computation.durations.push(duration);
|
||||
}
|
||||
match output {
|
||||
Ok(input_to_next_stage) => #later_fragment,
|
||||
//TODO this error type needs to be guaranteed to provide a useable string
|
||||
Err(err) => return unfinished_computation.output(Err(format!("Pass {} failed:\n{}", pass_name, err))),
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
quote! {
|
||||
{
|
||||
use std::time;
|
||||
use schala_repl::PassDebugOptionsDescriptor;
|
||||
|
||||
let eval_options = options;
|
||||
let input_to_next_stage = input;
|
||||
let mut unfinished_computation = UnfinishedComputation::default();
|
||||
#nested_passes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro_derive(ProgrammingLanguageInterface,
|
||||
attributes(LanguageName, SourceFileExtension, PipelineSteps, DocMethod, HandleCustomInterpreterDirectives))]
|
||||
pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream {
|
||||
let ast: DeriveInput = syn::parse(input).unwrap();
|
||||
let name = &ast.ident;
|
||||
let attrs = &ast.attrs;
|
||||
|
||||
let language_name: String = extract_attribute_arg_by_name("LanguageName", attrs).expect("LanguageName is required");
|
||||
let file_ext = extract_attribute_arg_by_name("SourceFileExtension", attrs).expect("SourceFileExtension is required");
|
||||
let passes = extract_attribute_list("PipelineSteps", attrs).expect("PipelineSteps are required");
|
||||
let pass_idents = passes.iter().map(|x| x.0.clone());
|
||||
|
||||
let get_doc_impl = match get_attribute_identifier("DocMethod", attrs) {
|
||||
None => quote! { },
|
||||
Some(method_name) => quote! {
|
||||
fn get_doc(&self, commands: &Vec<&str>) -> Option<String> {
|
||||
self.#method_name(commands)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let handle_custom_interpreter_directives_impl = match get_attribute_identifier("HandleCustomInterpreterDirectives", attrs) {
|
||||
None => quote! { },
|
||||
Some(method_name) => quote! {
|
||||
fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option<String> {
|
||||
//println!("If #method_name is &self not &mut self, this runs forever");
|
||||
self.#method_name(commands)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let pass_descriptors = passes.iter().map(|pass| {
|
||||
let name = pass.0.to_string();
|
||||
let opts: Vec<String> = match &pass.1 {
|
||||
None => vec![],
|
||||
Some(opts) => opts.iter().map(|o| o.to_string()).collect(),
|
||||
};
|
||||
|
||||
quote! {
|
||||
PassDescriptor {
|
||||
name: #name.to_string(),
|
||||
debug_options: vec![#(format!(#opts)),*]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let pass_chain = generate_pass_chain(pass_idents.collect());
|
||||
|
||||
let tokens = quote! {
|
||||
use schala_repl::PassDescriptor;
|
||||
impl ProgrammingLanguageInterface for #name {
|
||||
fn get_language_name(&self) -> String {
|
||||
#language_name.to_string()
|
||||
}
|
||||
fn get_source_file_suffix(&self) -> String {
|
||||
#file_ext.to_string()
|
||||
}
|
||||
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
|
||||
#pass_chain
|
||||
}
|
||||
fn get_passes(&self) -> Vec<PassDescriptor> {
|
||||
vec![ #(#pass_descriptors),* ]
|
||||
}
|
||||
#get_doc_impl
|
||||
#handle_custom_interpreter_directives_impl
|
||||
}
|
||||
};
|
||||
|
||||
let output: TokenStream = tokens.into();
|
||||
output
|
||||
}
|
||||
@@ -2,20 +2,19 @@
|
||||
name = "schala-repl"
|
||||
version = "0.1.0"
|
||||
authors = ["greg <greg.shuflin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
llvm-sys = "70.0.2"
|
||||
take_mut = "0.2.2"
|
||||
itertools = "0.5.8"
|
||||
getopts = "*"
|
||||
getopts = "0.2.18"
|
||||
lazy_static = "0.2.8"
|
||||
maplit = "*"
|
||||
colored = "1.5"
|
||||
serde = "1.0.15"
|
||||
serde_derive = "1.0.15"
|
||||
serde_json = "1.0.3"
|
||||
rocket = "0.4.0"
|
||||
rocket_contrib = "0.4.0"
|
||||
colored = "1.7"
|
||||
serde = "1.0.91"
|
||||
serde_derive = "1.0.91"
|
||||
serde_json = "1.0.15"
|
||||
phf = "0.7.12"
|
||||
includedir = "0.2.0"
|
||||
linefeed = "0.5.0"
|
||||
|
||||
@@ -1,173 +1,71 @@
|
||||
use std::collections::HashMap;
|
||||
use colored::*;
|
||||
use std::fmt::Write;
|
||||
use std::time;
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct EvalOptions {
|
||||
pub execution_method: ExecutionMethod,
|
||||
pub debug_passes: HashMap<String, PassDebugOptionsDescriptor>,
|
||||
pub debug_timing: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash, PartialEq)]
|
||||
pub struct PassDescriptor {
|
||||
pub name: String,
|
||||
pub debug_options: Vec<String>
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PassDebugOptionsDescriptor {
|
||||
pub opts: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum ExecutionMethod {
|
||||
Compile,
|
||||
Interpret,
|
||||
}
|
||||
impl Default for ExecutionMethod {
|
||||
fn default() -> ExecutionMethod {
|
||||
ExecutionMethod::Interpret
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct UnfinishedComputation {
|
||||
artifacts: Vec<(String, TraceArtifact)>,
|
||||
pub durations: Vec<time::Duration>,
|
||||
pub cur_debug_options: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FinishedComputation {
|
||||
artifacts: Vec<(String, TraceArtifact)>,
|
||||
durations: Vec<time::Duration>,
|
||||
text_output: Result<String, String>,
|
||||
}
|
||||
|
||||
impl UnfinishedComputation {
|
||||
pub fn add_artifact(&mut self, artifact: TraceArtifact) {
|
||||
self.artifacts.push((artifact.stage_name.clone(), artifact));
|
||||
}
|
||||
pub fn finish(self, text_output: Result<String, String>) -> FinishedComputation {
|
||||
FinishedComputation {
|
||||
artifacts: self.artifacts,
|
||||
text_output,
|
||||
durations: self.durations,
|
||||
}
|
||||
}
|
||||
pub fn output(self, output: Result<String, String>) -> FinishedComputation {
|
||||
FinishedComputation {
|
||||
artifacts: self.artifacts,
|
||||
text_output: output,
|
||||
durations: self.durations,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FinishedComputation {
|
||||
|
||||
fn get_timing(&self) -> Option<String> {
|
||||
if self.durations.len() != 0 {
|
||||
let mut buf = String::new();
|
||||
write!(&mut buf, "Timing: ").unwrap();
|
||||
for duration in self.durations.iter() {
|
||||
let timing = (duration.as_secs() as f64) + (duration.subsec_nanos() as f64 * 1e-9);
|
||||
write!(&mut buf, "{}s, ", timing).unwrap()
|
||||
}
|
||||
write!(&mut buf, "\n").unwrap();
|
||||
Some(buf)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_repl(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
for (stage, artifact) in self.artifacts.iter() {
|
||||
let color = artifact.text_color;
|
||||
let stage = stage.color(color).bold();
|
||||
let output = artifact.debug_output.color(color);
|
||||
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
|
||||
}
|
||||
|
||||
match self.get_timing() {
|
||||
Some(timing) => write!(&mut buf, "{}", timing).unwrap(),
|
||||
None => ()
|
||||
}
|
||||
|
||||
match self.text_output {
|
||||
Ok(ref output) => write!(&mut buf, "{}", output).unwrap(),
|
||||
Err(ref err) => write!(&mut buf, "{} {}", "Error: ".red().bold(), err).unwrap(),
|
||||
}
|
||||
buf
|
||||
}
|
||||
pub fn to_noninteractive(&self) -> Option<String> {
|
||||
match self.text_output {
|
||||
Ok(_) => {
|
||||
let mut buf = String::new();
|
||||
for (stage, artifact) in self.artifacts.iter() {
|
||||
let color = artifact.text_color;
|
||||
let stage = stage.color(color).bold();
|
||||
let output = artifact.debug_output.color(color);
|
||||
write!(&mut buf, "{}: {}\n", stage, output).unwrap();
|
||||
}
|
||||
if buf == "" { None } else { Some(buf) }
|
||||
},
|
||||
Err(ref s) => Some(format!("{} {}", "Error: ".red().bold(), s))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TraceArtifact {
|
||||
stage_name: String,
|
||||
debug_output: String,
|
||||
text_color: &'static str,
|
||||
}
|
||||
|
||||
impl TraceArtifact {
|
||||
pub fn new(stage: &str, debug: String) -> TraceArtifact {
|
||||
let color = match stage {
|
||||
"parse_trace" | "ast" => "red",
|
||||
"ast_reducing" => "red",
|
||||
"tokens" => "green",
|
||||
"type_check" => "magenta",
|
||||
_ => "blue",
|
||||
};
|
||||
TraceArtifact { stage_name: stage.to_string(), debug_output: debug, text_color: color}
|
||||
}
|
||||
|
||||
pub fn new_parse_trace(trace: Vec<String>) -> TraceArtifact {
|
||||
let mut output = String::new();
|
||||
|
||||
for t in trace {
|
||||
output.push_str(&t);
|
||||
output.push_str("\n");
|
||||
}
|
||||
|
||||
TraceArtifact { stage_name: "parse_trace".to_string(), debug_output: output, text_color: "red"}
|
||||
}
|
||||
}
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub trait ProgrammingLanguageInterface {
|
||||
fn execute_pipeline(&mut self, _input: &str, _eval_options: &EvalOptions) -> FinishedComputation {
|
||||
FinishedComputation { artifacts: vec![], text_output: Err(format!("Execution pipeline not done")), durations: vec![] }
|
||||
}
|
||||
|
||||
fn get_language_name(&self) -> String;
|
||||
fn get_source_file_suffix(&self) -> String;
|
||||
fn get_passes(&self) -> Vec<PassDescriptor> {
|
||||
vec![]
|
||||
|
||||
fn run_computation(&mut self, _request: ComputationRequest) -> ComputationResponse {
|
||||
ComputationResponse {
|
||||
main_output: Err(format!("Computation pipeline not implemented")),
|
||||
global_output_stats: GlobalOutputStats::default(),
|
||||
debug_responses: vec![],
|
||||
}
|
||||
}
|
||||
fn handle_custom_interpreter_directives(&mut self, _commands: &Vec<&str>) -> Option<String> {
|
||||
None
|
||||
}
|
||||
fn custom_interpreter_directives_help(&self) -> String {
|
||||
format!(">> No custom interpreter directives specified <<")
|
||||
}
|
||||
fn get_doc(&self, _commands: &Vec<&str>) -> Option<String> {
|
||||
None
|
||||
|
||||
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
|
||||
LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ComputationRequest<'a> {
|
||||
pub source: &'a str,
|
||||
pub debug_requests: HashSet<DebugAsk>,
|
||||
}
|
||||
|
||||
pub struct ComputationResponse {
|
||||
pub main_output: Result<String, String>,
|
||||
pub global_output_stats: GlobalOutputStats,
|
||||
pub debug_responses: Vec<DebugResponse>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct GlobalOutputStats {
|
||||
pub total_duration: time::Duration,
|
||||
pub stage_durations: Vec<(String, time::Duration)>
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum DebugAsk {
|
||||
Timing,
|
||||
ByStage { stage_name: String },
|
||||
}
|
||||
|
||||
pub struct DebugResponse {
|
||||
pub ask: DebugAsk,
|
||||
pub value: String
|
||||
}
|
||||
|
||||
pub enum LangMetaRequest {
|
||||
StageNames,
|
||||
Docs {
|
||||
source: String,
|
||||
},
|
||||
Custom {
|
||||
kind: String,
|
||||
value: String
|
||||
},
|
||||
ImmediateDebug(DebugAsk),
|
||||
}
|
||||
|
||||
pub enum LangMetaResponse {
|
||||
StageNames(Vec<String>),
|
||||
Docs {
|
||||
doc_string: String,
|
||||
},
|
||||
Custom {
|
||||
kind: String,
|
||||
value: String
|
||||
},
|
||||
ImmediateDebug(DebugResponse),
|
||||
}
|
||||
|
||||
@@ -9,88 +9,49 @@ extern crate colored;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate serde_json;
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
extern crate rocket_contrib;
|
||||
extern crate includedir;
|
||||
extern crate phf;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::process::exit;
|
||||
use std::default::Default;
|
||||
|
||||
mod repl;
|
||||
mod language;
|
||||
mod webapp;
|
||||
|
||||
const VERSION_STRING: &'static str = "0.1.0";
|
||||
pub use language::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
LangMetaRequest, LangMetaResponse,
|
||||
DebugResponse, DebugAsk, GlobalOutputStats};
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/static.rs"));
|
||||
const VERSION_STRING: &'static str = "0.1.0";
|
||||
|
||||
pub use language::{ProgrammingLanguageInterface, EvalOptions,
|
||||
ExecutionMethod, TraceArtifact, FinishedComputation, UnfinishedComputation, PassDebugOptionsDescriptor, PassDescriptor};
|
||||
|
||||
pub type PLIGenerator = Box<Fn() -> Box<ProgrammingLanguageInterface> + Send + Sync>;
|
||||
|
||||
pub fn repl_main(generators: Vec<PLIGenerator>) {
|
||||
let languages: Vec<Box<ProgrammingLanguageInterface>> = generators.iter().map(|x| x()).collect();
|
||||
|
||||
let option_matches = program_options().parse(std::env::args()).unwrap_or_else(|e| {
|
||||
pub fn start_repl(langs: Vec<Box<dyn ProgrammingLanguageInterface>>) {
|
||||
let options = command_line_options().parse(std::env::args()).unwrap_or_else(|e| {
|
||||
println!("{:?}", e);
|
||||
exit(1);
|
||||
});
|
||||
|
||||
if option_matches.opt_present("list-languages") {
|
||||
for lang in languages {
|
||||
println!("{}", lang.get_language_name());
|
||||
}
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if option_matches.opt_present("help") {
|
||||
println!("{}", program_options().usage("Schala metainterpreter"));
|
||||
if options.opt_present("help") {
|
||||
println!("{}", command_line_options().usage("Schala metainterpreter"));
|
||||
exit(0);
|
||||
}
|
||||
|
||||
if option_matches.opt_present("webapp") {
|
||||
webapp::web_main(generators);
|
||||
exit(0);
|
||||
}
|
||||
|
||||
let mut options = EvalOptions::default();
|
||||
let debug_passes = if let Some(opts) = option_matches.opt_str("debug") {
|
||||
let output: Vec<String> = opts.split_terminator(",").map(|s| s.to_string()).collect();
|
||||
output
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let language_names: Vec<String> = languages.iter().map(|lang| {lang.get_language_name()}).collect();
|
||||
let initial_index: usize =
|
||||
option_matches.opt_str("lang")
|
||||
.and_then(|lang| { language_names.iter().position(|x| { x.to_lowercase() == lang.to_lowercase() }) })
|
||||
.unwrap_or(0);
|
||||
|
||||
options.execution_method = match option_matches.opt_str("eval-style") {
|
||||
Some(ref s) if s == "compile" => ExecutionMethod::Compile,
|
||||
_ => ExecutionMethod::Interpret,
|
||||
};
|
||||
|
||||
match option_matches.free[..] {
|
||||
match options.free[..] {
|
||||
[] | [_] => {
|
||||
let mut repl = repl::Repl::new(languages, initial_index);
|
||||
repl.run();
|
||||
let mut repl = repl::Repl::new(langs);
|
||||
repl.run_repl();
|
||||
}
|
||||
[_, ref filename, _..] => {
|
||||
|
||||
run_noninteractive(filename, languages, options, debug_passes);
|
||||
run_noninteractive(filename, langs);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>, mut options: EvalOptions, debug_passes: Vec<String>) {
|
||||
fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInterface>>) {
|
||||
let path = Path::new(filename);
|
||||
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or_else(|| {
|
||||
println!("Source file lacks extension");
|
||||
@@ -104,53 +65,28 @@ fn run_noninteractive(filename: &str, languages: Vec<Box<ProgrammingLanguageInte
|
||||
|
||||
let mut source_file = File::open(path).unwrap();
|
||||
let mut buffer = String::new();
|
||||
|
||||
source_file.read_to_string(&mut buffer).unwrap();
|
||||
|
||||
for pass in debug_passes.into_iter() {
|
||||
if let Some(_) = language.get_passes().iter().find(|desc| desc.name == pass) {
|
||||
options.debug_passes.insert(pass, PassDebugOptionsDescriptor { opts: vec![] });
|
||||
}
|
||||
}
|
||||
let request = ComputationRequest {
|
||||
source: &buffer,
|
||||
debug_requests: HashSet::new(),
|
||||
};
|
||||
|
||||
match options.execution_method {
|
||||
ExecutionMethod::Compile => {
|
||||
/*
|
||||
let llvm_bytecode = language.compile(&buffer);
|
||||
compilation_sequence(llvm_bytecode, filename);
|
||||
*/
|
||||
panic!("Not ready to go yet");
|
||||
},
|
||||
ExecutionMethod::Interpret => {
|
||||
let output = language.execute_pipeline(&buffer, &options);
|
||||
output.to_noninteractive().map(|text| println!("{}", text));
|
||||
}
|
||||
}
|
||||
let response = language.run_computation(request);
|
||||
match response.main_output {
|
||||
Ok(s) => println!("{}", s),
|
||||
Err(s) => println!("{}", s)
|
||||
};
|
||||
}
|
||||
|
||||
fn program_options() -> getopts::Options {
|
||||
|
||||
fn command_line_options() -> getopts::Options {
|
||||
let mut options = getopts::Options::new();
|
||||
options.optopt("s",
|
||||
"eval-style",
|
||||
"Specify whether to compile (if supported) or interpret the language. If not specified, the default is language-specific",
|
||||
"[compile|interpret]"
|
||||
);
|
||||
options.optflag("",
|
||||
"list-languages",
|
||||
"Show a list of all supported languages");
|
||||
options.optopt("l",
|
||||
"lang",
|
||||
"Start up REPL in a language",
|
||||
"LANGUAGE");
|
||||
options.optflag("h",
|
||||
"help",
|
||||
"Show help text");
|
||||
options.optflag("w",
|
||||
"webapp",
|
||||
"Start up web interpreter");
|
||||
options.optopt("d",
|
||||
"debug",
|
||||
"Debug a stage (l = tokenizer, a = AST, r = parse trace, s = symbol table)",
|
||||
"[l|a|r|s]");
|
||||
options
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum CommandTree {
|
||||
Terminal {
|
||||
name: String,
|
||||
help_msg: Option<String>,
|
||||
function: Option<Box<(fn() -> Option<String>)>>,
|
||||
},
|
||||
NonTerminal {
|
||||
name: String,
|
||||
children: Vec<CommandTree>,
|
||||
help_msg: Option<String>,
|
||||
function: Option<Box<(fn() -> Option<String>)>>,
|
||||
},
|
||||
Top(Vec<CommandTree>),
|
||||
}
|
||||
|
||||
impl CommandTree {
|
||||
pub fn term(s: &str, help: Option<&str>) -> CommandTree {
|
||||
CommandTree::Terminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), function: None }
|
||||
}
|
||||
|
||||
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
|
||||
CommandTree::NonTerminal {
|
||||
name: s.to_string(),
|
||||
help_msg: help.map(|x| x.to_string()),
|
||||
children,
|
||||
function: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_cmd(&self) -> &str {
|
||||
match self {
|
||||
CommandTree::Terminal { name, .. } => name.as_str(),
|
||||
CommandTree::NonTerminal {name, ..} => name.as_str(),
|
||||
CommandTree::Top(_) => "",
|
||||
}
|
||||
}
|
||||
pub fn get_help(&self) -> &str {
|
||||
match self {
|
||||
CommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
CommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
CommandTree::Top(_) => ""
|
||||
}
|
||||
}
|
||||
pub fn get_children(&self) -> Vec<&str> {
|
||||
match self {
|
||||
CommandTree::Terminal { .. } => vec![],
|
||||
CommandTree::NonTerminal { children, .. } => children.iter().map(|x| x.get_cmd()).collect(),
|
||||
CommandTree::Top(children) => children.iter().map(|x| x.get_cmd()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
126
schala-repl/src/repl/directives.rs
Normal file
126
schala-repl/src/repl/directives.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use std::fmt::Write as FmtWrite;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::repl::Repl;
|
||||
use crate::repl::old_command_tree::OldCommandTree;
|
||||
use crate::language::{LangMetaRequest, LangMetaResponse, DebugAsk, DebugResponse};
|
||||
|
||||
pub fn directives_from_pass_names(pass_names: &Vec<String>) -> OldCommandTree {
|
||||
let passes_directives: Vec<OldCommandTree> = pass_names.iter()
|
||||
.map(|pass_name| { OldCommandTree::nonterm_no_further_tab_completions(pass_name, None) })
|
||||
.collect();
|
||||
|
||||
OldCommandTree::Top(vec![
|
||||
OldCommandTree::terminal("exit", Some("exit the REPL"), vec![], Box::new(|repl: &mut Repl, _cmds: &[&str]| {
|
||||
repl.save_before_exit();
|
||||
::std::process::exit(0)
|
||||
})),
|
||||
OldCommandTree::terminal("quit", Some("exit the REPL"), vec![], Box::new(|repl: &mut Repl, _cmds: &[&str]| {
|
||||
repl.save_before_exit();
|
||||
::std::process::exit(0)
|
||||
})),
|
||||
OldCommandTree::terminal("help", Some("Print this help message"), vec![], Box::new(|repl: &mut Repl, cmds: &[&str]| {
|
||||
Some(repl.print_help_message(cmds))
|
||||
})),
|
||||
OldCommandTree::nonterm("debug",
|
||||
Some("Configure debug information"),
|
||||
vec![
|
||||
OldCommandTree::terminal("list-passes", Some("List all registered compiler passes"), vec![], Box::new(|repl: &mut Repl, _cmds: &[&str]| {
|
||||
let language_state = repl.get_cur_language_state();
|
||||
let pass_names = match language_state.request_meta(LangMetaRequest::StageNames) {
|
||||
LangMetaResponse::StageNames(names) => names,
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
let mut buf = String::new();
|
||||
for pass in pass_names.iter().map(|name| Some(name)).intersperse(None) {
|
||||
match pass {
|
||||
Some(pass) => write!(buf, "{}", pass).unwrap(),
|
||||
None => write!(buf, " -> ").unwrap(),
|
||||
}
|
||||
}
|
||||
Some(buf)
|
||||
})),
|
||||
OldCommandTree::terminal("show-immediate", None, passes_directives.clone(),
|
||||
Box::new(|repl: &mut Repl, cmds: &[&str]| {
|
||||
let cur_state = repl.get_cur_language_state();
|
||||
let stage_name = match cmds.get(1) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a thing to debug")),
|
||||
};
|
||||
let meta = LangMetaRequest::ImmediateDebug(DebugAsk::ByStage { stage_name: stage_name.clone() });
|
||||
|
||||
let response = match cur_state.request_meta(meta) {
|
||||
LangMetaResponse::ImmediateDebug(DebugResponse { ask, value }) => {
|
||||
if (ask != DebugAsk::ByStage { stage_name: stage_name }) {
|
||||
return Some(format!("Didn't get debug stage requested"));
|
||||
}
|
||||
value
|
||||
},
|
||||
_ => return Some(format!("Invalid language meta response")),
|
||||
};
|
||||
Some(response)
|
||||
})),
|
||||
OldCommandTree::terminal("show", None, passes_directives.clone(), Box::new(|repl: &mut Repl, cmds: &[&str]| {
|
||||
let stage_name = match cmds.get(0) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a stage to show")),
|
||||
};
|
||||
let ask = DebugAsk::ByStage { stage_name };
|
||||
repl.options.debug_asks.insert(ask);
|
||||
None
|
||||
})),
|
||||
OldCommandTree::terminal("hide", None, passes_directives.clone(), Box::new(|repl: &mut Repl, cmds: &[&str]| {
|
||||
let stage_name = match cmds.get(0) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a stage to hide")),
|
||||
};
|
||||
let ask = DebugAsk::ByStage { stage_name };
|
||||
repl.options.debug_asks.remove(&ask);
|
||||
None
|
||||
})),
|
||||
OldCommandTree::nonterm("total-time", None, vec![
|
||||
OldCommandTree::terminal("on", None, vec![], Box::new(|repl: &mut Repl, _: &[&str]| {
|
||||
repl.options.show_total_time = true;
|
||||
None
|
||||
})),
|
||||
OldCommandTree::terminal("off", None, vec![], Box::new(turn_off)),
|
||||
]),
|
||||
OldCommandTree::nonterm("stage-times", Some("Computation time per-stage"), vec![
|
||||
OldCommandTree::terminal("on", None, vec![], Box::new(|repl: &mut Repl, _: &[&str]| {
|
||||
repl.options.show_stage_times = true;
|
||||
None
|
||||
})),
|
||||
OldCommandTree::terminal("off", None, vec![], Box::new(|repl: &mut Repl, _: &[&str]| {
|
||||
repl.options.show_stage_times = false;
|
||||
None
|
||||
})),
|
||||
])
|
||||
]
|
||||
),
|
||||
OldCommandTree::nonterm("lang",
|
||||
Some("switch between languages, or go directly to a langauge by name"),
|
||||
vec![
|
||||
OldCommandTree::nonterm_no_further_tab_completions("next", None),
|
||||
OldCommandTree::nonterm_no_further_tab_completions("prev", None),
|
||||
OldCommandTree::nonterm("go", None, vec![]),
|
||||
]
|
||||
),
|
||||
OldCommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Box::new(|repl: &mut Repl, cmds: &[&str]| {
|
||||
cmds.get(0).map(|cmd| {
|
||||
let source = cmd.to_string();
|
||||
let meta = LangMetaRequest::Docs { source };
|
||||
let cur_state = repl.get_cur_language_state();
|
||||
match cur_state.request_meta(meta) {
|
||||
LangMetaResponse::Docs { doc_string } => Some(doc_string),
|
||||
_ => Some(format!("Invalid doc response"))
|
||||
}
|
||||
}).unwrap_or(Some(format!(":docs needs an argument")))
|
||||
}))
|
||||
])
|
||||
}
|
||||
|
||||
fn turn_off(repl: &mut Repl, _cmds: &[&str]) -> Option<String> {
|
||||
repl.options.show_total_time = false;
|
||||
None
|
||||
}
|
||||
@@ -1,191 +1,130 @@
|
||||
use std::fmt::Write as FmtWrite;
|
||||
use std::io::{Read, Write};
|
||||
use std::fs::File;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use colored::*;
|
||||
use itertools::Itertools;
|
||||
use language::{ProgrammingLanguageInterface, EvalOptions,
|
||||
PassDebugOptionsDescriptor};
|
||||
mod command_tree;
|
||||
use self::command_tree::CommandTree;
|
||||
use crate::language::{ProgrammingLanguageInterface,
|
||||
ComputationRequest, ComputationResponse,
|
||||
DebugAsk, LangMetaResponse, LangMetaRequest};
|
||||
|
||||
mod old_command_tree;
|
||||
use self::old_command_tree::{OldCommandTree, BoxedCommandFunction};
|
||||
mod repl_options;
|
||||
use repl_options::ReplOptions;
|
||||
mod directives;
|
||||
use directives::directives_from_pass_names;
|
||||
|
||||
const HISTORY_SAVE_FILE: &'static str = ".schala_history";
|
||||
const OPTIONS_SAVE_FILE: &'static str = ".schala_repl";
|
||||
|
||||
pub struct Repl {
|
||||
options: EvalOptions,
|
||||
languages: Vec<Box<ProgrammingLanguageInterface>>,
|
||||
current_language_index: usize,
|
||||
interpreter_directive_sigil: char,
|
||||
line_reader: ::linefeed::interface::Interface<::linefeed::terminal::DefaultTerminal>,
|
||||
language_states: Vec<Box<ProgrammingLanguageInterface>>,
|
||||
options: ReplOptions,
|
||||
directives: OldCommandTree,
|
||||
}
|
||||
|
||||
impl Repl {
|
||||
pub fn new(languages: Vec<Box<ProgrammingLanguageInterface>>, initial_index: usize) -> Repl {
|
||||
pub fn new(mut initial_states: Vec<Box<ProgrammingLanguageInterface>>) -> Repl {
|
||||
use linefeed::Interface;
|
||||
let current_language_index = if initial_index < languages.len() { initial_index } else { 0 };
|
||||
|
||||
let line_reader = Interface::new("schala-repl").unwrap();
|
||||
let interpreter_directive_sigil = ':';
|
||||
|
||||
let pass_names = match initial_states[0].request_meta(LangMetaRequest::StageNames) {
|
||||
LangMetaResponse::StageNames(names) => names,
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
Repl {
|
||||
options: Repl::get_options(),
|
||||
languages,
|
||||
current_language_index,
|
||||
interpreter_directive_sigil: ':',
|
||||
interpreter_directive_sigil,
|
||||
line_reader,
|
||||
language_states: initial_states,
|
||||
options: ReplOptions::new(),
|
||||
directives: directives_from_pass_names(&pass_names)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_cur_language(&self) -> &ProgrammingLanguageInterface {
|
||||
self.languages[self.current_language_index].as_ref()
|
||||
}
|
||||
|
||||
fn get_options() -> EvalOptions {
|
||||
File::open(OPTIONS_SAVE_FILE)
|
||||
.and_then(|mut file| {
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
})
|
||||
.and_then(|contents| {
|
||||
let options: EvalOptions = ::serde_json::from_str(&contents)?;
|
||||
Ok(options)
|
||||
}).unwrap_or(EvalOptions::default())
|
||||
}
|
||||
|
||||
fn save_options(&self) {
|
||||
let ref options = self.options;
|
||||
let read = File::create(OPTIONS_SAVE_FILE)
|
||||
.and_then(|mut file| {
|
||||
let buf = ::serde_json::to_string(options).unwrap();
|
||||
file.write_all(buf.as_bytes())
|
||||
});
|
||||
|
||||
if let Err(err) = read {
|
||||
println!("Error saving {} file {}", OPTIONS_SAVE_FILE, err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(&mut self) {
|
||||
use linefeed::ReadResult;
|
||||
|
||||
println!("Schala MetaInterpreter version {}", ::VERSION_STRING);
|
||||
pub fn run_repl(&mut self) {
|
||||
println!("Schala MetaInterpreter version {}", crate::VERSION_STRING);
|
||||
println!("Type {}help for help with the REPL", self.interpreter_directive_sigil);
|
||||
|
||||
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
|
||||
loop {
|
||||
let language_name = self.get_cur_language().get_language_name();
|
||||
let directives = self.get_directives();
|
||||
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, directives);
|
||||
self.line_reader.set_completer(Arc::new(tab_complete_handler));
|
||||
|
||||
let prompt_str = format!("{} >> ", language_name);
|
||||
self.line_reader.set_prompt(&prompt_str).unwrap();
|
||||
|
||||
match self.line_reader.read_line() {
|
||||
Err(e) => {
|
||||
println!("Terminal read error: {}", e);
|
||||
},
|
||||
Ok(ReadResult::Eof) => break,
|
||||
Ok(ReadResult::Signal(_)) => break,
|
||||
Ok(ReadResult::Input(input)) => self.input_loop(input),
|
||||
}
|
||||
}
|
||||
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
self.save_options();
|
||||
self.load_options();
|
||||
self.handle_repl_loop();
|
||||
self.save_before_exit();
|
||||
println!("Exiting...");
|
||||
}
|
||||
|
||||
fn input_loop(&mut self, input: String) {
|
||||
use linefeed::ReadResult;
|
||||
if input == "" {
|
||||
return;
|
||||
}
|
||||
fn load_options(&mut self) {
|
||||
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
|
||||
Ok(options) => {
|
||||
self.options = options;
|
||||
},
|
||||
Err(()) => ()
|
||||
};
|
||||
}
|
||||
|
||||
if input.chars().nth(0).unwrap() == self.interpreter_directive_sigil {
|
||||
if let Some(output) = self.handle_interpreter_directive(&input) {
|
||||
println!("{}", output);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let mut lines = input;
|
||||
self.line_reader.set_prompt("> ").unwrap();
|
||||
fn handle_repl_loop(&mut self) {
|
||||
use linefeed::ReadResult::*;
|
||||
|
||||
loop {
|
||||
self.update_line_reader();
|
||||
match self.line_reader.read_line() {
|
||||
Err(e) => {
|
||||
println!("Terminal read error: {}", e);
|
||||
return;
|
||||
println!("readline IO Error: {}", e);
|
||||
break;
|
||||
},
|
||||
Ok(ReadResult::Eof) => break,
|
||||
Ok(ReadResult::Signal(_)) => break,
|
||||
Ok(ReadResult::Input(input)) => {
|
||||
lines.push('\n'); //TODO not sure if this is needed?
|
||||
lines.push_str(&input);
|
||||
Ok(Eof) | Ok(Signal(_)) => break,
|
||||
Ok(Input(ref input)) => {
|
||||
self.line_reader.add_history_unique(input.to_string());
|
||||
let output = match input.chars().nth(0) {
|
||||
Some(ch) if ch == self.interpreter_directive_sigil => self.handle_interpreter_directive(input),
|
||||
_ => Some(self.handle_input(input)),
|
||||
};
|
||||
if let Some(o) = output {
|
||||
println!("=> {}", o);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.line_reader.add_history_unique(lines.clone());
|
||||
let output = self.input_handler(&lines);
|
||||
println!("=> {}", output);
|
||||
}
|
||||
|
||||
fn input_handler(&mut self, input: &str) -> String {
|
||||
let ref mut language = self.languages[self.current_language_index];
|
||||
let interpreter_output = language.execute_pipeline(input, &self.options);
|
||||
interpreter_output.to_repl()
|
||||
fn update_line_reader(&mut self) {
|
||||
let tab_complete_handler = TabCompleteHandler::new(self.interpreter_directive_sigil, self.get_directives());
|
||||
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
|
||||
let prompt_str = format!(">> ");
|
||||
self.line_reader.set_prompt(&prompt_str).unwrap();
|
||||
}
|
||||
|
||||
fn get_directives(&self) -> CommandTree {
|
||||
let ref passes = self.get_cur_language().get_passes();
|
||||
fn save_before_exit(&self) {
|
||||
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(());
|
||||
self.options.save_to_file(OPTIONS_SAVE_FILE);
|
||||
}
|
||||
|
||||
let passes_directives: Vec<CommandTree> = passes.iter()
|
||||
.map(|pass_descriptor| {
|
||||
let name = &pass_descriptor.name;
|
||||
if pass_descriptor.debug_options.len() == 0 {
|
||||
CommandTree::term(name, None)
|
||||
} else {
|
||||
let children: Vec<CommandTree> = pass_descriptor.debug_options.iter()
|
||||
.map(|o| CommandTree::term(o, None)).collect();
|
||||
CommandTree::NonTerminal {
|
||||
name: name.clone(),
|
||||
children,
|
||||
help_msg: None,
|
||||
function: None,
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
fn get_function_from_directives<'a>(directives: &'a OldCommandTree, commands: &Vec<&str>) -> Result<(&'a BoxedCommandFunction, usize), String> {
|
||||
let mut dir_pointer: &OldCommandTree = &directives;
|
||||
let mut idx = 0;
|
||||
|
||||
CommandTree::Top(vec![
|
||||
CommandTree::term("exit", Some("exit the REPL")),
|
||||
CommandTree::term("quit", Some("exit the REPL")),
|
||||
CommandTree::term("help", Some("Print this help message")),
|
||||
CommandTree::nonterm("debug",
|
||||
Some("show or hide pass debug info for a given pass, or display the names of all passes, or turn timing on/off"),
|
||||
vec![
|
||||
CommandTree::term("passes", None),
|
||||
CommandTree::nonterm("show", None, passes_directives.clone()),
|
||||
CommandTree::nonterm("hide", None, passes_directives.clone()),
|
||||
CommandTree::nonterm("timing", None, vec![
|
||||
CommandTree::term("on", None),
|
||||
CommandTree::term("off", None),
|
||||
])
|
||||
]
|
||||
),
|
||||
CommandTree::nonterm("lang",
|
||||
Some("switch between languages, or go directly to a langauge by name"),
|
||||
vec![
|
||||
CommandTree::term("next", None),
|
||||
CommandTree::term("prev", None),
|
||||
CommandTree::nonterm("go", None, vec![]),
|
||||
]
|
||||
),
|
||||
CommandTree::term("doc", Some("Get language-specific help for an item")),
|
||||
])
|
||||
loop {
|
||||
match dir_pointer {
|
||||
OldCommandTree::Top(subcommands) | OldCommandTree::NonTerminal { children: subcommands, .. } => {
|
||||
let next_command = match commands.get(idx) {
|
||||
Some(cmd) => cmd,
|
||||
None => break Err(format!("Command requires arguments"))
|
||||
};
|
||||
idx += 1;
|
||||
match subcommands.iter().find(|sc| sc.get_cmd() == *next_command) {
|
||||
Some(command_tree) => {
|
||||
dir_pointer = command_tree;
|
||||
},
|
||||
None => break Err(format!("Command {} not found", next_command))
|
||||
};
|
||||
},
|
||||
OldCommandTree::Terminal { function, .. } => {
|
||||
break Ok((function, idx));
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_interpreter_directive(&mut self, input: &str) -> Option<String> {
|
||||
@@ -196,56 +135,27 @@ impl Repl {
|
||||
.split_whitespace()
|
||||
.collect();
|
||||
|
||||
let initial_cmd: &str = match commands.get(0).clone() {
|
||||
None => return None,
|
||||
Some(s) => s
|
||||
if commands.len() < 1 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let directives = self.get_directives();
|
||||
let result: Result<(&BoxedCommandFunction, _), String> = Repl::get_function_from_directives(&directives, &commands);
|
||||
match result {
|
||||
Ok((f, idx)) => f(self, &commands[idx..]),
|
||||
Err(err) => Some(err.red().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn print_help_message(&mut self, commands_passed_to_help: &[&str] ) -> String {
|
||||
let mut buf = String::new();
|
||||
let directives = match self.get_directives() {
|
||||
OldCommandTree::Top(children) => children,
|
||||
_ => panic!("Top-level OldCommandTree not Top")
|
||||
};
|
||||
|
||||
match initial_cmd {
|
||||
"exit" | "quit" => {
|
||||
self.save_options();
|
||||
::std::process::exit(0)
|
||||
},
|
||||
"lang" | "language" => match commands.get(1) {
|
||||
Some(&"show") => {
|
||||
let mut buf = String::new();
|
||||
for (i, lang) in self.languages.iter().enumerate() {
|
||||
write!(buf, "{}{}\n", if i == self.current_language_index { "* "} else { "" }, lang.get_language_name()).unwrap();
|
||||
}
|
||||
Some(buf)
|
||||
},
|
||||
Some(&"go") => match commands.get(2) {
|
||||
None => Some(format!("Must specify a language name")),
|
||||
Some(&desired_name) => {
|
||||
for (i, _) in self.languages.iter().enumerate() {
|
||||
let lang_name = self.languages[i].get_language_name();
|
||||
if lang_name.to_lowercase() == desired_name.to_lowercase() {
|
||||
self.current_language_index = i;
|
||||
return Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()));
|
||||
}
|
||||
}
|
||||
Some(format!("Language {} not found", desired_name))
|
||||
}
|
||||
},
|
||||
Some(&"next") | Some(&"n") => {
|
||||
self.current_language_index = (self.current_language_index + 1) % self.languages.len();
|
||||
Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()))
|
||||
},
|
||||
Some(&"previous") | Some(&"p") | Some(&"prev") => {
|
||||
self.current_language_index = if self.current_language_index == 0 { self.languages.len() - 1 } else { self.current_language_index - 1 };
|
||||
Some(format!("Switching to {}", self.languages[self.current_language_index].get_language_name()))
|
||||
},
|
||||
Some(e) => Some(format!("Bad `lang(uage)` argument: {}", e)),
|
||||
None => Some(format!("Valid arguments for `lang(uage)` are `show`, `next`|`n`, `previous`|`prev`|`n`"))
|
||||
},
|
||||
"help" => {
|
||||
let mut buf = String::new();
|
||||
let ref lang = self.languages[self.current_language_index];
|
||||
let directives = match self.get_directives() {
|
||||
CommandTree::Top(children) => children,
|
||||
_ => panic!("Top-level CommandTree not Top")
|
||||
};
|
||||
|
||||
match commands_passed_to_help {
|
||||
[] => {
|
||||
writeln!(buf, "MetaInterpreter options").unwrap();
|
||||
writeln!(buf, "-----------------------").unwrap();
|
||||
|
||||
@@ -254,81 +164,91 @@ impl Repl {
|
||||
writeln!(buf, "{}{}- {}", directive.get_cmd(), trailer, directive.get_help()).unwrap();
|
||||
}
|
||||
|
||||
let ref lang = self.get_cur_language_state();
|
||||
writeln!(buf, "").unwrap();
|
||||
writeln!(buf, "Language-specific help for {}", lang.get_language_name()).unwrap();
|
||||
writeln!(buf, "-----------------------").unwrap();
|
||||
writeln!(buf, "{}", lang.custom_interpreter_directives_help()).unwrap();
|
||||
Some(buf)
|
||||
},
|
||||
"debug" => self.handle_debug(commands),
|
||||
"doc" => self.languages[self.current_language_index]
|
||||
.get_doc(&commands)
|
||||
.or(Some(format!("No docs implemented"))),
|
||||
e => {
|
||||
self.languages[self.current_language_index]
|
||||
.handle_custom_interpreter_directives(&commands)
|
||||
.or(Some(format!("Unknown command: {}", e)))
|
||||
_ => {
|
||||
writeln!(buf, "Command-specific help not available yet").unwrap();
|
||||
}
|
||||
}
|
||||
};
|
||||
buf
|
||||
}
|
||||
fn handle_debug(&mut self, commands: Vec<&str>) -> Option<String> {
|
||||
let passes = self.get_cur_language().get_passes();
|
||||
match commands.get(1) {
|
||||
Some(&"timing") => match commands.get(2) {
|
||||
Some(&"on") => { self.options.debug_timing = true; None }
|
||||
Some(&"off") => { self.options.debug_timing = false; None }
|
||||
_ => return Some(format!(r#"Argument to "timing" must be "on" or "off""#)),
|
||||
},
|
||||
Some(&"passes") => Some(
|
||||
passes.into_iter()
|
||||
.map(|desc| {
|
||||
if self.options.debug_passes.contains_key(&desc.name) {
|
||||
let color = "green";
|
||||
format!("*{}", desc.name.color(color))
|
||||
} else {
|
||||
desc.name
|
||||
}
|
||||
})
|
||||
.intersperse(format!(" -> "))
|
||||
.collect()),
|
||||
b @ Some(&"show") | b @ Some(&"hide") => {
|
||||
let show = b == Some(&"show");
|
||||
let debug_pass: String = match commands.get(2) {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Some(format!("Must specify a stage to debug")),
|
||||
};
|
||||
let pass_opt = commands.get(3);
|
||||
if let Some(desc) = passes.iter().find(|desc| desc.name == debug_pass) {
|
||||
let mut opts = vec![];
|
||||
if let Some(opt) = pass_opt {
|
||||
opts.push(opt.to_string());
|
||||
}
|
||||
let msg = format!("{} debug for pass {}", if show { "Enabling" } else { "Disabling" }, debug_pass);
|
||||
if show {
|
||||
self.options.debug_passes.insert(desc.name.clone(), PassDebugOptionsDescriptor { opts });
|
||||
} else {
|
||||
self.options.debug_passes.remove(&desc.name);
|
||||
}
|
||||
Some(msg)
|
||||
} else {
|
||||
Some(format!("Couldn't find stage: {}", debug_pass))
|
||||
}
|
||||
},
|
||||
_ => Some(format!("Unknown debug command"))
|
||||
|
||||
fn get_cur_language_state(&mut self) -> &mut Box<ProgrammingLanguageInterface> {
|
||||
//TODO this is obviously not complete
|
||||
&mut self.language_states[0]
|
||||
}
|
||||
|
||||
fn handle_input(&mut self, input: &str) -> String {
|
||||
let mut debug_requests = HashSet::new();
|
||||
for ask in self.options.debug_asks.iter() {
|
||||
debug_requests.insert(ask.clone());
|
||||
}
|
||||
|
||||
let request = ComputationRequest {
|
||||
source: input,
|
||||
debug_requests,
|
||||
};
|
||||
|
||||
let ref mut language_state = self.get_cur_language_state();
|
||||
let response = language_state.run_computation(request);
|
||||
|
||||
self.handle_computation_response(response)
|
||||
}
|
||||
|
||||
fn handle_computation_response(&mut self, response: ComputationResponse) -> String {
|
||||
let mut buf = String::new();
|
||||
|
||||
if self.options.show_total_time {
|
||||
buf.push_str(&format!("Total duration: {:?}\n", response.global_output_stats.total_duration));
|
||||
}
|
||||
|
||||
if self.options.show_stage_times {
|
||||
buf.push_str(&format!("{:?}\n", response.global_output_stats.stage_durations));
|
||||
}
|
||||
|
||||
|
||||
for debug_resp in response.debug_responses {
|
||||
let stage_name = match debug_resp.ask {
|
||||
DebugAsk::ByStage { stage_name } => stage_name,
|
||||
_ => continue,
|
||||
};
|
||||
let s = format!("{} - {}\n", stage_name, debug_resp.value);
|
||||
buf.push_str(&s);
|
||||
}
|
||||
|
||||
buf.push_str(&match response.main_output {
|
||||
Ok(s) => s,
|
||||
Err(e) => format!("{} {}", "Error".red(), e)
|
||||
});
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
fn get_directives(&mut self) -> OldCommandTree {
|
||||
let language_state = self.get_cur_language_state();
|
||||
let pass_names = match language_state.request_meta(LangMetaRequest::StageNames) {
|
||||
LangMetaResponse::StageNames(names) => names,
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
directives_from_pass_names(&pass_names)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
struct TabCompleteHandler {
|
||||
sigil: char,
|
||||
top_level_commands: CommandTree,
|
||||
top_level_commands: OldCommandTree,
|
||||
}
|
||||
|
||||
use linefeed::complete::{Completion, Completer};
|
||||
use linefeed::terminal::Terminal;
|
||||
|
||||
impl TabCompleteHandler {
|
||||
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
|
||||
fn new(sigil: char, top_level_commands: OldCommandTree) -> TabCompleteHandler {
|
||||
TabCompleteHandler {
|
||||
top_level_commands,
|
||||
sigil,
|
||||
@@ -340,43 +260,43 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
|
||||
fn complete(&self, word: &str, prompter: &::linefeed::prompter::Prompter<T>, start: usize, _end: usize) -> Option<Vec<Completion>> {
|
||||
let line = prompter.buffer();
|
||||
|
||||
if line.starts_with(&format!("{}", self.sigil)) {
|
||||
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
|
||||
let mut completions = Vec::new();
|
||||
let mut command_tree: Option<&CommandTree> = Some(&self.top_level_commands);
|
||||
if !line.starts_with(self.sigil) {
|
||||
return None;
|
||||
}
|
||||
|
||||
loop {
|
||||
match words.next() {
|
||||
None => {
|
||||
let top = match command_tree {
|
||||
Some(CommandTree::Top(_)) => true,
|
||||
_ => false
|
||||
};
|
||||
let word = if top { word.get(1..).unwrap() } else { word };
|
||||
for cmd in command_tree.map(|x| x.get_children()).unwrap_or(vec![]).into_iter() {
|
||||
if cmd.starts_with(word) {
|
||||
completions.push(Completion {
|
||||
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
||||
display: Some(cmd.to_string()),
|
||||
suffix: ::linefeed::complete::Suffix::Some(' ')
|
||||
})
|
||||
}
|
||||
let mut words = line[1..(if start == 0 { 1 } else { start })].split_whitespace();
|
||||
let mut completions = Vec::new();
|
||||
let mut command_tree: Option<&OldCommandTree> = Some(&self.top_level_commands);
|
||||
|
||||
loop {
|
||||
match words.next() {
|
||||
None => {
|
||||
let top = match command_tree {
|
||||
Some(OldCommandTree::Top(_)) => true,
|
||||
_ => false
|
||||
};
|
||||
let word = if top { word.get(1..).unwrap() } else { word };
|
||||
for cmd in command_tree.map(|x| x.get_children()).unwrap_or(vec![]).into_iter() {
|
||||
if cmd.starts_with(word) {
|
||||
completions.push(Completion {
|
||||
completion: format!("{}{}", if top { ":" } else { "" }, cmd),
|
||||
display: Some(cmd.to_string()),
|
||||
suffix: ::linefeed::complete::Suffix::Some(' ')
|
||||
})
|
||||
}
|
||||
break;
|
||||
},
|
||||
Some(s) => {
|
||||
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
|
||||
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
||||
CommandTree::NonTerminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||
CommandTree::Terminal { .. } => None,
|
||||
});
|
||||
command_tree = new_ptr;
|
||||
}
|
||||
break;
|
||||
},
|
||||
Some(s) => {
|
||||
let new_ptr: Option<&OldCommandTree> = command_tree.and_then(|cm| match cm {
|
||||
OldCommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
|
||||
OldCommandTree::NonTerminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||
OldCommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s),
|
||||
});
|
||||
command_tree = new_ptr;
|
||||
}
|
||||
}
|
||||
Some(completions)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
Some(completions)
|
||||
}
|
||||
}
|
||||
|
||||
73
schala-repl/src/repl/old_command_tree.rs
Normal file
73
schala-repl/src/repl/old_command_tree.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use super::Repl;
|
||||
|
||||
pub type BoxedCommandFunction = Box<(fn(&mut Repl, &[&str]) -> Option<String>)>;
|
||||
|
||||
/// A OldCommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
|
||||
/// Terminal, it will execute the `BoxedCommandFunction` found there with any remaining arguments
|
||||
#[derive(Clone)]
|
||||
pub enum OldCommandTree {
|
||||
Terminal {
|
||||
name: String,
|
||||
children: Vec<OldCommandTree>,
|
||||
help_msg: Option<String>,
|
||||
function: BoxedCommandFunction,
|
||||
},
|
||||
NonTerminal {
|
||||
name: String,
|
||||
children: Vec<OldCommandTree>,
|
||||
help_msg: Option<String>,
|
||||
},
|
||||
Top(Vec<OldCommandTree>),
|
||||
}
|
||||
|
||||
impl OldCommandTree {
|
||||
pub fn nonterm_no_further_tab_completions(s: &str, help: Option<&str>) -> OldCommandTree {
|
||||
OldCommandTree::NonTerminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), children: vec![] }
|
||||
}
|
||||
|
||||
pub fn terminal(s: &str, help: Option<&str>, children: Vec<OldCommandTree>, function: BoxedCommandFunction) -> OldCommandTree {
|
||||
OldCommandTree::Terminal {name: s.to_string(), help_msg: help.map(|x| x.to_string()), function, children }
|
||||
}
|
||||
|
||||
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<OldCommandTree>) -> OldCommandTree {
|
||||
OldCommandTree::NonTerminal {
|
||||
name: s.to_string(),
|
||||
help_msg: help.map(|x| x.to_string()),
|
||||
children,
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn nonterm_with_function(s: &str, help: Option<&str>, children: Vec<OldCommandTree>, func: BoxedCommandFunction) -> OldCommandTree {
|
||||
OldCommandTree::NonTerminal {
|
||||
name: s.to_string(),
|
||||
help_msg: help.map(|x| x.to_string()),
|
||||
children,
|
||||
function: Some(func),
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn get_cmd(&self) -> &str {
|
||||
match self {
|
||||
OldCommandTree::Terminal { name, .. } => name.as_str(),
|
||||
OldCommandTree::NonTerminal {name, ..} => name.as_str(),
|
||||
OldCommandTree::Top(_) => "",
|
||||
}
|
||||
}
|
||||
pub fn get_help(&self) -> &str {
|
||||
match self {
|
||||
OldCommandTree::Terminal { help_msg, ..} => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
OldCommandTree::NonTerminal { help_msg, .. } => help_msg.as_ref().map(|s| s.as_str()).unwrap_or(""),
|
||||
OldCommandTree::Top(_) => ""
|
||||
}
|
||||
}
|
||||
pub fn get_children(&self) -> Vec<&str> {
|
||||
use OldCommandTree::*;
|
||||
match self {
|
||||
Terminal { children, .. } |
|
||||
NonTerminal { children, .. } |
|
||||
Top(children) => children.iter().map(|x| x.get_cmd()).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
47
schala-repl/src/repl/repl_options.rs
Normal file
47
schala-repl/src/repl/repl_options.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use crate::language::DebugAsk;
|
||||
|
||||
use std::io::{Read, Write};
|
||||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ReplOptions {
|
||||
pub debug_asks: HashSet<DebugAsk>,
|
||||
pub show_total_time: bool,
|
||||
pub show_stage_times: bool,
|
||||
}
|
||||
|
||||
impl ReplOptions {
|
||||
pub fn new() -> ReplOptions {
|
||||
ReplOptions {
|
||||
debug_asks: HashSet::new(),
|
||||
show_total_time: true,
|
||||
show_stage_times: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_to_file(&self, filename: &str) {
|
||||
let res = File::create(filename)
|
||||
.and_then(|mut file| {
|
||||
let buf = crate::serde_json::to_string(self).unwrap();
|
||||
file.write_all(buf.as_bytes())
|
||||
});
|
||||
if let Err(err) = res {
|
||||
println!("Error saving {} file {}", filename, err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_from_file(filename: &str) -> Result<ReplOptions, ()> {
|
||||
File::open(filename)
|
||||
.and_then(|mut file| {
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
})
|
||||
.and_then(|contents| {
|
||||
let output: ReplOptions = crate::serde_json::from_str(&contents)?;
|
||||
Ok(output)
|
||||
})
|
||||
.map_err(|_| ())
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
use rocket;
|
||||
use rocket::State;
|
||||
use rocket::response::Content;
|
||||
use rocket::http::ContentType;
|
||||
use rocket_contrib::json::Json;
|
||||
use language::{ProgrammingLanguageInterface, EvalOptions};
|
||||
use WEBFILES;
|
||||
use ::PLIGenerator;
|
||||
|
||||
#[get("/")]
|
||||
fn index() -> Content<String> {
|
||||
let path = "static/index.html";
|
||||
let html_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
|
||||
Content(ContentType::HTML, html_contents)
|
||||
}
|
||||
|
||||
#[get("/bundle.js")]
|
||||
fn js_bundle() -> Content<String> {
|
||||
let path = "static/bundle.js";
|
||||
let js_contents = String::from_utf8(WEBFILES.get(path).unwrap().into_owned()).unwrap();
|
||||
Content(ContentType::JavaScript, js_contents)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Input {
|
||||
source: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Output {
|
||||
text: String,
|
||||
}
|
||||
|
||||
#[post("/input", format = "application/json", data = "<input>")]
|
||||
fn interpreter_input(input: Json<Input>, generators: State<Vec<PLIGenerator>>) -> Json<Output> {
|
||||
let schala_gen = generators.get(0).unwrap();
|
||||
let mut schala: Box<ProgrammingLanguageInterface> = schala_gen();
|
||||
let code_output = schala.execute_pipeline(&input.source, &EvalOptions::default());
|
||||
Json(Output { text: code_output.to_repl() })
|
||||
}
|
||||
|
||||
pub fn web_main(language_generators: Vec<PLIGenerator>) {
|
||||
rocket::ignite().manage(language_generators).mount("/", routes![index, js_bundle, interpreter_input]).launch();
|
||||
}
|
||||
17
src/main.rs
17
src/main.rs
@@ -1,20 +1,15 @@
|
||||
extern crate schala_repl;
|
||||
|
||||
extern crate maaru_lang;
|
||||
extern crate rukka_lang;
|
||||
extern crate robo_lang;
|
||||
//extern crate maaru_lang;
|
||||
//extern crate rukka_lang;
|
||||
//extern crate robo_lang;
|
||||
extern crate schala_lang;
|
||||
use schala_repl::{PLIGenerator, repl_main};
|
||||
use schala_repl::{ProgrammingLanguageInterface, start_repl};
|
||||
|
||||
extern { }
|
||||
|
||||
fn main() {
|
||||
let generators: Vec<PLIGenerator> = vec![
|
||||
Box::new(|| { Box::new(schala_lang::Schala::new())}),
|
||||
Box::new(|| { Box::new(maaru_lang::Maaru::new())}),
|
||||
Box::new(|| { Box::new(robo_lang::Robo::new())}),
|
||||
Box::new(|| { Box::new(rukka_lang::Rukka::new())}),
|
||||
];
|
||||
repl_main(generators);
|
||||
let langs: Vec<Box<ProgrammingLanguageInterface>> = vec![Box::new(schala_lang::Schala::new())];
|
||||
start_repl(langs);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user