schala/schala-lang/src/lib.rs

128 lines
3.4 KiB
Rust
Raw Normal View History

2018-04-03 23:24:13 -07:00
#![feature(slice_patterns, box_patterns, box_syntax)]
#![feature(proc_macro)]
2018-03-23 18:43:43 -07:00
extern crate itertools;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate maplit;
extern crate schala_repl;
extern crate schala_codegen;
2018-03-23 18:43:43 -07:00
2018-04-28 00:08:16 -07:00
use std::collections::HashMap;
use itertools::Itertools;
2018-03-20 21:17:46 -07:00
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation};
macro_rules! bx {
($e:expr) => { Box::new($e) }
}
mod builtin;
mod tokenizing;
mod parsing;
2018-02-21 02:31:28 -08:00
mod typechecking;
mod eval;
2018-02-21 02:31:28 -08:00
use self::typechecking::{TypeContext};
2018-04-28 00:08:16 -07:00
/* TODO eventually custom-derive ProgrammingLanguageInterface with compiler passes as options */
pub struct Schala {
state: eval::State<'static>,
type_context: TypeContext
}
impl Schala {
pub fn new() -> Schala {
Schala {
state: eval::State::new(),
type_context: TypeContext::new(),
}
}
}
impl ProgrammingLanguageInterface for Schala {
2018-04-28 00:08:16 -07:00
2018-04-28 15:35:04 -07:00
schala_codegen::compiler_pass_sequence!(["tokenize", "parse", "yolo"]);
2018-04-28 00:08:16 -07:00
fn get_language_name(&self) -> String {
"Schala".to_string()
}
fn get_source_file_suffix(&self) -> String {
format!("schala")
}
2018-03-20 20:29:07 -07:00
fn execute(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
2018-03-19 22:57:54 -07:00
let mut evaluation = UnfinishedComputation::default();
//tokenzing
let tokens = tokenizing::tokenize(input);
if options.debug.tokens {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
evaluation.add_artifact(TraceArtifact::new("tokens", token_string));
}
{
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if token_errors.len() != 0 {
2018-03-19 22:57:54 -07:00
return evaluation.output(Err(format!("Tokenization error: {:?}\n", token_errors)));
}
}
2018-03-19 22:57:54 -07:00
// parsing
let ast = match parsing::parse(tokens) {
(Ok(ast), trace) => {
if options.debug.parse_tree {
evaluation.add_artifact(TraceArtifact::new_parse_trace(trace));
}
if options.debug.ast {
evaluation.add_artifact(TraceArtifact::new("ast", format!("{:#?}", ast)));
}
ast
},
(Err(err), trace) => {
if options.debug.parse_tree {
evaluation.add_artifact(TraceArtifact::new_parse_trace(trace));
}
2018-03-19 22:57:54 -07:00
return evaluation.output(Err(format!("Parse error: {:?}\n", err.msg)));
}
};
2018-03-19 22:57:54 -07:00
//symbol table
2018-02-27 03:01:05 -08:00
match self.type_context.add_top_level_types(&ast) {
Ok(()) => (),
Err(msg) => {
2018-03-27 00:50:31 -07:00
if options.debug.type_checking {
evaluation.add_artifact(TraceArtifact::new("type_check", msg));
}
2018-02-27 03:01:05 -08:00
}
};
2018-03-19 22:57:54 -07:00
//typechecking
2018-02-21 02:31:28 -08:00
match self.type_context.type_check_ast(&ast) {
Ok(ty) => {
if options.debug.type_checking {
evaluation.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty)));
}
},
2018-03-19 22:57:54 -07:00
Err(msg) => evaluation.add_artifact(TraceArtifact::new("type_check", msg)),
};
let text = self.type_context.debug_symbol_table();
if options.debug.symbol_table {
evaluation.add_artifact(TraceArtifact::new("symbol_table", text));
}
2018-02-21 02:31:28 -08:00
2018-01-08 05:21:04 -08:00
let evaluation_outputs = self.state.evaluate(ast);
let text_output: Result<Vec<String>, String> = evaluation_outputs
.into_iter()
.collect();
let eval_output = text_output
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
evaluation.output(eval_output)
}
}