schala/schala-lang/src/lib.rs

137 lines
3.8 KiB
Rust
Raw Normal View History

2018-04-03 23:24:13 -07:00
#![feature(slice_patterns, box_patterns, box_syntax)]
#![feature(proc_macro)]
2018-03-23 18:43:43 -07:00
extern crate itertools;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate maplit;
#[macro_use]
2018-03-23 18:43:43 -07:00
extern crate schala_repl;
2018-05-02 02:14:36 -07:00
#[macro_use]
extern crate schala_codegen;
2018-03-23 18:43:43 -07:00
use itertools::Itertools;
2018-03-20 21:17:46 -07:00
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, UnfinishedComputation, FinishedComputation};
macro_rules! bx {
($e:expr) => { Box::new($e) }
}
mod builtin;
mod tokenizing;
mod parsing;
2018-02-21 02:31:28 -08:00
mod typechecking;
mod eval;
2018-02-21 02:31:28 -08:00
use self::typechecking::{TypeContext};
2018-05-02 02:14:36 -07:00
#[derive(ProgrammingLanguageInterface)]
2018-05-02 03:53:38 -07:00
#[LanguageName = "Schala"]
2018-05-02 20:43:05 -07:00
#[SourceFileExtension = "schala"]
pub struct Schala {
state: eval::State<'static>,
type_context: TypeContext
}
impl Schala {
pub fn new() -> Schala {
Schala {
state: eval::State::new(),
type_context: TypeContext::new(),
}
}
}
2018-05-02 01:14:46 -07:00
fn tokenizing_stage(_handle: &mut Schala, input: &str, comp: Option<&mut UnfinishedComputation>) -> Result<Vec<tokenizing::Token>, String> {
let tokens = tokenizing::tokenize(input);
comp.map(|comp| {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
comp.add_artifact(TraceArtifact::new("tokens", token_string));
});
2018-05-02 00:27:58 -07:00
2018-05-02 01:14:46 -07:00
let errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if errors.len() == 0 {
Ok(tokens)
} else {
Err(format!("{:?}", errors))
}
}
fn parsing_stage(_handle: &mut Schala, input: Vec<tokenizing::Token>, comp: Option<&mut UnfinishedComputation>) -> Result<parsing::AST, parsing::ParseError> {
let (ast, trace) = parsing::parse(input);
comp.map(|comp| {
//TODO need to control which of these debug stages get added
comp.add_artifact(TraceArtifact::new_parse_trace(trace));
comp.add_artifact(TraceArtifact::new("ast", format!("{:#?}", ast)));
});
ast
}
fn symbol_table_stage(handle: &mut Schala, input: parsing::AST, comp: Option<&mut UnfinishedComputation>) -> Result<parsing::AST, String> {
2018-04-29 03:45:31 -07:00
match handle.type_context.add_top_level_types(&input) {
2018-04-29 22:51:01 -07:00
Ok(()) => {
let text = handle.type_context.debug_symbol_table();
comp.map(|comp| comp.add_artifact(TraceArtifact::new("symbol_table", text)));
Ok(input)
},
2018-04-29 03:45:31 -07:00
Err(msg) => Err(msg)
}
}
fn typechecking_stage(handle: &mut Schala, input: parsing::AST, comp: Option<&mut UnfinishedComputation>) -> Result<parsing::AST, String> {
2018-04-29 03:45:31 -07:00
match handle.type_context.type_check_ast(&input) {
Ok(ty) => {
2018-04-29 22:51:01 -07:00
comp.map(|comp| comp.add_artifact(TraceArtifact::new("type_check", format!("{:?}", ty))));
2018-04-29 03:45:31 -07:00
Ok(input)
},
Err(msg) => Err(msg)
}
}
2018-05-02 02:10:56 -07:00
fn eval_stage(handle: &mut Schala, input: parsing::AST, _comp: Option<&mut UnfinishedComputation>) -> Result<String, String> {
2018-04-29 03:45:31 -07:00
let evaluation_outputs = handle.state.evaluate(input);
let text_output: Result<Vec<String>, String> = evaluation_outputs
.into_iter()
.collect();
let eval_output: Result<String, String> = text_output
.map(|v| { v.into_iter().intersperse(format!("\n")).collect() });
eval_output
}
2018-05-02 02:14:36 -07:00
/*
impl ProgrammingLanguageInterface for Schala {
fn get_language_name(&self) -> String {
"Schala".to_string()
}
fn get_source_file_suffix(&self) -> String {
format!("schala")
}
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation {
2018-04-29 22:17:10 -07:00
let mut chain = pass_chain![self, options;
2018-04-29 03:45:31 -07:00
tokenizing_stage,
parsing_stage,
symbol_table_stage,
typechecking_stage,
eval_stage
];
2018-04-29 00:55:39 -07:00
chain(input)
}
2018-05-01 02:24:50 -07:00
fn get_stages(&self) -> Vec<String> {
vec![
2018-05-01 18:22:52 -07:00
format!("tokenizing_stage"),
format!("parsing_stage"), //TODO handle both types of this
format!("symbol_table_stage"),
format!("typechecking_stage"),
format!("eval_stage")
2018-05-01 02:24:50 -07:00
]
}
}
2018-05-02 02:14:36 -07:00
*/