schala/maaru/src/lib.rs

107 lines
2.4 KiB
Rust
Raw Normal View History

2018-03-20 23:29:56 -07:00
#![feature(box_patterns)]
extern crate schala_repl;
mod tokenizer;
mod parser;
mod eval;
mod compilation;
2017-02-03 11:34:26 -08:00
2018-03-20 21:17:46 -07:00
use schala_repl::{ProgrammingLanguageInterface, EvalOptions, LanguageOutput, TraceArtifact};
2017-11-01 22:41:34 -07:00
#[derive(Debug)]
pub struct TokenError {
pub msg: String,
}
impl TokenError {
pub fn new(msg: &str) -> TokenError {
TokenError { msg: msg.to_string() }
}
}
2017-08-29 00:28:19 -07:00
pub use self::eval::Evaluator as MaaruEvaluator;
2017-08-31 00:02:17 -07:00
pub struct Maaru<'a> {
2017-08-30 19:15:04 -07:00
evaluator: MaaruEvaluator<'a>
}
2017-08-31 00:02:17 -07:00
impl<'a> Maaru<'a> {
pub fn new() -> Maaru<'a> {
Maaru {
2017-08-30 19:15:04 -07:00
evaluator: MaaruEvaluator::new(None),
}
}
}
2017-08-31 00:02:17 -07:00
impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
2017-08-30 19:15:04 -07:00
fn get_language_name(&self) -> String {
"Maaru".to_string()
}
2017-10-02 23:07:05 -07:00
fn get_source_file_suffix(&self) -> String {
format!("maaru")
}
2017-08-30 19:15:04 -07:00
2018-03-07 22:07:13 -08:00
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
let mut output = LanguageOutput::default();
2017-08-31 20:59:43 -07:00
2017-08-30 19:15:04 -07:00
let tokens = match tokenizer::tokenize(input) {
Ok(tokens) => {
2018-03-20 20:29:07 -07:00
if options.debug.tokens {
2017-08-31 20:59:43 -07:00
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
2017-08-30 19:15:04 -07:00
}
tokens
},
Err(err) => {
2017-08-31 20:59:43 -07:00
output.add_output(format!("Tokenization error: {:?}\n", err.msg));
2017-08-30 19:15:04 -07:00
return output;
}
};
let ast = match parser::parse(&tokens, &[]) {
Ok(ast) => {
2018-03-20 20:29:07 -07:00
if options.debug.ast {
2017-08-31 20:59:43 -07:00
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
2017-08-30 19:15:04 -07:00
}
ast
},
Err(err) => {
2017-08-31 20:59:43 -07:00
output.add_output(format!("Parse error: {:?}\n", err.msg));
2017-08-30 19:15:04 -07:00
return output;
}
};
2017-08-31 20:59:43 -07:00
let mut evaluation_output = String::new();
for s in self.evaluator.run(ast).iter() {
evaluation_output.push_str(s);
}
output.add_output(evaluation_output);
2017-08-30 19:15:04 -07:00
return output;
}
2017-08-31 19:15:32 -07:00
2018-03-20 21:13:34 -07:00
/* TODO make this work with new framework */
/*
2017-08-31 19:15:32 -07:00
fn can_compile(&self) -> bool {
true
}
fn compile(&mut self, input: &str) -> LLVMCodeString {
let tokens = match tokenizer::tokenize(input) {
Ok(tokens) => tokens,
Err(err) => {
let msg = format!("Tokenization error: {:?}\n", err.msg);
panic!("{}", msg);
}
};
let ast = match parser::parse(&tokens, &[]) {
Ok(ast) => ast,
Err(err) => {
let msg = format!("Parse error: {:?}\n", err.msg);
panic!("{}", msg);
}
};
compilation::compile_ast(ast)
}
2018-03-20 21:13:34 -07:00
*/
2017-08-30 19:15:04 -07:00
}