schala/maaru/src/lib.rs

79 lines
1.7 KiB
Rust
Raw Normal View History

2018-03-20 23:29:56 -07:00
#![feature(box_patterns)]
extern crate schala_repl;
mod tokenizer;
mod parser;
mod eval;
2017-02-03 11:34:26 -08:00
2017-11-01 22:41:34 -07:00
#[derive(Debug)]
pub struct TokenError {
pub msg: String,
}
impl TokenError {
pub fn new(msg: &str) -> TokenError {
TokenError { msg: msg.to_string() }
}
}
2017-08-29 00:28:19 -07:00
pub use self::eval::Evaluator as MaaruEvaluator;
2017-08-31 00:02:17 -07:00
pub struct Maaru<'a> {
2017-08-30 19:15:04 -07:00
evaluator: MaaruEvaluator<'a>
}
2017-08-31 00:02:17 -07:00
impl<'a> Maaru<'a> {
pub fn new() -> Maaru<'a> {
Maaru {
2017-08-30 19:15:04 -07:00
evaluator: MaaruEvaluator::new(None),
}
}
}
/*
fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> Result<String, String> {
let mut output = UnfinishedComputation::default();
let tokens = match tokenizer::tokenize(input) {
Ok(tokens) => {
if let Some(_) = options.debug_passes.get("tokens") {
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", tokens)));
}
tokens
},
Err(err) => {
return output.finish(Err(format!("Tokenization error: {:?}\n", err.msg)))
}
};
let ast = match parser::parse(&tokens, &[]) {
Ok(ast) => {
if let Some(_) = options.debug_passes.get("ast") {
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
}
ast
},
Err(err) => {
return output.finish(Err(format!("Parse error: {:?}\n", err.msg)))
}
};
let mut evaluation_output = String::new();
for s in self.evaluator.run(ast).iter() {
evaluation_output.push_str(s);
}
Ok(evaluation_output)
}
*/
/*
2017-08-31 00:02:17 -07:00
impl<'a> ProgrammingLanguageInterface for Maaru<'a> {
2017-08-30 19:15:04 -07:00
fn get_language_name(&self) -> String {
"Maaru".to_string()
}
2017-10-02 23:07:05 -07:00
fn get_source_file_suffix(&self) -> String {
format!("maaru")
}
2017-08-30 19:15:04 -07:00
}
*/