schala/src/schala_lang/autoparser.rs

65 lines
1.9 KiB
Rust

use schala_lib::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, LanguageOutput};
use itertools::Itertools;
use schala_lang::{tokenizing, parsing};
use self::tokenizing::*;
use self::parsing::*;
fn auto_parse(input: Vec<Token>) -> (Result<AST, ParseError>, Vec<String>) {
let err = ParseError { msg: format!("Not yet implemented") };
(Err(err), vec![])
}
pub struct Schala { }
impl Schala {
pub fn new() -> Schala {
Schala { }
}
}
impl ProgrammingLanguageInterface for Schala {
fn get_language_name(&self) -> String {
"Schala-autoparser".to_string()
}
fn get_source_file_suffix(&self) -> String {
format!("schala")
}
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
let mut output = LanguageOutput::default();
let tokens = tokenizing::tokenize(input);
if options.debug_tokens {
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
}
{
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if token_errors.len() != 0 {
output.add_output(format!("Tokenization error: {:?}\n", token_errors));
return output;
}
}
let ast = match auto_parse(tokens) {
(Ok(ast), trace) => {
if options.debug_parse {
output.add_artifact(TraceArtifact::new_parse_trace(trace));
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
}
ast
},
(Err(err), trace) => {
output.add_artifact(TraceArtifact::new_parse_trace(trace));
output.add_output(format!("Parse error: {:?}\n", err.msg));
return output;
}
};
output.add_output(format!("{:?}", ast));
output
}
}