111 lines
3.1 KiB
Rust
111 lines
3.1 KiB
Rust
use schala_lib::{ProgrammingLanguageInterface, EvalOptions, TraceArtifact, LanguageOutput};
|
|
use itertools::Itertools;
|
|
|
|
use schala_lang::{tokenizing, parsing};
|
|
use self::tokenizing::*;
|
|
use self::parsing::*;
|
|
|
|
use schala_lang::tokenizing::TokenType::*;
|
|
|
|
struct AutoParser {
|
|
tokens: Vec<Token>,
|
|
}
|
|
|
|
/* BNF
|
|
* all terminals in this BNF refer to TokenType values
|
|
|
|
literal := Kw::True | Kw::False | StrLiteral | number_literal
|
|
number_literal := int_literal | float_literal
|
|
float_literal := digits float_continued
|
|
float_continued := ε | Period digits
|
|
int_literal := HexLiteral | nonhex_int
|
|
nonhex_int := BinNumberSigil+ digits
|
|
digits := (DigitGroup Underscore)+
|
|
*/
|
|
|
|
impl AutoParser {
|
|
fn new(tokens: Vec<Token>) -> AutoParser {
|
|
AutoParser { tokens: tokens.into_iter().rev().collect() }
|
|
}
|
|
fn peek(&mut self) -> TokenType {
|
|
self.tokens.last().map(|ref t| { t.token_type.clone() }).unwrap_or(TokenType::EOF)
|
|
}
|
|
fn next(&mut self) -> TokenType {
|
|
self.tokens.pop().map(|t| { t.token_type }).unwrap_or(TokenType::EOF)
|
|
}
|
|
fn parse(&mut self) -> (Result<AST, ParseError>, Vec<String>) {
|
|
let ast = self.program();
|
|
(ast, vec![])
|
|
}
|
|
}
|
|
|
|
impl AutoParser {
|
|
fn program(&mut self) -> ParseResult<AST> {
|
|
let etype = self.literal()?;
|
|
Ok(AST(vec![Statement::ExpressionStatement(Expression(etype, None))]))
|
|
}
|
|
|
|
fn literal(&mut self) -> ParseResult<ExpressionType> {
|
|
Ok(match self.next() {
|
|
Keyword(Kw::True) => ExpressionType::BoolLiteral(true),
|
|
Keyword(Kw::False) => ExpressionType::BoolLiteral(false),
|
|
_ => return ParseError::new("bad!")
|
|
})
|
|
}
|
|
}
|
|
|
|
|
|
pub struct Schala { }
|
|
|
|
impl Schala {
|
|
pub fn new() -> Schala {
|
|
Schala { }
|
|
}
|
|
}
|
|
|
|
impl ProgrammingLanguageInterface for Schala {
|
|
fn get_language_name(&self) -> String {
|
|
"Schala-autoparser".to_string()
|
|
}
|
|
fn get_source_file_suffix(&self) -> String {
|
|
format!("schala")
|
|
}
|
|
|
|
fn evaluate_in_repl(&mut self, input: &str, options: &EvalOptions) -> LanguageOutput {
|
|
let mut output = LanguageOutput::default();
|
|
|
|
let tokens = tokenizing::tokenize(input);
|
|
if options.debug_tokens {
|
|
let token_string = tokens.iter().map(|t| format!("{:?}<L:{},C:{}>", t.token_type, t.offset.0, t.offset.1)).join(", ");
|
|
output.add_artifact(TraceArtifact::new("tokens", format!("{:?}", token_string)));
|
|
}
|
|
{
|
|
let token_errors: Vec<&String> = tokens.iter().filter_map(|t| t.get_error()).collect();
|
|
if token_errors.len() != 0 {
|
|
output.add_output(format!("Tokenization error: {:?}\n", token_errors));
|
|
return output;
|
|
}
|
|
}
|
|
|
|
let mut parser = AutoParser::new(tokens);
|
|
|
|
let ast = match parser.parse() {
|
|
(Ok(ast), trace) => {
|
|
if options.debug_parse {
|
|
output.add_artifact(TraceArtifact::new_parse_trace(trace));
|
|
output.add_artifact(TraceArtifact::new("ast", format!("{:?}", ast)));
|
|
}
|
|
ast
|
|
},
|
|
(Err(err), trace) => {
|
|
output.add_artifact(TraceArtifact::new_parse_trace(trace));
|
|
output.add_output(format!("Parse error: {:?}\n", err.msg));
|
|
return output;
|
|
}
|
|
};
|
|
|
|
output.add_output(format!("{:?}", ast));
|
|
output
|
|
}
|
|
}
|