schala/schala-lang/language/src/schala.rs
2021-10-14 01:28:24 -07:00

211 lines
6.9 KiB
Rust

use stopwatch::Stopwatch;
use std::cell::RefCell;
use std::rc::Rc;
use std::collections::HashSet;
use schala_repl::{ProgrammingLanguageInterface,
ComputationRequest, ComputationResponse,
LangMetaRequest, LangMetaResponse, GlobalOutputStats};
use crate::{reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table, source_map};
pub type SymbolTableHandle = Rc<RefCell<symbol_table::SymbolTable>>;
pub type SourceMapHandle = Rc<RefCell<source_map::SourceMap>>;
/// All the state necessary to parse and execute a Schala program are stored in this struct.
/// `state` represents the execution state for the AST-walking interpreter, the other fields
/// should be self-explanatory.
#[allow(dead_code)]
pub struct Schala {
source_reference: SourceReference,
source_map: SourceMapHandle,
state: eval::State<'static>,
symbol_table: SymbolTableHandle,
resolver: crate::scope_resolution::ScopeResolver<'static>,
type_context: typechecking::TypeContext<'static>,
active_parser: parsing::Parser,
}
impl Schala {
//TODO implement documentation for language items
/*
fn handle_docs(&self, source: String) -> LangMetaResponse {
LangMetaResponse::Docs {
doc_string: format!("Schala item `{}` : <<Schala-lang documentation not yet implemented>>", source)
}
}
*/
}
impl Schala {
/// Creates a new Schala environment *without* any prelude.
fn new_blank_env() -> Schala {
let source_map = Rc::new(RefCell::new(source_map::SourceMap::new()));
let symbols = Rc::new(RefCell::new(symbol_table::SymbolTable::new(source_map.clone())));
Schala {
//TODO maybe these can be the same structure
source_reference: SourceReference::new(),
symbol_table: symbols.clone(),
source_map: source_map.clone(),
resolver: crate::scope_resolution::ScopeResolver::new(symbols.clone()),
state: eval::State::new(),
type_context: typechecking::TypeContext::new(),
active_parser: parsing::Parser::new(source_map)
}
}
/// Creates a new Schala environment with the standard prelude, which is defined as ordinary
/// Schala code in the file `prelude.schala`
pub fn new() -> Schala {
let prelude = include_str!("prelude.schala");
let mut s = Schala::new_blank_env();
//TODO this shouldn't depend on schala-repl types
let request = ComputationRequest { source: prelude, debug_requests: HashSet::default() };
let response = s.run_computation(request);
if let Err(msg) = response.main_output {
panic!("Error in prelude, panicking: {}", msg);
}
s
}
/// This is where the actual action of interpreting/compilation happens.
/// Note: this should eventually use a query-based system for parallelization, cf.
/// https://rustc-dev-guide.rust-lang.org/overview.html
fn run_pipeline(&mut self, source: &str) -> Result<String, String> {
//TODO `run_pipeline` should return a formatted error struct
//TODO every stage should have a common error-format that gets turned into a repl-appropriate
//string in `run_computation`
// 1st stage - tokenization
// TODO tokenize should return its own error type
let tokens = tokenizing::tokenize(source);
let token_errors: Vec<String> = tokens.iter().filter_map(|t| t.get_error()).collect();
if token_errors.len() > 0 {
return Err(format!("{:?}", token_errors));
}
//2nd stage - parsing
self.active_parser.add_new_tokens(tokens);
let mut ast = self.active_parser.parse()
.map_err(|err| format_parse_error(err, &self.source_reference))?;
// Symbol table
self.symbol_table.borrow_mut().add_top_level_symbols(&ast)?;
// Scope resolution - requires mutating AST
self.resolver.resolve(&mut ast)?;
// Typechecking
// TODO typechecking not working
let _overall_type = self.type_context.typecheck(&ast)
.map_err(|err| format!("Type error: {}", err.msg));
// Reduce AST - this doesn't produce an error yet, but probably should
let symbol_table = self.symbol_table.borrow();
let reduced_ast = reduced_ast::reduce(&ast, &symbol_table);
// Tree-walking evaluator. TODO fix this
let evaluation_outputs = self.state.evaluate(reduced_ast, true);
let text_output: Result<Vec<String>, String> = evaluation_outputs
.into_iter()
.collect();
let eval_output: String = text_output
.map(|v| { Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect() })?;
Ok(eval_output)
}
}
fn format_parse_error(error: parsing::ParseError, source_reference: &SourceReference) -> String {
let line_num = error.token.location.line_num;
let ch = error.token.location.char_num;
let line_from_program = source_reference.get_line(line_num);
let location_pointer = format!("{}^", " ".repeat(ch));
let line_num_digits = format!("{}", line_num).chars().count();
let space_padding = " ".repeat(line_num_digits);
let production = match error.production_name {
Some(n) => format!("\n(from production \"{}\")", n),
None => "".to_string()
};
format!(r#"
{error_msg}{production}
{space_padding} |
{line_num} | {}
{space_padding} | {}
"#, line_from_program, location_pointer, error_msg=error.msg, space_padding=space_padding, line_num=line_num, production=production
)
}
/// Represents lines of source code
struct SourceReference {
lines: Option<Vec<String>>
}
impl SourceReference {
fn new() -> SourceReference {
SourceReference { lines: None }
}
fn load_new_source(&mut self, source: &str) {
//TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
self.lines = Some(source.lines().map(|s| s.to_string()).collect()); }
fn get_line(&self, line: usize) -> String {
self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or(format!("NO LINE FOUND"))
}
}
fn stage_names() -> Vec<&'static str> {
vec![
"tokenizing",
"parsing",
"symbol-table",
"scope-resolution",
"typechecking",
"ast-reduction",
"ast-walking-evaluation"
]
}
impl ProgrammingLanguageInterface for Schala {
fn language_name() -> String {
"Schala".to_owned()
}
fn source_file_suffix() -> String {
"schala".to_owned()
}
fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse {
let ComputationRequest { source, debug_requests: _ } = request;
self.source_reference.load_new_source(source);
let sw = Stopwatch::start_new();
let main_output = self.run_pipeline(source);
let global_output_stats = GlobalOutputStats {
total_duration: sw.elapsed(),
stage_durations: vec![]
};
ComputationResponse {
main_output,
global_output_stats,
debug_responses: vec![]
}
}
fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse {
match request {
LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()),
_ => LangMetaResponse::Custom { kind: format!("not-implemented"), value: format!("") }
}
}
}