From 49a50deb042295170f430a06523d55cf6352b49e Mon Sep 17 00:00:00 2001 From: Greg Shuflin Date: Tue, 19 Oct 2021 20:50:43 -0700 Subject: [PATCH] Run rustfmt on schala.rs --- schala-lang/language/src/schala.rs | 296 +++++++++++++++-------------- 1 file changed, 157 insertions(+), 139 deletions(-) diff --git a/schala-lang/language/src/schala.rs b/schala-lang/language/src/schala.rs index 781affb..512d2d8 100644 --- a/schala-lang/language/src/schala.rs +++ b/schala-lang/language/src/schala.rs @@ -1,185 +1,203 @@ use stopwatch::Stopwatch; -use schala_repl::{ProgrammingLanguageInterface, -ComputationRequest, ComputationResponse, -LangMetaRequest, LangMetaResponse, GlobalOutputStats}; -use crate::{reduced_ast, tokenizing, parsing, eval, typechecking, symbol_table}; use crate::error::SchalaError; +use crate::{eval, parsing, reduced_ast, symbol_table, tokenizing, typechecking}; +use schala_repl::{ + ComputationRequest, ComputationResponse, GlobalOutputStats, LangMetaRequest, LangMetaResponse, + ProgrammingLanguageInterface, +}; /// All the state necessary to parse and execute a Schala program are stored in this struct. pub struct Schala { - /// Holds a reference to the original source code, parsed into line and character - source_reference: SourceReference, - /// Execution state for AST-walking interpreter - state: eval::State<'static>, - /// Keeps track of symbols and scopes - symbol_table: symbol_table::SymbolTable, - /// Contains information for type-checking - type_context: typechecking::TypeContext<'static>, - /// Schala Parser - active_parser: parsing::Parser, + /// Holds a reference to the original source code, parsed into line and character + source_reference: SourceReference, + /// Execution state for AST-walking interpreter + state: eval::State<'static>, + /// Keeps track of symbols and scopes + symbol_table: symbol_table::SymbolTable, + /// Contains information for type-checking + type_context: typechecking::TypeContext<'static>, + /// Schala Parser + active_parser: parsing::Parser, } impl Schala { - //TODO implement documentation for language items - /* - fn handle_docs(&self, source: String) -> LangMetaResponse { - LangMetaResponse::Docs { - doc_string: format!("Schala item `{}` : <>", source) + //TODO implement documentation for language items + /* + fn handle_docs(&self, source: String) -> LangMetaResponse { + LangMetaResponse::Docs { + doc_string: format!("Schala item `{}` : <>", source) + } } - } - */ + */ } impl Schala { - /// Creates a new Schala environment *without* any prelude. - fn new_blank_env() -> Schala { - Schala { - source_reference: SourceReference::new(), - symbol_table: symbol_table::SymbolTable::new(), - state: eval::State::new(), - type_context: typechecking::TypeContext::new(), - active_parser: parsing::Parser::new() - } - } - - /// Creates a new Schala environment with the standard prelude, which is defined as ordinary - /// Schala code in the file `prelude.schala` - #[allow(clippy::new_without_default)] - pub fn new() -> Schala { - let prelude = include_str!("../source-files/prelude.schala"); - let mut env = Schala::new_blank_env(); - - let response = env.run_pipeline(prelude); - if let Err(err) = response { - panic!("Error in prelude, panicking: {}", err.display()); - } - env - } - - /// This is where the actual action of interpreting/compilation happens. - /// Note: this should eventually use a query-based system for parallelization, cf. - /// https://rustc-dev-guide.rust-lang.org/overview.html - fn run_pipeline(&mut self, source: &str) -> Result { - // 1st stage - tokenization - // TODO tokenize should return its own error type - let tokens = tokenizing::tokenize(source); - if let Some(err) = SchalaError::from_tokens(&tokens) { - return Err(err) + /// Creates a new Schala environment *without* any prelude. + fn new_blank_env() -> Schala { + Schala { + source_reference: SourceReference::new(), + symbol_table: symbol_table::SymbolTable::new(), + state: eval::State::new(), + type_context: typechecking::TypeContext::new(), + active_parser: parsing::Parser::new(), + } } - //2nd stage - parsing - self.active_parser.add_new_tokens(tokens); - let ast = self.active_parser.parse() - .map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?; + /// Creates a new Schala environment with the standard prelude, which is defined as ordinary + /// Schala code in the file `prelude.schala` + #[allow(clippy::new_without_default)] + pub fn new() -> Schala { + let prelude = include_str!("../source-files/prelude.schala"); + let mut env = Schala::new_blank_env(); - //Perform all symbol table work - self.symbol_table.process_ast(&ast) - .map_err(SchalaError::from_symbol_table)?; + let response = env.run_pipeline(prelude); + if let Err(err) = response { + panic!("Error in prelude, panicking: {}", err.display()); + } + env + } - // Typechecking - // TODO typechecking not working - let _overall_type = self.type_context.typecheck(&ast) - .map_err(SchalaError::from_type_error); + /// This is where the actual action of interpreting/compilation happens. + /// Note: this should eventually use a query-based system for parallelization, cf. + /// https://rustc-dev-guide.rust-lang.org/overview.html + fn run_pipeline(&mut self, source: &str) -> Result { + // 1st stage - tokenization + // TODO tokenize should return its own error type + let tokens = tokenizing::tokenize(source); + if let Some(err) = SchalaError::from_tokens(&tokens) { + return Err(err); + } - // Reduce AST - TODO this doesn't produce an error yet, but probably should - let reduced_ast = reduced_ast::reduce(&ast, &self.symbol_table); + //2nd stage - parsing + self.active_parser.add_new_tokens(tokens); + let ast = self + .active_parser + .parse() + .map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?; - // Tree-walking evaluator. TODO fix this - let evaluation_outputs = self.state.evaluate(reduced_ast, true); - let text_output: Result, String> = evaluation_outputs - .into_iter() - .collect(); + //Perform all symbol table work + self.symbol_table + .process_ast(&ast) + .map_err(SchalaError::from_symbol_table)?; - let text_output: Result, SchalaError> = text_output - .map_err(|err| SchalaError::from_string(err, Stage::Evaluation)); + // Typechecking + // TODO typechecking not working + let _overall_type = self + .type_context + .typecheck(&ast) + .map_err(SchalaError::from_type_error); - let eval_output: String = text_output - .map(|v| { Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect() })?; + // Reduce AST - TODO this doesn't produce an error yet, but probably should + let reduced_ast = reduced_ast::reduce(&ast, &self.symbol_table); - Ok(eval_output) - } + // Tree-walking evaluator. TODO fix this + let evaluation_outputs = self.state.evaluate(reduced_ast, true); + let text_output: Result, String> = evaluation_outputs.into_iter().collect(); + + let text_output: Result, SchalaError> = + text_output.map_err(|err| SchalaError::from_string(err, Stage::Evaluation)); + + let eval_output: String = + text_output.map(|v| Iterator::intersperse(v.into_iter(), "\n".to_owned()).collect())?; + + Ok(eval_output) + } } - /// Represents lines of source code pub(crate) struct SourceReference { - lines: Option> + lines: Option>, } impl SourceReference { - fn new() -> SourceReference { - SourceReference { lines: None } - } + fn new() -> SourceReference { + SourceReference { lines: None } + } - fn load_new_source(&mut self, source: &str) { - //TODO this is a lot of heap allocations - maybe there's a way to make it more efficient? - self.lines = Some(source.lines().map(|s| s.to_string()).collect()); } + fn load_new_source(&mut self, source: &str) { + //TODO this is a lot of heap allocations - maybe there's a way to make it more efficient? + self.lines = Some(source.lines().map(|s| s.to_string()).collect()); + } - pub fn get_line(&self, line: usize) -> String { - self.lines.as_ref().and_then(|x| x.get(line).map(|s| s.to_string())).unwrap_or_else(|| "NO LINE FOUND".to_string()) - } + pub fn get_line(&self, line: usize) -> String { + self.lines + .as_ref() + .and_then(|x| x.get(line).map(|s| s.to_string())) + .unwrap_or_else(|| "NO LINE FOUND".to_string()) + } } #[allow(dead_code)] #[derive(Clone, Copy, Debug)] pub(crate) enum Stage { - Tokenizing, - Parsing, - Symbols, - ScopeResolution, - Typechecking, - AstReduction, - Evaluation, + Tokenizing, + Parsing, + Symbols, + ScopeResolution, + Typechecking, + AstReduction, + Evaluation, } fn stage_names() -> Vec<&'static str> { - vec![ - "tokenizing", - "parsing", - "symbol-table", - "scope-resolution", - "typechecking", - "ast-reduction", - "ast-walking-evaluation" - ] + vec![ + "tokenizing", + "parsing", + "symbol-table", + "typechecking", + "ast-reduction", + "ast-walking-evaluation", + ] } - impl ProgrammingLanguageInterface for Schala { - type Config = (); - fn language_name() -> String { - "Schala".to_owned() - } - - fn source_file_suffix() -> String { - "schala".to_owned() - } - - fn run_computation(&mut self, request: ComputationRequest) -> ComputationResponse { - let ComputationRequest { source, debug_requests: _, config: _ } = request; - self.source_reference.load_new_source(source); - let sw = Stopwatch::start_new(); - - let main_output = self.run_pipeline(source) - .map_err(|schala_err| schala_err.display()); - - let global_output_stats = GlobalOutputStats { - total_duration: sw.elapsed(), - stage_durations: vec![] - }; - - ComputationResponse { - main_output, - global_output_stats, - debug_responses: vec![] + type Config = (); + fn language_name() -> String { + "Schala".to_owned() } - } - fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse { - match request { - LangMetaRequest::StageNames => LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()), - _ => LangMetaResponse::Custom { kind: "not-implemented".to_string(), value: "".to_string() } + fn source_file_suffix() -> String { + "schala".to_owned() + } + + fn run_computation( + &mut self, + request: ComputationRequest, + ) -> ComputationResponse { + let ComputationRequest { + source, + debug_requests: _, + config: _, + } = request; + self.source_reference.load_new_source(source); + let sw = Stopwatch::start_new(); + + let main_output = self + .run_pipeline(source) + .map_err(|schala_err| schala_err.display()); + + let global_output_stats = GlobalOutputStats { + total_duration: sw.elapsed(), + stage_durations: vec![], + }; + + ComputationResponse { + main_output, + global_output_stats, + debug_responses: vec![], + } + } + + fn request_meta(&mut self, request: LangMetaRequest) -> LangMetaResponse { + match request { + LangMetaRequest::StageNames => { + LangMetaResponse::StageNames(stage_names().iter().map(|s| s.to_string()).collect()) + } + _ => LangMetaResponse::Custom { + kind: "not-implemented".to_string(), + value: "".to_string(), + }, + } } - } }