diff --git a/justfile b/justfile index c3759f9..adcf758 100755 --- a/justfile +++ b/justfile @@ -102,7 +102,7 @@ sloc: ! grep --color -En '.{101}' src/*.rs replace FROM TO: - sd -i '{{FROM}}' '{{TO}}' src/*.rs + sd '{{FROM}}' '{{TO}}' src/*.rs test-quine: cargo run -- quine diff --git a/src/alias.rs b/src/alias.rs index 3e6a87a..dcaedae 100644 --- a/src/alias.rs +++ b/src/alias.rs @@ -1,15 +1,35 @@ use crate::common::*; -#[derive(Debug)] -pub(crate) struct Alias<'a> { - pub(crate) name: &'a str, - pub(crate) target: &'a str, - pub(crate) line_number: usize, - pub(crate) private: bool, +/// An alias, e.g. `name := target` +#[derive(Debug, PartialEq)] +pub(crate) struct Alias<'src> { + pub(crate) name: Name<'src>, + pub(crate) target: Name<'src>, +} + +impl Alias<'_> { + pub(crate) fn is_private(&self) -> bool { + self.name.lexeme().starts_with('_') + } + + pub(crate) fn line_number(&self) -> usize { + self.name.line + } +} + +impl<'src> Keyed<'src> for Alias<'src> { + fn key(&self) -> &'src str { + self.name.lexeme() + } } impl<'a> Display for Alias<'a> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "alias {} := {}", self.name, self.target) + write!( + f, + "alias {} := {}", + self.name.lexeme(), + self.target.lexeme() + ) } } diff --git a/src/alias_resolver.rs b/src/alias_resolver.rs index d0c2fe9..e925845 100644 --- a/src/alias_resolver.rs +++ b/src/alias_resolver.rs @@ -7,20 +7,14 @@ where { aliases: &'b BTreeMap<&'a str, Alias<'a>>, recipes: &'b BTreeMap<&'a str, Recipe<'a>>, - alias_tokens: &'b BTreeMap<&'a str, Token<'a>>, } impl<'a: 'b, 'b> AliasResolver<'a, 'b> { pub(crate) fn resolve_aliases( aliases: &BTreeMap<&'a str, Alias<'a>>, recipes: &BTreeMap<&'a str, Recipe<'a>>, - alias_tokens: &BTreeMap<&'a str, Token<'a>>, ) -> CompilationResult<'a, ()> { - let resolver = AliasResolver { - aliases, - recipes, - alias_tokens, - }; + let resolver = AliasResolver { aliases, recipes }; resolver.resolve()?; @@ -36,20 +30,20 @@ impl<'a: 'b, 'b> AliasResolver<'a, 'b> { } fn resolve_alias(&self, alias: &Alias<'a>) -> CompilationResult<'a, ()> { - let token = self.alias_tokens.get(&alias.name).unwrap(); + let token = alias.name.token(); // Make sure the alias doesn't conflict with any recipe - if let Some(recipe) = self.recipes.get(alias.name) { + if let Some(recipe) = self.recipes.get(alias.name.lexeme()) { return Err(token.error(AliasShadowsRecipe { - alias: alias.name, - recipe_line: recipe.line_number, + alias: alias.name.lexeme(), + recipe_line: recipe.line_number(), })); } // Make sure the target recipe exists - if self.recipes.get(alias.target).is_none() { + if self.recipes.get(alias.target.lexeme()).is_none() { return Err(token.error(UnknownAliasTarget { - alias: alias.name, - target: alias.target, + alias: alias.name.lexeme(), + target: alias.target.lexeme(), })); } diff --git a/src/analyzer.rs b/src/analyzer.rs new file mode 100644 index 0000000..8812751 --- /dev/null +++ b/src/analyzer.rs @@ -0,0 +1,300 @@ +use crate::common::*; + +use CompilationErrorKind::*; + +pub(crate) struct Analyzer<'a> { + recipes: Table<'a, Recipe<'a>>, + assignments: Table<'a, Assignment<'a>>, + aliases: Table<'a, Alias<'a>>, +} + +impl<'a> Analyzer<'a> { + pub(crate) fn analyze(module: Module<'a>) -> CompilationResult<'a, Justfile> { + let analyzer = Analyzer::new(); + + analyzer.justfile(module) + } + + pub(crate) fn new() -> Analyzer<'a> { + Analyzer { + recipes: empty(), + assignments: empty(), + aliases: empty(), + } + } + + pub(crate) fn justfile(mut self, module: Module<'a>) -> CompilationResult<'a, Justfile<'a>> { + for item in module.items { + match item { + Item::Alias(alias) => { + self.analyze_alias(&alias)?; + self.aliases.insert(alias); + } + Item::Assignment(assignment) => { + self.analyze_assignment(&assignment)?; + self.assignments.insert(assignment); + } + Item::Recipe(recipe) => { + self.analyze_recipe(&recipe)?; + self.recipes.insert(recipe); + } + } + } + + let recipes = self.recipes; + let assignments = self.assignments; + let aliases = self.aliases; + + AssignmentResolver::resolve_assignments(&assignments)?; + + RecipeResolver::resolve_recipes(&recipes, &assignments)?; + + for recipe in recipes.values() { + for parameter in &recipe.parameters { + if assignments.contains_key(parameter.name.lexeme()) { + return Err(parameter.name.token().error(ParameterShadowsVariable { + parameter: parameter.name.lexeme(), + })); + } + } + + for dependency in &recipe.dependencies { + if !recipes[dependency.lexeme()].parameters.is_empty() { + return Err(dependency.error(DependencyHasParameters { + recipe: recipe.name(), + dependency: dependency.lexeme(), + })); + } + } + } + + AliasResolver::resolve_aliases(&aliases, &recipes)?; + + Ok(Justfile { + warnings: module.warnings, + recipes, + assignments, + aliases, + }) + } + + fn analyze_recipe(&self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> { + if let Some(original) = self.recipes.get(recipe.name.lexeme()) { + return Err(recipe.name.token().error(DuplicateRecipe { + recipe: original.name(), + first: original.line_number(), + })); + } + + let mut parameters = BTreeSet::new(); + let mut passed_default = false; + + for parameter in &recipe.parameters { + if parameters.contains(parameter.name.lexeme()) { + return Err(parameter.name.token().error(DuplicateParameter { + recipe: recipe.name.lexeme(), + parameter: parameter.name.lexeme(), + })); + } + parameters.insert(parameter.name.lexeme()); + + if parameter.default.is_some() { + passed_default = true; + } else if passed_default { + return Err( + parameter + .name + .token() + .error(RequiredParameterFollowsDefaultParameter { + parameter: parameter.name.lexeme(), + }), + ); + } + } + + let mut dependencies = BTreeSet::new(); + for dependency in &recipe.dependencies { + if dependencies.contains(dependency.lexeme()) { + return Err(dependency.token().error(DuplicateDependency { + recipe: recipe.name.lexeme(), + dependency: dependency.lexeme(), + })); + } + dependencies.insert(dependency.lexeme()); + } + + let mut continued = false; + for line in &recipe.body { + if !recipe.shebang && !continued { + if let Some(Fragment::Text { token }) = line.fragments.first() { + let text = token.lexeme(); + + if text.starts_with(' ') || text.starts_with('\t') { + return Err(token.error(ExtraLeadingWhitespace)); + } + } + } + + continued = line.is_continuation(); + } + + Ok(()) + } + + fn analyze_assignment(&self, assignment: &Assignment<'a>) -> CompilationResult<'a, ()> { + if self.assignments.contains_key(assignment.name.lexeme()) { + return Err(assignment.name.token().error(DuplicateVariable { + variable: assignment.name.lexeme(), + })); + } + Ok(()) + } + + fn analyze_alias(&self, alias: &Alias<'a>) -> CompilationResult<'a, ()> { + let name = alias.name.lexeme(); + + if let Some(original) = self.aliases.get(name) { + return Err(alias.name.token().error(DuplicateAlias { + alias: name, + first: original.line_number(), + })); + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + analysis_error! { + name: duplicate_alias, + input: "alias foo = bar\nalias foo = baz", + offset: 22, + line: 1, + column: 6, + width: 3, + kind: DuplicateAlias { alias: "foo", first: 0 }, + } + + analysis_error! { + name: unknown_alias_target, + input: "alias foo = bar\n", + offset: 6, + line: 0, + column: 6, + width: 3, + kind: UnknownAliasTarget {alias: "foo", target: "bar"}, + } + + analysis_error! { + name: alias_shadows_recipe_before, + input: "bar: \n echo bar\nalias foo = bar\nfoo:\n echo foo", + offset: 23, + line: 2, + column: 6, + width: 3, + kind: AliasShadowsRecipe {alias: "foo", recipe_line: 3}, + } + + analysis_error! { + name: alias_shadows_recipe_after, + input: "foo:\n echo foo\nalias foo = bar\nbar:\n echo bar", + offset: 22, + line: 2, + column: 6, + width: 3, + kind: AliasShadowsRecipe { alias: "foo", recipe_line: 0 }, + } + + analysis_error! { + name: required_after_default, + input: "hello arg='foo' bar:", + offset: 16, + line: 0, + column: 16, + width: 3, + kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"}, + } + + analysis_error! { + name: duplicate_parameter, + input: "a b b:", + offset: 4, + line: 0, + column: 4, + width: 1, + kind: DuplicateParameter{recipe: "a", parameter: "b"}, + } + + analysis_error! { + name: duplicate_variadic_parameter, + input: "a b +b:", + offset: 5, + line: 0, + column: 5, + width: 1, + kind: DuplicateParameter{recipe: "a", parameter: "b"}, + } + + analysis_error! { + name: parameter_shadows_varible, + input: "foo = \"h\"\na foo:", + offset: 12, + line: 1, + column: 2, + width: 3, + kind: ParameterShadowsVariable{parameter: "foo"}, + } + + analysis_error! { + name: dependency_has_parameters, + input: "foo arg:\nb: foo", + offset: 12, + line: 1, + column: 3, + width: 3, + kind: DependencyHasParameters{recipe: "b", dependency: "foo"}, + } + + analysis_error! { + name: duplicate_dependency, + input: "a b c: b c z z", + offset: 13, + line: 0, + column: 13, + width: 1, + kind: DuplicateDependency{recipe: "a", dependency: "z"}, + } + + analysis_error! { + name: duplicate_recipe, + input: "a:\nb:\na:", + offset: 6, + line: 2, + column: 0, + width: 1, + kind: DuplicateRecipe{recipe: "a", first: 0}, + } + + analysis_error! { + name: duplicate_variable, + input: "a = \"0\"\na = \"0\"", + offset: 8, + line: 1, + column: 0, + width: 1, + kind: DuplicateVariable{variable: "a"}, + } + + analysis_error! { + name: extra_whitespace, + input: "a:\n blah\n blarg", + offset: 10, + line: 2, + column: 1, + width: 6, + kind: ExtraLeadingWhitespace, + } +} diff --git a/src/assignment.rs b/src/assignment.rs new file mode 100644 index 0000000..20007d3 --- /dev/null +++ b/src/assignment.rs @@ -0,0 +1,18 @@ +use crate::common::*; + +/// An assignment, e.g `foo := bar` +#[derive(Debug, PartialEq)] +pub(crate) struct Assignment<'src> { + /// Assignment was prefixed by the `export` keyword + pub(crate) export: bool, + /// Left-hand side of the assignment + pub(crate) name: Name<'src>, + /// Right-hand side of the assignment + pub(crate) expression: Expression<'src>, +} + +impl<'src> Keyed<'src> for Assignment<'src> { + fn key(&self) -> &'src str { + self.name.lexeme() + } +} diff --git a/src/assignment_evaluator.rs b/src/assignment_evaluator.rs index 4fb410c..674834d 100644 --- a/src/assignment_evaluator.rs +++ b/src/assignment_evaluator.rs @@ -1,31 +1,29 @@ use crate::common::*; pub(crate) struct AssignmentEvaluator<'a: 'b, 'b> { - pub(crate) assignments: &'b BTreeMap<&'a str, Expression<'a>>, + pub(crate) assignments: &'b BTreeMap<&'a str, Assignment<'a>>, pub(crate) invocation_directory: &'b Result, pub(crate) dotenv: &'b BTreeMap, pub(crate) dry_run: bool, - pub(crate) evaluated: BTreeMap<&'a str, String>, - pub(crate) exports: &'b BTreeSet<&'a str>, + pub(crate) evaluated: BTreeMap<&'a str, (bool, String)>, pub(crate) overrides: &'b BTreeMap<&'b str, &'b str>, pub(crate) quiet: bool, - pub(crate) scope: &'b BTreeMap<&'a str, String>, + pub(crate) scope: &'b BTreeMap<&'a str, (bool, String)>, pub(crate) shell: &'b str, } impl<'a, 'b> AssignmentEvaluator<'a, 'b> { pub(crate) fn evaluate_assignments( - assignments: &BTreeMap<&'a str, Expression<'a>>, + assignments: &BTreeMap<&'a str, Assignment<'a>>, invocation_directory: &Result, dotenv: &'b BTreeMap, overrides: &BTreeMap<&str, &str>, quiet: bool, shell: &'a str, dry_run: bool, - ) -> RunResult<'a, BTreeMap<&'a str, String>> { + ) -> RunResult<'a, BTreeMap<&'a str, (bool, String)>> { let mut evaluator = AssignmentEvaluator { evaluated: empty(), - exports: &empty(), scope: &empty(), assignments, invocation_directory, @@ -46,13 +44,13 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> { pub(crate) fn evaluate_line( &mut self, line: &[Fragment<'a>], - arguments: &BTreeMap<&str, Cow>, + arguments: &BTreeMap<&'a str, Cow>, ) -> RunResult<'a, String> { let mut evaluated = String::new(); for fragment in line { - match *fragment { - Fragment::Text { ref text } => evaluated += text.lexeme(), - Fragment::Expression { ref expression } => { + match fragment { + Fragment::Text { token } => evaluated += token.lexeme(), + Fragment::Interpolation { expression } => { evaluated += &self.evaluate_expression(expression, arguments)?; } } @@ -65,12 +63,14 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> { return Ok(()); } - if let Some(expression) = self.assignments.get(name) { + if let Some(assignment) = self.assignments.get(name) { if let Some(value) = self.overrides.get(name) { - self.evaluated.insert(name, value.to_string()); + self + .evaluated + .insert(name, (assignment.export, value.to_string())); } else { - let value = self.evaluate_expression(expression, &empty())?; - self.evaluated.insert(name, value); + let value = self.evaluate_expression(&assignment.expression, &empty())?; + self.evaluated.insert(name, (assignment.export, value)); } } else { return Err(RuntimeError::Internal { @@ -84,29 +84,29 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> { pub(crate) fn evaluate_expression( &mut self, expression: &Expression<'a>, - arguments: &BTreeMap<&str, Cow>, + arguments: &BTreeMap<&'a str, Cow>, ) -> RunResult<'a, String> { - match *expression { + match expression { Expression::Variable { name, .. } => { - if self.evaluated.contains_key(name) { - Ok(self.evaluated[name].clone()) - } else if self.scope.contains_key(name) { - Ok(self.scope[name].clone()) - } else if self.assignments.contains_key(name) { - self.evaluate_assignment(name)?; - Ok(self.evaluated[name].clone()) - } else if arguments.contains_key(name) { - Ok(arguments[name].to_string()) + let variable = name.lexeme(); + if self.evaluated.contains_key(variable) { + Ok(self.evaluated[variable].1.clone()) + } else if self.scope.contains_key(variable) { + Ok(self.scope[variable].1.clone()) + } else if self.assignments.contains_key(variable) { + self.evaluate_assignment(variable)?; + Ok(self.evaluated[variable].1.clone()) + } else if arguments.contains_key(variable) { + Ok(arguments[variable].to_string()) } else { Err(RuntimeError::Internal { - message: format!("attempted to evaluate undefined variable `{}`", name), + message: format!("attempted to evaluate undefined variable `{}`", variable), }) } } Expression::Call { - name, - arguments: ref call_arguments, - ref token, + function, + arguments: call_arguments, } => { let call_arguments = call_arguments .iter() @@ -116,20 +116,20 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> { invocation_directory: &self.invocation_directory, dotenv: self.dotenv, }; - Function::evaluate(token, name, &context, &call_arguments) + Function::evaluate(*function, &context, &call_arguments) } - Expression::String { ref cooked_string } => Ok(cooked_string.cooked.to_string()), - Expression::Backtick { raw, ref token } => { + Expression::StringLiteral { string_literal } => Ok(string_literal.cooked.to_string()), + Expression::Backtick { contents, token } => { if self.dry_run { - Ok(format!("`{}`", raw)) + Ok(format!("`{}`", contents)) } else { - Ok(self.run_backtick(self.dotenv, raw, token)?) + Ok(self.run_backtick(self.dotenv, contents, token)?) } } - Expression::Concatination { ref lhs, ref rhs } => { + Expression::Concatination { lhs, rhs } => { Ok(self.evaluate_expression(lhs, arguments)? + &self.evaluate_expression(rhs, arguments)?) } - Expression::Group { ref expression } => self.evaluate_expression(&expression, arguments), + Expression::Group { contents } => self.evaluate_expression(contents, arguments), } } @@ -143,7 +143,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> { cmd.arg("-cu").arg(raw); - cmd.export_environment_variables(self.scope, dotenv, self.exports)?; + cmd.export_environment_variables(self.scope, dotenv)?; cmd.stdin(process::Stdio::inherit()); @@ -163,13 +163,13 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> { } #[cfg(test)] -mod test { +mod tests { use super::*; - use crate::testing::parse; + use crate::testing::compile; #[test] fn backtick_code() { - match parse("a:\n echo {{`f() { return 100; }; f`}}") + match compile("a:\n echo {{`f() { return 100; }; f`}}") .run(&["a"], &Default::default()) .unwrap_err() { @@ -198,7 +198,7 @@ recipe: ..Default::default() }; - match parse(text).run(&["recipe"], &config).unwrap_err() { + match compile(text).run(&["recipe"], &config).unwrap_err() { RuntimeError::Backtick { token, output_error: OutputError::Code(_), diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs index 3d015ce..9dae818 100644 --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -3,8 +3,7 @@ use crate::common::*; use CompilationErrorKind::*; pub(crate) struct AssignmentResolver<'a: 'b, 'b> { - assignments: &'b BTreeMap<&'a str, Expression<'a>>, - assignment_tokens: &'b BTreeMap<&'a str, Token<'a>>, + assignments: &'b BTreeMap<&'a str, Assignment<'a>>, stack: Vec<&'a str>, seen: BTreeSet<&'a str>, evaluated: BTreeSet<&'a str>, @@ -12,15 +11,13 @@ pub(crate) struct AssignmentResolver<'a: 'b, 'b> { impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> { pub(crate) fn resolve_assignments( - assignments: &BTreeMap<&'a str, Expression<'a>>, - assignment_tokens: &BTreeMap<&'a str, Token<'a>>, + assignments: &BTreeMap<&'a str, Assignment<'a>>, ) -> CompilationResult<'a, ()> { let mut resolver = AssignmentResolver { stack: empty(), seen: empty(), evaluated: empty(), assignments, - assignment_tokens, }; for name in assignments.keys() { @@ -38,13 +35,13 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> { self.seen.insert(name); self.stack.push(name); - if let Some(expression) = self.assignments.get(name) { - self.resolve_expression(expression)?; + if let Some(assignment) = self.assignments.get(name) { + self.resolve_expression(&assignment.expression)?; self.evaluated.insert(name); } else { let message = format!("attempted to resolve unknown assignment `{}`", name); return Err(CompilationError { - text: "", + src: "", offset: 0, line: 0, column: 0, @@ -57,43 +54,43 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> { fn resolve_expression(&mut self, expression: &Expression<'a>) -> CompilationResult<'a, ()> { match expression { - Expression::Variable { name, ref token } => { - if self.evaluated.contains(name) { + Expression::Variable { name } => { + let variable = name.lexeme(); + if self.evaluated.contains(variable) { return Ok(()); - } else if self.seen.contains(name) { - let token = &self.assignment_tokens[name]; - self.stack.push(name); + } else if self.seen.contains(variable) { + let token = self.assignments[variable].name.token(); + self.stack.push(variable); return Err(token.error(CircularVariableDependency { - variable: name, + variable: variable, circle: self.stack.clone(), })); - } else if self.assignments.contains_key(name) { - self.resolve_assignment(name)?; + } else if self.assignments.contains_key(variable) { + self.resolve_assignment(variable)?; } else { - return Err(token.error(UndefinedVariable { variable: name })); + return Err(name.token().error(UndefinedVariable { variable })); } } Expression::Call { - ref token, - ref arguments, - .. - } => Function::resolve(token, arguments.len())?, - Expression::Concatination { ref lhs, ref rhs } => { + function, + arguments, + } => Function::resolve(&function.token(), arguments.len())?, + Expression::Concatination { lhs, rhs } => { self.resolve_expression(lhs)?; self.resolve_expression(rhs)?; } - Expression::String { .. } | Expression::Backtick { .. } => {} - Expression::Group { expression } => self.resolve_expression(expression)?, + Expression::StringLiteral { .. } | Expression::Backtick { .. } => {} + Expression::Group { contents } => self.resolve_expression(contents)?, } Ok(()) } } #[cfg(test)] -mod test { +mod tests { use super::*; - error_test! { + analysis_error! { name: circular_variable_dependency, input: "a = b\nb = a", offset: 0, @@ -103,7 +100,7 @@ mod test { kind: CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]}, } - error_test! { + analysis_error! { name: self_variable_dependency, input: "a = a", offset: 0, @@ -113,7 +110,7 @@ mod test { kind: CircularVariableDependency{variable: "a", circle: vec!["a", "a"]}, } - error_test! { + analysis_error! { name: unknown_expression_variable, input: "x = yy", offset: 4, @@ -123,7 +120,7 @@ mod test { kind: UndefinedVariable{variable: "yy"}, } - error_test! { + analysis_error! { name: unknown_function, input: "a = foo()", offset: 4, @@ -132,5 +129,4 @@ mod test { width: 3, kind: UnknownFunction{function: "foo"}, } - } diff --git a/src/command_ext.rs b/src/command_ext.rs index 0d913ac..e17d797 100644 --- a/src/command_ext.rs +++ b/src/command_ext.rs @@ -3,31 +3,27 @@ use crate::common::*; pub(crate) trait CommandExt { fn export_environment_variables<'a>( &mut self, - scope: &BTreeMap<&'a str, String>, + scope: &BTreeMap<&'a str, (bool, String)>, dotenv: &BTreeMap, - exports: &BTreeSet<&'a str>, ) -> RunResult<'a, ()>; } impl CommandExt for Command { fn export_environment_variables<'a>( &mut self, - scope: &BTreeMap<&'a str, String>, + scope: &BTreeMap<&'a str, (bool, String)>, dotenv: &BTreeMap, - exports: &BTreeSet<&'a str>, ) -> RunResult<'a, ()> { for (name, value) in dotenv { self.env(name, value); } - for name in exports { - if let Some(value) = scope.get(name) { + + for (name, (export, value)) in scope { + if *export { self.env(name, value); - } else { - return Err(RuntimeError::Internal { - message: format!("scope does not contain exported variable `{}`", name), - }); } } + Ok(()) } } diff --git a/src/common.rs b/src/common.rs index e9dea2e..8d5653c 100644 --- a/src/common.rs +++ b/src/common.rs @@ -7,8 +7,10 @@ pub(crate) use std::{ env, ffi::OsStr, fmt::{self, Display, Formatter}, - fs, io, iter, - ops::{Range, RangeInclusive}, + fs, + io::{self, Write}, + iter::{self, FromIterator}, + ops::{Deref, Range, RangeInclusive}, path::{Path, PathBuf}, process::{self, Command}, str::{self, Chars}, @@ -23,7 +25,7 @@ pub(crate) use log::warn; pub(crate) use unicode_width::UnicodeWidthChar; // modules -pub(crate) use crate::search; +pub(crate) use crate::{keyword, search}; // modules used in tests #[cfg(test)] @@ -35,39 +37,35 @@ pub(crate) use crate::{ write_message_context::write_message_context, }; -// structs and enums +// traits pub(crate) use crate::{ - alias::Alias, alias_resolver::AliasResolver, assignment_evaluator::AssignmentEvaluator, - assignment_resolver::AssignmentResolver, color::Color, compilation_error::CompilationError, - compilation_error_kind::CompilationErrorKind, config::Config, config_error::ConfigError, - count::Count, enclosure::Enclosure, expression::Expression, fragment::Fragment, - function::Function, function_context::FunctionContext, functions::Functions, - interrupt_guard::InterruptGuard, interrupt_handler::InterruptHandler, justfile::Justfile, - lexer::Lexer, list::List, output_error::OutputError, parameter::Parameter, parser::Parser, - platform::Platform, position::Position, recipe::Recipe, recipe_context::RecipeContext, - recipe_resolver::RecipeResolver, runtime_error::RuntimeError, search_error::SearchError, - shebang::Shebang, show_whitespace::ShowWhitespace, state::State, string_literal::StringLiteral, - subcommand::Subcommand, token::Token, token_kind::TokenKind, use_color::UseColor, - variables::Variables, verbosity::Verbosity, warning::Warning, + command_ext::CommandExt, compilation_result_ext::CompilationResultExt, keyed::Keyed, + ordinal::Ordinal, platform_interface::PlatformInterface, range_ext::RangeExt, }; +// structs and enums +pub(crate) use crate::{ + alias::Alias, alias_resolver::AliasResolver, analyzer::Analyzer, assignment::Assignment, + assignment_evaluator::AssignmentEvaluator, assignment_resolver::AssignmentResolver, color::Color, + compilation_error::CompilationError, compilation_error_kind::CompilationErrorKind, + compiler::Compiler, config::Config, config_error::ConfigError, count::Count, + enclosure::Enclosure, expression::Expression, fragment::Fragment, function::Function, + function_context::FunctionContext, functions::Functions, interrupt_guard::InterruptGuard, + interrupt_handler::InterruptHandler, item::Item, justfile::Justfile, lexer::Lexer, line::Line, + list::List, module::Module, name::Name, output_error::OutputError, parameter::Parameter, + parser::Parser, platform::Platform, position::Position, recipe::Recipe, + recipe_context::RecipeContext, recipe_resolver::RecipeResolver, runtime_error::RuntimeError, + search_error::SearchError, shebang::Shebang, show_whitespace::ShowWhitespace, state::State, + string_literal::StringLiteral, subcommand::Subcommand, table::Table, token::Token, + token_kind::TokenKind, use_color::UseColor, variables::Variables, verbosity::Verbosity, + warning::Warning, +}; + +// structs and enums used in tests +#[cfg(test)] +pub(crate) use crate::{node::Node, tree::Tree}; + +// type aliases pub(crate) type CompilationResult<'a, T> = Result>; - -pub(crate) type RunResult<'a, T> = Result>; - pub(crate) type ConfigResult = Result; - -#[allow(unused_imports)] -pub(crate) use std::io::prelude::*; - -#[allow(unused_imports)] -pub(crate) use crate::command_ext::CommandExt; - -#[allow(unused_imports)] -pub(crate) use crate::range_ext::RangeExt; - -#[allow(unused_imports)] -pub(crate) use crate::ordinal::Ordinal; - -#[allow(unused_imports)] -pub(crate) use crate::platform_interface::PlatformInterface; +pub(crate) type RunResult<'a, T> = Result>; diff --git a/src/compilation_error.rs b/src/compilation_error.rs index 0b3b955..fe55c95 100644 --- a/src/compilation_error.rs +++ b/src/compilation_error.rs @@ -2,7 +2,7 @@ use crate::common::*; #[derive(Debug, PartialEq)] pub(crate) struct CompilationError<'a> { - pub(crate) text: &'a str, + pub(crate) src: &'a str, pub(crate) offset: usize, pub(crate) line: usize, pub(crate) column: usize, @@ -213,7 +213,7 @@ impl<'a> Display for CompilationError<'a> { write_message_context( f, Color::fmt(f).error(), - self.text, + self.src, self.offset, self.line, self.column, diff --git a/src/compilation_result_ext.rs b/src/compilation_result_ext.rs new file mode 100644 index 0000000..c5a68ad --- /dev/null +++ b/src/compilation_result_ext.rs @@ -0,0 +1,23 @@ +use crate::common::*; + +pub(crate) trait CompilationResultExt { + fn expected(self, kinds: &[TokenKind]) -> Self; +} + +impl<'src, T> CompilationResultExt for CompilationResult<'src, T> { + fn expected(mut self, kinds: &[TokenKind]) -> Self { + if let Err(CompilationError { + kind: CompilationErrorKind::UnexpectedToken { + ref mut expected, .. + }, + .. + }) = &mut self + { + expected.extend_from_slice(kinds); + expected.sort(); + expected.dedup(); + } + + self + } +} diff --git a/src/compiler.rs b/src/compiler.rs new file mode 100644 index 0000000..5285a88 --- /dev/null +++ b/src/compiler.rs @@ -0,0 +1,13 @@ +use crate::common::*; + +pub(crate) struct Compiler; + +impl Compiler { + pub(crate) fn compile(text: &str) -> CompilationResult { + let tokens = Lexer::lex(text)?; + + let ast = Parser::parse(&tokens)?; + + Analyzer::analyze(ast) + } +} diff --git a/src/expression.rs b/src/expression.rs index d1ba9c5..0555877 100644 --- a/src/expression.rs +++ b/src/expression.rs @@ -1,55 +1,60 @@ use crate::common::*; +/// An expression. Note that the Just language grammar has both an +/// `expression` production of additions (`a + b`) and values, and a +/// `value` production of all other value types (for example strings, +/// function calls, and parenthetical groups). +/// +/// The parser parses both values and expressions into `Expression`s. #[derive(PartialEq, Debug)] -pub(crate) enum Expression<'a> { +pub(crate) enum Expression<'src> { + /// `contents` Backtick { - raw: &'a str, - token: Token<'a>, + contents: &'src str, + token: Token<'src>, }, + /// `name(arguments)` Call { - name: &'a str, - token: Token<'a>, - arguments: Vec>, + function: Name<'src>, + arguments: Vec>, }, + /// `lhs + rhs` Concatination { - lhs: Box>, - rhs: Box>, + lhs: Box>, + rhs: Box>, }, - String { - cooked_string: StringLiteral<'a>, - }, - Variable { - name: &'a str, - token: Token<'a>, - }, - Group { - expression: Box>, + /// `(contents)` + Group { contents: Box> }, + /// `"string_literal"` or `'string_literal'` + StringLiteral { + string_literal: StringLiteral<'src>, }, + /// `variable` + Variable { name: Name<'src> }, } -impl<'a> Expression<'a> { - pub(crate) fn variables(&'a self) -> Variables<'a> { +impl<'src> Expression<'src> { + pub(crate) fn variables<'expression>(&'expression self) -> Variables<'expression, 'src> { Variables::new(self) } - pub(crate) fn functions(&'a self) -> Functions<'a> { + pub(crate) fn functions<'expression>(&'expression self) -> Functions<'expression, 'src> { Functions::new(self) } } -impl<'a> Display for Expression<'a> { +impl<'src> Display for Expression<'src> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { - match *self { - Expression::Backtick { raw, .. } => write!(f, "`{}`", raw)?, - Expression::Concatination { ref lhs, ref rhs } => write!(f, "{} + {}", lhs, rhs)?, - Expression::String { ref cooked_string } => write!(f, "{}", cooked_string)?, - Expression::Variable { name, .. } => write!(f, "{}", name)?, + match self { + Expression::Backtick { contents, .. } => write!(f, "`{}`", contents)?, + Expression::Concatination { lhs, rhs } => write!(f, "{} + {}", lhs, rhs)?, + Expression::StringLiteral { string_literal } => write!(f, "{}", string_literal)?, + Expression::Variable { name } => write!(f, "{}", name.lexeme())?, Expression::Call { - name, - ref arguments, - .. + function, + arguments, } => { - write!(f, "{}(", name)?; + write!(f, "{}(", function.lexeme())?; for (i, argument) in arguments.iter().enumerate() { if i > 0 { write!(f, ", {}", argument)?; @@ -59,7 +64,7 @@ impl<'a> Display for Expression<'a> { } write!(f, ")")?; } - Expression::Group { ref expression } => write!(f, "({})", expression)?, + Expression::Group { contents } => write!(f, "({})", contents)?, } Ok(()) } diff --git a/src/fragment.rs b/src/fragment.rs index fed215f..48aec9f 100644 --- a/src/fragment.rs +++ b/src/fragment.rs @@ -1,16 +1,10 @@ use crate::common::*; +/// A line fragment consisting either of… #[derive(PartialEq, Debug)] -pub(crate) enum Fragment<'a> { - Text { text: Token<'a> }, - Expression { expression: Expression<'a> }, -} - -impl<'a> Fragment<'a> { - pub(crate) fn continuation(&self) -> bool { - match *self { - Fragment::Text { ref text } => text.lexeme().ends_with('\\'), - _ => false, - } - } +pub(crate) enum Fragment<'src> { + /// …raw text… + Text { token: Token<'src> }, + /// …an interpolation containing `expression`. + Interpolation { expression: Expression<'src> }, } diff --git a/src/function.rs b/src/function.rs index dec01fe..edfe94e 100644 --- a/src/function.rs +++ b/src/function.rs @@ -56,26 +56,26 @@ impl Function { } pub(crate) fn evaluate<'a>( - token: &Token<'a>, - name: &'a str, + function_name: Name<'a>, context: &FunctionContext, arguments: &[String], ) -> RunResult<'a, String> { + let name = function_name.lexeme(); if let Some(function) = FUNCTIONS.get(name) { use self::Function::*; let argc = arguments.len(); match (function, argc) { (&Nullary(f), 0) => f(context).map_err(|message| RuntimeError::FunctionCall { - token: token.clone(), + function: function_name, message, }), (&Unary(f), 1) => f(context, &arguments[0]).map_err(|message| RuntimeError::FunctionCall { - token: token.clone(), + function: function_name, message, }), (&Binary(f), 2) => { f(context, &arguments[0], &arguments[1]).map_err(|message| RuntimeError::FunctionCall { - token: token.clone(), + function: function_name, message, }) } diff --git a/src/functions.rs b/src/functions.rs index 209818f..c459afe 100644 --- a/src/functions.rs +++ b/src/functions.rs @@ -1,34 +1,36 @@ use crate::common::*; -pub(crate) struct Functions<'a> { - stack: Vec<&'a Expression<'a>>, +pub(crate) struct Functions<'expression, 'src> { + stack: Vec<&'expression Expression<'src>>, } -impl<'a> Functions<'a> { - pub(crate) fn new(root: &'a Expression<'a>) -> Functions<'a> { +impl<'expression, 'src> Functions<'expression, 'src> { + pub(crate) fn new(root: &'expression Expression<'src>) -> Functions<'expression, 'src> { Functions { stack: vec![root] } } } -impl<'a> Iterator for Functions<'a> { - type Item = (&'a Token<'a>, usize); +impl<'expression, 'src> Iterator for Functions<'expression, 'src> { + type Item = (Token<'src>, usize); fn next(&mut self) -> Option { match self.stack.pop() { None - | Some(Expression::String { .. }) + | Some(Expression::StringLiteral { .. }) | Some(Expression::Backtick { .. }) | Some(Expression::Variable { .. }) => None, Some(Expression::Call { - token, arguments, .. - }) => Some((token, arguments.len())), + function, + arguments, + .. + }) => Some((function.token(), arguments.len())), Some(Expression::Concatination { lhs, rhs }) => { self.stack.push(lhs); self.stack.push(rhs); self.next() } - Some(Expression::Group { expression }) => { - self.stack.push(expression); + Some(Expression::Group { contents }) => { + self.stack.push(contents); self.next() } } diff --git a/src/item.rs b/src/item.rs new file mode 100644 index 0000000..1309a1b --- /dev/null +++ b/src/item.rs @@ -0,0 +1,9 @@ +use crate::common::*; + +/// A single top-level item +#[derive(Debug)] +pub(crate) enum Item<'src> { + Alias(Alias<'src>), + Assignment(Assignment<'src>), + Recipe(Recipe<'src>), +} diff --git a/src/justfile.rs b/src/justfile.rs index cdb59b2..f28bec4 100644 --- a/src/justfile.rs +++ b/src/justfile.rs @@ -1,11 +1,10 @@ use crate::common::*; -#[derive(Debug)] +#[derive(Debug, PartialEq)] pub(crate) struct Justfile<'a> { - pub(crate) recipes: BTreeMap<&'a str, Recipe<'a>>, - pub(crate) assignments: BTreeMap<&'a str, Expression<'a>>, - pub(crate) exports: BTreeSet<&'a str>, - pub(crate) aliases: BTreeMap<&'a str, Alias<'a>>, + pub(crate) recipes: Table<'a, Recipe<'a>>, + pub(crate) assignments: Table<'a, Assignment<'a>>, + pub(crate) aliases: Table<'a, Alias<'a>>, pub(crate) warnings: Vec>, } @@ -14,7 +13,7 @@ impl<'a> Justfile<'a> { let mut first: Option<&Recipe> = None; for recipe in self.recipes.values() { if let Some(first_recipe) = first { - if recipe.line_number < first_recipe.line_number { + if recipe.line_number() < first_recipe.line_number() { first = Some(recipe) } } else { @@ -75,7 +74,7 @@ impl<'a> Justfile<'a> { width = cmp::max(name.len(), width); } - for (name, value) in scope { + for (name, (_export, value)) in scope { println!("{0:1$} := \"{2}\"", name, width, value); } return Ok(()); @@ -94,7 +93,7 @@ impl<'a> Justfile<'a> { let argument_count = cmp::min(tail.len(), recipe.max_arguments()); if !argument_range.range_contains(&argument_count) { return Err(RuntimeError::ArgumentCountMismatch { - recipe: recipe.name, + recipe: recipe.name(), parameters: recipe.parameters.iter().collect(), found: tail.len(), min: recipe.min_arguments(), @@ -140,7 +139,7 @@ impl<'a> Justfile<'a> { if let Some(recipe) = self.recipes.get(name) { Some(recipe) } else if let Some(alias) = self.aliases.get(name) { - self.recipes.get(alias.target) + self.recipes.get(alias.target.lexeme()) } else { None } @@ -155,12 +154,13 @@ impl<'a> Justfile<'a> { ran: &mut BTreeSet<&'a str>, ) -> RunResult<()> { for dependency_name in &recipe.dependencies { - if !ran.contains(dependency_name) { - self.run_recipe(context, &self.recipes[dependency_name], &[], dotenv, ran)?; + let lexeme = dependency_name.lexeme(); + if !ran.contains(lexeme) { + self.run_recipe(context, &self.recipes[lexeme], &[], dotenv, ran)?; } } - recipe.run(context, arguments, dotenv, &self.exports)?; - ran.insert(recipe.name); + recipe.run(context, arguments, dotenv)?; + ran.insert(recipe.name()); Ok(()) } } @@ -168,11 +168,11 @@ impl<'a> Justfile<'a> { impl<'a> Display for Justfile<'a> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { let mut items = self.recipes.len() + self.assignments.len() + self.aliases.len(); - for (name, expression) in &self.assignments { - if self.exports.contains(name) { + for (name, assignment) in &self.assignments { + if assignment.export { write!(f, "export ")?; } - write!(f, "{} := {}", name, expression)?; + write!(f, "{} := {}", name, assignment.expression)?; items -= 1; if items != 0 { write!(f, "\n\n")?; @@ -197,15 +197,15 @@ impl<'a> Display for Justfile<'a> { } #[cfg(test)] -mod test { +mod tests { use super::*; use crate::runtime_error::RuntimeError::*; - use crate::testing::parse; + use crate::testing::compile; #[test] fn unknown_recipes() { - match parse("a:\nb:\nc:") + match compile("a:\nb:\nc:") .run(&["a", "x", "y", "z"], &Default::default()) .unwrap_err() { @@ -238,7 +238,7 @@ a: x "; - match parse(text).run(&["a"], &Default::default()).unwrap_err() { + match compile(text).run(&["a"], &Default::default()).unwrap_err() { Code { recipe, line_number, @@ -254,7 +254,7 @@ a: #[test] fn code_error() { - match parse("fail:\n @exit 100") + match compile("fail:\n @exit 100") .run(&["fail"], &Default::default()) .unwrap_err() { @@ -277,7 +277,7 @@ a: a return code: @x() { {{return}} {{code + "0"}}; }; x"#; - match parse(text) + match compile(text) .run(&["a", "return", "15"], &Default::default()) .unwrap_err() { @@ -296,7 +296,7 @@ a return code: #[test] fn missing_some_arguments() { - match parse("a b c d:") + match compile("a b c d:") .run(&["a", "b", "c"], &Default::default()) .unwrap_err() { @@ -307,7 +307,10 @@ a return code: min, max, } => { - let param_names = parameters.iter().map(|p| p.name).collect::>(); + let param_names = parameters + .iter() + .map(|p| p.name.lexeme()) + .collect::>(); assert_eq!(recipe, "a"); assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(found, 2); @@ -320,7 +323,7 @@ a return code: #[test] fn missing_some_arguments_variadic() { - match parse("a b c +d:") + match compile("a b c +d:") .run(&["a", "B", "C"], &Default::default()) .unwrap_err() { @@ -331,7 +334,10 @@ a return code: min, max, } => { - let param_names = parameters.iter().map(|p| p.name).collect::>(); + let param_names = parameters + .iter() + .map(|p| p.name.lexeme()) + .collect::>(); assert_eq!(recipe, "a"); assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(found, 2); @@ -344,7 +350,7 @@ a return code: #[test] fn missing_all_arguments() { - match parse("a b c d:\n echo {{b}}{{c}}{{d}}") + match compile("a b c d:\n echo {{b}}{{c}}{{d}}") .run(&["a"], &Default::default()) .unwrap_err() { @@ -355,7 +361,10 @@ a return code: min, max, } => { - let param_names = parameters.iter().map(|p| p.name).collect::>(); + let param_names = parameters + .iter() + .map(|p| p.name.lexeme()) + .collect::>(); assert_eq!(recipe, "a"); assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(found, 0); @@ -368,7 +377,7 @@ a return code: #[test] fn missing_some_defaults() { - match parse("a b c d='hello':") + match compile("a b c d='hello':") .run(&["a", "b"], &Default::default()) .unwrap_err() { @@ -379,7 +388,10 @@ a return code: min, max, } => { - let param_names = parameters.iter().map(|p| p.name).collect::>(); + let param_names = parameters + .iter() + .map(|p| p.name.lexeme()) + .collect::>(); assert_eq!(recipe, "a"); assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(found, 1); @@ -392,7 +404,7 @@ a return code: #[test] fn missing_all_defaults() { - match parse("a b c='r' d='h':") + match compile("a b c='r' d='h':") .run(&["a"], &Default::default()) .unwrap_err() { @@ -403,7 +415,10 @@ a return code: min, max, } => { - let param_names = parameters.iter().map(|p| p.name).collect::>(); + let param_names = parameters + .iter() + .map(|p| p.name.lexeme()) + .collect::>(); assert_eq!(recipe, "a"); assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(found, 0); @@ -419,7 +434,7 @@ a return code: let mut config: Config = Default::default(); config.overrides.insert("foo", "bar"); config.overrides.insert("baz", "bob"); - match parse("a:\n echo {{`f() { return 100; }; f`}}") + match compile("a:\n echo {{`f() { return 100; }; f`}}") .run(&["a"], &config) .unwrap_err() { @@ -447,7 +462,7 @@ wut: ..Default::default() }; - match parse(text).run(&["wut"], &config).unwrap_err() { + match compile(text).run(&["wut"], &config).unwrap_err() { Code { code: _, line_number, @@ -459,4 +474,369 @@ wut: other => panic!("expected a recipe code errror, but got: {}", other), } } + + macro_rules! test { + ($name:ident, $input:expr, $expected:expr $(,)*) => { + #[test] + fn $name() { + test($input, $expected); + } + }; + } + + fn test(input: &str, expected: &str) { + let justfile = compile(input); + let actual = format!("{:#}", justfile); + assert_eq!(actual, expected); + println!("Re-parsing..."); + let reparsed = compile(&actual); + let redumped = format!("{:#}", reparsed); + assert_eq!(redumped, actual); + } + + test! { + parse_empty, + " + +# hello + + + ", + "", + } + + test! { + parse_string_default, + r#" + +foo a="b\t": + + + "#, + r#"foo a="b\t":"#, + } + + test! { + parse_multiple, + r#" +a: +b: +"#, + r#"a: + +b:"#, + } + + test! { + parse_variadic, + r#" + +foo +a: + + + "#, + r#"foo +a:"#, + } + + test! { + parse_variadic_string_default, + r#" + +foo +a="Hello": + + + "#, + r#"foo +a="Hello":"#, + } + + test! { + parse_raw_string_default, + r#" + +foo a='b\t': + + + "#, + r#"foo a='b\t':"#, + } + + test! { + parse_export, + r#" +export a := "hello" + + "#, + r#"export a := "hello""#, + } + + test! { + parse_alias_after_target, + r#" +foo: + echo a +alias f := foo +"#, +r#"alias f := foo + +foo: + echo a"# + } + + test! { + parse_alias_before_target, + r#" +alias f := foo +foo: + echo a +"#, +r#"alias f := foo + +foo: + echo a"# + } + + test! { + parse_alias_with_comment, + r#" +alias f := foo #comment +foo: + echo a +"#, +r#"alias f := foo + +foo: + echo a"# + } + + test! { + parse_complex, + " +x: +y: +z: +foo := \"xx\" +bar := foo +goodbye := \"y\" +hello a b c : x y z #hello + #! blah + #blarg + {{ foo + bar}}abc{{ goodbye\t + \"x\" }}xyz + 1 + 2 + 3 +", + "bar := foo + +foo := \"xx\" + +goodbye := \"y\" + +hello a b c: x y z + #! blah + #blarg + {{foo + bar}}abc{{goodbye + \"x\"}}xyz + 1 + 2 + 3 + +x: + +y: + +z:" + } + + test! { + parse_shebang, + " +practicum := 'hello' +install: +\t#!/bin/sh +\tif [[ -f {{practicum}} ]]; then +\t\treturn +\tfi +", + "practicum := 'hello' + +install: + #!/bin/sh + if [[ -f {{practicum}} ]]; then + \treturn + fi", + } + + test! { + parse_simple_shebang, + "a:\n #!\n print(1)", + "a:\n #!\n print(1)", + } + + test! { + parse_assignments, + r#"a := "0" +c := a + b + a + b +b := "1" +"#, + r#"a := "0" + +b := "1" + +c := a + b + a + b"#, + } + + test! { + parse_assignment_backticks, + "a := `echo hello` +c := a + b + a + b +b := `echo goodbye`", + "a := `echo hello` + +b := `echo goodbye` + +c := a + b + a + b", + } + + test! { + parse_interpolation_backticks, + r#"a: + echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#, + r#"a: + echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#, + } + + test! { + eof_test, + "x:\ny:\nz:\na b c: x y z", + "a b c: x y z\n\nx:\n\ny:\n\nz:", + } + + test! { + string_quote_escape, + r#"a := "hello\"""#, + r#"a := "hello\"""#, + } + + test! { + string_escapes, + r#"a := "\n\t\r\"\\""#, + r#"a := "\n\t\r\"\\""#, + } + + test! { + parameters, + "a b c: + {{b}} {{c}}", + "a b c: + {{b}} {{c}}", + } + + test! { + unary_functions, + " +x := arch() + +a: + {{os()}} {{os_family()}}", + "x := arch() + +a: + {{os()}} {{os_family()}}", + } + + test! { + env_functions, + r#" +x := env_var('foo',) + +a: + {{env_var_or_default('foo' + 'bar', 'baz',)}} {{env_var(env_var("baz"))}}"#, + r#"x := env_var('foo') + +a: + {{env_var_or_default('foo' + 'bar', 'baz')}} {{env_var(env_var("baz"))}}"#, + } + + test! { + parameter_default_string, + r#" +f x="abc": +"#, + r#"f x="abc":"#, + } + + test! { + parameter_default_raw_string, + r#" +f x='abc': +"#, + r#"f x='abc':"#, + } + + test! { + parameter_default_backtick, + r#" +f x=`echo hello`: +"#, + r#"f x=`echo hello`:"#, + } + + test! { + parameter_default_concatination_string, + r#" +f x=(`echo hello` + "foo"): +"#, + r#"f x=(`echo hello` + "foo"):"#, + } + + test! { + parameter_default_concatination_variable, + r#" +x := "10" +f y=(`echo hello` + x) +z="foo": +"#, + r#"x := "10" + +f y=(`echo hello` + x) +z="foo":"#, + } + + test! { + parameter_default_multiple, + r#" +x := "10" +f y=(`echo hello` + x) +z=("foo" + "bar"): +"#, + r#"x := "10" + +f y=(`echo hello` + x) +z=("foo" + "bar"):"#, + } + + test! { + concatination_in_group, + "x := ('0' + '1')", + "x := ('0' + '1')", + } + + test! { + string_in_group, + "x := ('0' )", + "x := ('0')", + } + + #[rustfmt::skip] + test! { + escaped_dos_newlines, + "@spam:\r +\t{ \\\r +\t\tfiglet test; \\\r +\t\tcargo build --color always 2>&1; \\\r +\t\tcargo test --color always -- --color always 2>&1; \\\r +\t} | less\r +", +"@spam: + { \\ + \tfiglet test; \\ + \tcargo build --color always 2>&1; \\ + \tcargo test --color always -- --color always 2>&1; \\ + } | less", + } } diff --git a/src/keyed.rs b/src/keyed.rs new file mode 100644 index 0000000..dc679a9 --- /dev/null +++ b/src/keyed.rs @@ -0,0 +1,3 @@ +pub(crate) trait Keyed<'key> { + fn key(&self) -> &'key str; +} diff --git a/src/keyword.rs b/src/keyword.rs new file mode 100644 index 0000000..0fa4ba0 --- /dev/null +++ b/src/keyword.rs @@ -0,0 +1,2 @@ +pub(crate) const ALIAS: &str = "alias"; +pub(crate) const EXPORT: &str = "export"; diff --git a/src/lexer.rs b/src/lexer.rs index 049e5fd..17baaf6 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -5,12 +5,15 @@ use TokenKind::*; /// Just language lexer /// -/// `self.next` points to the next character to be lexed, and -/// the text between `self.token_start` and `self.token_end` contains -/// the current token being lexed. +/// The lexer proceeds character-by-character, as opposed to using +/// regular expressions to lex tokens or semi-tokens at a time. As a +/// result, it is verbose and straightforward. Just used to have a +/// regex-based lexer, which was slower and generally godawful. However, +/// this should not be taken as a slight against regular expressions, +/// the lexer was just idiosyncratically bad. pub(crate) struct Lexer<'a> { /// Source text - text: &'a str, + src: &'a str, /// Char iterator chars: Chars<'a>, /// Tokens @@ -21,19 +24,19 @@ pub(crate) struct Lexer<'a> { token_start: Position, /// Current token end token_end: Position, - /// Next character + /// Next character to be lexed next: Option, } impl<'a> Lexer<'a> { /// Lex `text` - pub(crate) fn lex(text: &str) -> CompilationResult> { - Lexer::new(text).tokenize() + pub(crate) fn lex(src: &str) -> CompilationResult> { + Lexer::new(src).tokenize() } /// Create a new Lexer to lex `text` - fn new(text: &'a str) -> Lexer<'a> { - let mut chars = text.chars(); + fn new(src: &'a str) -> Lexer<'a> { + let mut chars = src.chars(); let next = chars.next(); let start = Position { @@ -49,7 +52,7 @@ impl<'a> Lexer<'a> { token_end: start, chars, next, - text, + src, } } @@ -82,7 +85,7 @@ impl<'a> Lexer<'a> { /// Lexeme of in-progress token fn lexeme(&self) -> &'a str { - &self.text[self.token_start.offset..self.token_end.offset] + &self.src[self.token_start.offset..self.token_end.offset] } /// Length of current token @@ -102,7 +105,7 @@ impl<'a> Lexer<'a> { /// Un-lexed text fn rest(&self) -> &'a str { - &self.text[self.token_end.offset..] + &self.src[self.token_end.offset..] } /// Check if unlexed text begins with prefix @@ -145,7 +148,7 @@ impl<'a> Lexer<'a> { offset: self.token_start.offset, column: self.token_start.column, line: self.token_start.line, - text: self.text, + src: self.src, length: self.token_end.offset - self.token_start.offset, kind, }); @@ -158,7 +161,7 @@ impl<'a> Lexer<'a> { fn internal_error(&self, message: impl Into) -> CompilationError<'a> { // Use `self.token_end` as the location of the error CompilationError { - text: self.text, + src: self.src, offset: self.token_end.offset, line: self.token_end.line, column: self.token_end.column, @@ -184,7 +187,7 @@ impl<'a> Lexer<'a> { }; CompilationError { - text: self.text, + src: self.src, offset: self.token_start.offset, line: self.token_start.line, column: self.token_start.column, @@ -198,7 +201,7 @@ impl<'a> Lexer<'a> { interpolation_start: Position, ) -> CompilationError<'a> { CompilationError { - text: self.text, + src: self.src, offset: interpolation_start.offset, line: interpolation_start.line, column: interpolation_start.column, @@ -359,7 +362,7 @@ impl<'a> Lexer<'a> { ' ' | '\t' => self.lex_whitespace(), '\'' => self.lex_raw_string(), '"' => self.lex_cooked_string(), - 'a'..='z' | 'A'..='Z' | '_' => self.lex_name(), + 'a'..='z' | 'A'..='Z' | '_' => self.lex_identifier(), _ => { self.advance()?; Err(self.error(UnknownStartOfToken)) @@ -446,7 +449,6 @@ impl<'a> Lexer<'a> { /// Lex token beginning with `start` in indented state fn lex_indented(&mut self) -> CompilationResult<'a, ()> { self.state.push(State::Text); - self.token(Line); Ok(()) } @@ -513,8 +515,8 @@ impl<'a> Lexer<'a> { self.lex_double(Eol) } - /// Lex name: [a-zA-Z_][a-zA-Z0-9_]* - fn lex_name(&mut self) -> CompilationResult<'a, ()> { + /// Lex identifier: [a-zA-Z_][a-zA-Z0-9_]* + fn lex_identifier(&mut self) -> CompilationResult<'a, ()> { while self .next .map(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_') @@ -523,7 +525,7 @@ impl<'a> Lexer<'a> { self.advance()?; } - self.token(Name); + self.token(Identifier); Ok(()) } @@ -725,19 +727,61 @@ mod tests { Whitespace => " ", // Empty lexemes - Line | Dedent | Eof => "", + Dedent | Eof => "", // Variable lexemes - Text | StringCooked | StringRaw | Name | Comment | Backtick => { + Text | StringCooked | StringRaw | Identifier | Comment | Backtick => { panic!("Token {:?} has no default lexeme", kind) } } } + macro_rules! error { + ( + name: $name:ident, + input: $input:expr, + offset: $offset:expr, + line: $line:expr, + column: $column:expr, + width: $width:expr, + kind: $kind:expr, + ) => { + #[test] + fn $name() { + error($input, $offset, $line, $column, $width, $kind); + } + }; + } + + fn error( + src: &str, + offset: usize, + line: usize, + column: usize, + width: usize, + kind: CompilationErrorKind, + ) { + let expected = CompilationError { + src, + offset, + line, + column, + width, + kind, + }; + + match Lexer::lex(src) { + Ok(_) => panic!("Lexing succeeded but expected: {}\n{}", expected, src), + Err(actual) => { + assert_eq!(actual, expected); + } + } + } + test! { name: name_new, text: "foo", - tokens: (Name:"foo"), + tokens: (Identifier:"foo"), } test! { @@ -768,9 +812,9 @@ mod tests { name: export_concatination, text: "export foo = 'foo' + 'bar'", tokens: ( - Name:"export", + Identifier:"export", Whitespace, - Name:"foo", + Identifier:"foo", Whitespace, Equals, Whitespace, @@ -786,9 +830,9 @@ mod tests { name: export_complex, text: "export foo = ('foo' + 'bar') + `baz`", tokens: ( - Name:"export", + Identifier:"export", Whitespace, - Name:"foo", + Identifier:"foo", Whitespace, Equals, Whitespace, @@ -821,7 +865,7 @@ mod tests { test! { name: indented_line, text: "foo:\n a", - tokens: (Name:"foo", Colon, Eol, Indent:" ", Line, Text:"a", Dedent), + tokens: (Identifier:"foo", Colon, Eol, Indent:" ", Text:"a", Dedent), } test! { @@ -833,19 +877,16 @@ mod tests { c ", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Eol, Indent, - Line, Text:"a", Eol, Whitespace:" ", - Line, Text:"b", Eol, Whitespace:" ", - Line, Text:"c", Eol, Dedent, @@ -860,15 +901,14 @@ mod tests { b: ", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Eol, Indent, - Line, Text:"a", Eol, Dedent, - Name:"b", + Identifier:"b", Colon, Eol, ) @@ -883,17 +923,15 @@ mod tests { b: ", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Eol, Indent:" ", - Line, Text:"a", Eol, - Line, Eol, Dedent, - Name:"b", + Identifier:"b", Colon, Eol, ), @@ -903,11 +941,10 @@ mod tests { name: indented_line_containing_unpaired_carriage_return, text: "foo:\n \r \n", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Eol, Indent:" ", - Line, Text:"\r ", Eol, Dedent, @@ -931,51 +968,43 @@ mod tests { @mv b c ", tokens: ( - Name:"b", + Identifier:"b", Colon, Whitespace, - Name:"a", + Identifier:"a", Eol, Indent, - Line, Text:"@mv a b", Eol, - Line, Eol, Dedent, - Name:"a", + Identifier:"a", Colon, Eol, Indent, - Line, Text:"@touch F", Eol, Whitespace:" ", - Line, Text:"@touch a", Eol, - Line, Eol, Dedent, - Name:"d", + Identifier:"d", Colon, Whitespace, - Name:"c", + Identifier:"c", Eol, Indent, - Line, Text:"@rm c", Eol, - Line, Eol, Dedent, - Name:"c", + Identifier:"c", Colon, Whitespace, - Name:"b", + Identifier:"b", Eol, Indent, - Line, Text:"@mv b c", Eol, Dedent @@ -986,11 +1015,10 @@ mod tests { name: interpolation_empty, text: "hello:\n echo {{}}", tokens: ( - Name:"hello", + Identifier:"hello", Colon, Eol, Indent:" ", - Line, Text:"echo ", InterpolationStart, InterpolationEnd, @@ -1002,11 +1030,10 @@ mod tests { name: interpolation_expression, text: "hello:\n echo {{`echo hello` + `echo goodbye`}}", tokens: ( - Name:"hello", + Identifier:"hello", Colon, Eol, Indent:" ", - Line, Text:"echo ", InterpolationStart, Backtick:"`echo hello`", @@ -1028,13 +1055,13 @@ mod tests { test123 ", tokens: ( - Name:"foo", + Identifier:"foo", Eol, - Name:"bar-bob", + Identifier:"bar-bob", Eol, - Name:"b-bob_asdfAAAA", + Identifier:"b-bob_asdfAAAA", Eol, - Name:"test123", + Identifier:"test123", Eol, ), } @@ -1043,11 +1070,10 @@ mod tests { name: tokenize_indented_line, text: "foo:\n a", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Eol, Indent:" ", - Line, Text:"a", Dedent, ), @@ -1062,19 +1088,16 @@ mod tests { c ", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Eol, Indent, - Line, Text:"a", Eol, Whitespace:" ", - Line, Text:"b", Eol, Whitespace:" ", - Line, Text:"c", Eol, Dedent, @@ -1085,7 +1108,7 @@ mod tests { name: tokenize_strings, text: r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#, tokens: ( - Name:"a", + Identifier:"a", Whitespace, Equals, Whitespace, @@ -1113,15 +1136,14 @@ mod tests { {{hello}} ", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Whitespace, Comment:"# some comment", Eol, Indent:" ", - Line, InterpolationStart, - Name:"hello", + Identifier:"hello", InterpolationEnd, Eol, Dedent @@ -1135,15 +1157,14 @@ mod tests { # another comment ", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Whitespace, Comment:"# more comments", Eol, Indent:" ", - Line, InterpolationStart, - Name:"hello", + Identifier:"hello", InterpolationEnd, Eol, Dedent, @@ -1156,19 +1177,18 @@ mod tests { name: tokenize_recipe_complex_interpolation_expression, text: "foo: #lol\n {{a + b + \"z\" + blarg}}", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Whitespace:" ", Comment:"#lol", Eol, Indent:" ", - Line, InterpolationStart, - Name:"a", + Identifier:"a", Whitespace, Plus, Whitespace, - Name:"b", + Identifier:"b", Whitespace, Plus, Whitespace, @@ -1176,7 +1196,7 @@ mod tests { Whitespace, Plus, Whitespace, - Name:"blarg", + Identifier:"blarg", InterpolationEnd, Dedent, ), @@ -1186,23 +1206,22 @@ mod tests { name: tokenize_recipe_multiple_interpolations, text: "foo:,#ok\n {{a}}0{{b}}1{{c}}", tokens: ( - Name:"foo", + Identifier:"foo", Colon, Comma, Comment:"#ok", Eol, Indent:" ", - Line, InterpolationStart, - Name:"a", + Identifier:"a", InterpolationEnd, Text:"0", InterpolationStart, - Name:"b", + Identifier:"b", InterpolationEnd, Text:"1", InterpolationStart, - Name:"c", + Identifier:"c", InterpolationEnd, Dedent, @@ -1217,24 +1236,24 @@ mod tests { hello blah blah blah : a b c #whatever ", tokens: ( - Name:"bob", + Identifier:"bob", Eol, Eol, - Name:"hello", + Identifier:"hello", Whitespace, - Name:"blah", + Identifier:"blah", Whitespace, - Name:"blah", + Identifier:"blah", Whitespace, - Name:"blah", + Identifier:"blah", Whitespace, Colon, Whitespace, - Name:"a", + Identifier:"a", Whitespace, - Name:"b", + Identifier:"b", Whitespace, - Name:"c", + Identifier:"c", Whitespace, Comment:"#whatever", Eol, @@ -1260,30 +1279,23 @@ mod tests { Eol, Comment:"# this does something", Eol, - Name:"hello", + Identifier:"hello", Colon, Eol, Indent, - Line, Text:"asdf", Eol, Whitespace:" ", - Line, Text:"bsdf", Eol, - Line, Eol, Whitespace:" ", - Line, Text:"csdf", Eol, - Line, Eol, Whitespace:" ", - Line, Text:"dsdf # whatever", Eol, - Line, Eol, Dedent, Comment:"# yolo", @@ -1302,18 +1314,17 @@ mod tests { tokens: ( Comment:"#", Eol, - Name:"A", + Identifier:"A", Equals, StringRaw:"'1'", Eol, - Name:"echo", + Identifier:"echo", Colon, Eol, Indent, - Line, Text:"echo ", InterpolationStart, - Name:"A", + Identifier:"A", InterpolationEnd, Eol, Dedent, @@ -1324,11 +1335,10 @@ mod tests { name: tokenize_interpolation_backticks, text: "hello:\n echo {{`echo hello` + `echo goodbye`}}", tokens: ( - Name:"hello", + Identifier:"hello", Colon, Eol, Indent:" ", - Line, Text:"echo ", InterpolationStart, Backtick:"`echo hello`", @@ -1345,11 +1355,10 @@ mod tests { name: tokenize_empty_interpolation, text: "hello:\n echo {{}}", tokens: ( - Name:"hello", + Identifier:"hello", Colon, Eol, Indent:" ", - Line, Text:"echo ", InterpolationStart, InterpolationEnd, @@ -1361,7 +1370,7 @@ mod tests { name: tokenize_assignment_backticks, text: "a = `echo hello` + `echo goodbye`", tokens: ( - Name:"a", + Identifier:"a", Whitespace, Equals, Whitespace, @@ -1392,42 +1401,33 @@ mod tests { ", tokens: ( Eol, - Name:"hello", + Identifier:"hello", Colon, Eol, Indent, - Line, Text:"a", Eol, Whitespace:" ", - Line, Text:"b", Eol, - Line, Eol, Whitespace:" ", - Line, Text:"c", Eol, - Line, Eol, Whitespace:" ", - Line, Text:"d", Eol, - Line, Eol, Dedent, Comment:"# hello", Eol, - Name:"bob", + Identifier:"bob", Colon, Eol, Indent:" ", - Line, Text:"frank", Eol, - Line, Eol, Dedent, ), @@ -1437,7 +1437,7 @@ mod tests { name: tokenize_comment, text: "a:=#", tokens: ( - Name:"a", + Identifier:"a", ColonEquals, Comment:"#", ), @@ -1447,7 +1447,7 @@ mod tests { name: tokenize_comment_with_bang, text: "a:=#foo!", tokens: ( - Name:"a", + Identifier:"a", ColonEquals, Comment:"#foo!", ), @@ -1470,51 +1470,43 @@ mod tests { @mv b c ", tokens: ( - Name:"b", + Identifier:"b", Colon, Whitespace, - Name:"a", + Identifier:"a", Eol, Indent, - Line, Text:"@mv a b", Eol, - Line, Eol, Dedent, - Name:"a", + Identifier:"a", Colon, Eol, Indent, - Line, Text:"@touch F", Eol, Whitespace:" ", - Line, Text:"@touch a", Eol, - Line, Eol, Dedent, - Name:"d", + Identifier:"d", Colon, Whitespace, - Name:"c", + Identifier:"c", Eol, Indent, - Line, Text:"@rm c", Eol, - Line, Eol, Dedent, - Name:"c", + Identifier:"c", Colon, Whitespace, - Name:"b", + Identifier:"b", Eol, Indent, - Line, Text:"@mv b c", Eol, Dedent, @@ -1533,7 +1525,7 @@ mod tests { ParenR, Whitespace, ParenR, - Name:"abc", + Identifier:"abc", ParenL, Plus, ), @@ -1554,20 +1546,19 @@ mod tests { name: multiple_recipes, text: "a:\n foo\nb:", tokens: ( - Name:"a", + Identifier:"a", Colon, Eol, Indent:" ", - Line, Text:"foo", Eol, Dedent, - Name:"b", + Identifier:"b", Colon, ), } - error_test! { + error! { name: tokenize_space_then_tab, input: "a: 0 @@ -1581,7 +1572,7 @@ mod tests { kind: InconsistentLeadingWhitespace{expected: " ", found: "\t"}, } - error_test! { + error! { name: tokenize_tabs_then_tab_space, input: "a: \t\t0 @@ -1595,7 +1586,7 @@ mod tests { kind: InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "}, } - error_test! { + error! { name: tokenize_unknown, input: "~", offset: 0, @@ -1605,7 +1596,7 @@ mod tests { kind: UnknownStartOfToken, } - error_test! { + error! { name: unterminated_string_with_escapes, input: r#"a = "\n\t\r\"\\"#, offset: 4, @@ -1615,7 +1606,7 @@ mod tests { kind: UnterminatedString, } - error_test! { + error! { name: unterminated_raw_string, input: "r a='asdf", offset: 4, @@ -1625,7 +1616,7 @@ mod tests { kind: UnterminatedString, } - error_test! { + error! { name: unterminated_interpolation, input: "foo:\n echo {{ ", @@ -1636,7 +1627,7 @@ mod tests { kind: UnterminatedInterpolation, } - error_test! { + error! { name: unterminated_backtick, input: "`echo", offset: 0, @@ -1646,7 +1637,7 @@ mod tests { kind: UnterminatedBacktick, } - error_test! { + error! { name: unpaired_carriage_return, input: "foo\rbar", offset: 3, @@ -1656,7 +1647,7 @@ mod tests { kind: UnpairedCarriageReturn, } - error_test! { + error! { name: unknown_start_of_token_ampersand, input: " \r\n&", offset: 3, @@ -1666,7 +1657,7 @@ mod tests { kind: UnknownStartOfToken, } - error_test! { + error! { name: unknown_start_of_token_tilde, input: "~", offset: 0, @@ -1676,7 +1667,7 @@ mod tests { kind: UnknownStartOfToken, } - error_test! { + error! { name: unterminated_string, input: r#"a = ""#, offset: 4, @@ -1686,7 +1677,7 @@ mod tests { kind: UnterminatedString, } - error_test! { + error! { name: mixed_leading_whitespace, input: "a:\n\t echo hello", offset: 3, @@ -1696,7 +1687,7 @@ mod tests { kind: MixedLeadingWhitespace{whitespace: "\t "}, } - error_test! { + error! { name: unclosed_interpolation_delimiter, input: "a:\n echo {{ foo", offset: 9, diff --git a/src/lib.rs b/src/lib.rs index 2c1cdf1..9853f79 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,7 +3,14 @@ extern crate lazy_static; #[cfg(test)] #[macro_use] -mod testing; +pub mod testing; + +#[cfg(test)] +#[macro_use] +pub mod tree; + +#[cfg(test)] +pub mod node; #[cfg(fuzzing)] pub(crate) mod fuzzing; @@ -13,6 +20,8 @@ mod die; mod alias; mod alias_resolver; +mod analyzer; +mod assignment; mod assignment_evaluator; mod assignment_resolver; mod color; @@ -20,6 +29,8 @@ mod command_ext; mod common; mod compilation_error; mod compilation_error_kind; +mod compilation_result_ext; +mod compiler; mod config; mod config_error; mod count; @@ -33,10 +44,16 @@ mod function_context; mod functions; mod interrupt_guard; mod interrupt_handler; +mod item; mod justfile; +mod keyed; +mod keyword; mod lexer; +mod line; mod list; mod load_dotenv; +mod module; +mod name; mod ordinal; mod output; mod output_error; @@ -58,6 +75,7 @@ mod show_whitespace; mod state; mod string_literal; mod subcommand; +mod table; mod token; mod token_kind; mod use_color; diff --git a/src/line.rs b/src/line.rs new file mode 100644 index 0000000..6335abd --- /dev/null +++ b/src/line.rs @@ -0,0 +1,28 @@ +use crate::common::*; + +/// A single line in a recipe body, consisting of any number of +/// `Fragment`s. +#[derive(Debug, PartialEq)] +pub(crate) struct Line<'src> { + pub(crate) fragments: Vec>, +} + +impl<'src> Line<'src> { + pub(crate) fn is_empty(&self) -> bool { + self.fragments.is_empty() + } + + pub(crate) fn is_continuation(&self) -> bool { + match self.fragments.last() { + Some(Fragment::Text { token }) => token.lexeme().ends_with('\\'), + _ => false, + } + } + + pub(crate) fn is_shebang(&self) -> bool { + match self.fragments.first() { + Some(Fragment::Text { token }) => token.lexeme().starts_with("#!"), + _ => false, + } + } +} diff --git a/src/module.rs b/src/module.rs new file mode 100644 index 0000000..47c8376 --- /dev/null +++ b/src/module.rs @@ -0,0 +1,16 @@ +use crate::common::*; + +/// A module, the top-level type produced by the parser. So-named because +/// although at present, all justfiles consist of a single module, in the +/// future we will likely have multi-module and multi-file justfiles. +/// +/// Not all successful parses result in valid justfiles, so additional +/// consistency checks and name resolution are performed by the `Analyzer`, +/// which produces a `Justfile` from a `Module`. +#[derive(Debug)] +pub(crate) struct Module<'src> { + /// Items in the justfile + pub(crate) items: Vec>, + /// Non-fatal warnings encountered during parsing + pub(crate) warnings: Vec>, +} diff --git a/src/name.rs b/src/name.rs new file mode 100644 index 0000000..7df1f83 --- /dev/null +++ b/src/name.rs @@ -0,0 +1,52 @@ +use crate::common::*; + +/// A name. This is effectively just a `Token` of kind `Identifier`, but we +/// give it its own type for clarity. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)] +pub(crate) struct Name<'src> { + pub(crate) offset: usize, + pub(crate) length: usize, + pub(crate) line: usize, + pub(crate) column: usize, + pub(crate) src: &'src str, +} + +impl<'src> Name<'src> { + /// The name's text contents + pub(crate) fn lexeme(&self) -> &'src str { + &self.src[self.offset..self.offset + self.length] + } + + /// Turn this name back into a token + pub(crate) fn token(&self) -> Token<'src> { + Token { + kind: TokenKind::Identifier, + offset: self.offset, + length: self.length, + line: self.line, + column: self.column, + src: self.src, + } + } + + pub(crate) fn from_identifier(token: Token<'src>) -> Name { + assert_eq!(token.kind, TokenKind::Identifier); + Name { + offset: token.offset, + length: token.length, + line: token.line, + column: token.column, + src: token.src, + } + } + + pub(crate) fn error(&self, kind: CompilationErrorKind<'src>) -> CompilationError<'src> { + self.token().error(kind) + } +} + +impl Display for Name<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "{}", self.lexeme()) + } +} diff --git a/src/node.rs b/src/node.rs new file mode 100644 index 0000000..9a7aff0 --- /dev/null +++ b/src/node.rs @@ -0,0 +1,150 @@ +use crate::common::*; + +/// Methods commmon to all AST nodes. Currently only used in parser unit tests. +pub(crate) trait Node<'src> { + /// Construct an untyped tree of atoms representing this Node. This function, + /// and `Tree` type, are only used in parser unit tests. + fn tree(&self) -> Tree<'src>; +} + +impl<'src> Node<'src> for Module<'src> { + fn tree(&self) -> Tree<'src> { + Tree::atom("justfile") + .extend(self.items.iter().map(|item| item.tree())) + .extend(self.warnings.iter().map(|warning| warning.tree())) + } +} + +impl<'src> Node<'src> for Item<'src> { + fn tree(&self) -> Tree<'src> { + match self { + Item::Alias(alias) => alias.tree(), + Item::Assignment(assignment) => assignment.tree(), + Item::Recipe(recipe) => recipe.tree(), + } + } +} + +impl<'src> Node<'src> for Alias<'src> { + fn tree(&self) -> Tree<'src> { + Tree::atom(keyword::ALIAS) + .push(self.name.lexeme()) + .push(self.target.lexeme()) + } +} + +impl<'src> Node<'src> for Assignment<'src> { + fn tree(&self) -> Tree<'src> { + if self.export { + Tree::atom("assignment").push("#").push(keyword::EXPORT) + } else { + Tree::atom("assignment") + } + .push(self.name.lexeme()) + .push(self.expression.tree()) + } +} + +impl<'src> Node<'src> for Expression<'src> { + fn tree(&self) -> Tree<'src> { + match self { + Expression::Concatination { lhs, rhs } => Tree::atom("+").push(lhs.tree()).push(rhs.tree()), + Expression::Call { + function, + arguments, + } => Tree::atom("call") + .push(function.lexeme()) + .extend(arguments.iter().map(|argument| argument.tree())), + Expression::Variable { name } => Tree::atom(name.lexeme()), + Expression::StringLiteral { + string_literal: StringLiteral { cooked, .. }, + } => Tree::string(cooked), + Expression::Backtick { contents, .. } => Tree::atom("backtick").push(Tree::string(contents)), + Expression::Group { contents } => Tree::List(vec![contents.tree()]), + } + } +} + +impl<'src> Node<'src> for Recipe<'src> { + fn tree(&self) -> Tree<'src> { + let mut t = Tree::atom("recipe"); + + if self.quiet { + t.push_mut("#"); + t.push_mut("quiet"); + } + + if let Some(doc) = self.doc { + t.push_mut(Tree::string(doc)); + } + + t.push_mut(self.name.lexeme()); + + if !self.parameters.is_empty() { + let mut params = Tree::atom("params"); + + for parameter in &self.parameters { + if parameter.variadic { + params.push_mut("+"); + } + + params.push_mut(parameter.tree()); + } + + t.push_mut(params); + } + + if !self.dependencies.is_empty() { + t = t.push( + Tree::atom("deps").extend( + self + .dependencies + .iter() + .map(|dependency| dependency.lexeme()), + ), + ); + } + + if !self.body.is_empty() { + t.push_mut(Tree::atom("body").extend(self.body.iter().map(|line| line.tree()))); + } + + t + } +} + +impl<'src> Node<'src> for Parameter<'src> { + fn tree(&self) -> Tree<'src> { + let mut children = Vec::new(); + children.push(Tree::atom(self.name.lexeme())); + + if let Some(default) = &self.default { + children.push(default.tree()); + } + + Tree::List(children) + } +} + +impl<'src> Node<'src> for Line<'src> { + fn tree(&self) -> Tree<'src> { + Tree::list(self.fragments.iter().map(|fragment| fragment.tree())) + } +} + +impl<'src> Node<'src> for Fragment<'src> { + fn tree(&self) -> Tree<'src> { + match self { + Fragment::Text { token } => Tree::string(token.lexeme()), + Fragment::Interpolation { expression } => Tree::List(vec![expression.tree()]), + } + } +} + +impl<'src> Node<'src> for Warning<'src> { + fn tree(&self) -> Tree<'src> { + match self { + Warning::DeprecatedEquals { .. } => Tree::atom("warning").push("deprecated_equals"), + } + } +} diff --git a/src/output_error.rs b/src/output_error.rs index 1f58f14..977a22d 100644 --- a/src/output_error.rs +++ b/src/output_error.rs @@ -25,4 +25,3 @@ impl Display for OutputError { } } } - diff --git a/src/parameter.rs b/src/parameter.rs index c9d11d2..5061911 100644 --- a/src/parameter.rs +++ b/src/parameter.rs @@ -1,20 +1,23 @@ use crate::common::*; +/// A single function parameter #[derive(PartialEq, Debug)] -pub(crate) struct Parameter<'a> { - pub(crate) default: Option>, - pub(crate) name: &'a str, - pub(crate) token: Token<'a>, +pub(crate) struct Parameter<'src> { + /// The parameter name + pub(crate) name: Name<'src>, + /// Parameter is variadic pub(crate) variadic: bool, + /// An optional default expression + pub(crate) default: Option>, } -impl<'a> Display for Parameter<'a> { +impl<'src> Display for Parameter<'src> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { let color = Color::fmt(f); if self.variadic { write!(f, "{}", color.annotation().paint("+"))?; } - write!(f, "{}", color.parameter().paint(self.name))?; + write!(f, "{}", color.parameter().paint(self.name.lexeme()))?; if let Some(ref default) = self.default { write!(f, "={}", color.string().paint(&default.to_string()))?; } diff --git a/src/parser.rs b/src/parser.rs index bf14e1f..b70bcfe 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,1191 +1,1536 @@ use crate::common::*; -use CompilationErrorKind::*; use TokenKind::*; -pub(crate) struct Parser<'a> { - text: &'a str, - tokens: itertools::PutBackN>>, - recipes: BTreeMap<&'a str, Recipe<'a>>, - assignments: BTreeMap<&'a str, Expression<'a>>, - assignment_tokens: BTreeMap<&'a str, Token<'a>>, - exports: BTreeSet<&'a str>, - aliases: BTreeMap<&'a str, Alias<'a>>, - alias_tokens: BTreeMap<&'a str, Token<'a>>, - warnings: Vec>, +/// Just language parser +/// +/// The parser is a (hopefully) straightforward recursive descent parser. +/// +/// It uses a few tokens of lookahead to disambiguate different constructs. +/// +/// The `expect_*` and `presume_`* methods are similar in that they assert +/// the type of unparsed tokens and consume them. However, upon encountering +/// an unexpected token, the `expect_*` methods return an unexpected token +/// error, whereas the `presume_*` tokens return an internal error. +/// +/// The `presume_*` methods are used when the token stream has been inspected +/// in some other way, and thus encountering an unexpected token is a bug in +/// Just, and not a syntax error. +/// +/// All methods starting with `parse_*` parse and return a language construct. +pub(crate) struct Parser<'tokens, 'src> { + /// Source tokens + tokens: &'tokens [Token<'src>], + /// Index of the next un-parsed token + next: usize, } -impl<'a> Parser<'a> { - pub(crate) fn parse(text: &'a str) -> CompilationResult<'a, Justfile> { - let mut tokens = Lexer::lex(text)?; - tokens.retain(|token| token.kind != Whitespace); - let parser = Parser::new(text, tokens); - parser.justfile() +impl<'tokens, 'src> Parser<'tokens, 'src> { + /// Parse `tokens` into an `Module` + pub(crate) fn parse(tokens: &'tokens [Token<'src>]) -> CompilationResult<'src, Module<'src>> { + Self::new(tokens).parse_justfile() } - pub(crate) fn new(text: &'a str, tokens: Vec>) -> Parser<'a> { - Parser { - tokens: itertools::put_back_n(tokens), - recipes: empty(), - assignments: empty(), - assignment_tokens: empty(), - exports: empty(), - aliases: empty(), - alias_tokens: empty(), - warnings: Vec::new(), - text, - } + /// Construct a new Paser from a token stream + fn new(tokens: &'tokens [Token<'src>]) -> Parser<'tokens, 'src> { + Parser { next: 0, tokens } } - fn peek(&mut self, kind: TokenKind) -> bool { - let next = self.tokens.next().unwrap(); - let result = next.kind == kind; - self.tokens.put_back(next); - result + fn error( + &self, + kind: CompilationErrorKind<'src>, + ) -> CompilationResult<'src, CompilationError<'src>> { + Ok(self.next()?.error(kind)) } - fn accept(&mut self, kind: TokenKind) -> Option> { - if self.peek(kind) { - self.tokens.next() - } else { - None - } - } + /// Construct an unexpected token error with the token returned by `Parser::next` + fn unexpected_token( + &self, + expected: &[TokenKind], + ) -> CompilationResult<'src, CompilationError<'src>> { + let mut expected = expected.iter().cloned().collect::>(); + expected.sort(); - fn accepted(&mut self, kind: TokenKind) -> bool { - self.accept(kind).is_some() - } - - fn expect(&mut self, kind: TokenKind) -> Option> { - if self.peek(kind) { - self.tokens.next(); - None - } else { - self.tokens.next() - } - } - - fn expect_eol(&mut self) -> Option> { - self.accepted(Comment); - if self.peek(Eol) { - self.accept(Eol); - None - } else if self.peek(Eof) { - None - } else { - self.tokens.next() - } - } - - fn unexpected_token(&self, found: &Token<'a>, expected: &[TokenKind]) -> CompilationError<'a> { - found.error(UnexpectedToken { - expected: expected.to_vec(), - found: found.kind, + self.error(CompilationErrorKind::UnexpectedToken { + expected, + found: self.next()?.kind, }) } - fn recipe( - &mut self, - name: &Token<'a>, - doc: Option>, - quiet: bool, - ) -> CompilationResult<'a, ()> { - if let Some(recipe) = self.recipes.get(name.lexeme()) { - return Err(name.error(DuplicateRecipe { - recipe: recipe.name, - first: recipe.line_number, - })); - } - - let mut parsed_parameter_with_default = false; - let mut parsed_variadic_parameter = false; - let mut parameters: Vec = vec![]; - loop { - let plus = self.accept(Plus); - - let parameter = match self.accept(Name) { - Some(parameter) => parameter, - None => { - if let Some(plus) = plus { - return Err(self.unexpected_token(&plus, &[Name])); - } else { - break; - } - } - }; - - let variadic = plus.is_some(); - - if parsed_variadic_parameter { - return Err(parameter.error(ParameterFollowsVariadicParameter { - parameter: parameter.lexeme(), - })); - } - - if parameters.iter().any(|p| p.name == parameter.lexeme()) { - return Err(parameter.error(DuplicateParameter { - recipe: name.lexeme(), - parameter: parameter.lexeme(), - })); - } - - let default; - if self.accepted(Equals) { - default = Some(self.value()?); - } else { - default = None - } - - if parsed_parameter_with_default && default.is_none() { - return Err(parameter.error(RequiredParameterFollowsDefaultParameter { - parameter: parameter.lexeme(), - })); - } - - parsed_parameter_with_default |= default.is_some(); - parsed_variadic_parameter = variadic; - - parameters.push(Parameter { - name: parameter.lexeme(), - token: parameter, - default, - variadic, - }); - } - - if let Some(token) = self.expect(Colon) { - // if we haven't accepted any parameters, a := - // would have been fine as part of an assignment - if parameters.is_empty() { - return Err(self.unexpected_token(&token, &[Name, Plus, Colon, ColonEquals])); - } else { - return Err(self.unexpected_token(&token, &[Name, Plus, Colon])); - } - } - - let mut dependencies = vec![]; - let mut dependency_tokens = vec![]; - while let Some(dependency) = self.accept(Name) { - if dependencies.contains(&dependency.lexeme()) { - return Err(dependency.error(DuplicateDependency { - recipe: name.lexeme(), - dependency: dependency.lexeme(), - })); - } - dependencies.push(dependency.lexeme()); - dependency_tokens.push(dependency); - } - - if let Some(token) = self.expect_eol() { - return Err(self.unexpected_token(&token, &[Name, Eol, Eof])); - } - - let mut lines: Vec> = vec![]; - let mut shebang = false; - - if self.accepted(Indent) { - while !self.accepted(Dedent) { - if self.accepted(Eol) { - lines.push(vec![]); - continue; - } - if let Some(token) = self.expect(Line) { - return Err(token.error(Internal { - message: format!("Expected a line but got {}", token.kind), - })); - } - let mut fragments = vec![]; - - while !(self.accepted(Eol) || self.peek(Dedent)) { - if let Some(token) = self.accept(Text) { - if fragments.is_empty() { - if lines.is_empty() { - if token.lexeme().starts_with("#!") { - shebang = true; - } - } else if !shebang - && !lines - .last() - .and_then(|line| line.last()) - .map(Fragment::continuation) - .unwrap_or(false) - && (token.lexeme().starts_with(' ') || token.lexeme().starts_with('\t')) - { - return Err(token.error(ExtraLeadingWhitespace)); - } - } - fragments.push(Fragment::Text { text: token }); - } else if let Some(token) = self.expect(InterpolationStart) { - return Err(self.unexpected_token(&token, &[Text, InterpolationStart, Eol])); - } else { - fragments.push(Fragment::Expression { - expression: self.expression()?, - }); - - if let Some(token) = self.expect(InterpolationEnd) { - return Err(self.unexpected_token(&token, &[Plus, InterpolationEnd])); - } - } - } - - lines.push(fragments); - } - } - - while lines.last().map(Vec::is_empty).unwrap_or(false) { - lines.pop(); - } - - self.recipes.insert( - name.lexeme(), - Recipe { - line_number: name.line, - name: name.lexeme(), - doc: doc.map(|t| t.lexeme()[1..].trim()), - private: &name.lexeme()[0..1] == "_", - dependencies, - dependency_tokens, - lines, - parameters, - quiet, - shebang, - }, - ); - - Ok(()) + fn internal_error( + &self, + message: impl Into, + ) -> CompilationResult<'src, CompilationError<'src>> { + self.error(CompilationErrorKind::Internal { + message: message.into(), + }) } - fn value(&mut self) -> CompilationResult<'a, Expression<'a>> { - let first = self.tokens.next().unwrap(); + /// An iterator over the remaining significant tokens + fn rest(&self) -> impl Iterator> + 'tokens { + self.tokens[self.next..] + .into_iter() + .cloned() + .filter(|token| token.kind != Whitespace) + } - match first.kind { - Name => { - if self.peek(ParenL) { - if let Some(token) = self.expect(ParenL) { - return Err(self.unexpected_token(&token, &[ParenL])); + /// The next significant token + fn next(&self) -> CompilationResult<'src, Token<'src>> { + if let Some(token) = self.rest().next() { + Ok(token) + } else { + Err(self.internal_error("`Parser::next()` called after end of token stream")?) + } + } + + /// Check if the next significant token is of kind `kind` + fn next_is(&self, kind: TokenKind) -> bool { + self.next_are(&[kind]) + } + + /// Check if the next significant tokens are of kinds `kinds` + fn next_are(&self, kinds: &[TokenKind]) -> bool { + let mut rest = self.rest(); + for kind in kinds { + match rest.next() { + Some(token) => { + if token.kind != *kind { + return false; } - let arguments = self.arguments()?; - if let Some(token) = self.expect(ParenR) { - return Err(self.unexpected_token(&token, &[Name, StringCooked, ParenR])); + } + None => return false, + } + } + true + } + + /// Get the `n`th next significant token + fn get(&self, n: usize) -> CompilationResult<'src, Token<'src>> { + match self.rest().skip(n).next() { + Some(token) => Ok(token), + None => Err(self.internal_error("`Parser::get()` advanced past end of token stream")?), + } + } + + /// Advance past one significant token + fn advance(&mut self) -> CompilationResult<'src, Token<'src>> { + for skipped in &self.tokens[self.next..] { + self.next += 1; + + if skipped.kind != Whitespace { + return Ok(*skipped); + } + } + + Err(self.internal_error("`Parser::advance()` advanced past end of token stream")?) + } + + /// Return the next token if it is of kind `expected`, otherwise, return an + /// unexpected token error + fn expect(&mut self, expected: TokenKind) -> CompilationResult<'src, Token<'src>> { + if let Some(token) = self.accept(expected)? { + Ok(token) + } else { + Err(self.unexpected_token(&[expected])?) + } + } + + /// Return an unexpected token error if the next token is not an EOL + fn expect_eol(&mut self) -> CompilationResult<'src, ()> { + self.accept(Comment)?; + + if self.next_is(Eof) { + return Ok(()); + } + + self.expect(Eol).map(|_| ()).expected(&[Eof]) + } + + /// Return an internal error if the next token is not of kind `Identifier` with + /// lexeme `lexeme`. + fn presume_name(&mut self, lexeme: &str) -> CompilationResult<'src, ()> { + let next = self.advance()?; + + if next.kind != Identifier { + Err(self.internal_error(format!( + "Presumed next token would have kind {}, but found {}", + Identifier, next.kind + ))?) + } else if next.lexeme() != lexeme { + Err(self.internal_error(format!( + "Presumed next token would have lexeme \"{}\", but found \"{}\"", + lexeme, + next.lexeme(), + ))?) + } else { + Ok(()) + } + } + + /// Return an internal error if the next token is not of kind `kind`. + fn presume(&mut self, kind: TokenKind) -> CompilationResult<'src, Token<'src>> { + let next = self.advance()?; + + if next.kind != kind { + Err(self.internal_error(format!( + "Presumed next token would have kind {:?}, but found {:?}", + kind, next.kind + ))?) + } else { + Ok(next) + } + } + + /// Return an internal error if the next token is not one of kinds `kinds`. + fn presume_any(&mut self, kinds: &[TokenKind]) -> CompilationResult<'src, Token<'src>> { + let next = self.advance()?; + if !kinds.contains(&next.kind) { + Err(self.internal_error(format!( + "Presumed next token would be {}, but found {}", + List::or(kinds), + next.kind + ))?) + } else { + Ok(next) + } + } + + /// Accept and return a token of kind `kind` + fn accept(&mut self, kind: TokenKind) -> CompilationResult<'src, Option>> { + let next = self.next()?; + if next.kind == kind { + self.advance()?; + Ok(Some(next)) + } else { + Ok(None) + } + } + + /// Accept a token of kind `Identifier` and parse into an `Name` + fn accept_name(&mut self) -> CompilationResult<'src, Option>> { + if self.next_is(Identifier) { + Ok(Some(self.parse_name()?)) + } else { + Ok(None) + } + } + + /// Accept and return `true` if next token is of kind `kind` + fn accepted(&mut self, kind: TokenKind) -> CompilationResult<'src, bool> { + Ok(self.accept(kind)?.is_some()) + } + + /// Parse a justfile, consumes self + fn parse_justfile(mut self) -> CompilationResult<'src, Module<'src>> { + let mut items = Vec::new(); + let mut warnings = Vec::new(); + + let mut doc = None; + + loop { + let next = self.next()?; + + match next.kind { + Comment => { + doc = Some(next.lexeme()[1..].trim()); + self.expect_eol()?; + } + Eol => { + self.advance()?; + } + Eof => { + self.advance()?; + break; + } + Identifier => match next.lexeme() { + keyword::ALIAS => { + if self.next_are(&[Identifier, Identifier, Equals]) { + warnings.push(Warning::DeprecatedEquals { + equals: self.get(2)?, + }); + items.push(Item::Alias(self.parse_alias()?)); + } else if self.next_are(&[Identifier, Identifier, ColonEquals]) { + items.push(Item::Alias(self.parse_alias()?)); + } else { + items.push(Item::Recipe(self.parse_recipe(doc, false)?)); + } } + keyword::EXPORT => { + if self.next_are(&[Identifier, Identifier, Equals]) { + warnings.push(Warning::DeprecatedEquals { + equals: self.get(2)?, + }); + self.presume_name(keyword::EXPORT)?; + items.push(Item::Assignment(self.parse_assignment(true)?)); + } else if self.next_are(&[Identifier, Identifier, ColonEquals]) { + self.presume_name(keyword::EXPORT)?; + items.push(Item::Assignment(self.parse_assignment(true)?)); + } else { + items.push(Item::Recipe(self.parse_recipe(doc, false)?)); + } + } + _ => { + if self.next_are(&[Identifier, Equals]) { + warnings.push(Warning::DeprecatedEquals { + equals: self.get(1)?, + }); + items.push(Item::Assignment(self.parse_assignment(false)?)); + } else if self.next_are(&[Identifier, ColonEquals]) { + items.push(Item::Assignment(self.parse_assignment(false)?)); + } else { + items.push(Item::Recipe(self.parse_recipe(doc, false)?)); + } + } + }, + At => { + self.presume(At)?; + items.push(Item::Recipe(self.parse_recipe(doc, true)?)); + } + _ => { + return Err(self.unexpected_token(&[Identifier, At])?); + } + } + + if next.kind != Comment { + doc = None; + } + } + + if self.next != self.tokens.len() { + Err(self.internal_error(format!( + "Parse completed with {} unparsed tokens", + self.tokens.len() - self.next, + ))?) + } else { + Ok(Module { items, warnings }) + } + } + + /// Parse an alias, e.g `alias name := target` + fn parse_alias(&mut self) -> CompilationResult<'src, Alias<'src>> { + self.presume_name(keyword::ALIAS)?; + let name = self.parse_name()?; + self.presume_any(&[Equals, ColonEquals])?; + let target = self.parse_name()?; + self.expect_eol()?; + Ok(Alias { name, target }) + } + + /// Parse an assignment, e.g. `foo := bar` + fn parse_assignment(&mut self, export: bool) -> CompilationResult<'src, Assignment<'src>> { + let name = self.parse_name()?; + self.presume_any(&[Equals, ColonEquals])?; + let expression = self.parse_expression()?; + self.expect_eol()?; + Ok(Assignment { + name, + export, + expression, + }) + } + + /// Parse an expression, e.g. `1 + 2` + fn parse_expression(&mut self) -> CompilationResult<'src, Expression<'src>> { + let value = self.parse_value()?; + + if self.accepted(Plus)? { + let lhs = Box::new(value); + let rhs = Box::new(self.parse_expression()?); + Ok(Expression::Concatination { lhs, rhs }) + } else { + Ok(value) + } + } + + /// Parse a value, e.g. `(bar)` + fn parse_value(&mut self) -> CompilationResult<'src, Expression<'src>> { + let next = self.next()?; + + match next.kind { + StringCooked | StringRaw => Ok(Expression::StringLiteral { + string_literal: self.parse_string_literal()?, + }), + Backtick => { + let contents = &next.lexeme()[1..next.lexeme().len() - 1]; + let token = self.advance()?; + Ok(Expression::Backtick { contents, token }) + } + Identifier => { + let name = self.parse_name()?; + + if self.next_is(ParenL) { + let arguments = self.parse_sequence()?; Ok(Expression::Call { - name: first.lexeme(), - token: first, + function: name, arguments, }) } else { - Ok(Expression::Variable { - name: first.lexeme(), - token: first, - }) + Ok(Expression::Variable { name }) } } - Backtick => Ok(Expression::Backtick { - raw: &first.lexeme()[1..first.lexeme().len() - 1], - token: first, - }), - StringRaw | StringCooked => Ok(Expression::String { - cooked_string: StringLiteral::new(&first)?, - }), ParenL => { - let expression = self.expression()?; + self.presume(ParenL)?; + let contents = Box::new(self.parse_expression()?); + self.expect(ParenR)?; + Ok(Expression::Group { contents }) + } + _ => { + return Err(self.unexpected_token(&[ + StringCooked, + StringRaw, + Backtick, + Identifier, + ParenL, + ])?) + } + } + } - if let Some(token) = self.expect(ParenR) { - return Err(self.unexpected_token(&token, &[ParenR])); + /// Parse a string literal, e.g. `"FOO"` + fn parse_string_literal(&mut self) -> CompilationResult<'src, StringLiteral<'src>> { + let token = self.presume_any(&[StringRaw, StringCooked])?; + + let raw = &token.lexeme()[1..token.lexeme().len() - 1]; + + match token.kind { + StringRaw => Ok(StringLiteral { + raw, + cooked: Cow::Borrowed(raw), + }), + StringCooked => { + let mut cooked = String::new(); + let mut escape = false; + for c in raw.chars() { + if escape { + match c { + 'n' => cooked.push('\n'), + 'r' => cooked.push('\r'), + 't' => cooked.push('\t'), + '\\' => cooked.push('\\'), + '"' => cooked.push('"'), + other => { + return Err( + token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }), + ); + } + } + escape = false; + } else if c == '\\' { + escape = true; + } else { + cooked.push(c); + } } - - Ok(Expression::Group { - expression: Box::new(expression), + Ok(StringLiteral { + raw, + cooked: Cow::Owned(cooked), }) } - _ => Err(self.unexpected_token(&first, &[Name, StringCooked])), + _ => Err(token.error(CompilationErrorKind::Internal { + message: "`Parser::parse_string_literal` called on non-string token".to_string(), + })), } } - fn expression(&mut self) -> CompilationResult<'a, Expression<'a>> { - let lhs = self.value()?; - - if self.accepted(Plus) { - let rhs = self.expression()?; - - Ok(Expression::Concatination { - lhs: Box::new(lhs), - rhs: Box::new(rhs), - }) - } else { - Ok(lhs) - } + /// Parse a name from an identifier token + fn parse_name(&mut self) -> CompilationResult<'src, Name<'src>> { + self + .expect(Identifier) + .map(|token| Name::from_identifier(token)) } - fn arguments(&mut self) -> CompilationResult<'a, Vec>> { - let mut arguments = Vec::new(); + /// Parse sequence of comma-separated expressions + fn parse_sequence(&mut self) -> CompilationResult<'src, Vec>> { + self.presume(ParenL)?; - while !self.peek(ParenR) && !self.peek(Eof) && !self.peek(Eol) && !self.peek(InterpolationEnd) { - arguments.push(self.expression()?); - if !self.accepted(Comma) { - if self.peek(ParenR) { - break; - } else { - let next = self.tokens.next().unwrap(); - return Err(self.unexpected_token(&next, &[Comma, ParenR])); - } + let mut elements = Vec::new(); + + while !self.next_is(ParenR) { + elements.push(self.parse_expression().expected(&[ParenR])?); + + if !self.accepted(Comma)? { + break; } } - Ok(arguments) + self.expect(ParenR)?; + + Ok(elements) } - fn assignment(&mut self, name: Token<'a>, export: bool) -> CompilationResult<'a, ()> { - if self.assignments.contains_key(name.lexeme()) { - return Err(name.error(DuplicateVariable { - variable: name.lexeme(), - })); - } - if export { - self.exports.insert(name.lexeme()); + /// Parse a recipe + fn parse_recipe( + &mut self, + doc: Option<&'src str>, + quiet: bool, + ) -> CompilationResult<'src, Recipe<'src>> { + let name = self.parse_name()?; + + let mut positional = Vec::new(); + + while self.next_is(Identifier) { + positional.push(self.parse_parameter(false)?); } - let expression = self.expression()?; - if let Some(token) = self.expect_eol() { - return Err(self.unexpected_token(&token, &[Plus, Eol])); - } + let variadic = if self.accepted(Plus)? { + let variadic = self.parse_parameter(true)?; - self.assignments.insert(name.lexeme(), expression); - self.assignment_tokens.insert(name.lexeme(), name); - Ok(()) - } + if let Some(identifier) = self.accept(Identifier)? { + return Err( + identifier.error(CompilationErrorKind::ParameterFollowsVariadicParameter { + parameter: identifier.lexeme(), + }), + ); + } - fn alias(&mut self, name: Token<'a>) -> CompilationResult<'a, ()> { - // Make sure alias doesn't already exist - if let Some(alias) = self.aliases.get(name.lexeme()) { - return Err(name.error(DuplicateAlias { - alias: alias.name, - first: alias.line_number, - })); - } - - // Make sure the next token is of kind Name and keep it - let target = if let Some(next) = self.accept(Name) { - next.lexeme() + Some(variadic) } else { - let unexpected = self.tokens.next().unwrap(); - return Err(self.unexpected_token(&unexpected, &[Name])); + None }; - // Make sure this is where the line or file ends without any unexpected tokens. - if let Some(token) = self.expect_eol() { - return Err(self.unexpected_token(&token, &[Eol, Eof])); + let result = self.expect(Colon); + + if result.is_err() { + let mut alternatives = Vec::new(); + + if variadic.is_none() { + alternatives.push(Identifier); + } + + if !quiet && variadic.is_none() && positional.is_empty() { + alternatives.push(ColonEquals); + } + + if variadic.is_some() || !positional.is_empty() { + alternatives.push(Equals); + } + + if variadic.is_none() { + alternatives.push(Plus); + } + + result.expected(&alternatives)?; } - self.aliases.insert( - name.lexeme(), - Alias { - name: name.lexeme(), - line_number: name.line, - private: name.lexeme().starts_with('_'), - target, - }, - ); - self.alias_tokens.insert(name.lexeme(), name); + let mut dependencies = Vec::new(); - Ok(()) + while let Some(dependency) = self.accept_name()? { + dependencies.push(dependency); + } + + self.expect_eol().expected(&[Identifier])?; + + let body = self.parse_body()?; + + Ok(Recipe { + private: name.lexeme().starts_with('_'), + shebang: body.first().map(Line::is_shebang).unwrap_or(false), + parameters: positional.into_iter().chain(variadic).collect(), + doc, + name, + quiet, + dependencies, + body, + }) } - pub(crate) fn justfile(mut self) -> CompilationResult<'a, Justfile<'a>> { - let mut doc = None; - loop { - match self.tokens.next() { - Some(token) => match token.kind { - Eof => break, - Eol => { - doc = None; - continue; - } - Comment => { - if let Some(token) = self.expect_eol() { - return Err(token.error(Internal { - message: format!("found comment followed by {}", token.kind), - })); - } - doc = Some(token); - } - At => { - if let Some(name) = self.accept(Name) { - self.recipe(&name, doc, true)?; - doc = None; - } else { - let unexpected = &self.tokens.next().unwrap(); - return Err(self.unexpected_token(unexpected, &[Name])); - } - } - Name => { - if token.lexeme() == "export" { - let next = self.tokens.next().unwrap(); - if next.kind == Name && self.peek(Equals) { - self.warnings.push(Warning::DeprecatedEquals { - equals: self.tokens.next().unwrap(), - }); - self.assignment(next, true)?; - doc = None; - } else if next.kind == Name && self.accepted(ColonEquals) { - self.assignment(next, true)?; - doc = None; - } else { - self.tokens.put_back(next); - self.recipe(&token, doc, false)?; - doc = None; - } - } else if token.lexeme() == "alias" { - let next = self.tokens.next().unwrap(); - if next.kind == Name && self.peek(Equals) { - self.warnings.push(Warning::DeprecatedEquals { - equals: self.tokens.next().unwrap(), - }); - self.alias(next)?; - doc = None; - } else if next.kind == Name && self.accepted(ColonEquals) { - self.alias(next)?; - doc = None; - } else { - self.tokens.put_back(next); - self.recipe(&token, doc, false)?; - doc = None; - } - } else if self.peek(Equals) { - self.warnings.push(Warning::DeprecatedEquals { - equals: self.tokens.next().unwrap(), - }); - self.assignment(token, false)?; - doc = None; - } else if self.accepted(ColonEquals) { - self.assignment(token, false)?; - doc = None; - } else { - self.recipe(&token, doc, false)?; - doc = None; - } - } - _ => return Err(self.unexpected_token(&token, &[Name, At])), - }, - None => { - return Err(CompilationError { - text: self.text, - offset: 0, - line: 0, - column: 0, - width: 0, - kind: Internal { - message: "unexpected end of token stream".to_string(), - }, - }); - } - } - } + /// Parse a recipe parameter + fn parse_parameter(&mut self, variadic: bool) -> CompilationResult<'src, Parameter<'src>> { + let name = self.parse_name()?; - if let Some(token) = self.tokens.next() { - return Err(token.error(Internal { - message: format!( - "unexpected token remaining after parsing completed: {:?}", - token.kind - ), - })); - } + let default = if self.accepted(Equals)? { + Some(self.parse_value()?) + } else { + None + }; - AssignmentResolver::resolve_assignments(&self.assignments, &self.assignment_tokens)?; - - RecipeResolver::resolve_recipes(&self.recipes, &self.assignments, self.text)?; - - for recipe in self.recipes.values() { - for parameter in &recipe.parameters { - if self.assignments.contains_key(parameter.token.lexeme()) { - return Err(parameter.token.error(ParameterShadowsVariable { - parameter: parameter.token.lexeme(), - })); - } - } - - for dependency in &recipe.dependency_tokens { - if !self.recipes[dependency.lexeme()].parameters.is_empty() { - return Err(dependency.error(DependencyHasParameters { - recipe: recipe.name, - dependency: dependency.lexeme(), - })); - } - } - } - - AliasResolver::resolve_aliases(&self.aliases, &self.recipes, &self.alias_tokens)?; - - Ok(Justfile { - recipes: self.recipes, - assignments: self.assignments, - exports: self.exports, - aliases: self.aliases, - warnings: self.warnings, + Ok(Parameter { + name, + default, + variadic, }) } + + /// Parse the body of a recipe + fn parse_body(&mut self) -> CompilationResult<'src, Vec>> { + let mut lines = Vec::new(); + + if self.accepted(Indent)? { + while !self.accepted(Dedent)? { + let line = if self.accepted(Eol)? { + Line { + fragments: Vec::new(), + } + } else { + let mut fragments = Vec::new(); + + while !(self.accepted(Eol)? || self.next_is(Dedent)) { + if let Some(token) = self.accept(Text)? { + fragments.push(Fragment::Text { token }); + } else if self.accepted(InterpolationStart)? { + fragments.push(Fragment::Interpolation { + expression: self.parse_expression()?, + }); + self.expect(InterpolationEnd)?; + } else { + return Err(self.unexpected_token(&[Text, InterpolationStart])?); + } + } + + Line { fragments } + }; + + lines.push(line); + } + } + + while lines.last().map(Line::is_empty).unwrap_or(false) { + lines.pop(); + } + + Ok(lines) + } } #[cfg(test)] -mod test { +mod tests { use super::*; - use crate::testing::parse; - macro_rules! parse_test { - ($name:ident, $input:expr, $expected:expr $(,)*) => { + use testing::unindent; + use CompilationErrorKind::*; + + macro_rules! test { + { + name: $name:ident, + text: $text:expr, + tree: $tree:tt, + } => { #[test] fn $name() { - let input = $input; - let expected = $expected; - let justfile = parse(input); - let actual = format!("{:#}", justfile); - use pretty_assertions::assert_eq; - assert_eq!(actual, expected); - println!("Re-parsing..."); - let reparsed = parse(&actual); - let redumped = format!("{:#}", reparsed); - assert_eq!(redumped, actual); + let text: String = $text.into(); + let want = tree!($tree); + test(&text, want); + } + } + } + + fn test(text: &str, want: Tree) { + let unindented = unindent(text); + let tokens = Lexer::lex(&unindented).expect("lexing failed"); + let justfile = Parser::parse(&tokens).expect("parsing failed"); + let have = justfile.tree(); + if have != want { + println!("parsed text: {}", unindented); + println!("expected: {}", want); + println!("but got: {}", have); + println!("tokens: {:?}", tokens); + panic!(); + } + } + + macro_rules! error { + ( + name: $name:ident, + input: $input:expr, + offset: $offset:expr, + line: $line:expr, + column: $column:expr, + width: $width:expr, + kind: $kind:expr, + ) => { + #[test] + fn $name() { + error($input, $offset, $line, $column, $width, $kind); } }; } - parse_test! { - parse_empty, - " + fn error( + src: &str, + offset: usize, + line: usize, + column: usize, + width: usize, + kind: CompilationErrorKind, + ) { + let expected = CompilationError { + src, + offset, + line, + column, + width, + kind, + }; + + let tokens = Lexer::lex(src).expect("Lexing failed in parse test..."); + + match Parser::parse(&tokens) { + Ok(_) => panic!("Parsing succeeded but expected: {}\n{}", expected, src), + Err(actual) => { + assert_eq!(actual, expected); + } + } + } + + test! { + name: empty, + text: "", + tree: (justfile), + } + + test! { + name: empty_multiline, + text: " + + -# hello ", - "", + tree: (justfile), } - parse_test! { - parse_string_default, - r#" - -foo a="b\t": - - - "#, - r#"foo a="b\t":"#, + test! { + name: whitespace, + text: " ", + tree: (justfile), } - parse_test! { - parse_multiple, - r#" -a: -b: -"#, - r#"a: - -b:"#, + test! { + name: alias_single, + text: "alias t := test", + tree: (justfile (alias t test)), } - parse_test! { - parse_variadic, - r#" - -foo +a: - - - "#, - r#"foo +a:"#, + test! { + name: aliases_multiple, + text: "alias t := test\nalias b := build", + tree: ( + justfile + (alias t test) + (alias b build) + ), } - parse_test! { - parse_variadic_string_default, - r#" - -foo +a="Hello": - - - "#, - r#"foo +a="Hello":"#, + test! { + name: alias_equals, + text: "alias t = test", + tree: (justfile + (alias t test) + (warning deprecated_equals) + ), } - parse_test! { - parse_raw_string_default, - r#" - -foo a='b\t': - - - "#, - r#"foo a='b\t':"#, + test! { + name: export, + text: r#"export x := "hello""#, + tree: (justfile (assignment #export x "hello")), } - parse_test! { - parse_export, - r#" -export a := "hello" - - "#, - r#"export a := "hello""#, + test! { + name: export_equals, + text: r#"export x = "hello""#, + tree: (justfile + (assignment #export x "hello") + (warning deprecated_equals) + ), } - parse_test! { - parse_alias_after_target, - r#" -foo: - echo a -alias f := foo -"#, -r#"alias f := foo - -foo: - echo a"# + test! { + name: assignment, + text: r#"x := "hello""#, + tree: (justfile (assignment x "hello")), } - parse_test! { - parse_alias_before_target, - r#" -alias f := foo -foo: - echo a -"#, -r#"alias f := foo - -foo: - echo a"# + test! { + name: assignment_equals, + text: r#"x = "hello""#, + tree: (justfile + (assignment x "hello") + (warning deprecated_equals) + ), } - parse_test! { - parse_alias_with_comment, - r#" -alias f := foo #comment -foo: - echo a -"#, -r#"alias f := foo - -foo: - echo a"# + test! { + name: backtick, + text: "x := `hello`", + tree: (justfile (assignment x (backtick "hello"))), } - parse_test! { - parse_complex, - " -x: -y: -z: -foo := \"xx\" -bar := foo -goodbye := \"y\" -hello a b c : x y z #hello - #! blah - #blarg - {{ foo + bar}}abc{{ goodbye\t + \"x\" }}xyz - 1 - 2 - 3 -", - "bar := foo - -foo := \"xx\" - -goodbye := \"y\" - -hello a b c: x y z - #! blah - #blarg - {{foo + bar}}abc{{goodbye + \"x\"}}xyz - 1 - 2 - 3 - -x: - -y: - -z:" + test! { + name: variable, + text: "x := y", + tree: (justfile (assignment x y)), } - parse_test! { - parse_shebang, - " -practicum := 'hello' -install: -\t#!/bin/sh -\tif [[ -f {{practicum}} ]]; then -\t\treturn -\tfi -", - "practicum := 'hello' - -install: - #!/bin/sh - if [[ -f {{practicum}} ]]; then - \treturn - fi", + test! { + name: group, + text: "x := (y)", + tree: (justfile (assignment x (y))), } - parse_test! { - parse_simple_shebang, - "a:\n #!\n print(1)", - "a:\n #!\n print(1)", + test! { + name: addition_single, + text: "x := a + b", + tree: (justfile (assignment x (+ a b))), } - parse_test! { - parse_assignments, - r#"a := "0" -c := a + b + a + b -b := "1" -"#, - r#"a := "0" - -b := "1" - -c := a + b + a + b"#, + test! { + name: addition_chained, + text: "x := a + b + c", + tree: (justfile (assignment x (+ a (+ b c)))), } - parse_test! { - parse_assignment_backticks, - "a := `echo hello` -c := a + b + a + b -b := `echo goodbye`", - "a := `echo hello` - -b := `echo goodbye` - -c := a + b + a + b", + test! { + name: call_one_arg, + text: "x := foo(y)", + tree: (justfile (assignment x (call foo y))), } - parse_test! { - parse_interpolation_backticks, - r#"a: - echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#, - r#"a: - echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#, + test! { + name: call_multiple_args, + text: "x := foo(y, z)", + tree: (justfile (assignment x (call foo y z))), } - parse_test! { - eof_test, - "x:\ny:\nz:\na b c: x y z", - "a b c: x y z\n\nx:\n\ny:\n\nz:", + test! { + name: call_trailing_comma, + text: "x := foo(y,)", + tree: (justfile (assignment x (call foo y))), } - parse_test! { - string_quote_escape, - r#"a := "hello\"""#, - r#"a := "hello\"""#, + test! { + name: recipe, + text: "foo:", + tree: (justfile (recipe foo)), } - parse_test! { - string_escapes, - r#"a := "\n\t\r\"\\""#, - r#"a := "\n\t\r\"\\""#, + test! { + name: recipe_multiple, + text: " + foo: + bar: + baz: + ", + tree: (justfile (recipe foo) (recipe bar) (recipe baz)), } - parse_test! { - parameters, - "a b c: - {{b}} {{c}}", - "a b c: - {{b}} {{c}}", + test! { + name: recipe_quiet, + text: "@foo:", + tree: (justfile (recipe #quiet foo)), } - parse_test! { - unary_functions, - " -x := arch() - -a: - {{os()}} {{os_family()}}", - "x := arch() - -a: - {{os()}} {{os_family()}}", + test! { + name: recipe_parameter_single, + text: "foo bar:", + tree: (justfile (recipe foo (params (bar)))), } - parse_test! { - env_functions, - r#" -x := env_var('foo',) - -a: - {{env_var_or_default('foo' + 'bar', 'baz',)}} {{env_var(env_var("baz"))}}"#, - r#"x := env_var('foo') - -a: - {{env_var_or_default('foo' + 'bar', 'baz')}} {{env_var(env_var("baz"))}}"#, + test! { + name: recipe_parameter_multiple, + text: "foo bar baz:", + tree: (justfile (recipe foo (params (bar) (baz)))), } - parse_test! { - parameter_default_string, - r#" -f x="abc": -"#, - r#"f x="abc":"#, + test! { + name: recipe_default_single, + text: r#"foo bar="baz":"#, + tree: (justfile (recipe foo (params (bar "baz")))), } - parse_test! { - parameter_default_raw_string, - r#" -f x='abc': -"#, - r#"f x='abc':"#, + test! { + name: recipe_default_multiple, + text: r#"foo bar="baz" bob="biz":"#, + tree: (justfile (recipe foo (params (bar "baz") (bob "biz")))), } - parse_test! { - parameter_default_backtick, - r#" -f x=`echo hello`: -"#, - r#"f x=`echo hello`:"#, + test! { + name: recipe_variadic, + text: r#"foo +bar:"#, + tree: (justfile (recipe foo (params +(bar)))), } - parse_test! { - parameter_default_concatination_string, - r#" -f x=(`echo hello` + "foo"): -"#, - r#"f x=(`echo hello` + "foo"):"#, + test! { + name: recipe_variadic_string_default, + text: r#"foo +bar="baz":"#, + tree: (justfile (recipe foo (params +(bar "baz")))), } - parse_test! { - parameter_default_concatination_variable, - r#" -x := "10" -f y=(`echo hello` + x) +z="foo": -"#, - r#"x := "10" - -f y=(`echo hello` + x) +z="foo":"#, + test! { + name: recipe_variadic_variable_default, + text: r#"foo +bar=baz:"#, + tree: (justfile (recipe foo (params +(bar baz)))), } - parse_test! { - parameter_default_multiple, - r#" -x := "10" -f y=(`echo hello` + x) +z=("foo" + "bar"): -"#, - r#"x := "10" - -f y=(`echo hello` + x) +z=("foo" + "bar"):"#, + test! { + name: recipe_variadic_addition_group_default, + text: r#"foo +bar=(baz + bob):"#, + tree: (justfile (recipe foo (params +(bar ((+ baz bob)))))), } - parse_test! { - concatination_in_group, - "x := ('0' + '1')", - "x := ('0' + '1')", + test! { + name: recipe_dependency_single, + text: "foo: bar", + tree: (justfile (recipe foo (deps bar))), } - parse_test! { - string_in_group, - "x := ('0' )", - "x := ('0')", + test! { + name: recipe_dependency_multiple, + text: "foo: bar baz", + tree: (justfile (recipe foo (deps bar baz))), } - #[rustfmt::skip] - parse_test! { - escaped_dos_newlines, - "@spam:\r -\t{ \\\r -\t\tfiglet test; \\\r -\t\tcargo build --color always 2>&1; \\\r -\t\tcargo test --color always -- --color always 2>&1; \\\r -\t} | less\r -", -"@spam: - { \\ - \tfiglet test; \\ - \tcargo build --color always 2>&1; \\ - \tcargo test --color always -- --color always 2>&1; \\ - } | less", + test! { + name: recipe_line_single, + text: "foo:\n bar", + tree: (justfile (recipe foo (body ("bar")))), } - error_test! { - name: duplicate_alias, - input: "alias foo = bar\nalias foo = baz", - offset: 22, - line: 1, - column: 6, - width: 3, - kind: DuplicateAlias { alias: "foo", first: 0 }, + test! { + name: recipe_line_multiple, + text: "foo:\n bar\n baz\n {{\"bob\"}}biz", + tree: (justfile (recipe foo (body ("bar") ("baz") (("bob") "biz")))), } - error_test! { + test! { + name: recipe_line_interpolation, + text: "foo:\n bar{{\"bob\"}}biz", + tree: (justfile (recipe foo (body ("bar" ("bob") "biz")))), + } + + test! { + name: comment, + text: "# foo", + tree: (justfile), + } + + test! { + name: comment_alias, + text: "alias x := y # foo", + tree: (justfile (alias x y)), + } + + test! { + name: comment_assignment, + text: "x := y # foo", + tree: (justfile (assignment x y)), + } + + test! { + name: comment_export, + text: "export x := y # foo", + tree: (justfile (assignment #export x y)), + } + + test! { + name: comment_recipe, + text: "foo: # bar", + tree: (justfile (recipe foo)), + } + + test! { + name: comment_recipe_dependencies, + text: "foo: bar # baz", + tree: (justfile (recipe foo (deps bar))), + } + + test! { + name: doc_comment_single, + text: " + # foo + bar: + ", + tree: (justfile (recipe "foo" bar)), + } + + test! { + name: doc_comment_recipe_clear, + text: " + # foo + bar: + baz: + ", + tree: (justfile (recipe "foo" bar) (recipe baz)), + } + + test! { + name: doc_comment_middle, + text: " + bar: + # foo + baz: + ", + tree: (justfile (recipe bar) (recipe "foo" baz)), + } + + test! { + name: doc_comment_assignment_clear, + text: " + # foo + x := y + bar: + ", + tree: (justfile (assignment x y) (recipe bar)), + } + + test! { + name: doc_comment_empty_line_clear, + text: " + # foo + + bar: + ", + tree: (justfile (recipe bar)), + } + + test! { + name: string_escape_tab, + text: r#"x := "foo\tbar""#, + tree: (justfile (assignment x "foo\tbar")), + } + + test! { + name: string_escape_newline, + text: r#"x := "foo\nbar""#, + tree: (justfile (assignment x "foo\nbar")), + } + + test! { + name: string_escape_carriage_return, + text: r#"x := "foo\rbar""#, + tree: (justfile (assignment x "foo\rbar")), + } + + test! { + name: string_escape_slash, + text: r#"x := "foo\\bar""#, + tree: (justfile (assignment x "foo\\bar")), + } + + test! { + name: string_escape_quote, + text: r#"x := "foo\"bar""#, + tree: (justfile (assignment x "foo\"bar")), + } + + test! { + name: recipe_variadic_with_default_after_default, + text: r#" + f a=b +c=d: + "#, + tree: (justfile (recipe f (params (a b) +(c d)))), + } + + test! { + name: parameter_default_concatination_variable, + text: r#" + x := "10" + + f y=(`echo hello` + x) +z="foo": + "#, + tree: (justfile + (assignment x "10") + (recipe f (params (y ((+ (backtick "echo hello") x))) +(z "foo"))) + ), + } + + test! { + name: parameter_default_multiple, + text: r#" + x := "10" + f y=(`echo hello` + x) +z=("foo" + "bar"): + "#, + tree: (justfile + (assignment x "10") + (recipe f (params (y ((+ (backtick "echo hello") x))) +(z ((+ "foo" "bar"))))) + ), + } + + test! { + name: parse_raw_string_default, + text: r#" + + foo a='b\t': + + + "#, + tree: (justfile (recipe foo (params (a "b\\t")))), + } + + test! { + name: parse_alias_after_target, + text: r" + foo: + echo a + alias f := foo + ", + tree: (justfile + (recipe foo (body ("echo a"))) + (alias f foo) + ), + } + + test! { + name: parse_alias_before_target, + text: " + alias f := foo + foo: + echo a + ", + tree: (justfile + (alias f foo) + (recipe foo (body ("echo a"))) + ), + } + + test! { + name: parse_alias_with_comment, + text: " + alias f := foo #comment + foo: + echo a + ", + tree: (justfile + (alias f foo) + (recipe foo (body ("echo a"))) + ), + } + + test! { + name: parse_assignment_with_comment, + text: " + f := foo #comment + foo: + echo a + ", + tree: (justfile + (assignment f foo) + (recipe foo (body ("echo a"))) + ), + } + + test! { + name: parse_complex, + text: " + x: + y: + z: + foo := \"xx\" + bar := foo + goodbye := \"y\" + hello a b c : x y z #hello + #! blah + #blarg + {{ foo + bar}}abc{{ goodbye\t + \"x\" }}xyz + 1 + 2 + 3 + ", + tree: (justfile + (recipe x) + (recipe y) + (recipe z) + (assignment foo "xx") + (assignment bar foo) + (assignment goodbye "y") + (recipe hello + (params (a) (b) (c)) + (deps x y z) + (body + ("#! blah") + ("#blarg") + (((+ foo bar)) "abc" ((+ goodbye "x")) "xyz") + ("1") + ("2") + ("3") + ) + ) + ), + } + + test! { + name: parse_shebang, + text: " + practicum := 'hello' + install: + \t#!/bin/sh + \tif [[ -f {{practicum}} ]]; then + \t\treturn + \tfi + ", + tree: (justfile + (assignment practicum "hello") + (recipe install + (body + ("#!/bin/sh") + ("if [[ -f " (practicum) " ]]; then") + ("\treturn") + ("fi") + ) + ) + ), + } + + test! { + name: parse_simple_shebang, + text: "a:\n #!\n print(1)", + tree: (justfile + (recipe a (body ("#!") (" print(1)"))) + ), + } + + test! { + name: parse_assignments, + text: r#" + a := "0" + c := a + b + a + b + b := "1" + "#, + tree: (justfile + (assignment a "0") + (assignment c (+ a (+ b (+ a b)))) + (assignment b "1") + ), + } + + test! { + name: parse_assignment_backticks, + text: " + a := `echo hello` + c := a + b + a + b + b := `echo goodbye` + ", + tree: (justfile + (assignment a (backtick "echo hello")) + (assignment c (+ a (+ b (+ a b)))) + (assignment b (backtick "echo goodbye")) + ), + } + + test! { + name: parse_interpolation_backticks, + text: r#" + a: + echo {{ `echo hello` + "blarg" }} {{ `echo bob` }} + "#, + tree: (justfile + (recipe a + (body ("echo " ((+ (backtick "echo hello") "blarg")) " " ((backtick "echo bob")))) + ) + ), + } + + test! { + name: eof_test, + text: "x:\ny:\nz:\na b c: x y z", + tree: (justfile + (recipe x) + (recipe y) + (recipe z) + (recipe a (params (b) (c)) (deps x y z)) + ), + } + + test! { + name: string_quote_escape, + text: r#"a := "hello\"""#, + tree: (justfile + (assignment a "hello\"") + ), + } + + test! { + name: string_escapes, + text: r#"a := "\n\t\r\"\\""#, + tree: (justfile (assignment a "\n\t\r\"\\")), + } + + test! { + name: parameters, + text: " + a b c: + {{b}} {{c}} + ", + tree: (justfile (recipe a (params (b) (c)) (body ((b) " " (c))))), + } + + test! { + name: unary_functions, + text: " + x := arch() + + a: + {{os()}} {{os_family()}} + ", + tree: (justfile + (assignment x (call arch)) + (recipe a (body (((call os)) " " ((call os_family))))) + ), + } + + test! { + name: env_functions, + text: r#" + x := env_var('foo',) + + a: + {{env_var_or_default('foo' + 'bar', 'baz',)}} {{env_var(env_var("baz"))}} + "#, + tree: (justfile + (assignment x (call env_var "foo")) + (recipe a + (body + ( + ((call env_var_or_default (+ "foo" "bar") "baz")) + " " + ((call env_var (call env_var "baz"))) + ) + ) + ) + ), + } + + test! { + name: parameter_default_string, + text: r#" + f x="abc": + "#, + tree: (justfile (recipe f (params (x "abc")))), + } + + test! { + name: parameter_default_raw_string, + text: r" + f x='abc': + ", + tree: (justfile (recipe f (params (x "abc")))), + } + + test! { + name: parameter_default_backtick, + text: " + f x=`echo hello`: + ", + tree: (justfile + (recipe f (params (x (backtick "echo hello")))) + ), + } + + test! { + name: parameter_default_concatination_string, + text: r#" + f x=(`echo hello` + "foo"): + "#, + tree: (justfile (recipe f (params (x ((+ (backtick "echo hello") "foo")))))), + } + + test! { + name: concatination_in_group, + text: "x := ('0' + '1')", + tree: (justfile (assignment x ((+ "0" "1")))), + } + + test! { + name: string_in_group, + text: "x := ('0' )", + tree: (justfile (assignment x ("0"))), + } + + test! { + name: escaped_dos_newlines, + text: " + @spam:\r + \t{ \\\r + \t\tfiglet test; \\\r + \t\tcargo build --color always 2>&1; \\\r + \t\tcargo test --color always -- --color always 2>&1; \\\r + \t} | less\r + ", + tree: (justfile + (recipe #quiet spam + (body + ("{ \\") + ("\tfiglet test; \\") + ("\tcargo build --color always 2>&1; \\") + ("\tcargo test --color always -- --color always 2>&1; \\") + ("} | less") + ) + ) + ), + } + + test! { + name: empty_body, + text: "a:", + tree: (justfile (recipe a)), + } + + test! { + name: single_line_body, + text: "a:\n foo", + tree: (justfile (recipe a (body ("foo")))), + } + + test! { + name: trimmed_body, + text: "a:\n foo\n \n \n \nb:\n ", + tree: (justfile (recipe a (body ("foo"))) (recipe b)), + } + + error! { name: alias_syntax_multiple_rhs, input: "alias foo = bar baz", offset: 16, line: 0, column: 16, width: 3, - kind: UnexpectedToken { expected: vec![Eol, Eof], found: Name }, + kind: UnexpectedToken { expected: vec![Eof, Eol], found: Identifier }, } - error_test! { + error! { name: alias_syntax_no_rhs, input: "alias foo = \n", offset: 12, line: 0, column: 12, width: 1, - kind: UnexpectedToken {expected: vec![Name], found:Eol}, + kind: UnexpectedToken {expected: vec![Identifier], found:Eol}, } - error_test! { - name: unknown_alias_target, - input: "alias foo = bar\n", - offset: 6, - line: 0, - column: 6, - width: 3, - kind: UnknownAliasTarget {alias: "foo", target: "bar"}, - } - - error_test! { - name: alias_shadows_recipe_before, - input: "bar: \n echo bar\nalias foo = bar\nfoo:\n echo foo", - offset: 23, - line: 2, - column: 6, - width: 3, - kind: AliasShadowsRecipe {alias: "foo", recipe_line: 3}, - } - - error_test! { - name: alias_shadows_recipe_after, - input: "foo:\n echo foo\nalias foo = bar\nbar:\n echo bar", - offset: 22, - line: 2, - column: 6, - width: 3, - kind: AliasShadowsRecipe { alias: "foo", recipe_line: 0 }, - } - - error_test! { + error! { name: missing_colon, input: "a b c\nd e f", offset: 5, line: 0, column: 5, width: 1, - kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol}, + kind: UnexpectedToken{expected: vec![Colon, Equals, Identifier, Plus], found: Eol}, } - error_test! { + error! { name: missing_default_eol, input: "hello arg=\n", offset: 10, line: 0, column: 10, width: 1, - kind: UnexpectedToken{expected: vec![Name, StringCooked], found: Eol}, + kind: UnexpectedToken{expected: vec![Backtick, Identifier, ParenL, StringCooked, StringRaw], found: Eol}, } - error_test! { + error! { name: missing_default_eof, input: "hello arg=", offset: 10, line: 0, column: 10, width: 0, - kind: UnexpectedToken{expected: vec![Name, StringCooked], found: Eof}, + kind: UnexpectedToken{expected: vec![Backtick, Identifier, ParenL, StringCooked, StringRaw], found: Eof}, } - error_test! { - name: parameter_after_variadic, - input: "foo +a bbb:", - offset: 7, - line: 0, - column: 7, - width: 3, - kind: ParameterFollowsVariadicParameter{parameter: "bbb"}, - } - - error_test! { - name: required_after_default, - input: "hello arg='foo' bar:", - offset: 16, - line: 0, - column: 16, - width: 3, - kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"}, - } - - error_test! { + error! { name: missing_eol, input: "a b c: z =", offset: 9, line: 0, column: 9, width: 1, - kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals}, + kind: UnexpectedToken{expected: vec![Eof, Eol, Identifier], found: Equals}, } - error_test! { - name: duplicate_parameter, - input: "a b b:", - offset: 4, - line: 0, - column: 4, - width: 1, - kind: DuplicateParameter{recipe: "a", parameter: "b"}, - } - - error_test! { - name: parameter_shadows_varible, - input: "foo = \"h\"\na foo:", - offset: 12, - line: 1, - column: 2, - width: 3, - kind: ParameterShadowsVariable{parameter: "foo"}, - } - - error_test! { - name: dependency_has_parameters, - input: "foo arg:\nb: foo", - offset: 12, - line: 1, - column: 3, - width: 3, - kind: DependencyHasParameters{recipe: "b", dependency: "foo"}, - } - - error_test! { - name: duplicate_dependency, - input: "a b c: b c z z", - offset: 13, - line: 0, - column: 13, - width: 1, - kind: DuplicateDependency{recipe: "a", dependency: "z"}, - } - - error_test! { - name: duplicate_recipe, - input: "a:\nb:\na:", - offset: 6, - line: 2, - column: 0, - width: 1, - kind: DuplicateRecipe{recipe: "a", first: 0}, - } - - error_test! { - name: duplicate_variable, - input: "a = \"0\"\na = \"0\"", - offset: 8, - line: 1, - column: 0, - width: 1, - kind: DuplicateVariable{variable: "a"}, - } - - error_test! { - name: extra_whitespace, - input: "a:\n blah\n blarg", - offset: 10, - line: 2, - column: 1, - width: 6, - kind: ExtraLeadingWhitespace, - } - - error_test! { + error! { name: interpolation_outside_of_recipe, input: "{{", offset: 0, line: 0, column: 0, width: 2, - kind: UnexpectedToken{expected: vec![Name, At], found: InterpolationStart}, + kind: UnexpectedToken{expected: vec![At, Identifier], found: InterpolationStart}, } - error_test! { + error! { name: unclosed_parenthesis_in_expression, input: "x = foo(", - offset: 8, + offset: 8, line: 0, column: 8, width: 0, - kind: UnexpectedToken{expected: vec![Name, StringCooked, ParenR], found: Eof}, + kind: UnexpectedToken{ + expected: vec![Backtick, Identifier, ParenL, ParenR, StringCooked, StringRaw], + found: Eof, + }, } - error_test! { + error! { name: unclosed_parenthesis_in_interpolation, input: "a:\n echo {{foo(}}", offset: 15, line: 1, column: 12, width: 2, - kind: UnexpectedToken{expected: vec![Name, StringCooked, ParenR], found: InterpolationEnd}, + kind: UnexpectedToken{ + expected: vec![Backtick, Identifier, ParenL, ParenR, StringCooked, StringRaw], + found: InterpolationEnd, + }, } - error_test! { + error! { name: plus_following_parameter, input: "a b c+:", - offset: 5, + offset: 6, line: 0, - column: 5, + column: 6, width: 1, - kind: UnexpectedToken{expected: vec![Name], found: Plus}, + kind: UnexpectedToken{expected: vec![Identifier], found: Colon}, } - error_test! { + error! { + name: invalid_escape_sequence, + input: r#"foo := "\b""#, + offset: 7, + line: 0, + column: 7, + width: 4, + kind: InvalidEscapeSequence{character: 'b'}, + } + + error! { name: bad_export, input: "export a", offset: 8, line: 0, column: 8, width: 0, - kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eof}, + kind: UnexpectedToken{expected: vec![Colon, Equals, Identifier, Plus], found: Eof}, } - #[test] - fn readme_test() { - let mut justfiles = vec![]; - let mut current = None; - - for line in fs::read_to_string("README.adoc").unwrap().lines() { - if let Some(mut justfile) = current { - if line == "```" { - justfiles.push(justfile); - current = None; - } else { - justfile += line; - justfile += "\n"; - current = Some(justfile); - } - } else if line == "```make" { - current = Some(String::new()); - } - } - - for justfile in justfiles { - parse(&justfile); - } + error! { + name: parameter_follows_variadic_parameter, + input: "foo +a b:", + offset: 7, + line: 0, + column: 7, + width: 1, + kind: ParameterFollowsVariadicParameter{parameter: "b"}, } - #[test] - fn empty_recipe_lines() { - let text = "a:"; - let justfile = parse(&text); - - assert_eq!(justfile.recipes["a"].lines.len(), 0); + error! { + name: parameter_after_variadic, + input: "foo +a bbb:", + offset: 7, + line: 0, + column: 7, + width: 3, + kind: ParameterFollowsVariadicParameter{parameter: "bbb"}, } - #[test] - fn simple_recipe_lines() { - let text = "a:\n foo"; - let justfile = parse(&text); - - assert_eq!(justfile.recipes["a"].lines.len(), 1); - } - - #[test] - fn complex_recipe_lines() { - let text = "a: - foo - -b: -"; - - let justfile = parse(&text); - - assert_eq!(justfile.recipes["a"].lines.len(), 1); + error! { + name: concatination_in_default, + input: "foo a=c+d e:", + offset: 10, + line: 0, + column: 10, + width: 1, + kind: ParameterFollowsVariadicParameter{parameter: "e"}, } } diff --git a/src/range_ext.rs b/src/range_ext.rs index dd4296b..8145fd2 100644 --- a/src/range_ext.rs +++ b/src/range_ext.rs @@ -23,7 +23,7 @@ where } #[cfg(test)] -mod test { +mod tests { use super::*; #[test] diff --git a/src/recipe.rs b/src/recipe.rs index 48bcd1a..55291f0 100644 --- a/src/recipe.rs +++ b/src/recipe.rs @@ -22,14 +22,13 @@ fn error_from_signal( } } +/// A recipe, e.g. `foo: bar baz` #[derive(PartialEq, Debug)] pub(crate) struct Recipe<'a> { - pub(crate) dependencies: Vec<&'a str>, - pub(crate) dependency_tokens: Vec>, + pub(crate) dependencies: Vec>, pub(crate) doc: Option<&'a str>, - pub(crate) line_number: usize, - pub(crate) lines: Vec>>, - pub(crate) name: &'a str, + pub(crate) body: Vec>, + pub(crate) name: Name<'a>, pub(crate) parameters: Vec>, pub(crate) private: bool, pub(crate) quiet: bool, @@ -57,12 +56,19 @@ impl<'a> Recipe<'a> { } } + pub(crate) fn name(&self) -> &'a str { + self.name.lexeme() + } + + pub(crate) fn line_number(&self) -> usize { + self.name.line + } + pub(crate) fn run( &self, context: &RecipeContext<'a>, arguments: &[&'a str], dotenv: &BTreeMap, - exports: &BTreeSet<&'a str>, ) -> RunResult<'a, ()> { let config = &context.config; @@ -88,7 +94,6 @@ impl<'a> Recipe<'a> { scope: &context.scope, shell: config.shell, dotenv, - exports, }; let mut rest = arguments; @@ -111,13 +116,13 @@ impl<'a> Recipe<'a> { rest = &rest[1..]; value }; - argument_map.insert(parameter.name, value); + argument_map.insert(parameter.name.lexeme(), value); } if self.shebang { let mut evaluated_lines = vec![]; - for line in &self.lines { - evaluated_lines.push(evaluator.evaluate_line(line, &argument_map)?); + for line in &self.body { + evaluated_lines.push(evaluator.evaluate_line(&line.fragments, &argument_map)?); } if config.dry_run || self.quiet { @@ -134,14 +139,14 @@ impl<'a> Recipe<'a> { .prefix("just") .tempdir() .map_err(|error| RuntimeError::TmpdirIoError { - recipe: self.name, + recipe: self.name(), io_error: error, })?; let mut path = tmp.path().to_path_buf(); - path.push(self.name); + path.push(self.name()); { let mut f = fs::File::create(&path).map_err(|error| RuntimeError::TmpdirIoError { - recipe: self.name, + recipe: self.name(), io_error: error, })?; let mut text = String::new(); @@ -151,7 +156,7 @@ impl<'a> Recipe<'a> { // add blank lines so that lines in the generated script // have the same line number as the corresponding lines // in the justfile - for _ in 1..(self.line_number + 2) { + for _ in 1..(self.line_number() + 2) { text += "\n" } for line in &evaluated_lines[1..] { @@ -165,14 +170,14 @@ impl<'a> Recipe<'a> { f.write_all(text.as_bytes()) .map_err(|error| RuntimeError::TmpdirIoError { - recipe: self.name, + recipe: self.name(), io_error: error, })?; } // make the script executable Platform::set_execute_permission(&path).map_err(|error| RuntimeError::TmpdirIoError { - recipe: self.name, + recipe: self.name(), io_error: error, })?; @@ -193,12 +198,12 @@ impl<'a> Recipe<'a> { let mut command = Platform::make_shebang_command(&path, interpreter, argument).map_err(|output_error| { RuntimeError::Cygpath { - recipe: self.name, + recipe: self.name(), output_error, } })?; - command.export_environment_variables(&context.scope, dotenv, exports)?; + command.export_environment_variables(&context.scope, dotenv)?; // run it! match InterruptHandler::guard(|| command.status()) { @@ -206,18 +211,18 @@ impl<'a> Recipe<'a> { if let Some(code) = exit_status.code() { if code != 0 { return Err(RuntimeError::Code { - recipe: self.name, + recipe: self.name(), line_number: None, code, }); } } else { - return Err(error_from_signal(self.name, None, exit_status)); + return Err(error_from_signal(self.name(), None, exit_status)); } } Err(io_error) => { return Err(RuntimeError::Shebang { - recipe: self.name, + recipe: self.name(), command: interpreter.to_string(), argument: argument.map(String::from), io_error, @@ -225,8 +230,8 @@ impl<'a> Recipe<'a> { } }; } else { - let mut lines = self.lines.iter().peekable(); - let mut line_number = self.line_number + 1; + let mut lines = self.body.iter().peekable(); + let mut line_number = self.line_number() + 1; loop { if lines.peek().is_none() { break; @@ -238,8 +243,8 @@ impl<'a> Recipe<'a> { } let line = lines.next().unwrap(); line_number += 1; - evaluated += &evaluator.evaluate_line(line, &argument_map)?; - if line.last().map(Fragment::continuation).unwrap_or(false) { + evaluated += &evaluator.evaluate_line(&line.fragments, &argument_map)?; + if line.is_continuation() { evaluated.pop(); } else { break; @@ -280,25 +285,29 @@ impl<'a> Recipe<'a> { cmd.stdout(Stdio::null()); } - cmd.export_environment_variables(&context.scope, dotenv, exports)?; + cmd.export_environment_variables(&context.scope, dotenv)?; match InterruptHandler::guard(|| cmd.status()) { Ok(exit_status) => { if let Some(code) = exit_status.code() { if code != 0 { return Err(RuntimeError::Code { - recipe: self.name, + recipe: self.name(), line_number: Some(line_number), code, }); } } else { - return Err(error_from_signal(self.name, Some(line_number), exit_status)); + return Err(error_from_signal( + self.name(), + Some(line_number), + exit_status, + )); } } Err(io_error) => { return Err(RuntimeError::IoError { - recipe: self.name, + recipe: self.name(), io_error, }); } @@ -309,6 +318,12 @@ impl<'a> Recipe<'a> { } } +impl<'src> Keyed<'src> for Recipe<'src> { + fn key(&self) -> &'src str { + self.name.lexeme() + } +} + impl<'a> Display for Recipe<'a> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { if let Some(doc) = self.doc { @@ -329,20 +344,20 @@ impl<'a> Display for Recipe<'a> { write!(f, " {}", dependency)?; } - for (i, pieces) in self.lines.iter().enumerate() { + for (i, line) in self.body.iter().enumerate() { if i == 0 { writeln!(f)?; } - for (j, piece) in pieces.iter().enumerate() { + for (j, fragment) in line.fragments.iter().enumerate() { if j == 0 { write!(f, " ")?; } - match *piece { - Fragment::Text { ref text } => write!(f, "{}", text.lexeme())?, - Fragment::Expression { ref expression, .. } => write!(f, "{{{{{}}}}}", expression)?, + match fragment { + Fragment::Text { token } => write!(f, "{}", token.lexeme())?, + Fragment::Interpolation { expression, .. } => write!(f, "{{{{{}}}}}", expression)?, } } - if i + 1 < self.lines.len() { + if i + 1 < self.body.len() { writeln!(f)?; } } diff --git a/src/recipe_context.rs b/src/recipe_context.rs index 42a3fce..76a12eb 100644 --- a/src/recipe_context.rs +++ b/src/recipe_context.rs @@ -2,5 +2,5 @@ use crate::common::*; pub(crate) struct RecipeContext<'a> { pub(crate) config: &'a Config<'a>, - pub(crate) scope: BTreeMap<&'a str, String>, + pub(crate) scope: BTreeMap<&'a str, (bool, String)>, } diff --git a/src/recipe_resolver.rs b/src/recipe_resolver.rs index 2619500..b034a18 100644 --- a/src/recipe_resolver.rs +++ b/src/recipe_resolver.rs @@ -17,22 +17,19 @@ pub(crate) struct RecipeResolver<'a: 'b, 'b> { seen: BTreeSet<&'a str>, resolved: BTreeSet<&'a str>, recipes: &'b BTreeMap<&'a str, Recipe<'a>>, - assignments: &'b BTreeMap<&'a str, Expression<'a>>, - text: &'a str, + assignments: &'b BTreeMap<&'a str, Assignment<'a>>, } impl<'a, 'b> RecipeResolver<'a, 'b> { pub(crate) fn resolve_recipes( recipes: &BTreeMap<&'a str, Recipe<'a>>, - assignments: &BTreeMap<&'a str, Expression<'a>>, - text: &'a str, + assignments: &BTreeMap<&'a str, Assignment<'a>>, ) -> CompilationResult<'a, ()> { let mut resolver = RecipeResolver { seen: empty(), stack: empty(), resolved: empty(), assignments, - text, recipes, }; @@ -48,19 +45,19 @@ impl<'a, 'b> RecipeResolver<'a, 'b> { resolver.resolve_function(function, argc)?; } for variable in expression.variables() { - resolver.resolve_variable(variable, &[])?; + resolver.resolve_variable(&variable, &[])?; } } } - for line in &recipe.lines { - for fragment in line { - if let Fragment::Expression { ref expression, .. } = *fragment { + for line in &recipe.body { + for fragment in &line.fragments { + if let Fragment::Interpolation { expression, .. } = fragment { for (function, argc) in expression.functions() { resolver.resolve_function(function, argc)?; } for variable in expression.variables() { - resolver.resolve_variable(variable, &recipe.parameters)?; + resolver.resolve_variable(&variable, &recipe.parameters)?; } } } @@ -70,27 +67,27 @@ impl<'a, 'b> RecipeResolver<'a, 'b> { Ok(()) } - fn resolve_function(&self, function: &Token, argc: usize) -> CompilationResult<'a, ()> { - Function::resolve(function, argc).map_err(|error| CompilationError { + fn resolve_function(&self, function: Token<'a>, argc: usize) -> CompilationResult<'a, ()> { + Function::resolve(&function, argc).map_err(|error| CompilationError { offset: error.offset, line: error.line, column: error.column, width: error.width, kind: UnknownFunction { - function: &self.text[error.offset..error.offset + error.width], + function: &function.src[error.offset..error.offset + error.width], }, - text: self.text, + src: function.src, }) } fn resolve_variable( &self, - variable: &Token, + variable: &Token<'a>, parameters: &[Parameter], ) -> CompilationResult<'a, ()> { let name = variable.lexeme(); let undefined = - !self.assignments.contains_key(name) && !parameters.iter().any(|p| p.name == name); + !self.assignments.contains_key(name) && !parameters.iter().any(|p| p.name.lexeme() == name); if undefined { let error = variable.error(UndefinedVariable { variable: name }); return Err(CompilationError { @@ -99,9 +96,9 @@ impl<'a, 'b> RecipeResolver<'a, 'b> { column: error.column, width: error.width, kind: UndefinedVariable { - variable: &self.text[error.offset..error.offset + error.width], + variable: &variable.src[error.offset..error.offset + error.width], }, - text: self.text, + src: variable.src, }); } @@ -109,25 +106,29 @@ impl<'a, 'b> RecipeResolver<'a, 'b> { } fn resolve_recipe(&mut self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> { - if self.resolved.contains(recipe.name) { + if self.resolved.contains(recipe.name()) { return Ok(()); } - self.stack.push(recipe.name); - self.seen.insert(recipe.name); - for dependency_token in &recipe.dependency_tokens { + self.stack.push(recipe.name()); + self.seen.insert(recipe.name()); + for dependency_token in recipe + .dependencies + .iter() + .map(|dependency| dependency.token()) + { match self.recipes.get(dependency_token.lexeme()) { Some(dependency) => { - if !self.resolved.contains(dependency.name) { - if self.seen.contains(dependency.name) { + if !self.resolved.contains(dependency.name()) { + if self.seen.contains(dependency.name()) { let first = self.stack[0]; self.stack.push(first); return Err( dependency_token.error(CircularRecipeDependency { - recipe: recipe.name, + recipe: recipe.name(), circle: self .stack .iter() - .skip_while(|name| **name != dependency.name) + .skip_while(|name| **name != dependency.name()) .cloned() .collect(), }), @@ -138,23 +139,23 @@ impl<'a, 'b> RecipeResolver<'a, 'b> { } None => { return Err(dependency_token.error(UnknownDependency { - recipe: recipe.name, + recipe: recipe.name(), unknown: dependency_token.lexeme(), })); } } } - self.resolved.insert(recipe.name); + self.resolved.insert(recipe.name()); self.stack.pop(); Ok(()) } } #[cfg(test)] -mod test { +mod tests { use super::*; - error_test! { + analysis_error! { name: circular_recipe_dependency, input: "a: b\nb: a", offset: 8, @@ -164,7 +165,7 @@ mod test { kind: CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]}, } - error_test! { + analysis_error! { name: self_recipe_dependency, input: "a: a", offset: 3, @@ -174,7 +175,7 @@ mod test { kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]}, } - error_test! { + analysis_error! { name: unknown_dependency, input: "a: b", offset: 3, @@ -184,7 +185,7 @@ mod test { kind: UnknownDependency{recipe: "a", unknown: "b"}, } - error_test! { + analysis_error! { name: unknown_interpolation_variable, input: "x:\n {{ hello}}", offset: 9, @@ -194,7 +195,7 @@ mod test { kind: UndefinedVariable{variable: "hello"}, } - error_test! { + analysis_error! { name: unknown_second_interpolation_variable, input: "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}", offset: 33, @@ -204,7 +205,7 @@ mod test { kind: UndefinedVariable{variable: "lol"}, } - error_test! { + analysis_error! { name: unknown_function_in_interpolation, input: "a:\n echo {{bar()}}", offset: 11, @@ -214,7 +215,7 @@ mod test { kind: UnknownFunction{function: "bar"}, } - error_test! { + analysis_error! { name: unknown_function_in_default, input: "a f=baz():", offset: 4, @@ -224,7 +225,7 @@ mod test { kind: UnknownFunction{function: "baz"}, } - error_test! { + analysis_error! { name: unknown_variable_in_default, input: "a f=foo:", offset: 4, diff --git a/src/run.rs b/src/run.rs index 9c303e5..b2ea774 100644 --- a/src/run.rs +++ b/src/run.rs @@ -131,7 +131,7 @@ pub fn run() -> Result<(), i32> { } } - let justfile = match Parser::parse(&text) { + let justfile = match Compiler::compile(&text) { Err(error) => { if config.color.stderr().active() { eprintln!("{:#}", error); @@ -177,15 +177,15 @@ pub fn run() -> Result<(), i32> { // Construct a target to alias map. let mut recipe_aliases: BTreeMap<&str, Vec<&str>> = BTreeMap::new(); for alias in justfile.aliases.values() { - if alias.private { + if alias.is_private() { continue; } - if !recipe_aliases.contains_key(alias.target) { - recipe_aliases.insert(alias.target, vec![alias.name]); + if !recipe_aliases.contains_key(alias.target.lexeme()) { + recipe_aliases.insert(alias.target.lexeme(), vec![alias.name.lexeme()]); } else { - let aliases = recipe_aliases.get_mut(alias.target).unwrap(); - aliases.push(alias.name); + let aliases = recipe_aliases.get_mut(alias.target.lexeme()).unwrap(); + aliases.push(alias.name.lexeme()); } } @@ -262,7 +262,7 @@ pub fn run() -> Result<(), i32> { if let Subcommand::Show { name } = config.subcommand { if let Some(alias) = justfile.get_alias(name) { - let recipe = justfile.get_recipe(alias.target).unwrap(); + let recipe = justfile.get_recipe(alias.target.lexeme()).unwrap(); println!("{}", alias); println!("{}", recipe); return Ok(()); @@ -291,7 +291,7 @@ pub fn run() -> Result<(), i32> { Count("argument", min_arguments), ); } - vec![recipe.name] + vec![recipe.name()] } else { die!("Justfile contains no recipes."); }; diff --git a/src/runtime_error.rs b/src/runtime_error.rs index 7735b93..c389775 100644 --- a/src/runtime_error.rs +++ b/src/runtime_error.rs @@ -26,7 +26,7 @@ pub(crate) enum RuntimeError<'a> { dotenv_error: dotenv::Error, }, FunctionCall { - token: Token<'a>, + function: Name<'a>, message: String, }, Internal { @@ -91,7 +91,7 @@ impl<'a> Display for RuntimeError<'a> { let message = color.message(); write!(f, "{} {}", error.paint("error:"), message.prefix())?; - let mut error_token = None; + let mut error_token: Option = None; match *self { UnknownRecipes { @@ -235,16 +235,16 @@ impl<'a> Display for RuntimeError<'a> { writeln!(f, "Failed to load .env: {}", dotenv_error)?; } FunctionCall { - ref token, + ref function, ref message, } => { writeln!( f, "Call to function `{}` failed: {}", - token.lexeme(), + function.lexeme(), message )?; - error_token = Some(token); + error_token = Some(function.token()); } Shebang { recipe, @@ -332,15 +332,15 @@ impl<'a> Display for RuntimeError<'a> { } => match *output_error { OutputError::Code(code) => { writeln!(f, "Backtick failed with exit code {}", code)?; - error_token = Some(token); + error_token = Some(*token); } OutputError::Signal(signal) => { writeln!(f, "Backtick was terminated by signal {}", signal)?; - error_token = Some(token); + error_token = Some(*token); } OutputError::Unknown => { writeln!(f, "Backtick failed for an unknown reason")?; - error_token = Some(token); + error_token = Some(*token); } OutputError::Io(ref io_error) => { match io_error.kind() { @@ -361,7 +361,7 @@ impl<'a> Display for RuntimeError<'a> { io_error ), }?; - error_token = Some(token); + error_token = Some(*token); } OutputError::Utf8(ref utf8_error) => { writeln!( @@ -369,7 +369,7 @@ impl<'a> Display for RuntimeError<'a> { "Backtick succeeded but stdout was not utf8: {}", utf8_error )?; - error_token = Some(token); + error_token = Some(*token); } }, Internal { ref message } => { @@ -388,7 +388,7 @@ impl<'a> Display for RuntimeError<'a> { write_message_context( f, Color::fmt(f).error(), - token.text, + token.src, token.offset, token.line, token.column, diff --git a/src/shebang.rs b/src/shebang.rs index b6d4bdc..a5e08e9 100644 --- a/src/shebang.rs +++ b/src/shebang.rs @@ -31,7 +31,7 @@ impl<'a> Shebang<'a> { } #[cfg(test)] -mod test { +mod tests { use super::Shebang; #[test] diff --git a/src/string_literal.rs b/src/string_literal.rs index d2de5bd..9d210dd 100644 --- a/src/string_literal.rs +++ b/src/string_literal.rs @@ -1,59 +1,12 @@ use crate::common::*; #[derive(PartialEq, Debug)] -pub(crate) struct StringLiteral<'a> { - pub(crate) raw: &'a str, - pub(crate) cooked: Cow<'a, str>, +pub(crate) struct StringLiteral<'src> { + pub(crate) raw: &'src str, + pub(crate) cooked: Cow<'src, str>, } -impl<'a> StringLiteral<'a> { - pub(crate) fn new(token: &Token<'a>) -> CompilationResult<'a, StringLiteral<'a>> { - let raw = &token.lexeme()[1..token.lexeme().len() - 1]; - - if let TokenKind::StringRaw = token.kind { - Ok(StringLiteral { - cooked: Cow::Borrowed(raw), - raw, - }) - } else if let TokenKind::StringCooked = token.kind { - let mut cooked = String::new(); - let mut escape = false; - for c in raw.chars() { - if escape { - match c { - 'n' => cooked.push('\n'), - 'r' => cooked.push('\r'), - 't' => cooked.push('\t'), - '\\' => cooked.push('\\'), - '"' => cooked.push('"'), - other => { - return Err( - token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }), - ); - } - } - escape = false; - continue; - } - if c == '\\' { - escape = true; - continue; - } - cooked.push(c); - } - Ok(StringLiteral { - raw, - cooked: Cow::Owned(cooked), - }) - } else { - Err(token.error(CompilationErrorKind::Internal { - message: "cook_string() called on non-string token".to_string(), - })) - } - } -} - -impl<'a> Display for StringLiteral<'a> { +impl Display for StringLiteral<'_> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match self.cooked { Cow::Borrowed(raw) => write!(f, "'{}'", raw), diff --git a/src/summary.rs b/src/summary.rs index 6cd60e1..6ceebf8 100644 --- a/src/summary.rs +++ b/src/summary.rs @@ -18,12 +18,19 @@ use std::{ path::Path, }; -use crate::{expression, fragment, justfile::Justfile, parameter, parser::Parser, recipe}; +use crate::compiler::Compiler; + +mod full { + pub(crate) use crate::{ + assignment::Assignment, expression::Expression, fragment::Fragment, justfile::Justfile, + line::Line, parameter::Parameter, recipe::Recipe, + }; +} pub fn summary(path: &Path) -> Result, io::Error> { let text = fs::read_to_string(path)?; - match Parser::parse(&text) { + match Compiler::compile(&text) { Ok(justfile) => Ok(Ok(Summary::new(justfile))), Err(compilation_error) => Ok(Err(compilation_error.to_string())), } @@ -36,14 +43,12 @@ pub struct Summary { } impl Summary { - fn new(justfile: Justfile) -> Summary { - let exports = justfile.exports; - + fn new(justfile: full::Justfile) -> Summary { let mut aliases = BTreeMap::new(); for alias in justfile.aliases.values() { aliases - .entry(alias.target) + .entry(alias.target.lexeme()) .or_insert_with(Vec::new) .push(alias.name.to_string()); } @@ -62,12 +67,7 @@ impl Summary { assignments: justfile .assignments .into_iter() - .map(|(name, expression)| { - ( - name.to_string(), - Assignment::new(name, expression, &exports), - ) - }) + .map(|(name, assignment)| (name.to_string(), Assignment::new(assignment))) .collect(), } } @@ -85,13 +85,17 @@ pub struct Recipe { } impl Recipe { - fn new(recipe: recipe::Recipe, aliases: Vec) -> Recipe { + fn new(recipe: full::Recipe, aliases: Vec) -> Recipe { Recipe { private: recipe.private, shebang: recipe.shebang, quiet: recipe.quiet, - dependencies: recipe.dependencies.into_iter().map(str::to_owned).collect(), - lines: recipe.lines.into_iter().map(Line::new).collect(), + dependencies: recipe + .dependencies + .into_iter() + .map(|name| name.lexeme().to_string()) + .collect(), + lines: recipe.body.into_iter().map(Line::new).collect(), parameters: recipe.parameters.into_iter().map(Parameter::new).collect(), aliases, } @@ -106,10 +110,10 @@ pub struct Parameter { } impl Parameter { - fn new(parameter: parameter::Parameter) -> Parameter { + fn new(parameter: full::Parameter) -> Parameter { Parameter { variadic: parameter.variadic, - name: parameter.name.to_owned(), + name: parameter.name.lexeme().to_owned(), default: parameter.default.map(Expression::new), } } @@ -121,9 +125,9 @@ pub struct Line { } impl Line { - fn new(fragments: Vec) -> Line { + fn new(line: full::Line) -> Line { Line { - fragments: fragments.into_iter().map(Fragment::new).collect(), + fragments: line.fragments.into_iter().map(Fragment::new).collect(), } } } @@ -135,12 +139,12 @@ pub enum Fragment { } impl Fragment { - fn new(fragment: fragment::Fragment) -> Fragment { + fn new(fragment: full::Fragment) -> Fragment { match fragment { - fragment::Fragment::Text { text } => Fragment::Text { - text: text.lexeme().to_owned(), + full::Fragment::Text { token } => Fragment::Text { + text: token.lexeme().to_owned(), }, - fragment::Fragment::Expression { expression } => Fragment::Expression { + full::Fragment::Interpolation { expression } => Fragment::Expression { expression: Expression::new(expression), }, } @@ -154,10 +158,10 @@ pub struct Assignment { } impl Assignment { - fn new(name: &str, expression: expression::Expression, exports: &BTreeSet<&str>) -> Assignment { + fn new(assignment: full::Assignment) -> Assignment { Assignment { - exported: exports.contains(name), - expression: Expression::new(expression), + exported: assignment.export, + expression: Expression::new(assignment.expression), } } } @@ -184,29 +188,30 @@ pub enum Expression { } impl Expression { - fn new(expression: expression::Expression) -> Expression { - use expression::Expression::*; + fn new(expression: full::Expression) -> Expression { + use full::Expression::*; match expression { - Backtick { raw, .. } => Expression::Backtick { - command: raw.to_owned(), + Backtick { contents, .. } => Expression::Backtick { + command: contents.to_owned(), }, Call { - name, arguments, .. + function, + arguments, } => Expression::Call { - name: name.to_owned(), + name: function.lexeme().to_owned(), arguments: arguments.into_iter().map(Expression::new).collect(), }, Concatination { lhs, rhs } => Expression::Concatination { lhs: Box::new(Expression::new(*lhs)), rhs: Box::new(Expression::new(*rhs)), }, - String { cooked_string } => Expression::String { - text: cooked_string.cooked.to_string(), + StringLiteral { string_literal } => Expression::String { + text: string_literal.cooked.to_string(), }, Variable { name, .. } => Expression::Variable { - name: name.to_owned(), + name: name.lexeme().to_owned(), }, - Group { expression } => Expression::new(*expression), + Group { contents } => Expression::new(*contents), } } } diff --git a/src/table.rs b/src/table.rs new file mode 100644 index 0000000..2e98a25 --- /dev/null +++ b/src/table.rs @@ -0,0 +1,46 @@ +use crate::common::*; + +#[derive(Debug, PartialEq)] +pub(crate) struct Table<'key, V: Keyed<'key>> { + map: BTreeMap<&'key str, V>, +} + +impl<'key, V: Keyed<'key>> Table<'key, V> { + pub(crate) fn insert(&mut self, value: V) { + self.map.insert(value.key(), value); + } +} + +impl<'key, V: Keyed<'key>> FromIterator for Table<'key, V> { + fn from_iter>(iter: I) -> Self { + Table { + map: iter.into_iter().map(|value| (value.key(), value)).collect(), + } + } +} + +impl<'key, V: Keyed<'key>> Deref for Table<'key, V> { + type Target = BTreeMap<&'key str, V>; + + fn deref(&self) -> &Self::Target { + &self.map + } +} + +impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> { + type Item = (&'key str, V); + type IntoIter = std::collections::btree_map::IntoIter<&'key str, V>; + + fn into_iter(self) -> std::collections::btree_map::IntoIter<&'key str, V> { + self.map.into_iter() + } +} + +impl<'table, V: Keyed<'table> + 'table> IntoIterator for &'table Table<'table, V> { + type Item = (&'table &'table str, &'table V); + type IntoIter = std::collections::btree_map::Iter<'table, &'table str, V>; + + fn into_iter(self) -> std::collections::btree_map::Iter<'table, &'table str, V> { + self.map.iter() + } +} diff --git a/src/testing.rs b/src/testing.rs index db5d643..5af01ea 100644 --- a/src/testing.rs +++ b/src/testing.rs @@ -1,49 +1,81 @@ use crate::common::*; -pub(crate) fn parse(text: &str) -> Justfile { - match Parser::parse(text) { +pub(crate) fn compile(text: &str) -> Justfile { + match Compiler::compile(text) { Ok(justfile) => justfile, - Err(error) => panic!("Expected successful parse but got error:\n {}", error), + Err(error) => panic!("Expected successful compilation but got error:\n {}", error), } } pub(crate) use test_utilities::{tempdir, unindent}; -macro_rules! error_test { +macro_rules! analysis_error { ( - name: $name:ident, - input: $input:expr, - offset: $offset:expr, - line: $line:expr, - column: $column:expr, - width: $width:expr, - kind: $kind:expr, - ) => { + name: $name:ident, + input: $input:expr, + offset: $offset:expr, + line: $line:expr, + column: $column:expr, + width: $width:expr, + kind: $kind:expr, + ) => { #[test] fn $name() { - let text: &str = $input; - let offset: usize = $offset; - let column: usize = $column; - let width: usize = $width; - let line: usize = $line; - let kind: CompilationErrorKind = $kind; - - let expected = CompilationError { - text, - offset, - line, - column, - width, - kind, - }; - - match Parser::parse(text) { - Ok(_) => panic!("Compilation succeeded but expected: {}\n{}", expected, text), - Err(actual) => { - use pretty_assertions::assert_eq; - assert_eq!(actual, expected); - } - } + $crate::testing::error($input, $offset, $line, $column, $width, $kind); } }; } + +pub(crate) fn error( + src: &str, + offset: usize, + line: usize, + column: usize, + width: usize, + kind: CompilationErrorKind, +) { + let expected = CompilationError { + src, + offset, + line, + column, + width, + kind, + }; + + let tokens = Lexer::lex(src).expect("Lexing failed in parse test..."); + + let module = Parser::parse(&tokens).expect("Parsing failed in analysis test..."); + + match Analyzer::analyze(module) { + Ok(_) => panic!("Analysis succeeded but expected: {}\n{}", expected, src), + Err(actual) => { + assert_eq!(actual, expected); + } + } +} + +#[test] +fn readme_test() { + let mut justfiles = vec![]; + let mut current = None; + + for line in fs::read_to_string("README.adoc").unwrap().lines() { + if let Some(mut justfile) = current { + if line == "```" { + justfiles.push(justfile); + current = None; + } else { + justfile += line; + justfile += "\n"; + current = Some(justfile); + } + } else if line == "```make" { + current = Some(String::new()); + } + } + + for justfile in justfiles { + compile(&justfile); + } +} diff --git a/src/token.rs b/src/token.rs index 2587153..4d05ab3 100644 --- a/src/token.rs +++ b/src/token.rs @@ -1,18 +1,18 @@ use crate::common::*; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Copy)] pub(crate) struct Token<'a> { pub(crate) offset: usize, pub(crate) length: usize, pub(crate) line: usize, pub(crate) column: usize, - pub(crate) text: &'a str, + pub(crate) src: &'a str, pub(crate) kind: TokenKind, } impl<'a> Token<'a> { pub(crate) fn lexeme(&self) -> &'a str { - &self.text[self.offset..self.offset + self.length] + &self.src[self.offset..self.offset + self.length] } pub(crate) fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> { @@ -20,7 +20,7 @@ impl<'a> Token<'a> { column: self.column, offset: self.offset, line: self.line, - text: self.text, + src: self.src, width: self.length, kind, } diff --git a/src/token_kind.rs b/src/token_kind.rs index b74f4e2..0c4211e 100644 --- a/src/token_kind.rs +++ b/src/token_kind.rs @@ -1,6 +1,6 @@ use crate::common::*; -#[derive(Debug, PartialEq, Clone, Copy)] +#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)] pub(crate) enum TokenKind { At, Backtick, @@ -12,16 +12,15 @@ pub(crate) enum TokenKind { Eof, Eol, Equals, + Identifier, Indent, InterpolationEnd, InterpolationStart, - Line, - Name, ParenL, ParenR, Plus, - StringRaw, StringCooked, + StringRaw, Text, Whitespace, } @@ -43,16 +42,15 @@ impl Display for TokenKind { Eof => "end of file", Eol => "end of line", Equals => "'='", + Identifier => "identifier", Indent => "indent", InterpolationEnd => "'}}'", InterpolationStart => "'{{'", - Line => "command", - Name => "name", ParenL => "'('", ParenR => "')'", Plus => "'+'", - StringRaw => "raw string", StringCooked => "cooked string", + StringRaw => "raw string", Text => "command text", Whitespace => "whitespace", } diff --git a/src/tree.rs b/src/tree.rs new file mode 100644 index 0000000..c0b6b08 --- /dev/null +++ b/src/tree.rs @@ -0,0 +1,130 @@ +use crate::common::*; + +use std::mem; + +/// Construct a `Tree` from a symbolic expression literal. This macro, and the +/// Tree type, are only used in the Parser unit tests, as a concise notation +/// representing the expected results of parsing a given string. +macro_rules! tree { + { + ($($child:tt)*) + } => { + $crate::tree::Tree::List(vec![$(tree!($child),)*]) + }; + + { + $atom:ident + } => { + $crate::tree::Tree::atom(stringify!($atom)) + }; + + { + $atom:literal + } => { + $crate::tree::Tree::atom(format!("\"{}\"", $atom)) + }; + + { + # + } => { + $crate::tree::Tree::atom("#") + }; + + { + + + } => { + $crate::tree::Tree::atom("+") + }; +} + +/// A `Tree` is either… +#[derive(Debug, PartialEq)] +pub(crate) enum Tree<'text> { + /// …an atom containing text, or… + Atom(Cow<'text, str>), + /// …a list containing zero or more `Tree`s. + List(Vec>), +} + +impl<'text> Tree<'text> { + /// Construct an Atom from a text scalar + pub(crate) fn atom(text: impl Into>) -> Tree<'text> { + Tree::Atom(text.into()) + } + + /// Construct a List from an iterable of trees + pub(crate) fn list(children: impl IntoIterator>) -> Tree<'text> { + Tree::List(children.into_iter().collect()) + } + + /// Convenience function to create an atom containing quoted text + pub(crate) fn string(contents: impl AsRef) -> Tree<'text> { + Tree::atom(format!("\"{}\"", contents.as_ref())) + } + + /// Push a child node into self, turning it into a List if it was an Atom + pub(crate) fn push(self, tree: impl Into>) -> Tree<'text> { + match self { + Tree::List(mut children) => { + children.push(tree.into()); + Tree::List(children) + } + Tree::Atom(text) => Tree::List(vec![Tree::Atom(text), tree.into()]), + } + } + + /// Extend a self with a tail of Trees, turning self into a List if it + /// was an Atom + pub(crate) fn extend(self, tail: I) -> Tree<'text> + where + I: IntoIterator, + T: Into>, + { + // Tree::List(children.into_iter().collect()) + let mut head = match self { + Tree::List(children) => children, + Tree::Atom(text) => vec![Tree::Atom(text)], + }; + + for child in tail { + head.push(child.into()); + } + + Tree::List(head) + } + + /// Like `push`, but modify self in-place + pub(crate) fn push_mut(&mut self, tree: impl Into>) { + let tree = mem::replace(self, Tree::List(Vec::new())).push(tree.into()); + mem::replace(self, tree); + } +} + +impl Display for Tree<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + Tree::List(children) => { + write!(f, "(")?; + + for (i, child) in children.iter().enumerate() { + if i > 0 { + write!(f, " ")?; + } + write!(f, "{}", child)?; + } + + write!(f, ")") + } + Tree::Atom(text) => write!(f, "{}", text), + } + } +} + +impl<'text, T> From for Tree<'text> +where + T: Into>, +{ + fn from(text: T) -> Tree<'text> { + Tree::Atom(text.into()) + } +} diff --git a/src/variables.rs b/src/variables.rs index 309bd1a..ba626f7 100644 --- a/src/variables.rs +++ b/src/variables.rs @@ -1,32 +1,32 @@ use crate::common::*; -pub(crate) struct Variables<'a> { - stack: Vec<&'a Expression<'a>>, +pub(crate) struct Variables<'expression, 'src> { + stack: Vec<&'expression Expression<'src>>, } -impl<'a> Variables<'a> { - pub(crate) fn new(root: &'a Expression<'a>) -> Variables<'a> { +impl<'expression, 'src> Variables<'expression, 'src> { + pub(crate) fn new(root: &'expression Expression<'src>) -> Variables<'expression, 'src> { Variables { stack: vec![root] } } } -impl<'a> Iterator for Variables<'a> { - type Item = &'a Token<'a>; +impl<'expression, 'src> Iterator for Variables<'expression, 'src> { + type Item = Token<'src>; - fn next(&mut self) -> Option<&'a Token<'a>> { + fn next(&mut self) -> Option> { match self.stack.pop() { None - | Some(Expression::String { .. }) + | Some(Expression::StringLiteral { .. }) | Some(Expression::Backtick { .. }) | Some(Expression::Call { .. }) => None, - Some(Expression::Variable { token, .. }) => Some(token), + Some(Expression::Variable { name, .. }) => Some(name.token()), Some(Expression::Concatination { lhs, rhs }) => { self.stack.push(lhs); self.stack.push(rhs); self.next() } - Some(Expression::Group { expression }) => { - self.stack.push(expression); + Some(Expression::Group { contents }) => { + self.stack.push(contents); self.next() } } diff --git a/src/warning.rs b/src/warning.rs index 38d3c52..74a7683 100644 --- a/src/warning.rs +++ b/src/warning.rs @@ -2,13 +2,13 @@ use crate::common::*; use Warning::*; -#[derive(Debug)] -pub(crate) enum Warning<'a> { - DeprecatedEquals { equals: Token<'a> }, +#[derive(Debug, PartialEq)] +pub(crate) enum Warning<'src> { + DeprecatedEquals { equals: Token<'src> }, } -impl Warning<'_> { - fn context(&self) -> Option<&Token> { +impl<'src> Warning<'src> { + fn context(&self) -> Option<&Token<'src>> { match self { DeprecatedEquals { equals } => Some(equals), } @@ -42,7 +42,7 @@ impl Display for Warning<'_> { write_message_context( f, Color::fmt(f).warning(), - token.text, + token.src, token.offset, token.line, token.column, diff --git a/tests/integration.rs b/tests/integration.rs index 5c3aa55..c280ad1 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1503,7 +1503,7 @@ integration_test! { justfile: "foo: 'bar'", args: ("foo"), stdout: "", - stderr: "error: Expected name, end of line, or end of file, but found raw string + stderr: "error: Expected end of file, end of line, or identifier, but found raw string | 1 | foo: 'bar' | ^^^^^ @@ -1516,7 +1516,7 @@ integration_test! { justfile: "foo 'bar'", args: ("foo"), stdout: "", - stderr: "error: Expected name, '+', ':', or ':=', but found raw string + stderr: "error: Expected ':', ':=', identifier, or '+', but found raw string | 1 | foo 'bar' | ^^^^^