Don't conflate recipes with the same name in different modules (#1825)

This commit is contained in:
Casey Rodarmor 2024-01-08 13:26:33 -08:00 committed by GitHub
parent 0dbd5bf0b6
commit 1ea5e6ac31
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 259 additions and 161 deletions

View File

@ -47,7 +47,7 @@ impl<'src> Analyzer<'src> {
(*original, name) (*original, name)
}; };
return Err(redefinition.token().error(Redefinition { return Err(redefinition.token.error(Redefinition {
first_type, first_type,
second_type, second_type,
name: name.lexeme(), name: name.lexeme(),
@ -83,7 +83,7 @@ impl<'src> Analyzer<'src> {
if let Some(absolute) = absolute { if let Some(absolute) = absolute {
define(*name, "module", false)?; define(*name, "module", false)?;
modules.insert( modules.insert(
name.to_string(), name.lexeme().into(),
(*name, Self::analyze(loaded, paths, asts, absolute)?), (*name, Self::analyze(loaded, paths, asts, absolute)?),
); );
} }
@ -160,7 +160,7 @@ impl<'src> Analyzer<'src> {
for parameter in &recipe.parameters { for parameter in &recipe.parameters {
if parameters.contains(parameter.name.lexeme()) { if parameters.contains(parameter.name.lexeme()) {
return Err(parameter.name.token().error(DuplicateParameter { return Err(parameter.name.token.error(DuplicateParameter {
recipe: recipe.name.lexeme(), recipe: recipe.name.lexeme(),
parameter: parameter.name.lexeme(), parameter: parameter.name.lexeme(),
})); }));
@ -173,7 +173,7 @@ impl<'src> Analyzer<'src> {
return Err( return Err(
parameter parameter
.name .name
.token() .token
.error(RequiredParameterFollowsDefaultParameter { .error(RequiredParameterFollowsDefaultParameter {
parameter: parameter.name.lexeme(), parameter: parameter.name.lexeme(),
}), }),
@ -201,7 +201,7 @@ impl<'src> Analyzer<'src> {
fn analyze_assignment(&self, assignment: &Assignment<'src>) -> CompileResult<'src> { fn analyze_assignment(&self, assignment: &Assignment<'src>) -> CompileResult<'src> {
if self.assignments.contains_key(assignment.name.lexeme()) { if self.assignments.contains_key(assignment.name.lexeme()) {
return Err(assignment.name.token().error(DuplicateVariable { return Err(assignment.name.token.error(DuplicateVariable {
variable: assignment.name.lexeme(), variable: assignment.name.lexeme(),
})); }));
} }
@ -213,7 +213,7 @@ impl<'src> Analyzer<'src> {
for attr in &alias.attributes { for attr in &alias.attributes {
if *attr != Attribute::Private { if *attr != Attribute::Private {
return Err(alias.name.token().error(AliasInvalidAttribute { return Err(alias.name.token.error(AliasInvalidAttribute {
alias: name, alias: name,
attr: *attr, attr: *attr,
})); }));
@ -238,10 +238,9 @@ impl<'src> Analyzer<'src> {
recipes: &Table<'src, Rc<Recipe<'src>>>, recipes: &Table<'src, Rc<Recipe<'src>>>,
alias: Alias<'src, Name<'src>>, alias: Alias<'src, Name<'src>>,
) -> CompileResult<'src, Alias<'src>> { ) -> CompileResult<'src, Alias<'src>> {
let token = alias.name.token();
// Make sure the alias doesn't conflict with any recipe // Make sure the alias doesn't conflict with any recipe
if let Some(recipe) = recipes.get(alias.name.lexeme()) { if let Some(recipe) = recipes.get(alias.name.lexeme()) {
return Err(token.error(AliasShadowsRecipe { return Err(alias.name.token.error(AliasShadowsRecipe {
alias: alias.name.lexeme(), alias: alias.name.lexeme(),
recipe_line: recipe.line_number(), recipe_line: recipe.line_number(),
})); }));
@ -250,7 +249,7 @@ impl<'src> Analyzer<'src> {
// Make sure the target recipe exists // Make sure the target recipe exists
match recipes.get(alias.target.lexeme()) { match recipes.get(alias.target.lexeme()) {
Some(target) => Ok(alias.resolve(Rc::clone(target))), Some(target) => Ok(alias.resolve(Rc::clone(target))),
None => Err(token.error(UnknownAliasTarget { None => Err(alias.name.token.error(UnknownAliasTarget {
alias: alias.name.lexeme(), alias: alias.name.lexeme(),
target: alias.target.lexeme(), target: alias.target.lexeme(),
})), })),

View File

@ -59,16 +59,19 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
if self.evaluated.contains(variable) { if self.evaluated.contains(variable) {
Ok(()) Ok(())
} else if self.stack.contains(&variable) { } else if self.stack.contains(&variable) {
let token = self.assignments[variable].name.token();
self.stack.push(variable); self.stack.push(variable);
Err(token.error(CircularVariableDependency { Err(
variable, self.assignments[variable]
circle: self.stack.clone(), .name
})) .error(CircularVariableDependency {
variable,
circle: self.stack.clone(),
}),
)
} else if self.assignments.contains_key(variable) { } else if self.assignments.contains_key(variable) {
self.resolve_assignment(variable) self.resolve_assignment(variable)
} else { } else {
Err(name.token().error(UndefinedVariable { variable })) Err(name.token.error(UndefinedVariable { variable }))
} }
} }
Expression::Call { thunk } => match thunk { Expression::Call { thunk } => match thunk {

View File

@ -13,17 +13,17 @@ impl Compiler {
let mut srcs: HashMap<PathBuf, &str> = HashMap::new(); let mut srcs: HashMap<PathBuf, &str> = HashMap::new();
let mut loaded = Vec::new(); let mut loaded = Vec::new();
let mut stack: Vec<(PathBuf, u32)> = Vec::new(); let mut stack = Vec::new();
stack.push((root.into(), 0)); stack.push(Source::root(root));
while let Some((current, depth)) = stack.pop() { while let Some(current) = stack.pop() {
let (relative, src) = loader.load(root, &current)?; let (relative, src) = loader.load(root, &current.path)?;
loaded.push(relative.into()); loaded.push(relative.into());
let tokens = Lexer::lex(relative, src)?; let tokens = Lexer::lex(relative, src)?;
let mut ast = Parser::parse(depth, &current, &tokens)?; let mut ast = Parser::parse(&current.path, &current.namepath, current.depth, &tokens)?;
paths.insert(current.clone(), relative.into()); paths.insert(current.path.clone(), relative.into());
srcs.insert(current.clone(), src); srcs.insert(current.path.clone(), src);
for item in &mut ast.items { for item in &mut ast.items {
match item { match item {
@ -39,7 +39,7 @@ impl Compiler {
}); });
} }
let parent = current.parent().unwrap(); let parent = current.path.parent().unwrap();
let import = if let Some(relative) = relative { let import = if let Some(relative) = relative {
let path = parent.join(Self::expand_tilde(&relative.cooked)?); let path = parent.join(Self::expand_tilde(&relative.cooked)?);
@ -55,10 +55,13 @@ impl Compiler {
if let Some(import) = import { if let Some(import) = import {
if srcs.contains_key(&import) { if srcs.contains_key(&import) {
return Err(Error::CircularImport { current, import }); return Err(Error::CircularImport {
current: current.path,
import,
});
} }
*absolute = Some(import.clone()); *absolute = Some(import.clone());
stack.push((import, depth + 1)); stack.push(current.module(*name, import));
} else if !*optional { } else if !*optional {
return Err(Error::MissingModuleFile { module: *name }); return Err(Error::MissingModuleFile { module: *name });
} }
@ -70,6 +73,7 @@ impl Compiler {
path, path,
} => { } => {
let import = current let import = current
.path
.parent() .parent()
.unwrap() .unwrap()
.join(Self::expand_tilde(&relative.cooked)?) .join(Self::expand_tilde(&relative.cooked)?)
@ -77,10 +81,13 @@ impl Compiler {
if import.is_file() { if import.is_file() {
if srcs.contains_key(&import) { if srcs.contains_key(&import) {
return Err(Error::CircularImport { current, import }); return Err(Error::CircularImport {
current: current.path,
import,
});
} }
*absolute = Some(import.clone()); *absolute = Some(import.clone());
stack.push((import, depth + 1)); stack.push(current.import(import));
} else if !*optional { } else if !*optional {
return Err(Error::MissingImportFile { path: *path }); return Err(Error::MissingImportFile { path: *path });
} }
@ -89,7 +96,7 @@ impl Compiler {
} }
} }
asts.insert(current.clone(), ast.clone()); asts.insert(current.path, ast.clone());
} }
let justfile = Analyzer::analyze(&loaded, &paths, &asts, root)?; let justfile = Analyzer::analyze(&loaded, &paths, &asts, root)?;
@ -155,7 +162,7 @@ impl Compiler {
#[cfg(test)] #[cfg(test)]
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> { pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
let tokens = Lexer::test_lex(src)?; let tokens = Lexer::test_lex(src)?;
let ast = Parser::parse(0, &PathBuf::new(), &tokens)?; let ast = Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens)?;
let root = PathBuf::from("justfile"); let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new(); let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast); asts.insert(root.clone(), ast);

View File

@ -179,11 +179,11 @@ impl<'src> Error<'src> {
fn context(&self) -> Option<Token<'src>> { fn context(&self) -> Option<Token<'src>> {
match self { match self {
Self::AmbiguousModuleFile { module, .. } | Self::MissingModuleFile { module, .. } => { Self::AmbiguousModuleFile { module, .. } | Self::MissingModuleFile { module, .. } => {
Some(module.token()) Some(module.token)
} }
Self::Backtick { token, .. } => Some(*token), Self::Backtick { token, .. } => Some(*token),
Self::Compile { compile_error } => Some(compile_error.context()), Self::Compile { compile_error } => Some(compile_error.context()),
Self::FunctionCall { function, .. } => Some(function.token()), Self::FunctionCall { function, .. } => Some(function.token),
Self::MissingImportFile { path } => Some(*path), Self::MissingImportFile { path } => Some(*path),
_ => None, _ => None,
} }

View File

@ -255,7 +255,7 @@ impl<'src, 'run> Evaluator<'src, 'run> {
config: &'run Config, config: &'run Config,
dotenv: &'run BTreeMap<String, String>, dotenv: &'run BTreeMap<String, String>,
parameters: &[Parameter<'src>], parameters: &[Parameter<'src>],
arguments: &[&str], arguments: &[String],
scope: &'run Scope<'src, 'run>, scope: &'run Scope<'src, 'run>,
settings: &'run Settings, settings: &'run Settings,
search: &'run Search, search: &'run Search,
@ -289,13 +289,13 @@ impl<'src, 'run> Evaluator<'src, 'run> {
} }
} else if parameter.kind.is_variadic() { } else if parameter.kind.is_variadic() {
for value in rest { for value in rest {
positional.push((*value).to_owned()); positional.push(value.clone());
} }
let value = rest.to_vec().join(" "); let value = rest.to_vec().join(" ");
rest = &[]; rest = &[];
value value
} else { } else {
let value = rest[0].to_owned(); let value = rest[0].clone();
positional.push(value.clone()); positional.push(value.clone());
rest = &rest[1..]; rest = &rest[1..];
value value

View File

@ -271,7 +271,7 @@ impl<'src> Justfile<'src> {
}); });
} }
let mut ran = BTreeSet::new(); let mut ran = Ran::default();
for invocation in invocations { for invocation in invocations {
let context = RecipeContext { let context = RecipeContext {
settings: invocation.settings, settings: invocation.settings,
@ -283,7 +283,12 @@ impl<'src> Justfile<'src> {
Self::run_recipe( Self::run_recipe(
&context, &context,
invocation.recipe, invocation.recipe,
&invocation.arguments, &invocation
.arguments
.iter()
.copied()
.map(str::to_string)
.collect::<Vec<String>>(),
&dotenv, &dotenv,
search, search,
&mut ran, &mut ran,
@ -399,17 +404,12 @@ impl<'src> Justfile<'src> {
fn run_recipe( fn run_recipe(
context: &RecipeContext<'src, '_>, context: &RecipeContext<'src, '_>,
recipe: &Recipe<'src>, recipe: &Recipe<'src>,
arguments: &[&str], arguments: &[String],
dotenv: &BTreeMap<String, String>, dotenv: &BTreeMap<String, String>,
search: &Search, search: &Search,
ran: &mut BTreeSet<Vec<String>>, ran: &mut Ran<'src>,
) -> RunResult<'src> { ) -> RunResult<'src> {
let mut invocation = vec![recipe.name().to_owned()]; if ran.has_run(&recipe.namepath, arguments) {
for argument in arguments {
invocation.push((*argument).to_string());
}
if ran.contains(&invocation) {
return Ok(()); return Ok(());
} }
@ -440,20 +440,13 @@ impl<'src> Justfile<'src> {
.map(|argument| evaluator.evaluate_expression(argument)) .map(|argument| evaluator.evaluate_expression(argument))
.collect::<RunResult<Vec<String>>>()?; .collect::<RunResult<Vec<String>>>()?;
Self::run_recipe( Self::run_recipe(context, recipe, &arguments, dotenv, search, ran)?;
context,
recipe,
&arguments.iter().map(String::as_ref).collect::<Vec<&str>>(),
dotenv,
search,
ran,
)?;
} }
recipe.run(context, dotenv, scope.child(), search, &positional)?; recipe.run(context, dotenv, scope.child(), search, &positional)?;
{ {
let mut ran = BTreeSet::new(); let mut ran = Ran::default();
for Dependency { recipe, arguments } in recipe.dependencies.iter().skip(recipe.priors) { for Dependency { recipe, arguments } in recipe.dependencies.iter().skip(recipe.priors) {
let mut evaluated = Vec::new(); let mut evaluated = Vec::new();
@ -462,18 +455,11 @@ impl<'src> Justfile<'src> {
evaluated.push(evaluator.evaluate_expression(argument)?); evaluated.push(evaluator.evaluate_expression(argument)?);
} }
Self::run_recipe( Self::run_recipe(context, recipe, &evaluated, dotenv, search, &mut ran)?;
context,
recipe,
&evaluated.iter().map(String::as_ref).collect::<Vec<&str>>(),
dotenv,
search,
&mut ran,
)?;
} }
} }
ran.insert(invocation); ran.ran(&recipe.namepath, arguments.to_vec());
Ok(()) Ok(())
} }

View File

@ -25,17 +25,17 @@ pub(crate) use {
fragment::Fragment, function::Function, function_context::FunctionContext, fragment::Fragment, function::Function, function_context::FunctionContext,
interrupt_guard::InterruptGuard, interrupt_handler::InterruptHandler, item::Item, interrupt_guard::InterruptGuard, interrupt_handler::InterruptHandler, item::Item,
justfile::Justfile, keyed::Keyed, keyword::Keyword, lexer::Lexer, line::Line, list::List, justfile::Justfile, keyed::Keyed, keyword::Keyword, lexer::Lexer, line::Line, list::List,
load_dotenv::load_dotenv, loader::Loader, name::Name, ordinal::Ordinal, output::output, load_dotenv::load_dotenv, loader::Loader, name::Name, namepath::Namepath, ordinal::Ordinal,
output_error::OutputError, parameter::Parameter, parameter_kind::ParameterKind, parser::Parser, output::output, output_error::OutputError, parameter::Parameter, parameter_kind::ParameterKind,
platform::Platform, platform_interface::PlatformInterface, position::Position, parser::Parser, platform::Platform, platform_interface::PlatformInterface, position::Position,
positional::Positional, range_ext::RangeExt, recipe::Recipe, recipe_context::RecipeContext, positional::Positional, ran::Ran, range_ext::RangeExt, recipe::Recipe,
recipe_resolver::RecipeResolver, scope::Scope, search::Search, search_config::SearchConfig, recipe_context::RecipeContext, recipe_resolver::RecipeResolver, scope::Scope, search::Search,
search_error::SearchError, set::Set, setting::Setting, settings::Settings, shebang::Shebang, search_config::SearchConfig, search_error::SearchError, set::Set, setting::Setting,
shell::Shell, show_whitespace::ShowWhitespace, string_kind::StringKind, settings::Settings, shebang::Shebang, shell::Shell, show_whitespace::ShowWhitespace,
string_literal::StringLiteral, subcommand::Subcommand, suggestion::Suggestion, table::Table, source::Source, string_kind::StringKind, string_literal::StringLiteral, subcommand::Subcommand,
thunk::Thunk, token::Token, token_kind::TokenKind, unresolved_dependency::UnresolvedDependency, suggestion::Suggestion, table::Table, thunk::Thunk, token::Token, token_kind::TokenKind,
unresolved_recipe::UnresolvedRecipe, use_color::UseColor, variables::Variables, unresolved_dependency::UnresolvedDependency, unresolved_recipe::UnresolvedRecipe,
verbosity::Verbosity, warning::Warning, use_color::UseColor, variables::Variables, verbosity::Verbosity, warning::Warning,
}, },
std::{ std::{
cmp, cmp,
@ -47,6 +47,7 @@ pub(crate) use {
io::{self, Cursor, Write}, io::{self, Cursor, Write},
iter::{self, FromIterator}, iter::{self, FromIterator},
mem, mem,
ops::Deref,
ops::{Index, Range, RangeInclusive}, ops::{Index, Range, RangeInclusive},
path::{self, Path, PathBuf}, path::{self, Path, PathBuf},
process::{self, Command, ExitStatus, Stdio}, process::{self, Command, ExitStatus, Stdio},
@ -149,6 +150,7 @@ mod list;
mod load_dotenv; mod load_dotenv;
mod loader; mod loader;
mod name; mod name;
mod namepath;
mod ordinal; mod ordinal;
mod output; mod output;
mod output_error; mod output_error;
@ -159,6 +161,7 @@ mod platform;
mod platform_interface; mod platform_interface;
mod position; mod position;
mod positional; mod positional;
mod ran;
mod range_ext; mod range_ext;
mod recipe; mod recipe;
mod recipe_context; mod recipe_context;
@ -174,6 +177,7 @@ mod settings;
mod shebang; mod shebang;
mod shell; mod shell;
mod show_whitespace; mod show_whitespace;
mod source;
mod string_kind; mod string_kind;
mod string_literal; mod string_literal;
mod subcommand; mod subcommand;

View File

@ -1,50 +1,24 @@
use super::*; use super::*;
/// A name. This is effectively just a `Token` of kind `Identifier`, but we give /// A name. This is just a `Token` of kind `Identifier`, but we give it its own
/// it its own type for clarity. /// type for clarity.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
pub(crate) struct Name<'src> { pub(crate) struct Name<'src> {
pub(crate) column: usize, pub(crate) token: Token<'src>,
pub(crate) length: usize,
pub(crate) line: usize,
pub(crate) offset: usize,
pub(crate) path: &'src Path,
pub(crate) src: &'src str,
} }
impl<'src> Name<'src> { impl<'src> Name<'src> {
/// The name's text contents pub(crate) fn from_identifier(token: Token<'src>) -> Self {
pub(crate) fn lexeme(&self) -> &'src str {
&self.src[self.offset..self.offset + self.length]
}
/// Turn this name back into a token
pub(crate) fn token(&self) -> Token<'src> {
Token {
column: self.column,
kind: TokenKind::Identifier,
length: self.length,
line: self.line,
offset: self.offset,
path: self.path,
src: self.src,
}
}
pub(crate) fn from_identifier(token: Token<'src>) -> Name {
assert_eq!(token.kind, TokenKind::Identifier); assert_eq!(token.kind, TokenKind::Identifier);
Name { Self { token }
column: token.column,
length: token.length,
line: token.line,
offset: token.offset,
path: token.path,
src: token.src,
}
} }
}
pub(crate) fn error(&self, kind: CompileErrorKind<'src>) -> CompileError<'src> { impl<'src> Deref for Name<'src> {
self.token().error(kind) type Target = Token<'src>;
fn deref(&self) -> &Self::Target {
&self.token
} }
} }

28
src/namepath.rs Normal file
View File

@ -0,0 +1,28 @@
use super::*;
#[derive(Default, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) struct Namepath<'src>(Vec<Name<'src>>);
impl<'src> Namepath<'src> {
pub(crate) fn join(&self, name: Name<'src>) -> Self {
Self(self.0.iter().copied().chain(iter::once(name)).collect())
}
}
impl<'str> Serialize for Namepath<'str> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut path = String::new();
for (i, name) in self.0.iter().enumerate() {
if i > 0 {
path.push_str("::");
}
path.push_str(name.lexeme());
}
serializer.serialize_str(&path)
}
}

View File

@ -23,34 +23,31 @@ use {super::*, TokenKind::*};
/// find it, it adds that token to the set. When the parser accepts a token, the /// find it, it adds that token to the set. When the parser accepts a token, the
/// set is cleared. If the parser finds a token which is unexpected, the /// set is cleared. If the parser finds a token which is unexpected, the
/// contents of the set is printed in the resultant error message. /// contents of the set is printed in the resultant error message.
pub(crate) struct Parser<'tokens, 'src> { pub(crate) struct Parser<'run, 'src> {
/// Source tokens expected_tokens: BTreeSet<TokenKind>,
tokens: &'tokens [Token<'src>], file_path: &'run Path,
/// Index of the next un-parsed token module_namepath: &'run Namepath<'src>,
next: usize, next_token: usize,
/// Current expected tokens recursion_depth: usize,
expected: BTreeSet<TokenKind>, submodule_depth: u32,
/// Current recursion depth tokens: &'run [Token<'src>],
depth: usize,
/// Path to the file being parsed
path: PathBuf,
/// Depth of submodule being parsed
submodule: u32,
} }
impl<'tokens, 'src> Parser<'tokens, 'src> { impl<'run, 'src> Parser<'run, 'src> {
/// Parse `tokens` into an `Ast` /// Parse `tokens` into an `Ast`
pub(crate) fn parse( pub(crate) fn parse(
submodule: u32, file_path: &'run Path,
path: &Path, module_namepath: &'run Namepath<'src>,
tokens: &'tokens [Token<'src>], submodule_depth: u32,
tokens: &'run [Token<'src>],
) -> CompileResult<'src, Ast<'src>> { ) -> CompileResult<'src, Ast<'src>> {
Parser { Self {
depth: 0, expected_tokens: BTreeSet::new(),
expected: BTreeSet::new(), file_path,
next: 0, module_namepath,
path: path.into(), next_token: 0,
submodule, recursion_depth: 0,
submodule_depth,
tokens, tokens,
} }
.parse_ast() .parse_ast()
@ -65,7 +62,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
fn unexpected_token(&self) -> CompileResult<'src, CompileError<'src>> { fn unexpected_token(&self) -> CompileResult<'src, CompileError<'src>> {
self.error(CompileErrorKind::UnexpectedToken { self.error(CompileErrorKind::UnexpectedToken {
expected: self expected: self
.expected .expected_tokens
.iter() .iter()
.copied() .copied()
.filter(|kind| *kind != ByteOrderMark) .filter(|kind| *kind != ByteOrderMark)
@ -81,8 +78,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} }
/// An iterator over the remaining significant tokens /// An iterator over the remaining significant tokens
fn rest(&self) -> impl Iterator<Item = Token<'src>> + 'tokens { fn rest(&self) -> impl Iterator<Item = Token<'src>> + 'run {
self.tokens[self.next..] self.tokens[self.next_token..]
.iter() .iter()
.copied() .copied()
.filter(|token| token.kind != Whitespace) .filter(|token| token.kind != Whitespace)
@ -107,7 +104,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
/// The first token in `kinds` will be added to the expected token set. /// The first token in `kinds` will be added to the expected token set.
fn next_are(&mut self, kinds: &[TokenKind]) -> bool { fn next_are(&mut self, kinds: &[TokenKind]) -> bool {
if let Some(&kind) = kinds.first() { if let Some(&kind) = kinds.first() {
self.expected.insert(kind); self.expected_tokens.insert(kind);
} }
let mut rest = self.rest(); let mut rest = self.rest();
@ -126,10 +123,10 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
/// Advance past one significant token, clearing the expected token set. /// Advance past one significant token, clearing the expected token set.
fn advance(&mut self) -> CompileResult<'src, Token<'src>> { fn advance(&mut self) -> CompileResult<'src, Token<'src>> {
self.expected.clear(); self.expected_tokens.clear();
for skipped in &self.tokens[self.next..] { for skipped in &self.tokens[self.next_token..] {
self.next += 1; self.next_token += 1;
if skipped.kind != Whitespace { if skipped.kind != Whitespace {
return Ok(*skipped); return Ok(*skipped);
@ -419,7 +416,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} }
} }
if self.next == self.tokens.len() { if self.next_token == self.tokens.len() {
Ok(Ast { Ok(Ast {
warnings: Vec::new(), warnings: Vec::new(),
items, items,
@ -427,7 +424,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} else { } else {
Err(self.internal_error(format!( Err(self.internal_error(format!(
"Parse completed with {} unparsed tokens", "Parse completed with {} unparsed tokens",
self.tokens.len() - self.next, self.tokens.len() - self.next_token,
))?) ))?)
} }
} }
@ -464,7 +461,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
/// Parse an expression, e.g. `1 + 2` /// Parse an expression, e.g. `1 + 2`
fn parse_expression(&mut self) -> CompileResult<'src, Expression<'src>> { fn parse_expression(&mut self) -> CompileResult<'src, Expression<'src>> {
if self.depth == if cfg!(windows) { 48 } else { 256 } { if self.recursion_depth == if cfg!(windows) { 48 } else { 256 } {
let token = self.next()?; let token = self.next()?;
return Err(CompileError::new( return Err(CompileError::new(
token, token,
@ -472,7 +469,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
)); ));
} }
self.depth += 1; self.recursion_depth += 1;
let expression = if self.accepted_keyword(Keyword::If)? { let expression = if self.accepted_keyword(Keyword::If)? {
self.parse_conditional()? self.parse_conditional()?
@ -496,7 +493,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} }
}; };
self.depth -= 1; self.recursion_depth -= 1;
Ok(expression) Ok(expression)
} }
@ -740,11 +737,12 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
doc, doc,
name, name,
parameters: positional.into_iter().chain(variadic).collect(), parameters: positional.into_iter().chain(variadic).collect(),
path: self.path.clone(), file_path: self.file_path.into(),
priors, priors,
private: name.lexeme().starts_with('_'), private: name.lexeme().starts_with('_'),
quiet, quiet,
depth: self.submodule, depth: self.submodule_depth,
namepath: self.module_namepath.join(name),
}) })
} }
@ -962,7 +960,8 @@ mod tests {
fn test(text: &str, want: Tree) { fn test(text: &str, want: Tree) {
let unindented = unindent(text); let unindented = unindent(text);
let tokens = Lexer::test_lex(&unindented).expect("lexing failed"); let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
let justfile = Parser::parse(0, &PathBuf::new(), &tokens).expect("parsing failed"); let justfile =
Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens).expect("parsing failed");
let have = justfile.tree(); let have = justfile.tree();
if have != want { if have != want {
println!("parsed text: {unindented}"); println!("parsed text: {unindented}");
@ -1000,7 +999,7 @@ mod tests {
) { ) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
match Parser::parse(0, &PathBuf::new(), &tokens) { match Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens) {
Ok(_) => panic!("Parsing unexpectedly succeeded"), Ok(_) => panic!("Parsing unexpectedly succeeded"),
Err(have) => { Err(have) => {
let want = CompileError { let want = CompileError {

18
src/ran.rs Normal file
View File

@ -0,0 +1,18 @@
use super::*;
#[derive(Default)]
pub(crate) struct Ran<'src>(BTreeMap<Namepath<'src>, BTreeSet<Vec<String>>>);
impl<'src> Ran<'src> {
pub(crate) fn has_run(&self, recipe: &Namepath<'src>, arguments: &[String]) -> bool {
self
.0
.get(recipe)
.map(|ran| ran.contains(arguments))
.unwrap_or_default()
}
pub(crate) fn ran(&mut self, recipe: &Namepath<'src>, arguments: Vec<String>) {
self.0.entry(recipe.clone()).or_default().insert(arguments);
}
}

View File

@ -25,17 +25,18 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
pub(crate) attributes: BTreeSet<Attribute>, pub(crate) attributes: BTreeSet<Attribute>,
pub(crate) body: Vec<Line<'src>>, pub(crate) body: Vec<Line<'src>>,
pub(crate) dependencies: Vec<D>, pub(crate) dependencies: Vec<D>,
pub(crate) doc: Option<&'src str>,
pub(crate) name: Name<'src>,
pub(crate) parameters: Vec<Parameter<'src>>,
#[serde(skip)] #[serde(skip)]
pub(crate) path: PathBuf, pub(crate) depth: u32,
pub(crate) doc: Option<&'src str>,
#[serde(skip)]
pub(crate) file_path: PathBuf,
pub(crate) name: Name<'src>,
pub(crate) namepath: Namepath<'src>,
pub(crate) parameters: Vec<Parameter<'src>>,
pub(crate) priors: usize, pub(crate) priors: usize,
pub(crate) private: bool, pub(crate) private: bool,
pub(crate) quiet: bool, pub(crate) quiet: bool,
pub(crate) shebang: bool, pub(crate) shebang: bool,
#[serde(skip)]
pub(crate) depth: u32,
} }
impl<'src, D> Recipe<'src, D> { impl<'src, D> Recipe<'src, D> {
@ -223,7 +224,7 @@ impl<'src, D> Recipe<'src, D> {
if self.change_directory() { if self.change_directory() {
cmd.current_dir(if self.depth > 0 { cmd.current_dir(if self.depth > 0 {
self.path.parent().unwrap() self.file_path.parent().unwrap()
} else { } else {
&context.search.working_directory &context.search.working_directory
}); });
@ -363,7 +364,7 @@ impl<'src, D> Recipe<'src, D> {
&path, &path,
if self.change_directory() { if self.change_directory() {
if self.depth > 0 { if self.depth > 0 {
Some(self.path.parent().unwrap()) Some(self.file_path.parent().unwrap())
} else { } else {
Some(&context.search.working_directory) Some(&context.search.working_directory)
} }

33
src/source.rs Normal file
View File

@ -0,0 +1,33 @@
use super::*;
pub(crate) struct Source<'src> {
pub(crate) path: PathBuf,
pub(crate) depth: u32,
pub(crate) namepath: Namepath<'src>,
}
impl<'src> Source<'src> {
pub(crate) fn root(path: &Path) -> Self {
Self {
path: path.into(),
depth: 0,
namepath: Namepath::default(),
}
}
pub(crate) fn import(&self, path: PathBuf) -> Self {
Self {
depth: self.depth + 1,
path,
namepath: self.namepath.clone(),
}
}
pub(crate) fn module(&self, name: Name<'src>, path: PathBuf) -> Self {
Self {
path,
depth: self.depth + 1,
namepath: self.namepath.join(name),
}
}
}

View File

@ -59,7 +59,8 @@ pub(crate) fn analysis_error(
) { ) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
let ast = Parser::parse(0, &PathBuf::new(), &tokens).expect("Parsing failed in analysis test..."); let ast = Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens)
.expect("Parsing failed in analysis test...");
let root = PathBuf::from("justfile"); let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new(); let mut asts: HashMap<PathBuf, Ast> = HashMap::new();

View File

@ -1,6 +1,6 @@
use super::*; use super::*;
#[derive(Debug, PartialEq, Clone, Copy)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
pub(crate) struct Token<'src> { pub(crate) struct Token<'src> {
pub(crate) column: usize, pub(crate) column: usize,
pub(crate) kind: TokenKind, pub(crate) kind: TokenKind,

View File

@ -50,9 +50,10 @@ impl<'src> UnresolvedRecipe<'src> {
dependencies, dependencies,
depth: self.depth, depth: self.depth,
doc: self.doc, doc: self.doc,
file_path: self.file_path,
name: self.name, name: self.name,
namepath: self.namepath,
parameters: self.parameters, parameters: self.parameters,
path: self.path,
priors: self.priors, priors: self.priors,
private: self.private, private: self.private,
quiet: self.quiet, quiet: self.quiet,

View File

@ -60,7 +60,7 @@ impl<'expression, 'src> Iterator for Variables<'expression, 'src> {
self.stack.push(rhs); self.stack.push(rhs);
self.stack.push(lhs); self.stack.push(lhs);
} }
Expression::Variable { name, .. } => return Some(name.token()), Expression::Variable { name, .. } => return Some(name.token),
Expression::Concatenation { lhs, rhs } => { Expression::Concatenation { lhs, rhs } => {
self.stack.push(rhs); self.stack.push(rhs);
self.stack.push(lhs); self.stack.push(lhs);

View File

@ -34,6 +34,7 @@ fn alias() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -118,6 +119,7 @@ fn body() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -161,6 +163,7 @@ fn dependencies() {
"attributes": [], "attributes": [],
"doc": null, "doc": null,
"name": "bar", "name": "bar",
"namepath": "bar",
"body": [], "body": [],
"dependencies": [{ "dependencies": [{
"arguments": [], "arguments": [],
@ -177,6 +180,7 @@ fn dependencies() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -239,6 +243,7 @@ fn dependency_argument() {
"bar": { "bar": {
"doc": null, "doc": null,
"name": "bar", "name": "bar",
"namepath": "bar",
"body": [], "body": [],
"dependencies": [{ "dependencies": [{
"arguments": [ "arguments": [
@ -267,6 +272,7 @@ fn dependency_argument() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [ "parameters": [
{ {
"name": "args", "name": "args",
@ -328,6 +334,7 @@ fn duplicate_recipes() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [ "parameters": [
{ {
"name": "bar", "name": "bar",
@ -377,6 +384,7 @@ fn doc_comment() {
"dependencies": [], "dependencies": [],
"doc": "hello", "doc": "hello",
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -456,6 +464,7 @@ fn parameters() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "a", "name": "a",
"namepath": "a",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -467,6 +476,7 @@ fn parameters() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "b", "name": "b",
"namepath": "b",
"parameters": [ "parameters": [
{ {
"name": "x", "name": "x",
@ -486,6 +496,7 @@ fn parameters() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "c", "name": "c",
"namepath": "c",
"parameters": [ "parameters": [
{ {
"name": "x", "name": "x",
@ -505,6 +516,7 @@ fn parameters() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "d", "name": "d",
"namepath": "d",
"parameters": [ "parameters": [
{ {
"name": "x", "name": "x",
@ -524,6 +536,7 @@ fn parameters() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "e", "name": "e",
"namepath": "e",
"parameters": [ "parameters": [
{ {
"name": "x", "name": "x",
@ -543,6 +556,7 @@ fn parameters() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "f", "name": "f",
"namepath": "f",
"parameters": [ "parameters": [
{ {
"name": "x", "name": "x",
@ -596,6 +610,7 @@ fn priors() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "a", "name": "a",
"namepath": "a",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -617,6 +632,7 @@ fn priors() {
], ],
"doc": null, "doc": null,
"name": "b", "name": "b",
"namepath": "b",
"private": false, "private": false,
"quiet": false, "quiet": false,
"shebang": false, "shebang": false,
@ -629,6 +645,7 @@ fn priors() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "c", "name": "c",
"namepath": "c",
"parameters": [], "parameters": [],
"private": false, "private": false,
"quiet": false, "quiet": false,
@ -672,6 +689,7 @@ fn private() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "_foo", "name": "_foo",
"namepath": "_foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": true, "private": true,
@ -714,6 +732,7 @@ fn quiet() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -767,6 +786,7 @@ fn settings() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -815,6 +835,7 @@ fn shebang() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -857,6 +878,7 @@ fn simple() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -903,6 +925,7 @@ fn attribute() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "foo", "name": "foo",
"namepath": "foo",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,
@ -961,6 +984,7 @@ fn module() {
"dependencies": [], "dependencies": [],
"doc": null, "doc": null,
"name": "bar", "name": "bar",
"namepath": "foo::bar",
"parameters": [], "parameters": [],
"priors": 0, "priors": 0,
"private": false, "private": false,

View File

@ -684,3 +684,23 @@ fn module_paths_beginning_with_tilde_are_expanded_to_homdir() {
.env("HOME", "foobar") .env("HOME", "foobar")
.run(); .run();
} }
#[test]
fn recipes_with_same_name_are_both_run() {
Test::new()
.write("foo.just", "bar:\n @echo MODULE")
.justfile(
"
mod foo
bar:
@echo ROOT
",
)
.test_round_trip(false)
.arg("--unstable")
.arg("foo::bar")
.arg("bar")
.stdout("MODULE\nROOT\n")
.run();
}