Run imports in working directory of importer (#1817)

This commit is contained in:
Casey Rodarmor 2024-01-11 19:00:38 -08:00 committed by GitHub
parent ea1b68644a
commit fbd4a437a0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 150 additions and 36 deletions

View File

@ -20,7 +20,13 @@ impl Compiler {
let (relative, src) = loader.load(root, &current.path)?; let (relative, src) = loader.load(root, &current.path)?;
loaded.push(relative.into()); loaded.push(relative.into());
let tokens = Lexer::lex(relative, src)?; let tokens = Lexer::lex(relative, src)?;
let mut ast = Parser::parse(&current.path, &current.namepath, current.depth, &tokens)?; let mut ast = Parser::parse(
&current.path,
&current.namepath,
current.depth,
&tokens,
&current.working_directory,
)?;
paths.insert(current.path.clone(), relative.into()); paths.insert(current.path.clone(), relative.into());
srcs.insert(current.path.clone(), src); srcs.insert(current.path.clone(), src);
@ -162,7 +168,13 @@ impl Compiler {
#[cfg(test)] #[cfg(test)]
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> { pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
let tokens = Lexer::test_lex(src)?; let tokens = Lexer::test_lex(src)?;
let ast = Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens)?; let ast = Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
)?;
let root = PathBuf::from("justfile"); let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new(); let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast); asts.insert(root.clone(), ast);

View File

@ -31,6 +31,7 @@ pub(crate) struct Parser<'run, 'src> {
recursion_depth: usize, recursion_depth: usize,
submodule_depth: u32, submodule_depth: u32,
tokens: &'run [Token<'src>], tokens: &'run [Token<'src>],
working_directory: &'run Path,
} }
impl<'run, 'src> Parser<'run, 'src> { impl<'run, 'src> Parser<'run, 'src> {
@ -40,6 +41,7 @@ impl<'run, 'src> Parser<'run, 'src> {
module_namepath: &'run Namepath<'src>, module_namepath: &'run Namepath<'src>,
submodule_depth: u32, submodule_depth: u32,
tokens: &'run [Token<'src>], tokens: &'run [Token<'src>],
working_directory: &'run Path,
) -> CompileResult<'src, Ast<'src>> { ) -> CompileResult<'src, Ast<'src>> {
Self { Self {
expected_tokens: BTreeSet::new(), expected_tokens: BTreeSet::new(),
@ -49,6 +51,7 @@ impl<'run, 'src> Parser<'run, 'src> {
recursion_depth: 0, recursion_depth: 0,
submodule_depth, submodule_depth,
tokens, tokens,
working_directory,
} }
.parse_ast() .parse_ast()
} }
@ -734,15 +737,16 @@ impl<'run, 'src> Parser<'run, 'src> {
attributes, attributes,
body, body,
dependencies, dependencies,
depth: self.submodule_depth,
doc, doc,
name,
parameters: positional.into_iter().chain(variadic).collect(),
file_path: self.file_path.into(), file_path: self.file_path.into(),
name,
namepath: self.module_namepath.join(name),
parameters: positional.into_iter().chain(variadic).collect(),
priors, priors,
private: name.lexeme().starts_with('_'), private: name.lexeme().starts_with('_'),
quiet, quiet,
depth: self.submodule_depth, working_directory: self.working_directory.into(),
namepath: self.module_namepath.join(name),
}) })
} }
@ -960,8 +964,14 @@ mod tests {
fn test(text: &str, want: Tree) { fn test(text: &str, want: Tree) {
let unindented = unindent(text); let unindented = unindent(text);
let tokens = Lexer::test_lex(&unindented).expect("lexing failed"); let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
let justfile = let justfile = Parser::parse(
Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens).expect("parsing failed"); &PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
)
.expect("parsing failed");
let have = justfile.tree(); let have = justfile.tree();
if have != want { if have != want {
println!("parsed text: {unindented}"); println!("parsed text: {unindented}");
@ -999,7 +1009,13 @@ mod tests {
) { ) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
match Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens) { match Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
) {
Ok(_) => panic!("Parsing unexpectedly succeeded"), Ok(_) => panic!("Parsing unexpectedly succeeded"),
Err(have) => { Err(have) => {
let want = CompileError { let want = CompileError {

View File

@ -37,6 +37,8 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
pub(crate) private: bool, pub(crate) private: bool,
pub(crate) quiet: bool, pub(crate) quiet: bool,
pub(crate) shebang: bool, pub(crate) shebang: bool,
#[serde(skip)]
pub(crate) working_directory: PathBuf,
} }
impl<'src, D> Recipe<'src, D> { impl<'src, D> Recipe<'src, D> {
@ -120,6 +122,18 @@ impl<'src, D> Recipe<'src, D> {
!self.attributes.contains(&Attribute::NoExitMessage) !self.attributes.contains(&Attribute::NoExitMessage)
} }
fn working_directory<'a>(&'a self, search: &'a Search) -> Option<&Path> {
if self.change_directory() {
Some(if self.depth > 0 {
&self.working_directory
} else {
&search.working_directory
})
} else {
None
}
}
pub(crate) fn run<'run>( pub(crate) fn run<'run>(
&self, &self,
context: &RecipeContext<'src, 'run>, context: &RecipeContext<'src, 'run>,
@ -222,12 +236,8 @@ impl<'src, D> Recipe<'src, D> {
let mut cmd = context.settings.shell_command(config); let mut cmd = context.settings.shell_command(config);
if self.change_directory() { if let Some(working_directory) = self.working_directory(context.search) {
cmd.current_dir(if self.depth > 0 { cmd.current_dir(working_directory);
self.file_path.parent().unwrap()
} else {
&context.search.working_directory
});
} }
cmd.arg(command); cmd.arg(command);
@ -353,26 +363,15 @@ impl<'src, D> Recipe<'src, D> {
})?; })?;
} }
// make the script executable // make script executable
Platform::set_execute_permission(&path).map_err(|error| Error::TmpdirIo { Platform::set_execute_permission(&path).map_err(|error| Error::TmpdirIo {
recipe: self.name(), recipe: self.name(),
io_error: error, io_error: error,
})?; })?;
// create a command to run the script // create command to run script
let mut command = Platform::make_shebang_command( let mut command =
&path, Platform::make_shebang_command(&path, self.working_directory(context.search), shebang)
if self.change_directory() {
if self.depth > 0 {
Some(self.file_path.parent().unwrap())
} else {
Some(&context.search.working_directory)
}
} else {
None
},
shebang,
)
.map_err(|output_error| Error::Cygpath { .map_err(|output_error| Error::Cygpath {
recipe: self.name(), recipe: self.name(),
output_error, output_error,

View File

@ -4,6 +4,7 @@ pub(crate) struct Source<'src> {
pub(crate) path: PathBuf, pub(crate) path: PathBuf,
pub(crate) depth: u32, pub(crate) depth: u32,
pub(crate) namepath: Namepath<'src>, pub(crate) namepath: Namepath<'src>,
pub(crate) working_directory: PathBuf,
} }
impl<'src> Source<'src> { impl<'src> Source<'src> {
@ -12,6 +13,7 @@ impl<'src> Source<'src> {
path: path.into(), path: path.into(),
depth: 0, depth: 0,
namepath: Namepath::default(), namepath: Namepath::default(),
working_directory: path.parent().unwrap().into(),
} }
} }
@ -20,11 +22,13 @@ impl<'src> Source<'src> {
depth: self.depth + 1, depth: self.depth + 1,
path, path,
namepath: self.namepath.clone(), namepath: self.namepath.clone(),
working_directory: self.working_directory.clone(),
} }
} }
pub(crate) fn module(&self, name: Name<'src>, path: PathBuf) -> Self { pub(crate) fn module(&self, name: Name<'src>, path: PathBuf) -> Self {
Self { Self {
working_directory: path.parent().unwrap().into(),
path, path,
depth: self.depth + 1, depth: self.depth + 1,
namepath: self.namepath.join(name), namepath: self.namepath.join(name),

View File

@ -59,7 +59,13 @@ pub(crate) fn analysis_error(
) { ) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
let ast = Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens) let ast = Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
)
.expect("Parsing failed in analysis test..."); .expect("Parsing failed in analysis test...");
let root = PathBuf::from("justfile"); let root = PathBuf::from("justfile");

View File

@ -58,6 +58,7 @@ impl<'src> UnresolvedRecipe<'src> {
private: self.private, private: self.private,
quiet: self.quiet, quiet: self.quiet,
shebang: self.shebang, shebang: self.shebang,
working_directory: self.working_directory,
}) })
} }
} }

View File

@ -242,3 +242,79 @@ fn optional_imports_dump_correctly() {
.stdout("import? './import.justfile'\n") .stdout("import? './import.justfile'\n")
.run(); .run();
} }
#[test]
fn imports_in_root_run_in_justfile_directory() {
Test::new()
.write("foo/import.justfile", "bar:\n @cat baz")
.write("baz", "BAZ")
.justfile(
"
import 'foo/import.justfile'
",
)
.test_round_trip(false)
.arg("bar")
.stdout("BAZ")
.run();
}
#[test]
fn imports_in_submodules_run_in_submodule_directory() {
Test::new()
.justfile("mod foo")
.write("foo/mod.just", "import 'import.just'")
.write("foo/import.just", "bar:\n @cat baz")
.write("foo/baz", "BAZ")
.test_round_trip(false)
.arg("--unstable")
.arg("foo")
.arg("bar")
.stdout("BAZ")
.run();
}
#[test]
fn nested_import_paths_are_relative_to_containing_submodule() {
Test::new()
.justfile("import 'foo/import.just'")
.write("foo/import.just", "import 'bar.just'")
.write("foo/bar.just", "bar:\n @echo BAR")
.test_round_trip(false)
.arg("bar")
.stdout("BAR\n")
.run();
}
#[test]
fn recipes_in_nested_imports_run_in_parent_module() {
Test::new()
.justfile("import 'foo/import.just'")
.write("foo/import.just", "import 'bar/import.just'")
.write("foo/bar/import.just", "bar:\n @cat baz")
.write("baz", "BAZ")
.test_round_trip(false)
.arg("--unstable")
.arg("bar")
.stdout("BAZ")
.run();
}
#[test]
fn shebang_recipes_in_imports_in_root_run_in_justfile_directory() {
Test::new()
.write(
"foo/import.justfile",
"bar:\n #!/usr/bin/env bash\n cat baz",
)
.write("baz", "BAZ")
.justfile(
"
import 'foo/import.justfile'
",
)
.test_round_trip(false)
.arg("bar")
.stdout("BAZ")
.run();
}