Run imports in working directory of importer (#1817)

This commit is contained in:
Casey Rodarmor 2024-01-11 19:00:38 -08:00 committed by GitHub
parent ea1b68644a
commit fbd4a437a0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 150 additions and 36 deletions

View File

@ -20,7 +20,13 @@ impl Compiler {
let (relative, src) = loader.load(root, &current.path)?;
loaded.push(relative.into());
let tokens = Lexer::lex(relative, src)?;
let mut ast = Parser::parse(&current.path, &current.namepath, current.depth, &tokens)?;
let mut ast = Parser::parse(
&current.path,
&current.namepath,
current.depth,
&tokens,
&current.working_directory,
)?;
paths.insert(current.path.clone(), relative.into());
srcs.insert(current.path.clone(), src);
@ -162,7 +168,13 @@ impl Compiler {
#[cfg(test)]
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
let tokens = Lexer::test_lex(src)?;
let ast = Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens)?;
let ast = Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
)?;
let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast);

View File

@ -31,6 +31,7 @@ pub(crate) struct Parser<'run, 'src> {
recursion_depth: usize,
submodule_depth: u32,
tokens: &'run [Token<'src>],
working_directory: &'run Path,
}
impl<'run, 'src> Parser<'run, 'src> {
@ -40,6 +41,7 @@ impl<'run, 'src> Parser<'run, 'src> {
module_namepath: &'run Namepath<'src>,
submodule_depth: u32,
tokens: &'run [Token<'src>],
working_directory: &'run Path,
) -> CompileResult<'src, Ast<'src>> {
Self {
expected_tokens: BTreeSet::new(),
@ -49,6 +51,7 @@ impl<'run, 'src> Parser<'run, 'src> {
recursion_depth: 0,
submodule_depth,
tokens,
working_directory,
}
.parse_ast()
}
@ -734,15 +737,16 @@ impl<'run, 'src> Parser<'run, 'src> {
attributes,
body,
dependencies,
depth: self.submodule_depth,
doc,
name,
parameters: positional.into_iter().chain(variadic).collect(),
file_path: self.file_path.into(),
name,
namepath: self.module_namepath.join(name),
parameters: positional.into_iter().chain(variadic).collect(),
priors,
private: name.lexeme().starts_with('_'),
quiet,
depth: self.submodule_depth,
namepath: self.module_namepath.join(name),
working_directory: self.working_directory.into(),
})
}
@ -960,8 +964,14 @@ mod tests {
fn test(text: &str, want: Tree) {
let unindented = unindent(text);
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
let justfile =
Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens).expect("parsing failed");
let justfile = Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
)
.expect("parsing failed");
let have = justfile.tree();
if have != want {
println!("parsed text: {unindented}");
@ -999,7 +1009,13 @@ mod tests {
) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
match Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens) {
match Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
) {
Ok(_) => panic!("Parsing unexpectedly succeeded"),
Err(have) => {
let want = CompileError {

View File

@ -37,6 +37,8 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
pub(crate) private: bool,
pub(crate) quiet: bool,
pub(crate) shebang: bool,
#[serde(skip)]
pub(crate) working_directory: PathBuf,
}
impl<'src, D> Recipe<'src, D> {
@ -120,6 +122,18 @@ impl<'src, D> Recipe<'src, D> {
!self.attributes.contains(&Attribute::NoExitMessage)
}
fn working_directory<'a>(&'a self, search: &'a Search) -> Option<&Path> {
if self.change_directory() {
Some(if self.depth > 0 {
&self.working_directory
} else {
&search.working_directory
})
} else {
None
}
}
pub(crate) fn run<'run>(
&self,
context: &RecipeContext<'src, 'run>,
@ -222,12 +236,8 @@ impl<'src, D> Recipe<'src, D> {
let mut cmd = context.settings.shell_command(config);
if self.change_directory() {
cmd.current_dir(if self.depth > 0 {
self.file_path.parent().unwrap()
} else {
&context.search.working_directory
});
if let Some(working_directory) = self.working_directory(context.search) {
cmd.current_dir(working_directory);
}
cmd.arg(command);
@ -353,30 +363,19 @@ impl<'src, D> Recipe<'src, D> {
})?;
}
// make the script executable
// make script executable
Platform::set_execute_permission(&path).map_err(|error| Error::TmpdirIo {
recipe: self.name(),
io_error: error,
})?;
// create a command to run the script
let mut command = Platform::make_shebang_command(
&path,
if self.change_directory() {
if self.depth > 0 {
Some(self.file_path.parent().unwrap())
} else {
Some(&context.search.working_directory)
}
} else {
None
},
shebang,
)
.map_err(|output_error| Error::Cygpath {
recipe: self.name(),
output_error,
})?;
// create command to run script
let mut command =
Platform::make_shebang_command(&path, self.working_directory(context.search), shebang)
.map_err(|output_error| Error::Cygpath {
recipe: self.name(),
output_error,
})?;
if context.settings.positional_arguments {
command.args(positional);

View File

@ -4,6 +4,7 @@ pub(crate) struct Source<'src> {
pub(crate) path: PathBuf,
pub(crate) depth: u32,
pub(crate) namepath: Namepath<'src>,
pub(crate) working_directory: PathBuf,
}
impl<'src> Source<'src> {
@ -12,6 +13,7 @@ impl<'src> Source<'src> {
path: path.into(),
depth: 0,
namepath: Namepath::default(),
working_directory: path.parent().unwrap().into(),
}
}
@ -20,11 +22,13 @@ impl<'src> Source<'src> {
depth: self.depth + 1,
path,
namepath: self.namepath.clone(),
working_directory: self.working_directory.clone(),
}
}
pub(crate) fn module(&self, name: Name<'src>, path: PathBuf) -> Self {
Self {
working_directory: path.parent().unwrap().into(),
path,
depth: self.depth + 1,
namepath: self.namepath.join(name),

View File

@ -59,8 +59,14 @@ pub(crate) fn analysis_error(
) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
let ast = Parser::parse(&PathBuf::new(), &Namepath::default(), 0, &tokens)
.expect("Parsing failed in analysis test...");
let ast = Parser::parse(
&PathBuf::new(),
&Namepath::default(),
0,
&tokens,
&PathBuf::new(),
)
.expect("Parsing failed in analysis test...");
let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();

View File

@ -58,6 +58,7 @@ impl<'src> UnresolvedRecipe<'src> {
private: self.private,
quiet: self.quiet,
shebang: self.shebang,
working_directory: self.working_directory,
})
}
}

View File

@ -242,3 +242,79 @@ fn optional_imports_dump_correctly() {
.stdout("import? './import.justfile'\n")
.run();
}
#[test]
fn imports_in_root_run_in_justfile_directory() {
Test::new()
.write("foo/import.justfile", "bar:\n @cat baz")
.write("baz", "BAZ")
.justfile(
"
import 'foo/import.justfile'
",
)
.test_round_trip(false)
.arg("bar")
.stdout("BAZ")
.run();
}
#[test]
fn imports_in_submodules_run_in_submodule_directory() {
Test::new()
.justfile("mod foo")
.write("foo/mod.just", "import 'import.just'")
.write("foo/import.just", "bar:\n @cat baz")
.write("foo/baz", "BAZ")
.test_round_trip(false)
.arg("--unstable")
.arg("foo")
.arg("bar")
.stdout("BAZ")
.run();
}
#[test]
fn nested_import_paths_are_relative_to_containing_submodule() {
Test::new()
.justfile("import 'foo/import.just'")
.write("foo/import.just", "import 'bar.just'")
.write("foo/bar.just", "bar:\n @echo BAR")
.test_round_trip(false)
.arg("bar")
.stdout("BAR\n")
.run();
}
#[test]
fn recipes_in_nested_imports_run_in_parent_module() {
Test::new()
.justfile("import 'foo/import.just'")
.write("foo/import.just", "import 'bar/import.just'")
.write("foo/bar/import.just", "bar:\n @cat baz")
.write("baz", "BAZ")
.test_round_trip(false)
.arg("--unstable")
.arg("bar")
.stdout("BAZ")
.run();
}
#[test]
fn shebang_recipes_in_imports_in_root_run_in_justfile_directory() {
Test::new()
.write(
"foo/import.justfile",
"bar:\n #!/usr/bin/env bash\n cat baz",
)
.write("baz", "BAZ")
.justfile(
"
import 'foo/import.justfile'
",
)
.test_round_trip(false)
.arg("bar")
.stdout("BAZ")
.run();
}