Run recipes with working directory set to submodule directory (#1788)
This commit is contained in:
parent
94b3af6cb7
commit
a1bd70a030
@ -20,7 +20,7 @@ impl Compiler {
|
|||||||
let (relative, src) = loader.load(root, ¤t)?;
|
let (relative, src) = loader.load(root, ¤t)?;
|
||||||
loaded.push(relative.into());
|
loaded.push(relative.into());
|
||||||
let tokens = Lexer::lex(relative, src)?;
|
let tokens = Lexer::lex(relative, src)?;
|
||||||
let mut ast = Parser::parse(&tokens)?;
|
let mut ast = Parser::parse(current != root, ¤t, &tokens)?;
|
||||||
|
|
||||||
paths.insert(current.clone(), relative.into());
|
paths.insert(current.clone(), relative.into());
|
||||||
srcs.insert(current.clone(), src);
|
srcs.insert(current.clone(), src);
|
||||||
@ -120,7 +120,7 @@ impl Compiler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
|
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
|
||||||
let tokens = Lexer::test_lex(src)?;
|
let tokens = Lexer::test_lex(src)?;
|
||||||
let ast = Parser::parse(&tokens)?;
|
let ast = Parser::parse(false, &PathBuf::new(), &tokens)?;
|
||||||
let root = PathBuf::from("justfile");
|
let root = PathBuf::from("justfile");
|
||||||
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
||||||
asts.insert(root.clone(), ast);
|
asts.insert(root.clone(), ast);
|
||||||
|
@ -18,7 +18,6 @@ pub(crate) fn chooser_default(justfile: &Path) -> OsString {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
#[allow(clippy::struct_excessive_bools)]
|
|
||||||
pub(crate) struct Config {
|
pub(crate) struct Config {
|
||||||
pub(crate) check: bool,
|
pub(crate) check: bool,
|
||||||
pub(crate) color: Color,
|
pub(crate) color: Color,
|
||||||
|
@ -299,7 +299,6 @@ impl<'src> Justfile<'src> {
|
|||||||
.or_else(|| self.aliases.get(name).map(|alias| alias.target.as_ref()))
|
.or_else(|| self.aliases.get(name).map(|alias| alias.target.as_ref()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn invocation<'run>(
|
fn invocation<'run>(
|
||||||
&'run self,
|
&'run self,
|
||||||
depth: usize,
|
depth: usize,
|
||||||
|
@ -4,9 +4,13 @@
|
|||||||
clippy::let_underscore_untyped,
|
clippy::let_underscore_untyped,
|
||||||
clippy::needless_pass_by_value,
|
clippy::needless_pass_by_value,
|
||||||
clippy::similar_names,
|
clippy::similar_names,
|
||||||
|
clippy::struct_excessive_bools,
|
||||||
|
clippy::struct_field_names,
|
||||||
|
clippy::too_many_arguments,
|
||||||
clippy::too_many_lines,
|
clippy::too_many_lines,
|
||||||
clippy::unnecessary_wraps,
|
clippy::unnecessary_wraps,
|
||||||
clippy::wildcard_imports
|
clippy::wildcard_imports,
|
||||||
|
overlapping_range_endpoints
|
||||||
)]
|
)]
|
||||||
|
|
||||||
pub(crate) use {
|
pub(crate) use {
|
||||||
|
@ -32,22 +32,28 @@ pub(crate) struct Parser<'tokens, 'src> {
|
|||||||
expected: BTreeSet<TokenKind>,
|
expected: BTreeSet<TokenKind>,
|
||||||
/// Current recursion depth
|
/// Current recursion depth
|
||||||
depth: usize,
|
depth: usize,
|
||||||
|
/// Path to the file being parsed
|
||||||
|
path: PathBuf,
|
||||||
|
/// Parsing a submodule
|
||||||
|
submodule: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tokens, 'src> Parser<'tokens, 'src> {
|
impl<'tokens, 'src> Parser<'tokens, 'src> {
|
||||||
/// Parse `tokens` into an `Ast`
|
/// Parse `tokens` into an `Ast`
|
||||||
pub(crate) fn parse(tokens: &'tokens [Token<'src>]) -> CompileResult<'src, Ast<'src>> {
|
pub(crate) fn parse(
|
||||||
Self::new(tokens).parse_ast()
|
submodule: bool,
|
||||||
}
|
path: &Path,
|
||||||
|
tokens: &'tokens [Token<'src>],
|
||||||
/// Construct a new Parser from a token stream
|
) -> CompileResult<'src, Ast<'src>> {
|
||||||
fn new(tokens: &'tokens [Token<'src>]) -> Parser<'tokens, 'src> {
|
|
||||||
Parser {
|
Parser {
|
||||||
next: 0,
|
|
||||||
expected: BTreeSet::new(),
|
|
||||||
tokens,
|
|
||||||
depth: 0,
|
depth: 0,
|
||||||
|
expected: BTreeSet::new(),
|
||||||
|
next: 0,
|
||||||
|
path: path.into(),
|
||||||
|
submodule,
|
||||||
|
tokens,
|
||||||
}
|
}
|
||||||
|
.parse_ast()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error(&self, kind: CompileErrorKind<'src>) -> CompileResult<'src, CompileError<'src>> {
|
fn error(&self, kind: CompileErrorKind<'src>) -> CompileResult<'src, CompileError<'src>> {
|
||||||
@ -707,16 +713,18 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
|
|||||||
let body = self.parse_body()?;
|
let body = self.parse_body()?;
|
||||||
|
|
||||||
Ok(Recipe {
|
Ok(Recipe {
|
||||||
parameters: positional.into_iter().chain(variadic).collect(),
|
|
||||||
private: name.lexeme().starts_with('_'),
|
|
||||||
shebang: body.first().map_or(false, Line::is_shebang),
|
shebang: body.first().map_or(false, Line::is_shebang),
|
||||||
attributes,
|
attributes,
|
||||||
priors,
|
|
||||||
body,
|
body,
|
||||||
dependencies,
|
dependencies,
|
||||||
doc,
|
doc,
|
||||||
name,
|
name,
|
||||||
|
parameters: positional.into_iter().chain(variadic).collect(),
|
||||||
|
path: self.path.clone(),
|
||||||
|
priors,
|
||||||
|
private: name.lexeme().starts_with('_'),
|
||||||
quiet,
|
quiet,
|
||||||
|
submodule: self.submodule,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -934,7 +942,7 @@ mod tests {
|
|||||||
fn test(text: &str, want: Tree) {
|
fn test(text: &str, want: Tree) {
|
||||||
let unindented = unindent(text);
|
let unindented = unindent(text);
|
||||||
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
|
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
|
||||||
let justfile = Parser::parse(&tokens).expect("parsing failed");
|
let justfile = Parser::parse(false, &PathBuf::new(), &tokens).expect("parsing failed");
|
||||||
let have = justfile.tree();
|
let have = justfile.tree();
|
||||||
if have != want {
|
if have != want {
|
||||||
println!("parsed text: {unindented}");
|
println!("parsed text: {unindented}");
|
||||||
@ -972,7 +980,7 @@ mod tests {
|
|||||||
) {
|
) {
|
||||||
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
||||||
|
|
||||||
match Parser::parse(&tokens) {
|
match Parser::parse(false, &PathBuf::new(), &tokens) {
|
||||||
Ok(_) => panic!("Parsing unexpectedly succeeded"),
|
Ok(_) => panic!("Parsing unexpectedly succeeded"),
|
||||||
Err(have) => {
|
Err(have) => {
|
||||||
let want = CompileError {
|
let want = CompileError {
|
||||||
|
@ -28,10 +28,14 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
|
|||||||
pub(crate) doc: Option<&'src str>,
|
pub(crate) doc: Option<&'src str>,
|
||||||
pub(crate) name: Name<'src>,
|
pub(crate) name: Name<'src>,
|
||||||
pub(crate) parameters: Vec<Parameter<'src>>,
|
pub(crate) parameters: Vec<Parameter<'src>>,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub(crate) path: PathBuf,
|
||||||
pub(crate) priors: usize,
|
pub(crate) priors: usize,
|
||||||
pub(crate) private: bool,
|
pub(crate) private: bool,
|
||||||
pub(crate) quiet: bool,
|
pub(crate) quiet: bool,
|
||||||
pub(crate) shebang: bool,
|
pub(crate) shebang: bool,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub(crate) submodule: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'src, D> Recipe<'src, D> {
|
impl<'src, D> Recipe<'src, D> {
|
||||||
@ -222,7 +226,11 @@ impl<'src, D> Recipe<'src, D> {
|
|||||||
let mut cmd = context.settings.shell_command(config);
|
let mut cmd = context.settings.shell_command(config);
|
||||||
|
|
||||||
if self.change_directory() {
|
if self.change_directory() {
|
||||||
cmd.current_dir(&context.search.working_directory);
|
cmd.current_dir(if self.submodule {
|
||||||
|
self.path.parent().unwrap()
|
||||||
|
} else {
|
||||||
|
&context.search.working_directory
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd.arg(command);
|
cmd.arg(command);
|
||||||
@ -358,7 +366,11 @@ impl<'src, D> Recipe<'src, D> {
|
|||||||
let mut command = Platform::make_shebang_command(
|
let mut command = Platform::make_shebang_command(
|
||||||
&path,
|
&path,
|
||||||
if self.change_directory() {
|
if self.change_directory() {
|
||||||
Some(&context.search.working_directory)
|
if self.submodule {
|
||||||
|
Some(self.path.parent().unwrap())
|
||||||
|
} else {
|
||||||
|
Some(&context.search.working_directory)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
|
@ -6,7 +6,6 @@ pub(crate) const WINDOWS_POWERSHELL_SHELL: &str = "powershell.exe";
|
|||||||
pub(crate) const WINDOWS_POWERSHELL_ARGS: &[&str] = &["-NoLogo", "-Command"];
|
pub(crate) const WINDOWS_POWERSHELL_ARGS: &[&str] = &["-NoLogo", "-Command"];
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Serialize, Default)]
|
#[derive(Debug, PartialEq, Serialize, Default)]
|
||||||
#[allow(clippy::struct_excessive_bools)]
|
|
||||||
pub(crate) struct Settings<'src> {
|
pub(crate) struct Settings<'src> {
|
||||||
pub(crate) allow_duplicate_recipes: bool,
|
pub(crate) allow_duplicate_recipes: bool,
|
||||||
pub(crate) dotenv_filename: Option<String>,
|
pub(crate) dotenv_filename: Option<String>,
|
||||||
|
@ -59,7 +59,8 @@ pub(crate) fn analysis_error(
|
|||||||
) {
|
) {
|
||||||
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
||||||
|
|
||||||
let ast = Parser::parse(&tokens).expect("Parsing failed in analysis test...");
|
let ast =
|
||||||
|
Parser::parse(false, &PathBuf::new(), &tokens).expect("Parsing failed in analysis test...");
|
||||||
|
|
||||||
let root = PathBuf::from("justfile");
|
let root = PathBuf::from("justfile");
|
||||||
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
||||||
|
@ -45,16 +45,18 @@ impl<'src> UnresolvedRecipe<'src> {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(Recipe {
|
Ok(Recipe {
|
||||||
|
attributes: self.attributes,
|
||||||
body: self.body,
|
body: self.body,
|
||||||
|
dependencies,
|
||||||
doc: self.doc,
|
doc: self.doc,
|
||||||
name: self.name,
|
name: self.name,
|
||||||
parameters: self.parameters,
|
parameters: self.parameters,
|
||||||
|
path: self.path,
|
||||||
|
priors: self.priors,
|
||||||
private: self.private,
|
private: self.private,
|
||||||
quiet: self.quiet,
|
quiet: self.quiet,
|
||||||
shebang: self.shebang,
|
shebang: self.shebang,
|
||||||
priors: self.priors,
|
submodule: self.submodule,
|
||||||
attributes: self.attributes,
|
|
||||||
dependencies,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -493,3 +493,39 @@ fn recipes_may_be_named_mod() {
|
|||||||
.stdout("FOO\n")
|
.stdout("FOO\n")
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn submodule_linewise_recipes_run_in_submodule_directory() {
|
||||||
|
Test::new()
|
||||||
|
.write("foo/bar", "BAR")
|
||||||
|
.write("foo/mod.just", "foo:\n @cat bar")
|
||||||
|
.justfile(
|
||||||
|
"
|
||||||
|
mod foo
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.test_round_trip(false)
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("foo")
|
||||||
|
.arg("foo")
|
||||||
|
.stdout("BAR")
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn submodule_shebang_recipes_run_in_submodule_directory() {
|
||||||
|
Test::new()
|
||||||
|
.write("foo/bar", "BAR")
|
||||||
|
.write("foo/mod.just", "foo:\n #!/bin/sh\n cat bar")
|
||||||
|
.justfile(
|
||||||
|
"
|
||||||
|
mod foo
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.test_round_trip(false)
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("foo")
|
||||||
|
.arg("foo")
|
||||||
|
.stdout("BAR")
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use super::*;
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
test! {
|
test! {
|
||||||
name: powershell,
|
name: powershell,
|
||||||
@ -41,3 +43,17 @@ default:
|
|||||||
"#,
|
"#,
|
||||||
stdout: "Hello-World\r\n",
|
stdout: "Hello-World\r\n",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple() {
|
||||||
|
Test::new()
|
||||||
|
.justfile(
|
||||||
|
"
|
||||||
|
foo:
|
||||||
|
#!/bin/sh
|
||||||
|
echo bar
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.stdout("bar\n")
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user