2019-04-11 15:23:14 -07:00
|
|
|
use crate::common::*;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2018-12-08 14:29:41 -08:00
|
|
|
use TokenKind::*;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Just language parser
|
|
|
|
///
|
|
|
|
/// The parser is a (hopefully) straightforward recursive descent parser.
|
|
|
|
///
|
|
|
|
/// It uses a few tokens of lookahead to disambiguate different constructs.
|
|
|
|
///
|
|
|
|
/// The `expect_*` and `presume_`* methods are similar in that they assert
|
|
|
|
/// the type of unparsed tokens and consume them. However, upon encountering
|
|
|
|
/// an unexpected token, the `expect_*` methods return an unexpected token
|
|
|
|
/// error, whereas the `presume_*` tokens return an internal error.
|
|
|
|
///
|
|
|
|
/// The `presume_*` methods are used when the token stream has been inspected
|
|
|
|
/// in some other way, and thus encountering an unexpected token is a bug in
|
|
|
|
/// Just, and not a syntax error.
|
|
|
|
///
|
|
|
|
/// All methods starting with `parse_*` parse and return a language construct.
|
|
|
|
pub(crate) struct Parser<'tokens, 'src> {
|
|
|
|
/// Source tokens
|
|
|
|
tokens: &'tokens [Token<'src>],
|
|
|
|
/// Index of the next un-parsed token
|
|
|
|
next: usize,
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
impl<'tokens, 'src> Parser<'tokens, 'src> {
|
|
|
|
/// Parse `tokens` into an `Module`
|
|
|
|
pub(crate) fn parse(tokens: &'tokens [Token<'src>]) -> CompilationResult<'src, Module<'src>> {
|
|
|
|
Self::new(tokens).parse_justfile()
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Construct a new Paser from a token stream
|
|
|
|
fn new(tokens: &'tokens [Token<'src>]) -> Parser<'tokens, 'src> {
|
|
|
|
Parser { next: 0, tokens }
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
fn error(
|
|
|
|
&self,
|
|
|
|
kind: CompilationErrorKind<'src>,
|
|
|
|
) -> CompilationResult<'src, CompilationError<'src>> {
|
|
|
|
Ok(self.next()?.error(kind))
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Construct an unexpected token error with the token returned by `Parser::next`
|
|
|
|
fn unexpected_token(
|
|
|
|
&self,
|
|
|
|
expected: &[TokenKind],
|
|
|
|
) -> CompilationResult<'src, CompilationError<'src>> {
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
let mut expected = expected.to_vec();
|
2019-11-07 10:55:15 -08:00
|
|
|
expected.sort();
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
self.error(CompilationErrorKind::UnexpectedToken {
|
|
|
|
expected,
|
|
|
|
found: self.next()?.kind,
|
|
|
|
})
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
fn internal_error(
|
|
|
|
&self,
|
|
|
|
message: impl Into<String>,
|
|
|
|
) -> CompilationResult<'src, CompilationError<'src>> {
|
|
|
|
self.error(CompilationErrorKind::Internal {
|
|
|
|
message: message.into(),
|
|
|
|
})
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// An iterator over the remaining significant tokens
|
|
|
|
fn rest(&self) -> impl Iterator<Item = Token<'src>> + 'tokens {
|
|
|
|
self.tokens[self.next..]
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
.iter()
|
2019-11-07 10:55:15 -08:00
|
|
|
.cloned()
|
|
|
|
.filter(|token| token.kind != Whitespace)
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// The next significant token
|
|
|
|
fn next(&self) -> CompilationResult<'src, Token<'src>> {
|
|
|
|
if let Some(token) = self.rest().next() {
|
|
|
|
Ok(token)
|
|
|
|
} else {
|
|
|
|
Err(self.internal_error("`Parser::next()` called after end of token stream")?)
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Check if the next significant token is of kind `kind`
|
|
|
|
fn next_is(&self, kind: TokenKind) -> bool {
|
|
|
|
self.next_are(&[kind])
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Check if the next significant tokens are of kinds `kinds`
|
|
|
|
fn next_are(&self, kinds: &[TokenKind]) -> bool {
|
|
|
|
let mut rest = self.rest();
|
|
|
|
for kind in kinds {
|
|
|
|
match rest.next() {
|
|
|
|
Some(token) => {
|
|
|
|
if token.kind != *kind {
|
|
|
|
return false;
|
2018-12-08 14:29:41 -08:00
|
|
|
}
|
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
None => return false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
true
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Get the `n`th next significant token
|
|
|
|
fn get(&self, n: usize) -> CompilationResult<'src, Token<'src>> {
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
match self.rest().nth(n) {
|
2019-11-07 10:55:15 -08:00
|
|
|
Some(token) => Ok(token),
|
|
|
|
None => Err(self.internal_error("`Parser::get()` advanced past end of token stream")?),
|
|
|
|
}
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Advance past one significant token
|
|
|
|
fn advance(&mut self) -> CompilationResult<'src, Token<'src>> {
|
|
|
|
for skipped in &self.tokens[self.next..] {
|
|
|
|
self.next += 1;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
if skipped.kind != Whitespace {
|
|
|
|
return Ok(*skipped);
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
Err(self.internal_error("`Parser::advance()` advanced past end of token stream")?)
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Return the next token if it is of kind `expected`, otherwise, return an
|
|
|
|
/// unexpected token error
|
|
|
|
fn expect(&mut self, expected: TokenKind) -> CompilationResult<'src, Token<'src>> {
|
|
|
|
if let Some(token) = self.accept(expected)? {
|
|
|
|
Ok(token)
|
|
|
|
} else {
|
|
|
|
Err(self.unexpected_token(&[expected])?)
|
|
|
|
}
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-10 23:17:47 -08:00
|
|
|
/// Return an error if the next token is not one of kinds `kinds`.
|
|
|
|
fn expect_any(&mut self, expected: &[TokenKind]) -> CompilationResult<'src, Token<'src>> {
|
|
|
|
for expected in expected.iter().cloned() {
|
|
|
|
if let Some(token) = self.accept(expected)? {
|
|
|
|
return Ok(token);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Err(self.unexpected_token(expected)?)
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Return an unexpected token error if the next token is not an EOL
|
|
|
|
fn expect_eol(&mut self) -> CompilationResult<'src, ()> {
|
|
|
|
self.accept(Comment)?;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
if self.next_is(Eof) {
|
|
|
|
return Ok(());
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
self.expect(Eol).map(|_| ()).expected(&[Eof])
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Return an internal error if the next token is not of kind `Identifier` with
|
|
|
|
/// lexeme `lexeme`.
|
|
|
|
fn presume_name(&mut self, lexeme: &str) -> CompilationResult<'src, ()> {
|
|
|
|
let next = self.advance()?;
|
|
|
|
|
|
|
|
if next.kind != Identifier {
|
|
|
|
Err(self.internal_error(format!(
|
|
|
|
"Presumed next token would have kind {}, but found {}",
|
|
|
|
Identifier, next.kind
|
|
|
|
))?)
|
|
|
|
} else if next.lexeme() != lexeme {
|
|
|
|
Err(self.internal_error(format!(
|
|
|
|
"Presumed next token would have lexeme \"{}\", but found \"{}\"",
|
|
|
|
lexeme,
|
|
|
|
next.lexeme(),
|
|
|
|
))?)
|
|
|
|
} else {
|
|
|
|
Ok(())
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Return an internal error if the next token is not of kind `kind`.
|
|
|
|
fn presume(&mut self, kind: TokenKind) -> CompilationResult<'src, Token<'src>> {
|
|
|
|
let next = self.advance()?;
|
|
|
|
|
|
|
|
if next.kind != kind {
|
|
|
|
Err(self.internal_error(format!(
|
|
|
|
"Presumed next token would have kind {:?}, but found {:?}",
|
|
|
|
kind, next.kind
|
|
|
|
))?)
|
|
|
|
} else {
|
|
|
|
Ok(next)
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Return an internal error if the next token is not one of kinds `kinds`.
|
|
|
|
fn presume_any(&mut self, kinds: &[TokenKind]) -> CompilationResult<'src, Token<'src>> {
|
|
|
|
let next = self.advance()?;
|
|
|
|
if !kinds.contains(&next.kind) {
|
|
|
|
Err(self.internal_error(format!(
|
|
|
|
"Presumed next token would be {}, but found {}",
|
|
|
|
List::or(kinds),
|
|
|
|
next.kind
|
|
|
|
))?)
|
|
|
|
} else {
|
|
|
|
Ok(next)
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Accept and return a token of kind `kind`
|
|
|
|
fn accept(&mut self, kind: TokenKind) -> CompilationResult<'src, Option<Token<'src>>> {
|
|
|
|
let next = self.next()?;
|
|
|
|
if next.kind == kind {
|
|
|
|
self.advance()?;
|
|
|
|
Ok(Some(next))
|
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Accept a token of kind `Identifier` and parse into an `Name`
|
|
|
|
fn accept_name(&mut self) -> CompilationResult<'src, Option<Name<'src>>> {
|
|
|
|
if self.next_is(Identifier) {
|
|
|
|
Ok(Some(self.parse_name()?))
|
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Accept and return `true` if next token is of kind `kind`
|
|
|
|
fn accepted(&mut self, kind: TokenKind) -> CompilationResult<'src, bool> {
|
|
|
|
Ok(self.accept(kind)?.is_some())
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse a justfile, consumes self
|
|
|
|
fn parse_justfile(mut self) -> CompilationResult<'src, Module<'src>> {
|
|
|
|
let mut items = Vec::new();
|
|
|
|
let mut warnings = Vec::new();
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
let mut doc = None;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let next = self.next()?;
|
|
|
|
|
|
|
|
match next.kind {
|
|
|
|
Comment => {
|
|
|
|
doc = Some(next.lexeme()[1..].trim());
|
|
|
|
self.expect_eol()?;
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
Eol => {
|
|
|
|
self.advance()?;
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
Eof => {
|
|
|
|
self.advance()?;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Identifier => match next.lexeme() {
|
|
|
|
keyword::ALIAS => {
|
|
|
|
if self.next_are(&[Identifier, Identifier, Equals]) {
|
|
|
|
warnings.push(Warning::DeprecatedEquals {
|
|
|
|
equals: self.get(2)?,
|
|
|
|
});
|
|
|
|
items.push(Item::Alias(self.parse_alias()?));
|
|
|
|
} else if self.next_are(&[Identifier, Identifier, ColonEquals]) {
|
|
|
|
items.push(Item::Alias(self.parse_alias()?));
|
|
|
|
} else {
|
|
|
|
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
|
|
|
keyword::EXPORT => {
|
|
|
|
if self.next_are(&[Identifier, Identifier, Equals]) {
|
|
|
|
warnings.push(Warning::DeprecatedEquals {
|
|
|
|
equals: self.get(2)?,
|
|
|
|
});
|
|
|
|
self.presume_name(keyword::EXPORT)?;
|
|
|
|
items.push(Item::Assignment(self.parse_assignment(true)?));
|
|
|
|
} else if self.next_are(&[Identifier, Identifier, ColonEquals]) {
|
|
|
|
self.presume_name(keyword::EXPORT)?;
|
|
|
|
items.push(Item::Assignment(self.parse_assignment(true)?));
|
|
|
|
} else {
|
|
|
|
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
}
|
2019-11-10 23:17:47 -08:00
|
|
|
keyword::SET => {
|
|
|
|
if self.next_are(&[Identifier, Identifier, ColonEquals]) {
|
|
|
|
items.push(Item::Set(self.parse_set()?));
|
|
|
|
} else {
|
|
|
|
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
|
|
|
|
}
|
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
_ => {
|
|
|
|
if self.next_are(&[Identifier, Equals]) {
|
|
|
|
warnings.push(Warning::DeprecatedEquals {
|
|
|
|
equals: self.get(1)?,
|
|
|
|
});
|
|
|
|
items.push(Item::Assignment(self.parse_assignment(false)?));
|
|
|
|
} else if self.next_are(&[Identifier, ColonEquals]) {
|
|
|
|
items.push(Item::Assignment(self.parse_assignment(false)?));
|
|
|
|
} else {
|
|
|
|
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
At => {
|
|
|
|
self.presume(At)?;
|
|
|
|
items.push(Item::Recipe(self.parse_recipe(doc, true)?));
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
_ => {
|
|
|
|
return Err(self.unexpected_token(&[Identifier, At])?);
|
|
|
|
}
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
if next.kind != Comment {
|
|
|
|
doc = None;
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
if self.next != self.tokens.len() {
|
|
|
|
Err(self.internal_error(format!(
|
|
|
|
"Parse completed with {} unparsed tokens",
|
|
|
|
self.tokens.len() - self.next,
|
|
|
|
))?)
|
|
|
|
} else {
|
|
|
|
Ok(Module { items, warnings })
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse an alias, e.g `alias name := target`
|
|
|
|
fn parse_alias(&mut self) -> CompilationResult<'src, Alias<'src>> {
|
|
|
|
self.presume_name(keyword::ALIAS)?;
|
|
|
|
let name = self.parse_name()?;
|
|
|
|
self.presume_any(&[Equals, ColonEquals])?;
|
|
|
|
let target = self.parse_name()?;
|
|
|
|
self.expect_eol()?;
|
|
|
|
Ok(Alias { name, target })
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse an assignment, e.g. `foo := bar`
|
|
|
|
fn parse_assignment(&mut self, export: bool) -> CompilationResult<'src, Assignment<'src>> {
|
|
|
|
let name = self.parse_name()?;
|
|
|
|
self.presume_any(&[Equals, ColonEquals])?;
|
|
|
|
let expression = self.parse_expression()?;
|
|
|
|
self.expect_eol()?;
|
|
|
|
Ok(Assignment {
|
|
|
|
name,
|
|
|
|
export,
|
|
|
|
expression,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse an expression, e.g. `1 + 2`
|
|
|
|
fn parse_expression(&mut self) -> CompilationResult<'src, Expression<'src>> {
|
|
|
|
let value = self.parse_value()?;
|
|
|
|
|
|
|
|
if self.accepted(Plus)? {
|
|
|
|
let lhs = Box::new(value);
|
|
|
|
let rhs = Box::new(self.parse_expression()?);
|
|
|
|
Ok(Expression::Concatination { lhs, rhs })
|
|
|
|
} else {
|
|
|
|
Ok(value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse a value, e.g. `(bar)`
|
|
|
|
fn parse_value(&mut self) -> CompilationResult<'src, Expression<'src>> {
|
|
|
|
let next = self.next()?;
|
|
|
|
|
|
|
|
match next.kind {
|
|
|
|
StringCooked | StringRaw => Ok(Expression::StringLiteral {
|
|
|
|
string_literal: self.parse_string_literal()?,
|
|
|
|
}),
|
|
|
|
Backtick => {
|
|
|
|
let contents = &next.lexeme()[1..next.lexeme().len() - 1];
|
|
|
|
let token = self.advance()?;
|
|
|
|
Ok(Expression::Backtick { contents, token })
|
|
|
|
}
|
|
|
|
Identifier => {
|
|
|
|
let name = self.parse_name()?;
|
|
|
|
|
|
|
|
if self.next_is(ParenL) {
|
|
|
|
let arguments = self.parse_sequence()?;
|
2019-04-11 23:58:08 -07:00
|
|
|
Ok(Expression::Call {
|
2019-11-07 10:55:15 -08:00
|
|
|
function: name,
|
2018-12-08 14:29:41 -08:00
|
|
|
arguments,
|
2019-04-11 23:58:08 -07:00
|
|
|
})
|
2017-12-02 05:37:10 -08:00
|
|
|
} else {
|
2019-11-07 10:55:15 -08:00
|
|
|
Ok(Expression::Variable { name })
|
2017-12-02 05:37:10 -08:00
|
|
|
}
|
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
ParenL => {
|
2019-11-07 10:55:15 -08:00
|
|
|
self.presume(ParenL)?;
|
|
|
|
let contents = Box::new(self.parse_expression()?);
|
|
|
|
self.expect(ParenR)?;
|
|
|
|
Ok(Expression::Group { contents })
|
|
|
|
}
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
_ => Err(self.unexpected_token(&[StringCooked, StringRaw, Backtick, Identifier, ParenL])?),
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse a string literal, e.g. `"FOO"`
|
|
|
|
fn parse_string_literal(&mut self) -> CompilationResult<'src, StringLiteral<'src>> {
|
2019-11-10 23:17:47 -08:00
|
|
|
let token = self.expect_any(&[StringRaw, StringCooked])?;
|
2019-11-07 10:55:15 -08:00
|
|
|
|
|
|
|
let raw = &token.lexeme()[1..token.lexeme().len() - 1];
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
match token.kind {
|
|
|
|
StringRaw => Ok(StringLiteral {
|
|
|
|
raw,
|
|
|
|
cooked: Cow::Borrowed(raw),
|
|
|
|
}),
|
|
|
|
StringCooked => {
|
|
|
|
let mut cooked = String::new();
|
|
|
|
let mut escape = false;
|
|
|
|
for c in raw.chars() {
|
|
|
|
if escape {
|
|
|
|
match c {
|
|
|
|
'n' => cooked.push('\n'),
|
|
|
|
'r' => cooked.push('\r'),
|
|
|
|
't' => cooked.push('\t'),
|
|
|
|
'\\' => cooked.push('\\'),
|
|
|
|
'"' => cooked.push('"'),
|
|
|
|
other => {
|
|
|
|
return Err(
|
|
|
|
token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
escape = false;
|
|
|
|
} else if c == '\\' {
|
|
|
|
escape = true;
|
|
|
|
} else {
|
|
|
|
cooked.push(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(StringLiteral {
|
|
|
|
raw,
|
|
|
|
cooked: Cow::Owned(cooked),
|
2019-04-11 23:58:08 -07:00
|
|
|
})
|
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
_ => Err(token.error(CompilationErrorKind::Internal {
|
|
|
|
message: "`Parser::parse_string_literal` called on non-string token".to_string(),
|
|
|
|
})),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse a name from an identifier token
|
|
|
|
fn parse_name(&mut self) -> CompilationResult<'src, Name<'src>> {
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
self.expect(Identifier).map(Name::from_identifier)
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse sequence of comma-separated expressions
|
|
|
|
fn parse_sequence(&mut self) -> CompilationResult<'src, Vec<Expression<'src>>> {
|
|
|
|
self.presume(ParenL)?;
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
let mut elements = Vec::new();
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
while !self.next_is(ParenR) {
|
|
|
|
elements.push(self.parse_expression().expected(&[ParenR])?);
|
2017-12-02 14:59:07 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
if !self.accepted(Comma)? {
|
|
|
|
break;
|
2017-12-02 14:59:07 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
self.expect(ParenR)?;
|
|
|
|
|
|
|
|
Ok(elements)
|
2017-12-02 14:59:07 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse a recipe
|
|
|
|
fn parse_recipe(
|
|
|
|
&mut self,
|
|
|
|
doc: Option<&'src str>,
|
|
|
|
quiet: bool,
|
|
|
|
) -> CompilationResult<'src, Recipe<'src>> {
|
|
|
|
let name = self.parse_name()?;
|
|
|
|
|
|
|
|
let mut positional = Vec::new();
|
2017-12-02 14:59:07 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
while self.next_is(Identifier) {
|
|
|
|
positional.push(self.parse_parameter(false)?);
|
2017-12-02 14:59:07 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
let variadic = if self.accepted(Plus)? {
|
|
|
|
let variadic = self.parse_parameter(true)?;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
if let Some(identifier) = self.accept(Identifier)? {
|
|
|
|
return Err(
|
|
|
|
identifier.error(CompilationErrorKind::ParameterFollowsVariadicParameter {
|
|
|
|
parameter: identifier.lexeme(),
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
}
|
2019-04-11 12:30:29 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
Some(variadic)
|
2019-04-11 12:30:29 -07:00
|
|
|
} else {
|
2019-11-07 10:55:15 -08:00
|
|
|
None
|
2019-04-11 12:30:29 -07:00
|
|
|
};
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
let result = self.expect(Colon);
|
|
|
|
|
|
|
|
if result.is_err() {
|
|
|
|
let mut alternatives = Vec::new();
|
|
|
|
|
|
|
|
if variadic.is_none() {
|
|
|
|
alternatives.push(Identifier);
|
|
|
|
}
|
|
|
|
|
|
|
|
if !quiet && variadic.is_none() && positional.is_empty() {
|
|
|
|
alternatives.push(ColonEquals);
|
|
|
|
}
|
|
|
|
|
|
|
|
if variadic.is_some() || !positional.is_empty() {
|
|
|
|
alternatives.push(Equals);
|
|
|
|
}
|
|
|
|
|
|
|
|
if variadic.is_none() {
|
|
|
|
alternatives.push(Plus);
|
|
|
|
}
|
|
|
|
|
|
|
|
result.expected(&alternatives)?;
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
let mut dependencies = Vec::new();
|
|
|
|
|
|
|
|
while let Some(dependency) = self.accept_name()? {
|
|
|
|
dependencies.push(dependency);
|
|
|
|
}
|
|
|
|
|
|
|
|
self.expect_eol().expected(&[Identifier])?;
|
|
|
|
|
|
|
|
let body = self.parse_body()?;
|
2019-04-11 12:30:29 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
Ok(Recipe {
|
|
|
|
private: name.lexeme().starts_with('_'),
|
|
|
|
shebang: body.first().map(Line::is_shebang).unwrap_or(false),
|
|
|
|
parameters: positional.into_iter().chain(variadic).collect(),
|
|
|
|
doc,
|
|
|
|
name,
|
|
|
|
quiet,
|
|
|
|
dependencies,
|
|
|
|
body,
|
|
|
|
})
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
/// Parse a recipe parameter
|
|
|
|
fn parse_parameter(&mut self, variadic: bool) -> CompilationResult<'src, Parameter<'src>> {
|
|
|
|
let name = self.parse_name()?;
|
|
|
|
|
|
|
|
let default = if self.accepted(Equals)? {
|
|
|
|
Some(self.parse_value()?)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(Parameter {
|
|
|
|
name,
|
|
|
|
default,
|
|
|
|
variadic,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse the body of a recipe
|
|
|
|
fn parse_body(&mut self) -> CompilationResult<'src, Vec<Line<'src>>> {
|
|
|
|
let mut lines = Vec::new();
|
|
|
|
|
|
|
|
if self.accepted(Indent)? {
|
|
|
|
while !self.accepted(Dedent)? {
|
|
|
|
let line = if self.accepted(Eol)? {
|
|
|
|
Line {
|
|
|
|
fragments: Vec::new(),
|
2018-12-08 14:29:41 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
} else {
|
|
|
|
let mut fragments = Vec::new();
|
|
|
|
|
|
|
|
while !(self.accepted(Eol)? || self.next_is(Dedent)) {
|
|
|
|
if let Some(token) = self.accept(Text)? {
|
|
|
|
fragments.push(Fragment::Text { token });
|
|
|
|
} else if self.accepted(InterpolationStart)? {
|
|
|
|
fragments.push(Fragment::Interpolation {
|
|
|
|
expression: self.parse_expression()?,
|
2019-09-21 18:53:30 -07:00
|
|
|
});
|
2019-11-07 10:55:15 -08:00
|
|
|
self.expect(InterpolationEnd)?;
|
2017-11-16 23:30:08 -08:00
|
|
|
} else {
|
2019-11-07 10:55:15 -08:00
|
|
|
return Err(self.unexpected_token(&[Text, InterpolationStart])?);
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2018-12-08 14:29:41 -08:00
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
|
|
|
|
Line { fragments }
|
|
|
|
};
|
|
|
|
|
|
|
|
lines.push(line);
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
while lines.last().map(Line::is_empty).unwrap_or(false) {
|
|
|
|
lines.pop();
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
Ok(lines)
|
|
|
|
}
|
2019-11-10 23:17:47 -08:00
|
|
|
|
|
|
|
/// Parse a setting
|
|
|
|
fn parse_set(&mut self) -> CompilationResult<'src, Set<'src>> {
|
|
|
|
self.presume_name(keyword::SET)?;
|
|
|
|
let name = Name::from_identifier(self.presume(Identifier)?);
|
|
|
|
self.presume(ColonEquals)?;
|
|
|
|
match name.lexeme() {
|
|
|
|
keyword::SHELL => {
|
|
|
|
self.expect(BracketL)?;
|
|
|
|
|
|
|
|
let command = self.parse_string_literal()?;
|
|
|
|
|
|
|
|
let mut arguments = Vec::new();
|
|
|
|
|
|
|
|
let mut comma = false;
|
|
|
|
|
|
|
|
if self.accepted(Comma)? {
|
|
|
|
comma = true;
|
|
|
|
while !self.next_is(BracketR) {
|
|
|
|
arguments.push(self.parse_string_literal().expected(&[BracketR])?);
|
|
|
|
|
|
|
|
if !self.accepted(Comma)? {
|
|
|
|
comma = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
comma = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
self
|
|
|
|
.expect(BracketR)
|
|
|
|
.expected(if comma { &[] } else { &[Comma] })?;
|
|
|
|
|
|
|
|
Ok(Set {
|
|
|
|
value: Setting::Shell(setting::Shell { command, arguments }),
|
|
|
|
name,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
_ => Err(name.error(CompilationErrorKind::UnknownSetting {
|
|
|
|
setting: name.lexeme(),
|
|
|
|
})),
|
|
|
|
}
|
|
|
|
}
|
2019-11-07 10:55:15 -08:00
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-10 23:17:47 -08:00
|
|
|
use pretty_assertions::assert_eq;
|
2019-11-07 10:55:15 -08:00
|
|
|
use testing::unindent;
|
|
|
|
use CompilationErrorKind::*;
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
macro_rules! test {
|
|
|
|
{
|
|
|
|
name: $name:ident,
|
|
|
|
text: $text:expr,
|
|
|
|
tree: $tree:tt,
|
|
|
|
} => {
|
|
|
|
#[test]
|
|
|
|
fn $name() {
|
|
|
|
let text: String = $text.into();
|
|
|
|
let want = tree!($tree);
|
|
|
|
test(&text, want);
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
fn test(text: &str, want: Tree) {
|
|
|
|
let unindented = unindent(text);
|
|
|
|
let tokens = Lexer::lex(&unindented).expect("lexing failed");
|
|
|
|
let justfile = Parser::parse(&tokens).expect("parsing failed");
|
|
|
|
let have = justfile.tree();
|
|
|
|
if have != want {
|
|
|
|
println!("parsed text: {}", unindented);
|
|
|
|
println!("expected: {}", want);
|
|
|
|
println!("but got: {}", have);
|
|
|
|
println!("tokens: {:?}", tokens);
|
|
|
|
panic!();
|
|
|
|
}
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
macro_rules! error {
|
|
|
|
(
|
|
|
|
name: $name:ident,
|
|
|
|
input: $input:expr,
|
|
|
|
offset: $offset:expr,
|
|
|
|
line: $line:expr,
|
|
|
|
column: $column:expr,
|
|
|
|
width: $width:expr,
|
|
|
|
kind: $kind:expr,
|
|
|
|
) => {
|
2017-11-17 17:28:06 -08:00
|
|
|
#[test]
|
|
|
|
fn $name() {
|
2019-11-07 10:55:15 -08:00
|
|
|
error($input, $offset, $line, $column, $width, $kind);
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2018-12-08 14:29:41 -08:00
|
|
|
};
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
fn error(
|
|
|
|
src: &str,
|
|
|
|
offset: usize,
|
|
|
|
line: usize,
|
|
|
|
column: usize,
|
|
|
|
width: usize,
|
|
|
|
kind: CompilationErrorKind,
|
|
|
|
) {
|
|
|
|
let expected = CompilationError {
|
|
|
|
src,
|
|
|
|
offset,
|
|
|
|
line,
|
|
|
|
column,
|
|
|
|
width,
|
|
|
|
kind,
|
|
|
|
};
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
let tokens = Lexer::lex(src).expect("Lexing failed in parse test...");
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
match Parser::parse(&tokens) {
|
|
|
|
Ok(_) => panic!("Parsing succeeded but expected: {}\n{}", expected, src),
|
|
|
|
Err(actual) => {
|
|
|
|
assert_eq!(actual, expected);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: empty,
|
|
|
|
text: "",
|
|
|
|
tree: (justfile),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: empty_multiline,
|
|
|
|
text: "
|
|
|
|
|
|
|
|
|
2017-11-16 23:30:08 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
",
|
|
|
|
tree: (justfile),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: whitespace,
|
|
|
|
text: " ",
|
|
|
|
tree: (justfile),
|
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: alias_single,
|
|
|
|
text: "alias t := test",
|
|
|
|
tree: (justfile (alias t test)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: aliases_multiple,
|
|
|
|
text: "alias t := test\nalias b := build",
|
|
|
|
tree: (
|
|
|
|
justfile
|
|
|
|
(alias t test)
|
|
|
|
(alias b build)
|
|
|
|
),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: alias_equals,
|
|
|
|
text: "alias t = test",
|
|
|
|
tree: (justfile
|
|
|
|
(alias t test)
|
|
|
|
(warning deprecated_equals)
|
|
|
|
),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: export,
|
|
|
|
text: r#"export x := "hello""#,
|
|
|
|
tree: (justfile (assignment #export x "hello")),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: export_equals,
|
|
|
|
text: r#"export x = "hello""#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment #export x "hello")
|
|
|
|
(warning deprecated_equals)
|
|
|
|
),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: assignment,
|
|
|
|
text: r#"x := "hello""#,
|
|
|
|
tree: (justfile (assignment x "hello")),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: assignment_equals,
|
|
|
|
text: r#"x = "hello""#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment x "hello")
|
|
|
|
(warning deprecated_equals)
|
|
|
|
),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: backtick,
|
|
|
|
text: "x := `hello`",
|
|
|
|
tree: (justfile (assignment x (backtick "hello"))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: variable,
|
|
|
|
text: "x := y",
|
|
|
|
tree: (justfile (assignment x y)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: group,
|
|
|
|
text: "x := (y)",
|
|
|
|
tree: (justfile (assignment x (y))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: addition_single,
|
|
|
|
text: "x := a + b",
|
|
|
|
tree: (justfile (assignment x (+ a b))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: addition_chained,
|
|
|
|
text: "x := a + b + c",
|
|
|
|
tree: (justfile (assignment x (+ a (+ b c)))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: call_one_arg,
|
|
|
|
text: "x := foo(y)",
|
|
|
|
tree: (justfile (assignment x (call foo y))),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: call_multiple_args,
|
|
|
|
text: "x := foo(y, z)",
|
|
|
|
tree: (justfile (assignment x (call foo y z))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: call_trailing_comma,
|
|
|
|
text: "x := foo(y,)",
|
|
|
|
tree: (justfile (assignment x (call foo y))),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe,
|
|
|
|
text: "foo:",
|
|
|
|
tree: (justfile (recipe foo)),
|
|
|
|
}
|
2019-04-11 12:30:29 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_multiple,
|
|
|
|
text: "
|
|
|
|
foo:
|
|
|
|
bar:
|
|
|
|
baz:
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe foo) (recipe bar) (recipe baz)),
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_quiet,
|
|
|
|
text: "@foo:",
|
|
|
|
tree: (justfile (recipe #quiet foo)),
|
|
|
|
}
|
2019-04-11 12:30:29 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_parameter_single,
|
|
|
|
text: "foo bar:",
|
|
|
|
tree: (justfile (recipe foo (params (bar)))),
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_parameter_multiple,
|
|
|
|
text: "foo bar baz:",
|
|
|
|
tree: (justfile (recipe foo (params (bar) (baz)))),
|
|
|
|
}
|
2019-04-11 12:30:29 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_default_single,
|
|
|
|
text: r#"foo bar="baz":"#,
|
|
|
|
tree: (justfile (recipe foo (params (bar "baz")))),
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_default_multiple,
|
|
|
|
text: r#"foo bar="baz" bob="biz":"#,
|
|
|
|
tree: (justfile (recipe foo (params (bar "baz") (bob "biz")))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_variadic,
|
|
|
|
text: r#"foo +bar:"#,
|
|
|
|
tree: (justfile (recipe foo (params +(bar)))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_variadic_string_default,
|
|
|
|
text: r#"foo +bar="baz":"#,
|
|
|
|
tree: (justfile (recipe foo (params +(bar "baz")))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_variadic_variable_default,
|
|
|
|
text: r#"foo +bar=baz:"#,
|
|
|
|
tree: (justfile (recipe foo (params +(bar baz)))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_variadic_addition_group_default,
|
|
|
|
text: r#"foo +bar=(baz + bob):"#,
|
|
|
|
tree: (justfile (recipe foo (params +(bar ((+ baz bob)))))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_dependency_single,
|
|
|
|
text: "foo: bar",
|
|
|
|
tree: (justfile (recipe foo (deps bar))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_dependency_multiple,
|
|
|
|
text: "foo: bar baz",
|
|
|
|
tree: (justfile (recipe foo (deps bar baz))),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_line_single,
|
|
|
|
text: "foo:\n bar",
|
|
|
|
tree: (justfile (recipe foo (body ("bar")))),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_line_multiple,
|
|
|
|
text: "foo:\n bar\n baz\n {{\"bob\"}}biz",
|
|
|
|
tree: (justfile (recipe foo (body ("bar") ("baz") (("bob") "biz")))),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_line_interpolation,
|
|
|
|
text: "foo:\n bar{{\"bob\"}}biz",
|
|
|
|
tree: (justfile (recipe foo (body ("bar" ("bob") "biz")))),
|
2017-11-17 23:59:55 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: comment,
|
|
|
|
text: "# foo",
|
|
|
|
tree: (justfile),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: comment_alias,
|
|
|
|
text: "alias x := y # foo",
|
|
|
|
tree: (justfile (alias x y)),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: comment_assignment,
|
|
|
|
text: "x := y # foo",
|
|
|
|
tree: (justfile (assignment x y)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: comment_export,
|
|
|
|
text: "export x := y # foo",
|
|
|
|
tree: (justfile (assignment #export x y)),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: comment_recipe,
|
|
|
|
text: "foo: # bar",
|
|
|
|
tree: (justfile (recipe foo)),
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: comment_recipe_dependencies,
|
|
|
|
text: "foo: bar # baz",
|
|
|
|
tree: (justfile (recipe foo (deps bar))),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: doc_comment_single,
|
|
|
|
text: "
|
|
|
|
# foo
|
|
|
|
bar:
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe "foo" bar)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: doc_comment_recipe_clear,
|
|
|
|
text: "
|
|
|
|
# foo
|
|
|
|
bar:
|
|
|
|
baz:
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe "foo" bar) (recipe baz)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: doc_comment_middle,
|
|
|
|
text: "
|
|
|
|
bar:
|
|
|
|
# foo
|
|
|
|
baz:
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe bar) (recipe "foo" baz)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: doc_comment_assignment_clear,
|
|
|
|
text: "
|
|
|
|
# foo
|
|
|
|
x := y
|
|
|
|
bar:
|
|
|
|
",
|
|
|
|
tree: (justfile (assignment x y) (recipe bar)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: doc_comment_empty_line_clear,
|
|
|
|
text: "
|
|
|
|
# foo
|
|
|
|
|
|
|
|
bar:
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe bar)),
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: string_escape_tab,
|
|
|
|
text: r#"x := "foo\tbar""#,
|
|
|
|
tree: (justfile (assignment x "foo\tbar")),
|
|
|
|
}
|
2017-12-02 14:59:07 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: string_escape_newline,
|
|
|
|
text: r#"x := "foo\nbar""#,
|
|
|
|
tree: (justfile (assignment x "foo\nbar")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: string_escape_carriage_return,
|
|
|
|
text: r#"x := "foo\rbar""#,
|
|
|
|
tree: (justfile (assignment x "foo\rbar")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: string_escape_slash,
|
|
|
|
text: r#"x := "foo\\bar""#,
|
|
|
|
tree: (justfile (assignment x "foo\\bar")),
|
|
|
|
}
|
2017-12-02 14:59:07 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: string_escape_quote,
|
|
|
|
text: r#"x := "foo\"bar""#,
|
|
|
|
tree: (justfile (assignment x "foo\"bar")),
|
2017-12-02 14:59:07 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: recipe_variadic_with_default_after_default,
|
|
|
|
text: r#"
|
|
|
|
f a=b +c=d:
|
|
|
|
"#,
|
|
|
|
tree: (justfile (recipe f (params (a b) +(c d)))),
|
|
|
|
}
|
2017-12-02 14:59:07 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: parameter_default_concatination_variable,
|
|
|
|
text: r#"
|
|
|
|
x := "10"
|
2017-12-02 14:59:07 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
f y=(`echo hello` + x) +z="foo":
|
|
|
|
"#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment x "10")
|
|
|
|
(recipe f (params (y ((+ (backtick "echo hello") x))) +(z "foo")))
|
|
|
|
),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: parameter_default_multiple,
|
|
|
|
text: r#"
|
|
|
|
x := "10"
|
|
|
|
f y=(`echo hello` + x) +z=("foo" + "bar"):
|
|
|
|
"#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment x "10")
|
|
|
|
(recipe f (params (y ((+ (backtick "echo hello") x))) +(z ((+ "foo" "bar")))))
|
|
|
|
),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: parse_raw_string_default,
|
|
|
|
text: r#"
|
|
|
|
|
|
|
|
foo a='b\t':
|
|
|
|
|
|
|
|
|
|
|
|
"#,
|
|
|
|
tree: (justfile (recipe foo (params (a "b\\t")))),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: parse_alias_after_target,
|
|
|
|
text: r"
|
|
|
|
foo:
|
|
|
|
echo a
|
|
|
|
alias f := foo
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(recipe foo (body ("echo a")))
|
|
|
|
(alias f foo)
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_alias_before_target,
|
|
|
|
text: "
|
|
|
|
alias f := foo
|
|
|
|
foo:
|
|
|
|
echo a
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(alias f foo)
|
|
|
|
(recipe foo (body ("echo a")))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_alias_with_comment,
|
|
|
|
text: "
|
|
|
|
alias f := foo #comment
|
|
|
|
foo:
|
|
|
|
echo a
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(alias f foo)
|
|
|
|
(recipe foo (body ("echo a")))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_assignment_with_comment,
|
|
|
|
text: "
|
|
|
|
f := foo #comment
|
|
|
|
foo:
|
|
|
|
echo a
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(assignment f foo)
|
|
|
|
(recipe foo (body ("echo a")))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_complex,
|
|
|
|
text: "
|
|
|
|
x:
|
|
|
|
y:
|
|
|
|
z:
|
|
|
|
foo := \"xx\"
|
|
|
|
bar := foo
|
|
|
|
goodbye := \"y\"
|
|
|
|
hello a b c : x y z #hello
|
|
|
|
#! blah
|
|
|
|
#blarg
|
|
|
|
{{ foo + bar}}abc{{ goodbye\t + \"x\" }}xyz
|
|
|
|
1
|
|
|
|
2
|
|
|
|
3
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(recipe x)
|
|
|
|
(recipe y)
|
|
|
|
(recipe z)
|
|
|
|
(assignment foo "xx")
|
|
|
|
(assignment bar foo)
|
|
|
|
(assignment goodbye "y")
|
|
|
|
(recipe hello
|
|
|
|
(params (a) (b) (c))
|
|
|
|
(deps x y z)
|
|
|
|
(body
|
|
|
|
("#! blah")
|
|
|
|
("#blarg")
|
|
|
|
(((+ foo bar)) "abc" ((+ goodbye "x")) "xyz")
|
|
|
|
("1")
|
|
|
|
("2")
|
|
|
|
("3")
|
|
|
|
)
|
|
|
|
)
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_shebang,
|
|
|
|
text: "
|
|
|
|
practicum := 'hello'
|
|
|
|
install:
|
|
|
|
\t#!/bin/sh
|
|
|
|
\tif [[ -f {{practicum}} ]]; then
|
|
|
|
\t\treturn
|
|
|
|
\tfi
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(assignment practicum "hello")
|
|
|
|
(recipe install
|
|
|
|
(body
|
|
|
|
("#!/bin/sh")
|
|
|
|
("if [[ -f " (practicum) " ]]; then")
|
|
|
|
("\treturn")
|
|
|
|
("fi")
|
|
|
|
)
|
|
|
|
)
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_simple_shebang,
|
|
|
|
text: "a:\n #!\n print(1)",
|
|
|
|
tree: (justfile
|
|
|
|
(recipe a (body ("#!") (" print(1)")))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_assignments,
|
|
|
|
text: r#"
|
|
|
|
a := "0"
|
|
|
|
c := a + b + a + b
|
|
|
|
b := "1"
|
|
|
|
"#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment a "0")
|
|
|
|
(assignment c (+ a (+ b (+ a b))))
|
|
|
|
(assignment b "1")
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_assignment_backticks,
|
|
|
|
text: "
|
|
|
|
a := `echo hello`
|
|
|
|
c := a + b + a + b
|
|
|
|
b := `echo goodbye`
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(assignment a (backtick "echo hello"))
|
|
|
|
(assignment c (+ a (+ b (+ a b))))
|
|
|
|
(assignment b (backtick "echo goodbye"))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parse_interpolation_backticks,
|
|
|
|
text: r#"
|
|
|
|
a:
|
|
|
|
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}
|
|
|
|
"#,
|
|
|
|
tree: (justfile
|
|
|
|
(recipe a
|
|
|
|
(body ("echo " ((+ (backtick "echo hello") "blarg")) " " ((backtick "echo bob"))))
|
|
|
|
)
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: eof_test,
|
|
|
|
text: "x:\ny:\nz:\na b c: x y z",
|
|
|
|
tree: (justfile
|
|
|
|
(recipe x)
|
|
|
|
(recipe y)
|
|
|
|
(recipe z)
|
|
|
|
(recipe a (params (b) (c)) (deps x y z))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: string_quote_escape,
|
|
|
|
text: r#"a := "hello\"""#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment a "hello\"")
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: string_escapes,
|
|
|
|
text: r#"a := "\n\t\r\"\\""#,
|
|
|
|
tree: (justfile (assignment a "\n\t\r\"\\")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parameters,
|
|
|
|
text: "
|
|
|
|
a b c:
|
|
|
|
{{b}} {{c}}
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe a (params (b) (c)) (body ((b) " " (c))))),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: unary_functions,
|
|
|
|
text: "
|
|
|
|
x := arch()
|
|
|
|
|
|
|
|
a:
|
|
|
|
{{os()}} {{os_family()}}
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(assignment x (call arch))
|
|
|
|
(recipe a (body (((call os)) " " ((call os_family)))))
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: env_functions,
|
|
|
|
text: r#"
|
|
|
|
x := env_var('foo',)
|
|
|
|
|
|
|
|
a:
|
|
|
|
{{env_var_or_default('foo' + 'bar', 'baz',)}} {{env_var(env_var("baz"))}}
|
|
|
|
"#,
|
|
|
|
tree: (justfile
|
|
|
|
(assignment x (call env_var "foo"))
|
|
|
|
(recipe a
|
|
|
|
(body
|
|
|
|
(
|
|
|
|
((call env_var_or_default (+ "foo" "bar") "baz"))
|
|
|
|
" "
|
|
|
|
((call env_var (call env_var "baz")))
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parameter_default_string,
|
|
|
|
text: r#"
|
|
|
|
f x="abc":
|
|
|
|
"#,
|
|
|
|
tree: (justfile (recipe f (params (x "abc")))),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: parameter_default_raw_string,
|
|
|
|
text: r"
|
|
|
|
f x='abc':
|
|
|
|
",
|
|
|
|
tree: (justfile (recipe f (params (x "abc")))),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: parameter_default_backtick,
|
|
|
|
text: "
|
|
|
|
f x=`echo hello`:
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(recipe f (params (x (backtick "echo hello"))))
|
|
|
|
),
|
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: parameter_default_concatination_string,
|
|
|
|
text: r#"
|
|
|
|
f x=(`echo hello` + "foo"):
|
|
|
|
"#,
|
|
|
|
tree: (justfile (recipe f (params (x ((+ (backtick "echo hello") "foo")))))),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: concatination_in_group,
|
|
|
|
text: "x := ('0' + '1')",
|
|
|
|
tree: (justfile (assignment x ((+ "0" "1")))),
|
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: string_in_group,
|
|
|
|
text: "x := ('0' )",
|
|
|
|
tree: (justfile (assignment x ("0"))),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: escaped_dos_newlines,
|
|
|
|
text: "
|
|
|
|
@spam:\r
|
|
|
|
\t{ \\\r
|
|
|
|
\t\tfiglet test; \\\r
|
|
|
|
\t\tcargo build --color always 2>&1; \\\r
|
|
|
|
\t\tcargo test --color always -- --color always 2>&1; \\\r
|
|
|
|
\t} | less\r
|
|
|
|
",
|
|
|
|
tree: (justfile
|
|
|
|
(recipe #quiet spam
|
|
|
|
(body
|
|
|
|
("{ \\")
|
|
|
|
("\tfiglet test; \\")
|
|
|
|
("\tcargo build --color always 2>&1; \\")
|
|
|
|
("\tcargo test --color always -- --color always 2>&1; \\")
|
|
|
|
("} | less")
|
|
|
|
)
|
|
|
|
)
|
|
|
|
),
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: empty_body,
|
|
|
|
text: "a:",
|
|
|
|
tree: (justfile (recipe a)),
|
2017-12-02 14:59:07 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: single_line_body,
|
|
|
|
text: "a:\n foo",
|
|
|
|
tree: (justfile (recipe a (body ("foo")))),
|
2019-04-15 22:40:02 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
test! {
|
|
|
|
name: trimmed_body,
|
|
|
|
text: "a:\n foo\n \n \n \nb:\n ",
|
|
|
|
tree: (justfile (recipe a (body ("foo"))) (recipe b)),
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-10 23:17:47 -08:00
|
|
|
test! {
|
|
|
|
name: set_shell_no_arguments,
|
|
|
|
text: "set shell := ['tclsh']",
|
|
|
|
tree: (justfile (set shell "tclsh")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: set_shell_no_arguments_cooked,
|
|
|
|
text: "set shell := [\"tclsh\"]",
|
|
|
|
tree: (justfile (set shell "tclsh")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: set_shell_no_arguments_trailing_comma,
|
|
|
|
text: "set shell := ['tclsh',]",
|
|
|
|
tree: (justfile (set shell "tclsh")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: set_shell_with_one_argument,
|
|
|
|
text: "set shell := ['bash', '-cu']",
|
|
|
|
tree: (justfile (set shell "bash" "-cu")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: set_shell_with_one_argument_trailing_comma,
|
|
|
|
text: "set shell := ['bash', '-cu',]",
|
|
|
|
tree: (justfile (set shell "bash" "-cu")),
|
|
|
|
}
|
|
|
|
|
|
|
|
test! {
|
|
|
|
name: set_shell_with_two_arguments,
|
|
|
|
text: "set shell := ['bash', '-cu', '-l']",
|
|
|
|
tree: (justfile (set shell "bash" "-cu" "-l")),
|
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2019-04-11 12:30:29 -07:00
|
|
|
name: alias_syntax_multiple_rhs,
|
|
|
|
input: "alias foo = bar baz",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 16,
|
2019-04-11 12:30:29 -07:00
|
|
|
line: 0,
|
|
|
|
column: 16,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 3,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken { expected: vec![Eof, Eol], found: Identifier },
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2019-04-11 12:30:29 -07:00
|
|
|
name: alias_syntax_no_rhs,
|
|
|
|
input: "alias foo = \n",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 12,
|
2019-04-11 12:30:29 -07:00
|
|
|
line: 0,
|
|
|
|
column: 12,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 1,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken {expected: vec![Identifier], found:Eol},
|
2019-04-11 12:30:29 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-11-17 23:59:55 -08:00
|
|
|
name: missing_colon,
|
|
|
|
input: "a b c\nd e f",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 5,
|
2017-11-17 23:59:55 -08:00
|
|
|
line: 0,
|
|
|
|
column: 5,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 1,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{expected: vec![Colon, Equals, Identifier, Plus], found: Eol},
|
2017-11-17 23:59:55 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-11-17 23:59:55 -08:00
|
|
|
name: missing_default_eol,
|
|
|
|
input: "hello arg=\n",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 10,
|
2017-11-17 23:59:55 -08:00
|
|
|
line: 0,
|
|
|
|
column: 10,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 1,
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![Backtick, Identifier, ParenL, StringCooked, StringRaw],
|
|
|
|
found: Eol
|
|
|
|
},
|
2017-11-17 23:59:55 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-11-17 23:59:55 -08:00
|
|
|
name: missing_default_eof,
|
|
|
|
input: "hello arg=",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 10,
|
2017-11-17 23:59:55 -08:00
|
|
|
line: 0,
|
|
|
|
column: 10,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 0,
|
Gargantuan refactor (#522)
- Instead of changing the current directory with `env::set_current_dir`
to be implicitly inherited by subprocesses, we now use
`Command::current_dir` to set it explicitly. This feels much better,
since we aren't dependent on the implicit state of the process's
current directory.
- Subcommand execution is much improved.
- Added a ton of tests for config parsing, config execution, working
dir, and search dir.
- Error messages are improved. Many more will be colored.
- The Config is now onwed, instead of borrowing from the arguments and
the `clap::ArgMatches` object. This is a huge ergonomic improvement,
especially in tests, and I don't think anyone will notice.
- `--edit` now uses `$VISUAL`, `$EDITOR`, or `vim`, in that order,
matching git, which I think is what most people will expect.
- Added a cute `tmptree!{}` macro, for creating temporary directories
populated with directories and files for tests.
- Admitted that grammer is LL(k) and I don't know what `k` is.
2019-11-09 21:43:20 -08:00
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![Backtick, Identifier, ParenL, StringCooked, StringRaw],
|
|
|
|
found: Eof,
|
|
|
|
},
|
2017-11-17 23:59:55 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-11-17 23:59:55 -08:00
|
|
|
name: missing_eol,
|
|
|
|
input: "a b c: z =",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 9,
|
2017-11-17 23:59:55 -08:00
|
|
|
line: 0,
|
|
|
|
column: 9,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 1,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{expected: vec![Eof, Eol, Identifier], found: Equals},
|
2017-11-17 23:59:55 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-11-17 23:59:55 -08:00
|
|
|
name: interpolation_outside_of_recipe,
|
|
|
|
input: "{{",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 0,
|
2017-11-17 23:59:55 -08:00
|
|
|
line: 0,
|
|
|
|
column: 0,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 2,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{expected: vec![At, Identifier], found: InterpolationStart},
|
2017-11-17 23:59:55 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-12-02 05:37:10 -08:00
|
|
|
name: unclosed_parenthesis_in_expression,
|
|
|
|
input: "x = foo(",
|
2019-11-07 10:55:15 -08:00
|
|
|
offset: 8,
|
2017-12-02 05:37:10 -08:00
|
|
|
line: 0,
|
|
|
|
column: 8,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 0,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{
|
|
|
|
expected: vec![Backtick, Identifier, ParenL, ParenR, StringCooked, StringRaw],
|
|
|
|
found: Eof,
|
|
|
|
},
|
2017-12-02 05:37:10 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-12-02 05:37:10 -08:00
|
|
|
name: unclosed_parenthesis_in_interpolation,
|
|
|
|
input: "a:\n echo {{foo(}}",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 15,
|
2017-12-02 05:37:10 -08:00
|
|
|
line: 1,
|
|
|
|
column: 12,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 2,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{
|
|
|
|
expected: vec![Backtick, Identifier, ParenL, ParenR, StringCooked, StringRaw],
|
|
|
|
found: InterpolationEnd,
|
|
|
|
},
|
2017-12-02 05:37:10 -08:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2017-11-17 23:59:55 -08:00
|
|
|
name: plus_following_parameter,
|
|
|
|
input: "a b c+:",
|
2019-11-07 10:55:15 -08:00
|
|
|
offset: 6,
|
2017-11-17 23:59:55 -08:00
|
|
|
line: 0,
|
2019-11-07 10:55:15 -08:00
|
|
|
column: 6,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 1,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{expected: vec![Identifier], found: Colon},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: invalid_escape_sequence,
|
|
|
|
input: r#"foo := "\b""#,
|
|
|
|
offset: 7,
|
|
|
|
line: 0,
|
|
|
|
column: 7,
|
|
|
|
width: 4,
|
|
|
|
kind: InvalidEscapeSequence{character: 'b'},
|
2017-11-17 17:28:06 -08:00
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
2018-10-13 02:39:26 -07:00
|
|
|
name: bad_export,
|
|
|
|
input: "export a",
|
2019-04-15 22:40:02 -07:00
|
|
|
offset: 8,
|
2018-10-13 02:39:26 -07:00
|
|
|
line: 0,
|
|
|
|
column: 8,
|
2019-04-15 22:40:02 -07:00
|
|
|
width: 0,
|
2019-11-07 10:55:15 -08:00
|
|
|
kind: UnexpectedToken{expected: vec![Colon, Equals, Identifier, Plus], found: Eof},
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|
2019-04-11 23:58:08 -07:00
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
|
|
|
name: parameter_follows_variadic_parameter,
|
|
|
|
input: "foo +a b:",
|
|
|
|
offset: 7,
|
|
|
|
line: 0,
|
|
|
|
column: 7,
|
|
|
|
width: 1,
|
|
|
|
kind: ParameterFollowsVariadicParameter{parameter: "b"},
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
|
|
|
name: parameter_after_variadic,
|
|
|
|
input: "foo +a bbb:",
|
|
|
|
offset: 7,
|
|
|
|
line: 0,
|
|
|
|
column: 7,
|
|
|
|
width: 3,
|
|
|
|
kind: ParameterFollowsVariadicParameter{parameter: "bbb"},
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 10:55:15 -08:00
|
|
|
error! {
|
|
|
|
name: concatination_in_default,
|
|
|
|
input: "foo a=c+d e:",
|
|
|
|
offset: 10,
|
|
|
|
line: 0,
|
|
|
|
column: 10,
|
|
|
|
width: 1,
|
|
|
|
kind: ParameterFollowsVariadicParameter{parameter: "e"},
|
2019-04-11 23:58:08 -07:00
|
|
|
}
|
2019-11-10 23:17:47 -08:00
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_shell_empty,
|
|
|
|
input: "set shell := []",
|
|
|
|
offset: 14,
|
|
|
|
line: 0,
|
|
|
|
column: 14,
|
|
|
|
width: 1,
|
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![StringCooked, StringRaw],
|
|
|
|
found: BracketR,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_shell_non_literal_first,
|
|
|
|
input: "set shell := ['bar' + 'baz']",
|
|
|
|
offset: 20,
|
|
|
|
line: 0,
|
|
|
|
column: 20,
|
|
|
|
width: 1,
|
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![BracketR, Comma],
|
|
|
|
found: Plus,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_shell_non_literal_second,
|
|
|
|
input: "set shell := ['biz', 'bar' + 'baz']",
|
|
|
|
offset: 27,
|
|
|
|
line: 0,
|
|
|
|
column: 27,
|
|
|
|
width: 1,
|
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![BracketR, Comma],
|
|
|
|
found: Plus,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_shell_bad_comma,
|
|
|
|
input: "set shell := ['bash',",
|
|
|
|
offset: 21,
|
|
|
|
line: 0,
|
|
|
|
column: 21,
|
|
|
|
width: 0,
|
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![BracketR, StringCooked, StringRaw],
|
|
|
|
found: Eof,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_shell_bad,
|
|
|
|
input: "set shell := ['bash'",
|
|
|
|
offset: 20,
|
|
|
|
line: 0,
|
|
|
|
column: 20,
|
|
|
|
width: 0,
|
|
|
|
kind: UnexpectedToken {
|
|
|
|
expected: vec![BracketR, Comma],
|
|
|
|
found: Eof,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_unknown,
|
|
|
|
input: "set shall := []",
|
|
|
|
offset: 4,
|
|
|
|
line: 0,
|
|
|
|
column: 4,
|
|
|
|
width: 5,
|
|
|
|
kind: UnknownSetting {
|
|
|
|
setting: "shall",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
error! {
|
|
|
|
name: set_shell_non_string,
|
|
|
|
input: "set shall := []",
|
|
|
|
offset: 4,
|
|
|
|
line: 0,
|
|
|
|
column: 4,
|
|
|
|
width: 5,
|
|
|
|
kind: UnknownSetting {
|
|
|
|
setting: "shall",
|
|
|
|
},
|
|
|
|
}
|
2017-11-16 23:30:08 -08:00
|
|
|
}
|