Refactor some other stuff (#252)
This commit is contained in:
parent
2b6b715528
commit
13a124d659
97
Cargo.lock
generated
97
Cargo.lock
generated
@ -1,27 +1,9 @@
|
||||
[root]
|
||||
name = "just"
|
||||
version = "0.3.3"
|
||||
dependencies = [
|
||||
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utilities 0.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -35,7 +17,7 @@ version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -61,15 +43,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "2.26.2"
|
||||
version = "2.27.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -81,7 +62,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.2.0"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@ -110,7 +91,25 @@ name = "itertools"
|
||||
version = "0.6.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"either 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "just"
|
||||
version = "0.3.3"
|
||||
dependencies = [
|
||||
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atty 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.27.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utilities 0.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -124,29 +123,29 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "0.2.9"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.32"
|
||||
version = "0.2.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "1.0.1"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.3.17"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -168,7 +167,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -189,17 +188,7 @@ name = "tempdir"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "term_size"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -207,17 +196,16 @@ name = "termion"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -226,7 +214,7 @@ name = "thread_local"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -279,27 +267,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
|
||||
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
|
||||
"checksum brev 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "d85c3b7957223c752ff78ffd20a1806b0c7262d9aef85ed470546f16b56a5bb2"
|
||||
"checksum clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3451e409013178663435d6f15fdb212f14ee4424a3d74f979d081d0a66b6f1f2"
|
||||
"checksum clap 2.27.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1b8c532887f1a292d17de05ae858a8fe50a301e196f9ef0ddb7ccd0d1d00f180"
|
||||
"checksum edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6a34f5204fbc13582de418611cf3a7dcdd07c6d312a5b631597ba72c06b9d9c9"
|
||||
"checksum either 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cbee135e9245416869bf52bd6ccc9b59e2482651510784e089b874272f02a252"
|
||||
"checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3"
|
||||
"checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159"
|
||||
"checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82"
|
||||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||
"checksum itertools 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3f2be4da1690a039e9ae5fd575f706a63ad5a2120f161b1d653c9da3930dd21"
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
"checksum lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c9e5e58fa1a4c3b915a561a78a22ee0cac6ab97dca2504428bc1cb074375f8d5"
|
||||
"checksum libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "56cce3130fd040c28df6f495c8492e5ec5808fb4c9093c310df02b0c8f030148"
|
||||
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
|
||||
"checksum rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "61efcbcd9fa8d8fbb07c84e34a8af18a1ff177b449689ad38a6e9457ecc7b2ae"
|
||||
"checksum lazy_static 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "236eb37a62591d4a41a89b7763d7de3e06ca02d5ab2815446a8bae5d2f8c2d57"
|
||||
"checksum libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "5ba3df4dcb460b9dfbd070d41c94c19209620c191b0340b929ce748a2bcd42d2"
|
||||
"checksum memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "148fab2e51b4f1cfc66da2a7c32981d1d3c083a803978268bb11fe4b86925e7a"
|
||||
"checksum rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "6475140dfd8655aeb72e1fd4b7a1cc1c202be65d71669476e392fe62532b9edd"
|
||||
"checksum redox_syscall 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "8dde11f18c108289bef24469638a04dce49da56084f2d50618b226e47eb04509"
|
||||
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
|
||||
"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
|
||||
"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
|
||||
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
|
||||
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
|
||||
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
|
||||
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
|
||||
"checksum textwrap 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df8e08afc40ae3459e4838f303e465aa50d823df8d7f83ca88108f6d3afe7edd"
|
||||
"checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
|
||||
"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14"
|
||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
|
||||
|
22
Cargo.toml
22
Cargo.toml
@ -8,17 +8,17 @@ homepage = "https://github.com/casey/just"
|
||||
readme = "crates-io-readme.md"
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "^0.9.0"
|
||||
atty = "^0.2.1"
|
||||
brev = "^0.1.6"
|
||||
clap = "^2.0.0"
|
||||
edit-distance = "^2.0.0"
|
||||
itertools = "^0.6.2"
|
||||
lazy_static = "^0.2.1"
|
||||
libc = "^0.2.21"
|
||||
regex = "^0.2.2"
|
||||
tempdir = "^0.3.5"
|
||||
unicode-width = "^0.1.3"
|
||||
ansi_term = "0.9.0"
|
||||
atty = "0.2.1"
|
||||
brev = "0.1.6"
|
||||
clap = "2.0.0"
|
||||
edit-distance = "2.0.0"
|
||||
itertools = "0.6.2"
|
||||
lazy_static = "0.2.1"
|
||||
libc = "0.2.21"
|
||||
regex = "0.2.2"
|
||||
tempdir = "0.3.5"
|
||||
unicode-width = "0.1.3"
|
||||
|
||||
[dev-dependencies.utilities]
|
||||
path = "utilities"
|
||||
|
@ -507,7 +507,7 @@ If the first argument passed to `just` contains a `/`, then the following occurs
|
||||
|
||||
This may seem a little strange, but it's useful if you wish to run a command in a justfile that is in a subdirectory.
|
||||
|
||||
For example, if you are in a directory which contains a subdirectory named `foo`, which contains justfile with the recipe `build`, which is also the default recipe, the following are all equivalent:
|
||||
For example, if you are in a directory which contains a subdirectory named `foo`, which contains a justfile with the recipe `build`, which is also the default recipe, the following are all equivalent:
|
||||
|
||||
```sh
|
||||
$ (cd foo && just build)
|
||||
|
@ -1 +1,3 @@
|
||||
cyclomatic-complexity-threshold = 1337
|
||||
|
||||
doc-valid-idents = ["FreeBSD"]
|
||||
|
2
justfile
2
justfile
@ -62,7 +62,7 @@ install-dev-deps:
|
||||
|
||||
# everyone's favorite animate paper clip
|
||||
clippy: lint
|
||||
rustup run nightly cargo clippy -- -D clippy
|
||||
cargo +nightly clippy -- -D clippy
|
||||
|
||||
# count non-empty lines of code
|
||||
sloc:
|
||||
|
@ -7,7 +7,7 @@ pub fn evaluate_assignments<'a>(
|
||||
overrides: &Map<&str, &str>,
|
||||
quiet: bool,
|
||||
shell: &'a str,
|
||||
) -> Result<Map<&'a str, String>, RuntimeError<'a>> {
|
||||
) -> RunResult<'a, Map<&'a str, String>> {
|
||||
let mut evaluator = AssignmentEvaluator {
|
||||
assignments: assignments,
|
||||
evaluated: empty(),
|
||||
@ -32,7 +32,7 @@ fn run_backtick<'a, 'b>(
|
||||
exports: &Set<&'a str>,
|
||||
quiet: bool,
|
||||
shell: &'b str,
|
||||
) -> Result<String, RuntimeError<'a>> {
|
||||
) -> RunResult<'a, String> {
|
||||
let mut cmd = Command::new(shell);
|
||||
|
||||
cmd.export_environment_variables(scope, exports)?;
|
||||
@ -46,7 +46,8 @@ fn run_backtick<'a, 'b>(
|
||||
process::Stdio::inherit()
|
||||
});
|
||||
|
||||
brev::output(cmd).map_err(|output_error| RuntimeError::Backtick{token: token.clone(), output_error})
|
||||
brev::output(cmd)
|
||||
.map_err(|output_error| RuntimeError::Backtick{token: token.clone(), output_error})
|
||||
}
|
||||
|
||||
pub struct AssignmentEvaluator<'a: 'b, 'b> {
|
||||
@ -64,7 +65,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
|
||||
&mut self,
|
||||
line: &[Fragment<'a>],
|
||||
arguments: &Map<&str, Cow<str>>
|
||||
) -> Result<String, RuntimeError<'a>> {
|
||||
) -> RunResult<'a, String> {
|
||||
let mut evaluated = String::new();
|
||||
for fragment in line {
|
||||
match *fragment {
|
||||
@ -77,7 +78,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
|
||||
Ok(evaluated)
|
||||
}
|
||||
|
||||
fn evaluate_assignment(&mut self, name: &'a str) -> Result<(), RuntimeError<'a>> {
|
||||
fn evaluate_assignment(&mut self, name: &'a str) -> RunResult<'a, ()> {
|
||||
if self.evaluated.contains_key(name) {
|
||||
return Ok(());
|
||||
}
|
||||
@ -102,7 +103,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
|
||||
&mut self,
|
||||
expression: &Expression<'a>,
|
||||
arguments: &Map<&str, Cow<str>>
|
||||
) -> Result<String, RuntimeError<'a>> {
|
||||
) -> RunResult<'a, String> {
|
||||
Ok(match *expression {
|
||||
Expression::Variable{name, ..} => {
|
||||
if self.evaluated.contains_key(name) {
|
||||
@ -141,39 +142,37 @@ mod test {
|
||||
use testing::parse_success;
|
||||
use Configuration;
|
||||
|
||||
#[test]
|
||||
fn backtick_code() {
|
||||
match parse_success("a:\n echo {{`f() { return 100; }; f`}}")
|
||||
.run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::Backtick{token, output_error: OutputError::Code(code)} => {
|
||||
assert_eq!(code, 100);
|
||||
assert_eq!(token.lexeme, "`f() { return 100; }; f`");
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn backtick_code() {
|
||||
match parse_success("a:\n echo {{`f() { return 100; }; f`}}")
|
||||
.run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::Backtick{token, output_error: OutputError::Code(code)} => {
|
||||
assert_eq!(code, 100);
|
||||
assert_eq!(token.lexeme, "`f() { return 100; }; f`");
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn export_assignment_backtick() {
|
||||
let text = r#"
|
||||
#[test]
|
||||
fn export_assignment_backtick() {
|
||||
let text = r#"
|
||||
export exported_variable = "A"
|
||||
b = `echo $exported_variable`
|
||||
|
||||
recipe:
|
||||
echo {{b}}
|
||||
"#;
|
||||
let options = Configuration {
|
||||
quiet: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let options = Configuration {
|
||||
quiet: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
match parse_success(text).run(&["recipe"], &options).unwrap_err() {
|
||||
RuntimeError::Backtick{token, output_error: OutputError::Code(_)} => {
|
||||
assert_eq!(token.lexeme, "`echo $exported_variable`");
|
||||
},
|
||||
other => panic!("expected a backtick code errror, but got: {}", other),
|
||||
match parse_success(text).run(&["recipe"], &options).unwrap_err() {
|
||||
RuntimeError::Backtick{token, output_error: OutputError::Code(_)} => {
|
||||
assert_eq!(token.lexeme, "`echo $exported_variable`");
|
||||
},
|
||||
other => panic!("expected a backtick code errror, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,9 +1,11 @@
|
||||
use common::*;
|
||||
|
||||
use CompilationErrorKind::*;
|
||||
|
||||
pub fn resolve_assignments<'a>(
|
||||
assignments: &Map<&'a str, Expression<'a>>,
|
||||
assignment_tokens: &Map<&'a str, Token<'a>>,
|
||||
) -> Result<(), CompilationError<'a>> {
|
||||
) -> CompilationResult<'a, ()> {
|
||||
|
||||
let mut resolver = AssignmentResolver {
|
||||
assignments: assignments,
|
||||
@ -29,7 +31,7 @@ struct AssignmentResolver<'a: 'b, 'b> {
|
||||
}
|
||||
|
||||
impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
|
||||
fn resolve_assignment(&mut self, name: &'a str) -> Result<(), CompilationError<'a>> {
|
||||
fn resolve_assignment(&mut self, name: &'a str) -> CompilationResult<'a, ()> {
|
||||
if self.evaluated.contains(name) {
|
||||
return Ok(());
|
||||
}
|
||||
@ -48,13 +50,14 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::Internal{message}
|
||||
kind: Internal{message}
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_expression(&mut self, expression: &Expression<'a>) -> Result<(), CompilationError<'a>> {
|
||||
fn resolve_expression(
|
||||
&mut self, expression: &Expression<'a>) -> CompilationResult<'a, ()> {
|
||||
match *expression {
|
||||
Expression::Variable{name, ref token} => {
|
||||
if self.evaluated.contains(name) {
|
||||
@ -62,14 +65,14 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
|
||||
} else if self.seen.contains(name) {
|
||||
let token = &self.assignment_tokens[name];
|
||||
self.stack.push(name);
|
||||
return Err(token.error(CompilationErrorKind::CircularVariableDependency {
|
||||
return Err(token.error(CircularVariableDependency {
|
||||
variable: name,
|
||||
circle: self.stack.clone(),
|
||||
}));
|
||||
} else if self.assignments.contains_key(name) {
|
||||
self.resolve_assignment(name)?;
|
||||
} else {
|
||||
return Err(token.error(CompilationErrorKind::UndefinedVariable{variable: name}));
|
||||
return Err(token.error(UndefinedVariable{variable: name}));
|
||||
}
|
||||
}
|
||||
Expression::Concatination{ref lhs, ref rhs} => {
|
||||
@ -87,43 +90,46 @@ mod test {
|
||||
use testing::parse_error;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn circular_variable_dependency() {
|
||||
let text = "a = b\nb = a";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]}
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn circular_variable_dependency() {
|
||||
let text = "a = b\nb = a";
|
||||
let variable = "a";
|
||||
let circle = vec!["a", "b", "a"];
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: CircularVariableDependency{variable, circle}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_variable_dependency() {
|
||||
let text = "a = a";
|
||||
let variable = "a";
|
||||
let circle = vec!["a", "a"];
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: CircularVariableDependency{variable, circle}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_variable_dependency() {
|
||||
let text = "a = a";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::CircularVariableDependency{variable: "a", circle: vec!["a", "a"]}
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn unknown_expression_variable() {
|
||||
let text = "x = yy";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 4,
|
||||
line: 0,
|
||||
column: 4,
|
||||
width: Some(2),
|
||||
kind: CompilationErrorKind::UndefinedVariable{variable: "yy"},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_expression_variable() {
|
||||
let text = "x = yy";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 4,
|
||||
line: 0,
|
||||
column: 4,
|
||||
width: Some(2),
|
||||
kind: UndefinedVariable{variable: "yy"},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ pub trait CommandExt {
|
||||
&mut self,
|
||||
scope: &Map<&'a str, String>,
|
||||
exports: &Set<&'a str>
|
||||
) -> Result<(), RuntimeError<'a>>;
|
||||
) -> RunResult<'a, ()>;
|
||||
}
|
||||
|
||||
impl CommandExt for Command {
|
||||
@ -13,7 +13,7 @@ impl CommandExt for Command {
|
||||
&mut self,
|
||||
scope: &Map<&'a str, String>,
|
||||
exports: &Set<&'a str>
|
||||
) -> Result<(), RuntimeError<'a>> {
|
||||
) -> RunResult<'a, ()> {
|
||||
for name in exports {
|
||||
if let Some(value) = scope.get(name) {
|
||||
self.env(name, value);
|
||||
|
@ -2,6 +2,8 @@ use common::*;
|
||||
|
||||
use misc::{Or, write_error_context, show_whitespace};
|
||||
|
||||
pub type CompilationResult<'a, T> = Result<T, CompilationError<'a>>;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct CompilationError<'a> {
|
||||
pub text: &'a str,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use common::*;
|
||||
|
||||
pub const DEFAULT_SHELL: &'static str = "sh";
|
||||
pub const DEFAULT_SHELL: &str = "sh";
|
||||
|
||||
pub struct Configuration<'a> {
|
||||
pub dry_run: bool,
|
||||
|
@ -7,7 +7,7 @@ pub struct CookedString<'a> {
|
||||
}
|
||||
|
||||
impl<'a> CookedString<'a> {
|
||||
pub fn new(token: &Token<'a>) -> Result<CookedString<'a>, CompilationError<'a>> {
|
||||
pub fn new(token: &Token<'a>) -> CompilationResult<'a, CookedString<'a>> {
|
||||
let raw = &token.lexeme[1..token.lexeme.len()-1];
|
||||
|
||||
if let TokenKind::RawString = token.kind {
|
||||
|
229
src/justfile.rs
229
src/justfile.rs
@ -46,7 +46,7 @@ impl<'a, 'b> Justfile<'a> where 'a: 'b {
|
||||
&'a self,
|
||||
arguments: &[&'a str],
|
||||
options: &Configuration<'a>,
|
||||
) -> Result<(), RuntimeError<'a>> {
|
||||
) -> RunResult<'a, ()> {
|
||||
let unknown_overrides = options.overrides.keys().cloned()
|
||||
.filter(|name| !self.assignments.contains_key(name))
|
||||
.collect::<Vec<_>>();
|
||||
@ -126,7 +126,7 @@ impl<'a, 'b> Justfile<'a> where 'a: 'b {
|
||||
scope: &Map<&'c str, String>,
|
||||
ran: &mut Set<&'a str>,
|
||||
options: &Configuration<'a>,
|
||||
) -> Result<(), RuntimeError> {
|
||||
) -> RunResult<()> {
|
||||
for dependency_name in &recipe.dependencies {
|
||||
if !ran.contains(dependency_name) {
|
||||
self.run_recipe(&self.recipes[dependency_name], &[], scope, ran, options)?;
|
||||
@ -179,16 +179,16 @@ mod test {
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn run_shebang() {
|
||||
// this test exists to make sure that shebang recipes
|
||||
// run correctly. although this script is still
|
||||
// executed by a shell its behavior depends on the value of a
|
||||
// variable and continuing even though a command fails,
|
||||
// whereas in plain recipes variables are not available
|
||||
// in subsequent lines and execution stops when a line
|
||||
// fails
|
||||
let text = "
|
||||
#[test]
|
||||
fn run_shebang() {
|
||||
// this test exists to make sure that shebang recipes
|
||||
// run correctly. although this script is still
|
||||
// executed by a shell its behavior depends on the value of a
|
||||
// variable and continuing even though a command fails,
|
||||
// whereas in plain recipes variables are not available
|
||||
// in subsequent lines and execution stops when a line
|
||||
// fails
|
||||
let text = "
|
||||
a:
|
||||
#!/usr/bin/env sh
|
||||
code=200
|
||||
@ -197,15 +197,16 @@ a:
|
||||
x
|
||||
";
|
||||
|
||||
match parse_success(text).run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::Code{recipe, line_number, code} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(line_number, None);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
match parse_success(text).run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::Code{recipe, line_number, code} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(line_number, None);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn code_error() {
|
||||
match parse_success("fail:\n @exit 100")
|
||||
@ -219,105 +220,105 @@ fn code_error() {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn run_args() {
|
||||
let text = r#"
|
||||
#[test]
|
||||
fn run_args() {
|
||||
let text = r#"
|
||||
a return code:
|
||||
@x() { {{return}} {{code + "0"}}; }; x"#;
|
||||
|
||||
match parse_success(text).run(&["a", "return", "15"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::Code{recipe, line_number, code} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(code, 150);
|
||||
assert_eq!(line_number, Some(3));
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
match parse_success(text).run(&["a", "return", "15"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::Code{recipe, line_number, code} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(code, 150);
|
||||
assert_eq!(line_number, Some(3));
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_some_arguments() {
|
||||
match parse_success("a b c d:").run(&["a", "b", "c"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 2);
|
||||
assert_eq!(min, 3);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn missing_some_arguments() {
|
||||
match parse_success("a b c d:").run(&["a", "b", "c"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 2);
|
||||
assert_eq!(min, 3);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_some_arguments_variadic() {
|
||||
match parse_success("a b c +d:").run(&["a", "B", "C"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 2);
|
||||
assert_eq!(min, 3);
|
||||
assert_eq!(max, usize::MAX - 1);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn missing_some_arguments_variadic() {
|
||||
match parse_success("a b c +d:").run(&["a", "B", "C"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 2);
|
||||
assert_eq!(min, 3);
|
||||
assert_eq!(max, usize::MAX - 1);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_all_arguments() {
|
||||
match parse_success("a b c d:\n echo {{b}}{{c}}{{d}}")
|
||||
.run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 0);
|
||||
assert_eq!(min, 3);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn missing_all_arguments() {
|
||||
match parse_success("a b c d:\n echo {{b}}{{c}}{{d}}")
|
||||
.run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 0);
|
||||
assert_eq!(min, 3);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_some_defaults() {
|
||||
match parse_success("a b c d='hello':").run(&["a", "b"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 1);
|
||||
assert_eq!(min, 2);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn missing_some_defaults() {
|
||||
match parse_success("a b c d='hello':").run(&["a", "b"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 1);
|
||||
assert_eq!(min, 2);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_all_defaults() {
|
||||
match parse_success("a b c='r' d='h':").run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 0);
|
||||
assert_eq!(min, 1);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn missing_all_defaults() {
|
||||
match parse_success("a b c='r' d='h':").run(&["a"], &Default::default()).unwrap_err() {
|
||||
RuntimeError::ArgumentCountMismatch{recipe, found, min, max} => {
|
||||
assert_eq!(recipe, "a");
|
||||
assert_eq!(found, 0);
|
||||
assert_eq!(min, 1);
|
||||
assert_eq!(max, 3);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_overrides() {
|
||||
let mut options: Configuration = Default::default();
|
||||
options.overrides.insert("foo", "bar");
|
||||
options.overrides.insert("baz", "bob");
|
||||
match parse_success("a:\n echo {{`f() { return 100; }; f`}}")
|
||||
.run(&["a"], &options).unwrap_err() {
|
||||
RuntimeError::UnknownOverrides{overrides} => {
|
||||
assert_eq!(overrides, &["baz", "foo"]);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
#[test]
|
||||
fn unknown_overrides() {
|
||||
let mut options: Configuration = Default::default();
|
||||
options.overrides.insert("foo", "bar");
|
||||
options.overrides.insert("baz", "bob");
|
||||
match parse_success("a:\n echo {{`f() { return 100; }; f`}}")
|
||||
.run(&["a"], &options).unwrap_err() {
|
||||
RuntimeError::UnknownOverrides{overrides} => {
|
||||
assert_eq!(overrides, &["baz", "foo"]);
|
||||
},
|
||||
other => panic!("expected a code run error, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn export_failure() {
|
||||
let text = r#"
|
||||
#[test]
|
||||
fn export_failure() {
|
||||
let text = r#"
|
||||
export foo = "a"
|
||||
baz = "c"
|
||||
export bar = "b"
|
||||
@ -327,19 +328,17 @@ wut:
|
||||
echo $foo $bar $baz
|
||||
"#;
|
||||
|
||||
let options = Configuration {
|
||||
quiet: true,
|
||||
..Default::default()
|
||||
};
|
||||
let options = Configuration {
|
||||
quiet: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
match parse_success(text).run(&["wut"], &options).unwrap_err() {
|
||||
RuntimeError::Code{code: _, line_number, recipe} => {
|
||||
assert_eq!(recipe, "wut");
|
||||
assert_eq!(line_number, Some(8));
|
||||
},
|
||||
other => panic!("expected a recipe code errror, but got: {}", other),
|
||||
match parse_success(text).run(&["wut"], &options).unwrap_err() {
|
||||
RuntimeError::Code{code: _, line_number, recipe} => {
|
||||
assert_eq!(recipe, "wut");
|
||||
assert_eq!(line_number, Some(8));
|
||||
},
|
||||
other => panic!("expected a recipe code errror, but got: {}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
40
src/main.rs
40
src/main.rs
@ -10,28 +10,28 @@ extern crate regex;
|
||||
extern crate tempdir;
|
||||
extern crate unicode_width;
|
||||
|
||||
mod platform;
|
||||
mod run;
|
||||
mod color;
|
||||
mod compilation_error;
|
||||
mod runtime_error;
|
||||
mod misc;
|
||||
mod justfile;
|
||||
mod recipe;
|
||||
mod token;
|
||||
mod parser;
|
||||
mod tokenizer;
|
||||
mod cooked_string;
|
||||
mod recipe_resolver;
|
||||
mod assignment_resolver;
|
||||
mod assignment_evaluator;
|
||||
mod assignment_resolver;
|
||||
mod color;
|
||||
mod command_ext;
|
||||
mod compilation_error;
|
||||
mod configuration;
|
||||
mod parameter;
|
||||
mod cooked_string;
|
||||
mod expression;
|
||||
mod fragment;
|
||||
mod shebang;
|
||||
mod command_ext;
|
||||
mod justfile;
|
||||
mod misc;
|
||||
mod parameter;
|
||||
mod parser;
|
||||
mod platform;
|
||||
mod range_ext;
|
||||
mod recipe;
|
||||
mod recipe_resolver;
|
||||
mod run;
|
||||
mod runtime_error;
|
||||
mod shebang;
|
||||
mod token;
|
||||
mod tokenizer;
|
||||
|
||||
#[cfg(test)] mod testing;
|
||||
|
||||
@ -54,7 +54,7 @@ mod common {
|
||||
|
||||
pub use assignment_evaluator::AssignmentEvaluator;
|
||||
pub use command_ext::CommandExt;
|
||||
pub use compilation_error::{CompilationError, CompilationErrorKind};
|
||||
pub use compilation_error::{CompilationError, CompilationErrorKind, CompilationResult};
|
||||
pub use configuration::Configuration;
|
||||
pub use cooked_string::CookedString;
|
||||
pub use expression::Expression;
|
||||
@ -64,14 +64,14 @@ mod common {
|
||||
pub use parameter::Parameter;
|
||||
pub use parser::Parser;
|
||||
pub use recipe::Recipe;
|
||||
pub use runtime_error::RuntimeError;
|
||||
pub use runtime_error::{RuntimeError, RunResult};
|
||||
pub use shebang::Shebang;
|
||||
pub use token::{Token, TokenKind};
|
||||
}
|
||||
|
||||
use common::*;
|
||||
|
||||
fn compile(text: &str) -> Result<Justfile, CompilationError> {
|
||||
fn compile(text: &str) -> CompilationResult<Justfile> {
|
||||
let tokens = tokenize(text)?;
|
||||
let parser = Parser::new(text, tokens);
|
||||
parser.justfile()
|
||||
|
747
src/parser.rs
747
src/parser.rs
@ -1,7 +1,8 @@
|
||||
use common::*;
|
||||
|
||||
use itertools;
|
||||
use token::TokenKind::*;
|
||||
use TokenKind::*;
|
||||
use CompilationErrorKind::*;
|
||||
use recipe_resolver::resolve_recipes;
|
||||
use assignment_resolver::resolve_assignments;
|
||||
|
||||
@ -76,7 +77,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn unexpected_token(&self, found: &Token<'a>, expected: &[TokenKind]) -> CompilationError<'a> {
|
||||
found.error(CompilationErrorKind::UnexpectedToken {
|
||||
found.error(UnexpectedToken {
|
||||
expected: expected.to_vec(),
|
||||
found: found.kind,
|
||||
})
|
||||
@ -84,12 +85,12 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn recipe(
|
||||
&mut self,
|
||||
name: Token<'a>,
|
||||
name: &Token<'a>,
|
||||
doc: Option<Token<'a>>,
|
||||
quiet: bool,
|
||||
) -> Result<(), CompilationError<'a>> {
|
||||
) -> CompilationResult<'a, ()> {
|
||||
if let Some(recipe) = self.recipes.get(name.lexeme) {
|
||||
return Err(name.error(CompilationErrorKind::DuplicateRecipe {
|
||||
return Err(name.error(DuplicateRecipe {
|
||||
recipe: recipe.name,
|
||||
first: recipe.line_number
|
||||
}));
|
||||
@ -113,13 +114,13 @@ impl<'a> Parser<'a> {
|
||||
let variadic = plus.is_some();
|
||||
|
||||
if parsed_variadic_parameter {
|
||||
return Err(parameter.error(CompilationErrorKind::ParameterFollowsVariadicParameter {
|
||||
return Err(parameter.error(ParameterFollowsVariadicParameter {
|
||||
parameter: parameter.lexeme,
|
||||
}));
|
||||
}
|
||||
|
||||
if parameters.iter().any(|p| p.name == parameter.lexeme) {
|
||||
return Err(parameter.error(CompilationErrorKind::DuplicateParameter {
|
||||
return Err(parameter.error(DuplicateParameter {
|
||||
recipe: name.lexeme, parameter: parameter.lexeme
|
||||
}));
|
||||
}
|
||||
@ -137,7 +138,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
if parsed_parameter_with_default && default.is_none() {
|
||||
return Err(parameter.error(CompilationErrorKind::RequiredParameterFollowsDefaultParameter{
|
||||
return Err(parameter.error(RequiredParameterFollowsDefaultParameter{
|
||||
parameter: parameter.lexeme,
|
||||
}));
|
||||
}
|
||||
@ -167,7 +168,7 @@ impl<'a> Parser<'a> {
|
||||
let mut dependency_tokens = vec![];
|
||||
while let Some(dependency) = self.accept(Name) {
|
||||
if dependencies.contains(&dependency.lexeme) {
|
||||
return Err(dependency.error(CompilationErrorKind::DuplicateDependency {
|
||||
return Err(dependency.error(DuplicateDependency {
|
||||
recipe: name.lexeme,
|
||||
dependency: dependency.lexeme
|
||||
}));
|
||||
@ -190,7 +191,7 @@ impl<'a> Parser<'a> {
|
||||
continue;
|
||||
}
|
||||
if let Some(token) = self.expect(Line) {
|
||||
return Err(token.error(CompilationErrorKind::Internal{
|
||||
return Err(token.error(Internal{
|
||||
message: format!("Expected a line but got {}", token.kind)
|
||||
}))
|
||||
}
|
||||
@ -207,7 +208,7 @@ impl<'a> Parser<'a> {
|
||||
&& !lines.last().and_then(|line| line.last())
|
||||
.map(Fragment::continuation).unwrap_or(false)
|
||||
&& (token.lexeme.starts_with(' ') || token.lexeme.starts_with('\t')) {
|
||||
return Err(token.error(CompilationErrorKind::ExtraLeadingWhitespace));
|
||||
return Err(token.error(ExtraLeadingWhitespace));
|
||||
}
|
||||
}
|
||||
fragments.push(Fragment::Text{text: token});
|
||||
@ -243,7 +244,7 @@ impl<'a> Parser<'a> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn expression(&mut self, interpolation: bool) -> Result<Expression<'a>, CompilationError<'a>> {
|
||||
fn expression(&mut self, interpolation: bool) -> CompilationResult<'a, Expression<'a>> {
|
||||
let first = self.tokens.next().unwrap();
|
||||
let lhs = match first.kind {
|
||||
Name => Expression::Variable {name: first.lexeme, token: first},
|
||||
@ -273,9 +274,9 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn assignment(&mut self, name: Token<'a>, export: bool) -> Result<(), CompilationError<'a>> {
|
||||
fn assignment(&mut self, name: Token<'a>, export: bool) -> CompilationResult<'a, ()> {
|
||||
if self.assignments.contains_key(name.lexeme) {
|
||||
return Err(name.error(CompilationErrorKind::DuplicateVariable {variable: name.lexeme}));
|
||||
return Err(name.error(DuplicateVariable {variable: name.lexeme}));
|
||||
}
|
||||
if export {
|
||||
self.exports.insert(name.lexeme);
|
||||
@ -286,7 +287,7 @@ impl<'a> Parser<'a> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn justfile(mut self) -> Result<Justfile<'a>, CompilationError<'a>> {
|
||||
pub fn justfile(mut self) -> CompilationResult<'a, Justfile<'a>> {
|
||||
let mut doc = None;
|
||||
loop {
|
||||
match self.tokens.next() {
|
||||
@ -298,14 +299,14 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
Comment => {
|
||||
if let Some(token) = self.expect_eol() {
|
||||
return Err(token.error(CompilationErrorKind::Internal {
|
||||
return Err(token.error(Internal {
|
||||
message: format!("found comment followed by {}", token.kind),
|
||||
}));
|
||||
}
|
||||
doc = Some(token);
|
||||
}
|
||||
At => if let Some(name) = self.accept(Name) {
|
||||
self.recipe(name, doc, true)?;
|
||||
self.recipe(&name, doc, true)?;
|
||||
doc = None;
|
||||
} else {
|
||||
let unexpected = &self.tokens.next().unwrap();
|
||||
@ -318,14 +319,14 @@ impl<'a> Parser<'a> {
|
||||
doc = None;
|
||||
} else {
|
||||
self.tokens.put_back(next);
|
||||
self.recipe(token, doc, false)?;
|
||||
self.recipe(&token, doc, false)?;
|
||||
doc = None;
|
||||
}
|
||||
} else if self.accepted(Equals) {
|
||||
self.assignment(token, false)?;
|
||||
doc = None;
|
||||
} else {
|
||||
self.recipe(token, doc, false)?;
|
||||
self.recipe(&token, doc, false)?;
|
||||
doc = None;
|
||||
},
|
||||
_ => return Err(self.unexpected_token(&token, &[Name, At])),
|
||||
@ -336,7 +337,7 @@ impl<'a> Parser<'a> {
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::Internal {
|
||||
kind: Internal {
|
||||
message: "unexpected end of token stream".to_string()
|
||||
}
|
||||
}),
|
||||
@ -344,7 +345,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
if let Some(token) = self.tokens.next() {
|
||||
return Err(token.error(CompilationErrorKind::Internal {
|
||||
return Err(token.error(Internal {
|
||||
message: format!("unexpected token remaining after parsing completed: {:?}", token.kind)
|
||||
}))
|
||||
}
|
||||
@ -354,7 +355,7 @@ impl<'a> Parser<'a> {
|
||||
for recipe in self.recipes.values() {
|
||||
for parameter in &recipe.parameters {
|
||||
if self.assignments.contains_key(parameter.token.lexeme) {
|
||||
return Err(parameter.token.error(CompilationErrorKind::ParameterShadowsVariable {
|
||||
return Err(parameter.token.error(ParameterShadowsVariable {
|
||||
parameter: parameter.token.lexeme
|
||||
}));
|
||||
}
|
||||
@ -362,7 +363,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
for dependency in &recipe.dependency_tokens {
|
||||
if !self.recipes[dependency.lexeme].parameters.is_empty() {
|
||||
return Err(dependency.error(CompilationErrorKind::DependencyHasParameters {
|
||||
return Err(dependency.error(DependencyHasParameters {
|
||||
recipe: recipe.name,
|
||||
dependency: dependency.lexeme,
|
||||
}));
|
||||
@ -387,78 +388,83 @@ mod test {
|
||||
use testing::parse_success;
|
||||
use testing::parse_error;
|
||||
|
||||
fn parse_summary(input: &str, output: &str) {
|
||||
let justfile = parse_success(input);
|
||||
let s = format!("{:#}", justfile);
|
||||
if s != output {
|
||||
println!("got:\n\"{}\"\n", s);
|
||||
println!("\texpected:\n\"{}\"", output);
|
||||
assert_eq!(s, output);
|
||||
macro_rules! summary_test {
|
||||
($name:ident, $input:expr, $expected:expr $(,)*) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
let input = $input;
|
||||
let expected = $expected;
|
||||
let justfile = parse_success(input);
|
||||
let actual = format!("{:#}", justfile);
|
||||
if actual != expected {
|
||||
println!("got:\n\"{}\"\n", actual);
|
||||
println!("\texpected:\n\"{}\"", expected);
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty() {
|
||||
parse_summary("
|
||||
summary_test!{parse_empty,
|
||||
"
|
||||
|
||||
# hello
|
||||
|
||||
|
||||
", "");
|
||||
}
|
||||
",
|
||||
"",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_default() {
|
||||
parse_summary(r#"
|
||||
summary_test!{parse_string_default,
|
||||
r#"
|
||||
|
||||
foo a="b\t":
|
||||
|
||||
|
||||
"#, r#"foo a='b\t':"#);
|
||||
}
|
||||
"#,
|
||||
r#"foo a='b\t':"#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_variadic() {
|
||||
parse_summary(r#"
|
||||
summary_test!{parse_variadic,
|
||||
r#"
|
||||
|
||||
foo +a:
|
||||
|
||||
|
||||
"#, r#"foo +a:"#);
|
||||
}
|
||||
"#,
|
||||
r#"foo +a:"#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_variadic_string_default() {
|
||||
parse_summary(r#"
|
||||
summary_test!{parse_variadic_string_default,
|
||||
r#"
|
||||
|
||||
foo +a="Hello":
|
||||
|
||||
|
||||
"#, r#"foo +a='Hello':"#);
|
||||
}
|
||||
"#,
|
||||
r#"foo +a='Hello':"#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_raw_string_default() {
|
||||
parse_summary(r#"
|
||||
summary_test!{parse_raw_string_default,
|
||||
r#"
|
||||
|
||||
foo a='b\t':
|
||||
|
||||
|
||||
"#, r#"foo a='b\\t':"#);
|
||||
}
|
||||
"#,
|
||||
r#"foo a='b\\t':"#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_export() {
|
||||
parse_summary(r#"
|
||||
summary_test!{parse_export,
|
||||
r#"
|
||||
export a = "hello"
|
||||
|
||||
"#, r#"export a = "hello""#);
|
||||
}
|
||||
"#,
|
||||
r#"export a = "hello""#,
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn parse_complex() {
|
||||
parse_summary("
|
||||
summary_test!{parse_complex,
|
||||
"
|
||||
x:
|
||||
y:
|
||||
z:
|
||||
@ -472,7 +478,8 @@ hello a b c : x y z #hello
|
||||
1
|
||||
2
|
||||
3
|
||||
", "bar = foo
|
||||
",
|
||||
"bar = foo
|
||||
|
||||
foo = \"xx\"
|
||||
|
||||
@ -490,359 +497,349 @@ x:
|
||||
|
||||
y:
|
||||
|
||||
z:");
|
||||
}
|
||||
z:"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_shebang() {
|
||||
parse_summary("
|
||||
summary_test!{parse_shebang,
|
||||
"
|
||||
practicum = 'hello'
|
||||
install:
|
||||
\t#!/bin/sh
|
||||
\tif [[ -f {{practicum}} ]]; then
|
||||
\t\treturn
|
||||
\tfi
|
||||
", "practicum = \"hello\"
|
||||
",
|
||||
"practicum = \"hello\"
|
||||
|
||||
install:
|
||||
#!/bin/sh
|
||||
if [[ -f {{practicum}} ]]; then
|
||||
\treturn
|
||||
fi"
|
||||
);
|
||||
}
|
||||
fi",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_assignments() {
|
||||
parse_summary(
|
||||
r#"a = "0"
|
||||
summary_test!{parse_assignments,
|
||||
r#"a = "0"
|
||||
c = a + b + a + b
|
||||
b = "1"
|
||||
"#,
|
||||
|
||||
r#"a = "0"
|
||||
r#"a = "0"
|
||||
|
||||
b = "1"
|
||||
|
||||
c = a + b + a + b"#);
|
||||
}
|
||||
c = a + b + a + b"#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_assignment_backticks() {
|
||||
parse_summary(
|
||||
"a = `echo hello`
|
||||
summary_test!{parse_assignment_backticks,
|
||||
"a = `echo hello`
|
||||
c = a + b + a + b
|
||||
b = `echo goodbye`",
|
||||
|
||||
"a = `echo hello`
|
||||
"a = `echo hello`
|
||||
|
||||
b = `echo goodbye`
|
||||
|
||||
c = a + b + a + b");
|
||||
}
|
||||
c = a + b + a + b",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_interpolation_backticks() {
|
||||
parse_summary(
|
||||
r#"a:
|
||||
summary_test!{parse_interpolation_backticks,
|
||||
r#"a:
|
||||
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#,
|
||||
r#"a:
|
||||
r#"a:
|
||||
echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_colon() {
|
||||
let text = "a b c\nd e f";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 5,
|
||||
line: 0,
|
||||
column: 5,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
|
||||
});
|
||||
}
|
||||
summary_test!{eof_test,
|
||||
"x:\ny:\nz:\na b c: x y z",
|
||||
"a b c: x y z\n\nx:\n\ny:\n\nz:",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_default_eol() {
|
||||
let text = "hello arg=\n";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Eol},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_default_eof() {
|
||||
let text = "hello arg=";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(0),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Eof},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_default_colon() {
|
||||
let text = "hello arg=:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Colon},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_default_backtick() {
|
||||
let text = "hello arg=`hello`";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(7),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![StringToken, RawString], found: Backtick},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parameter_after_variadic() {
|
||||
let text = "foo +a bbb:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 7,
|
||||
line: 0,
|
||||
column: 7,
|
||||
width: Some(3),
|
||||
kind: CompilationErrorKind::ParameterFollowsVariadicParameter{parameter: "bbb"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn required_after_default() {
|
||||
let text = "hello arg='foo' bar:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 16,
|
||||
line: 0,
|
||||
column: 16,
|
||||
width: Some(3),
|
||||
kind: CompilationErrorKind::RequiredParameterFollowsDefaultParameter{parameter: "bar"},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_eol() {
|
||||
let text = "a b c: z =";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 9,
|
||||
line: 0,
|
||||
column: 9,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn eof_test() {
|
||||
parse_summary("x:\ny:\nz:\na b c: x y z", "a b c: x y z\n\nx:\n\ny:\n\nz:");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_parameter() {
|
||||
let text = "a b b:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 4,
|
||||
line: 0,
|
||||
column: 4,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::DuplicateParameter{recipe: "a", parameter: "b"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parameter_shadows_varible() {
|
||||
let text = "foo = \"h\"\na foo:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 12,
|
||||
line: 1,
|
||||
column: 2,
|
||||
width: Some(3),
|
||||
kind: CompilationErrorKind::ParameterShadowsVariable{parameter: "foo"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependency_has_parameters() {
|
||||
let text = "foo arg:\nb: foo";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 12,
|
||||
line: 1,
|
||||
column: 3,
|
||||
width: Some(3),
|
||||
kind: CompilationErrorKind::DependencyHasParameters{recipe: "b", dependency: "foo"}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn duplicate_dependency() {
|
||||
let text = "a b c: b c z z";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 13,
|
||||
line: 0,
|
||||
column: 13,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::DuplicateDependency{recipe: "a", dependency: "z"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_recipe() {
|
||||
let text = "a:\nb:\na:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 6,
|
||||
line: 2,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::DuplicateRecipe{recipe: "a", first: 0}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_variable() {
|
||||
let text = "a = \"0\"\na = \"0\"";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 8,
|
||||
line: 1,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::DuplicateVariable{variable: "a"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_quote_escape() {
|
||||
parse_summary(
|
||||
summary_test!{string_quote_escape,
|
||||
r#"a = "hello\"""#,
|
||||
r#"a = "hello\"""#
|
||||
);
|
||||
}
|
||||
r#"a = "hello\"""#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_escapes() {
|
||||
parse_summary(
|
||||
summary_test!{string_escapes,
|
||||
r#"a = "\n\t\r\"\\""#,
|
||||
r#"a = "\n\t\r\"\\""#
|
||||
);
|
||||
}
|
||||
r#"a = "\n\t\r\"\\""#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parameters() {
|
||||
parse_summary(
|
||||
"a b c:
|
||||
summary_test!{parameters,
|
||||
"a b c:
|
||||
{{b}} {{c}}",
|
||||
"a b c:
|
||||
"a b c:
|
||||
{{b}} {{c}}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_colon() {
|
||||
let text = "a b c\nd e f";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 5,
|
||||
line: 0,
|
||||
column: 5,
|
||||
width: Some(1),
|
||||
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_default_eol() {
|
||||
let text = "hello arg=\n";
|
||||
let expected = vec![StringToken, RawString];
|
||||
let found = Eol;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(1),
|
||||
kind: UnexpectedToken{expected, found},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_whitespace() {
|
||||
let text = "a:\n blah\n blarg";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 2,
|
||||
column: 1,
|
||||
width: Some(6),
|
||||
kind: CompilationErrorKind::ExtraLeadingWhitespace
|
||||
});
|
||||
#[test]
|
||||
fn missing_default_eof() {
|
||||
let text = "hello arg=";
|
||||
let expected = vec![StringToken, RawString];
|
||||
let found = Eof;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(0),
|
||||
kind: UnexpectedToken{expected, found},
|
||||
});
|
||||
}
|
||||
|
||||
// extra leading whitespace is okay in a shebang recipe
|
||||
parse_success("a:\n #!\n print(1)");
|
||||
}
|
||||
#[test]
|
||||
fn interpolation_outside_of_recipe() {
|
||||
let text = "{{";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: Some(2),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name, At], found: InterpolationStart},
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn unclosed_interpolation_delimiter() {
|
||||
let text = "a:\n echo {{ foo";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 15,
|
||||
line: 1,
|
||||
column: 12,
|
||||
width: Some(0),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Plus, Eol, InterpolationEnd], found: Dedent},
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn missing_default_colon() {
|
||||
let text = "hello arg=:";
|
||||
let expected = vec![StringToken, RawString];
|
||||
let found = Colon;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(1),
|
||||
kind: UnexpectedToken{expected, found},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plus_following_parameter() {
|
||||
let text = "a b c+:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 5,
|
||||
line: 0,
|
||||
column: 5,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::UnexpectedToken{expected: vec![Name], found: Plus},
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn missing_default_backtick() {
|
||||
let text = "hello arg=`hello`";
|
||||
let expected = vec![StringToken, RawString];
|
||||
let found = Backtick;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 0,
|
||||
column: 10,
|
||||
width: Some(7),
|
||||
kind: UnexpectedToken{expected, found},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme_test() {
|
||||
let mut justfiles = vec![];
|
||||
let mut current = None;
|
||||
#[test]
|
||||
fn parameter_after_variadic() {
|
||||
let text = "foo +a bbb:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 7,
|
||||
line: 0,
|
||||
column: 7,
|
||||
width: Some(3),
|
||||
kind: ParameterFollowsVariadicParameter{parameter: "bbb"}
|
||||
});
|
||||
}
|
||||
|
||||
for line in brev::slurp("README.asc").lines() {
|
||||
if let Some(mut justfile) = current {
|
||||
if line == "```" {
|
||||
justfiles.push(justfile);
|
||||
current = None;
|
||||
} else {
|
||||
justfile += line;
|
||||
justfile += "\n";
|
||||
current = Some(justfile);
|
||||
#[test]
|
||||
fn required_after_default() {
|
||||
let text = "hello arg='foo' bar:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 16,
|
||||
line: 0,
|
||||
column: 16,
|
||||
width: Some(3),
|
||||
kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_eol() {
|
||||
let text = "a b c: z =";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 9,
|
||||
line: 0,
|
||||
column: 9,
|
||||
width: Some(1),
|
||||
kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_parameter() {
|
||||
let text = "a b b:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 4,
|
||||
line: 0,
|
||||
column: 4,
|
||||
width: Some(1),
|
||||
kind: DuplicateParameter{recipe: "a", parameter: "b"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parameter_shadows_varible() {
|
||||
let text = "foo = \"h\"\na foo:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 12,
|
||||
line: 1,
|
||||
column: 2,
|
||||
width: Some(3),
|
||||
kind: ParameterShadowsVariable{parameter: "foo"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependency_has_parameters() {
|
||||
let text = "foo arg:\nb: foo";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 12,
|
||||
line: 1,
|
||||
column: 3,
|
||||
width: Some(3),
|
||||
kind: DependencyHasParameters{recipe: "b", dependency: "foo"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_dependency() {
|
||||
let text = "a b c: b c z z";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 13,
|
||||
line: 0,
|
||||
column: 13,
|
||||
width: Some(1),
|
||||
kind: DuplicateDependency{recipe: "a", dependency: "z"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_recipe() {
|
||||
let text = "a:\nb:\na:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 6,
|
||||
line: 2,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: DuplicateRecipe{recipe: "a", first: 0}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_variable() {
|
||||
let text = "a = \"0\"\na = \"0\"";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 8,
|
||||
line: 1,
|
||||
column: 0,
|
||||
width: Some(1),
|
||||
kind: DuplicateVariable{variable: "a"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_whitespace() {
|
||||
let text = "a:\n blah\n blarg";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 10,
|
||||
line: 2,
|
||||
column: 1,
|
||||
width: Some(6),
|
||||
kind: ExtraLeadingWhitespace
|
||||
});
|
||||
// extra whitespace is okay in a shebang recipe
|
||||
parse_success("a:\n #!\n print(1)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn interpolation_outside_of_recipe() {
|
||||
let text = "{{";
|
||||
let expected = vec![Name, At];
|
||||
let found = InterpolationStart;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: Some(2),
|
||||
kind: UnexpectedToken{expected, found},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unclosed_interpolation_delimiter() {
|
||||
let text = "a:\n echo {{ foo";
|
||||
let expected = vec![Plus, Eol, InterpolationEnd];
|
||||
let found = Dedent;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 15,
|
||||
line: 1,
|
||||
column: 12,
|
||||
width: Some(0),
|
||||
kind: UnexpectedToken{expected, found},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plus_following_parameter() {
|
||||
let text = "a b c+:";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 5,
|
||||
line: 0,
|
||||
column: 5,
|
||||
width: Some(1),
|
||||
kind: UnexpectedToken{expected: vec![Name], found: Plus},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme_test() {
|
||||
let mut justfiles = vec![];
|
||||
let mut current = None;
|
||||
|
||||
for line in brev::slurp("README.asc").lines() {
|
||||
if let Some(mut justfile) = current {
|
||||
if line == "```" {
|
||||
justfiles.push(justfile);
|
||||
current = None;
|
||||
} else {
|
||||
justfile += line;
|
||||
justfile += "\n";
|
||||
current = Some(justfile);
|
||||
}
|
||||
} else if line == "```make" {
|
||||
current = Some(String::new());
|
||||
}
|
||||
} else if line == "```make" {
|
||||
current = Some(String::new());
|
||||
}
|
||||
|
||||
for justfile in justfiles {
|
||||
parse_success(&justfile);
|
||||
}
|
||||
}
|
||||
|
||||
for justfile in justfiles {
|
||||
parse_success(&justfile);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ impl<'a> Recipe<'a> {
|
||||
scope: &Map<&'a str, String>,
|
||||
exports: &Set<&'a str>,
|
||||
options: &Configuration,
|
||||
) -> Result<(), RuntimeError<'a>> {
|
||||
) -> RunResult<'a, ()> {
|
||||
if options.verbose {
|
||||
let color = options.color.stderr().banner();
|
||||
eprintln!("{}===> Running recipe `{}`...{}", color.prefix(), self.name, color.suffix());
|
||||
@ -150,7 +150,7 @@ impl<'a> Recipe<'a> {
|
||||
|
||||
// create a command to run the script
|
||||
let mut command = Platform::make_shebang_command(&path, interpreter, argument)
|
||||
.map_err(|output_error| RuntimeError::Cygpath{recipe: self.name, output_error: output_error})?;
|
||||
.map_err(|output_error| RuntimeError::Cygpath{recipe: self.name, output_error})?;
|
||||
|
||||
command.export_environment_variables(scope, exports)?;
|
||||
|
||||
|
@ -1,10 +1,12 @@
|
||||
use common::*;
|
||||
|
||||
use CompilationErrorKind::*;
|
||||
|
||||
pub fn resolve_recipes<'a>(
|
||||
recipes: &Map<&'a str, Recipe<'a>>,
|
||||
assignments: &Map<&'a str, Expression<'a>>,
|
||||
text: &'a str,
|
||||
) -> Result<(), CompilationError<'a>> {
|
||||
) -> CompilationResult<'a, ()> {
|
||||
let mut resolver = RecipeResolver {
|
||||
seen: empty(),
|
||||
stack: empty(),
|
||||
@ -35,14 +37,14 @@ pub fn resolve_recipes<'a>(
|
||||
// two lifetime parameters instead of one, with one being the lifetime
|
||||
// of the struct, and the second being the lifetime of the tokens
|
||||
// that it contains
|
||||
let error = variable.error(CompilationErrorKind::UndefinedVariable{variable: name});
|
||||
let error = variable.error(UndefinedVariable{variable: name});
|
||||
return Err(CompilationError {
|
||||
text: text,
|
||||
index: error.index,
|
||||
line: error.line,
|
||||
column: error.column,
|
||||
width: error.width,
|
||||
kind: CompilationErrorKind::UndefinedVariable {
|
||||
kind: UndefinedVariable {
|
||||
variable: &text[error.index..error.index + error.width.unwrap()],
|
||||
}
|
||||
});
|
||||
@ -64,7 +66,7 @@ struct RecipeResolver<'a: 'b, 'b> {
|
||||
}
|
||||
|
||||
impl<'a, 'b> RecipeResolver<'a, 'b> {
|
||||
fn resolve(&mut self, recipe: &Recipe<'a>) -> Result<(), CompilationError<'a>> {
|
||||
fn resolve(&mut self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> {
|
||||
if self.resolved.contains(recipe.name) {
|
||||
return Ok(())
|
||||
}
|
||||
@ -76,7 +78,7 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
|
||||
if self.seen.contains(dependency.name) {
|
||||
let first = self.stack[0];
|
||||
self.stack.push(first);
|
||||
return Err(dependency_token.error(CompilationErrorKind::CircularRecipeDependency {
|
||||
return Err(dependency_token.error(CircularRecipeDependency {
|
||||
recipe: recipe.name,
|
||||
circle: self.stack.iter()
|
||||
.skip_while(|name| **name != dependency.name)
|
||||
@ -85,7 +87,7 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
|
||||
}
|
||||
self.resolve(dependency)?;
|
||||
},
|
||||
None => return Err(dependency_token.error(CompilationErrorKind::UnknownDependency {
|
||||
None => return Err(dependency_token.error(UnknownDependency {
|
||||
recipe: recipe.name,
|
||||
unknown: dependency_token.lexeme
|
||||
})),
|
||||
@ -102,70 +104,70 @@ mod test {
|
||||
use super::*;
|
||||
use testing::parse_error;
|
||||
|
||||
#[test]
|
||||
fn circular_recipe_dependency() {
|
||||
let text = "a: b\nb: a";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 8,
|
||||
line: 1,
|
||||
column: 3,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]}
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn circular_recipe_dependency() {
|
||||
let text = "a: b\nb: a";
|
||||
let recipe = "b";
|
||||
let circle = vec!["a", "b", "a"];
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 8,
|
||||
line: 1,
|
||||
column: 3,
|
||||
width: Some(1),
|
||||
kind: CircularRecipeDependency{recipe, circle}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_recipe_dependency() {
|
||||
let text = "a: a";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 3,
|
||||
line: 0,
|
||||
column: 3,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]}
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn self_recipe_dependency() {
|
||||
let text = "a: a";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 3,
|
||||
line: 0,
|
||||
column: 3,
|
||||
width: Some(1),
|
||||
kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_dependency() {
|
||||
let text = "a: b";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 3,
|
||||
line: 0,
|
||||
column: 3,
|
||||
width: Some(1),
|
||||
kind: UnknownDependency{recipe: "a", unknown: "b"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_dependency() {
|
||||
let text = "a: b";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 3,
|
||||
line: 0,
|
||||
column: 3,
|
||||
width: Some(1),
|
||||
kind: CompilationErrorKind::UnknownDependency{recipe: "a", unknown: "b"}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_interpolation_variable() {
|
||||
let text = "x:\n {{ hello}}";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 9,
|
||||
line: 1,
|
||||
column: 6,
|
||||
width: Some(5),
|
||||
kind: CompilationErrorKind::UndefinedVariable{variable: "hello"},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_second_interpolation_variable() {
|
||||
let text = "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 33,
|
||||
line: 3,
|
||||
column: 16,
|
||||
width: Some(3),
|
||||
kind: CompilationErrorKind::UndefinedVariable{variable: "lol"},
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn unknown_interpolation_variable() {
|
||||
let text = "x:\n {{ hello}}";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 9,
|
||||
line: 1,
|
||||
column: 6,
|
||||
width: Some(5),
|
||||
kind: UndefinedVariable{variable: "hello"},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_second_interpolation_variable() {
|
||||
let text = "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 33,
|
||||
line: 3,
|
||||
column: 16,
|
||||
width: Some(3),
|
||||
kind: UndefinedVariable{variable: "lol"},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -276,7 +276,7 @@ pub fn run() {
|
||||
if let Some(doc) = recipe.doc {
|
||||
print!(" {} {}", doc_color.paint("#"), doc_color.paint(doc));
|
||||
}
|
||||
println!("");
|
||||
println!();
|
||||
}
|
||||
process::exit(EXIT_SUCCESS);
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ use misc::{And, Or, maybe_s, Tick, ticks, write_error_context};
|
||||
|
||||
use self::RuntimeError::*;
|
||||
|
||||
pub type RunResult<'a, T> = Result<T, RuntimeError<'a>>;
|
||||
|
||||
fn write_token_error_context(f: &mut fmt::Formatter, token: &Token) -> Result<(), fmt::Error> {
|
||||
write_error_context(
|
||||
f,
|
||||
|
@ -1,6 +1,7 @@
|
||||
use common::*;
|
||||
|
||||
use compile;
|
||||
use tokenizer::tokenize;
|
||||
|
||||
pub fn parse_success(text: &str) -> Justfile {
|
||||
match compile(text) {
|
||||
@ -10,7 +11,10 @@ pub fn parse_success(text: &str) -> Justfile {
|
||||
}
|
||||
|
||||
pub fn parse_error(text: &str, expected: CompilationError) {
|
||||
if let Err(error) = compile(text) {
|
||||
let tokens = tokenize(text).unwrap();
|
||||
let parser = Parser::new(text, tokens);
|
||||
|
||||
if let Err(error) = parser.justfile() {
|
||||
assert_eq!(error.text, expected.text);
|
||||
assert_eq!(error.index, expected.index);
|
||||
assert_eq!(error.line, expected.line);
|
||||
|
404
src/tokenizer.rs
404
src/tokenizer.rs
@ -1,6 +1,7 @@
|
||||
use common::*;
|
||||
|
||||
use TokenKind::*;
|
||||
use CompilationErrorKind::*;
|
||||
|
||||
fn re(pattern: &str) -> Regex {
|
||||
Regex::new(pattern).unwrap()
|
||||
@ -18,7 +19,7 @@ fn mixed_whitespace(text: &str) -> bool {
|
||||
!(text.chars().all(|c| c == ' ') || text.chars().all(|c| c == '\t'))
|
||||
}
|
||||
|
||||
pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
pub fn tokenize(text: &str) -> CompilationResult<Vec<Token>> {
|
||||
lazy_static! {
|
||||
static ref BACKTICK: Regex = token(r"`[^`\n\r]*`" );
|
||||
static ref COLON: Regex = token(r":" );
|
||||
@ -84,7 +85,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
// indent: was no indentation, now there is
|
||||
(&State::Start, Some(current)) => {
|
||||
if mixed_whitespace(current) {
|
||||
return error!(CompilationErrorKind::MixedLeadingWhitespace{whitespace: current})
|
||||
return error!(MixedLeadingWhitespace{whitespace: current})
|
||||
}
|
||||
//indent = Some(current);
|
||||
state.push(State::Indent(current));
|
||||
@ -99,7 +100,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
// was indentation and still is, check if the new indentation matches
|
||||
(&State::Indent(previous), Some(current)) => {
|
||||
if !current.starts_with(previous) {
|
||||
return error!(CompilationErrorKind::InconsistentLeadingWhitespace{
|
||||
return error!(InconsistentLeadingWhitespace{
|
||||
expected: previous,
|
||||
found: current
|
||||
});
|
||||
@ -108,7 +109,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
}
|
||||
// at column 0 in some other state: this should never happen
|
||||
(&State::Text, _) | (&State::Interpolation, _) => {
|
||||
return error!(CompilationErrorKind::Internal {
|
||||
return error!(Internal {
|
||||
message: "unexpected state at column 0".to_string()
|
||||
});
|
||||
}
|
||||
@ -143,7 +144,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
(column, state.last().unwrap(), LINE.captures(rest)) {
|
||||
let line = captures.get(0).unwrap().as_str();
|
||||
if !line.starts_with(indent) {
|
||||
return error!(CompilationErrorKind::Internal{message: "unexpected indent".to_string()});
|
||||
return error!(Internal{message: "unexpected indent".to_string()});
|
||||
}
|
||||
state.push(State::Text);
|
||||
(&line[0..indent.len()], "", Line)
|
||||
@ -161,7 +162,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
state.pop();
|
||||
(captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), Eol)
|
||||
} else {
|
||||
return error!(CompilationErrorKind::Internal {
|
||||
return error!(Internal {
|
||||
message: format!("Could not match token in text state: \"{}\"", rest)
|
||||
});
|
||||
}
|
||||
@ -176,7 +177,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
(captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), Name)
|
||||
} else if let Some(captures) = EOL.captures(rest) {
|
||||
if state.last().unwrap() == &State::Interpolation {
|
||||
return error!(CompilationErrorKind::Internal {
|
||||
return error!(Internal {
|
||||
message: "hit EOL while still in interpolation state".to_string()
|
||||
});
|
||||
}
|
||||
@ -196,18 +197,18 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
} else if let Some(captures) = RAW_STRING.captures(rest) {
|
||||
(captures.get(1).unwrap().as_str(), captures.get(2).unwrap().as_str(), RawString)
|
||||
} else if UNTERMINATED_RAW_STRING.is_match(rest) {
|
||||
return error!(CompilationErrorKind::UnterminatedString);
|
||||
return error!(UnterminatedString);
|
||||
} else if let Some(captures) = STRING.captures(rest) {
|
||||
let prefix = captures.get(1).unwrap().as_str();
|
||||
let contents = &rest[prefix.len()+1..];
|
||||
if contents.is_empty() {
|
||||
return error!(CompilationErrorKind::UnterminatedString);
|
||||
return error!(UnterminatedString);
|
||||
}
|
||||
let mut len = 0;
|
||||
let mut escape = false;
|
||||
for c in contents.chars() {
|
||||
if c == '\n' || c == '\r' {
|
||||
return error!(CompilationErrorKind::UnterminatedString);
|
||||
return error!(UnterminatedString);
|
||||
} else if !escape && c == '"' {
|
||||
break;
|
||||
} else if !escape && c == '\\' {
|
||||
@ -220,13 +221,13 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
let start = prefix.len();
|
||||
let content_end = start + len + 1;
|
||||
if escape || content_end >= rest.len() {
|
||||
return error!(CompilationErrorKind::UnterminatedString);
|
||||
return error!(UnterminatedString);
|
||||
}
|
||||
(prefix, &rest[start..content_end + 1], StringToken)
|
||||
} else if rest.starts_with("#!") {
|
||||
return error!(CompilationErrorKind::OuterShebang)
|
||||
return error!(OuterShebang)
|
||||
} else {
|
||||
return error!(CompilationErrorKind::UnknownStartOfToken)
|
||||
return error!(UnknownStartOfToken)
|
||||
};
|
||||
|
||||
tokens.push(Token {
|
||||
@ -245,7 +246,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
let last = tokens.last().unwrap();
|
||||
match last.kind {
|
||||
Eof => {},
|
||||
_ => return Err(last.error(CompilationErrorKind::Internal {
|
||||
_ => return Err(last.error(Internal {
|
||||
message: format!("zero length token: {:?}", last)
|
||||
})),
|
||||
}
|
||||
@ -281,35 +282,28 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>, CompilationError> {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
mod test {
|
||||
use super::*;
|
||||
use testing::parse_error;
|
||||
|
||||
fn tokenize_success(text: &str, expected_summary: &str) {
|
||||
let tokens = tokenize(text).unwrap();
|
||||
let roundtrip = tokens.iter().map(|t| {
|
||||
let mut s = String::new();
|
||||
s += t.prefix;
|
||||
s += t.lexeme;
|
||||
s
|
||||
}).collect::<Vec<_>>().join("");
|
||||
let summary = token_summary(&tokens);
|
||||
if summary != expected_summary {
|
||||
panic!("token summary mismatch:\nexpected: {}\ngot: {}\n", expected_summary, summary);
|
||||
}
|
||||
assert_eq!(text, roundtrip);
|
||||
}
|
||||
|
||||
fn tokenize_error(text: &str, expected: CompilationError) {
|
||||
if let Err(error) = tokenize(text) {
|
||||
assert_eq!(error.text, expected.text);
|
||||
assert_eq!(error.index, expected.index);
|
||||
assert_eq!(error.line, expected.line);
|
||||
assert_eq!(error.column, expected.column);
|
||||
assert_eq!(error.kind, expected.kind);
|
||||
assert_eq!(error, expected);
|
||||
} else {
|
||||
panic!("tokenize() succeeded but expected: {}\n{}", expected, text);
|
||||
macro_rules! summary_test {
|
||||
($name:ident, $input:expr, $expected:expr $(,)*) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
let input = $input;
|
||||
let expected = $expected;
|
||||
let tokens = tokenize(input).unwrap();
|
||||
let roundtrip = tokens.iter().map(|t| {
|
||||
let mut s = String::new();
|
||||
s += t.prefix;
|
||||
s += t.lexeme;
|
||||
s
|
||||
}).collect::<Vec<_>>().join("");
|
||||
let actual = token_summary(&tokens);
|
||||
if actual != expected {
|
||||
panic!("token summary mismatch:\nexpected: {}\ngot: {}\n", expected, actual);
|
||||
}
|
||||
assert_eq!(input, roundtrip);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -337,55 +331,83 @@ mod test {
|
||||
}).collect::<Vec<_>>().join("")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokanize_strings() {
|
||||
tokenize_success(
|
||||
macro_rules! error_test {
|
||||
(
|
||||
name: $name:ident,
|
||||
input: $input:expr,
|
||||
index: $index:expr,
|
||||
line: $line:expr,
|
||||
column: $column:expr,
|
||||
width: $width:expr,
|
||||
kind: $kind:expr,
|
||||
) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
let input = $input;
|
||||
|
||||
let expected = CompilationError {
|
||||
text: input,
|
||||
index: $index,
|
||||
line: $line,
|
||||
column: $column,
|
||||
width: $width,
|
||||
kind: $kind,
|
||||
};
|
||||
|
||||
if let Err(error) = tokenize(input) {
|
||||
assert_eq!(error.text, expected.text);
|
||||
assert_eq!(error.index, expected.index);
|
||||
assert_eq!(error.line, expected.line);
|
||||
assert_eq!(error.column, expected.column);
|
||||
assert_eq!(error.kind, expected.kind);
|
||||
assert_eq!(error, expected);
|
||||
} else {
|
||||
panic!("tokenize() succeeded but expected: {}\n{}", expected, input);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
summary_test!{tokenize_strings,
|
||||
r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#,
|
||||
r#"N="+'+"+'#."#
|
||||
);
|
||||
}
|
||||
r#"N="+'+"+'#."#,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_recipe_interpolation_eol() {
|
||||
let text = "foo: # some comment
|
||||
summary_test!{tokenize_recipe_interpolation_eol,
|
||||
"foo: # some comment
|
||||
{{hello}}
|
||||
";
|
||||
tokenize_success(text, "N:#$>^{N}$<.");
|
||||
}
|
||||
",
|
||||
"N:#$>^{N}$<.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_recipe_interpolation_eof() {
|
||||
let text = "foo: # more comments
|
||||
summary_test!{tokenize_recipe_interpolation_eof,
|
||||
"foo: # more comments
|
||||
{{hello}}
|
||||
# another comment
|
||||
";
|
||||
tokenize_success(text, "N:#$>^{N}$<#$.");
|
||||
}
|
||||
",
|
||||
"N:#$>^{N}$<#$.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_recipe_complex_interpolation_expression() {
|
||||
let text = "foo: #lol\n {{a + b + \"z\" + blarg}}";
|
||||
tokenize_success(text, "N:#$>^{N+N+\"+N}<.");
|
||||
}
|
||||
summary_test!{tokenize_recipe_complex_interpolation_expression,
|
||||
"foo: #lol\n {{a + b + \"z\" + blarg}}",
|
||||
"N:#$>^{N+N+\"+N}<.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_recipe_multiple_interpolations() {
|
||||
let text = "foo:#ok\n {{a}}0{{b}}1{{c}}";
|
||||
tokenize_success(text, "N:#$>^{N}_{N}_{N}<.");
|
||||
}
|
||||
summary_test!{tokenize_recipe_multiple_interpolations,
|
||||
"foo:#ok\n {{a}}0{{b}}1{{c}}",
|
||||
"N:#$>^{N}_{N}_{N}<.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_junk() {
|
||||
let text = "bob
|
||||
summary_test!{tokenize_junk,
|
||||
"bob
|
||||
|
||||
hello blah blah blah : a b c #whatever
|
||||
";
|
||||
tokenize_success(text, "N$$NNNN:NNN#$.");
|
||||
}
|
||||
",
|
||||
"N$$NNNN:NNN#$.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_empty_lines() {
|
||||
let text = "
|
||||
summary_test!{tokenize_empty_lines,
|
||||
"
|
||||
# this does something
|
||||
hello:
|
||||
asdf
|
||||
@ -396,41 +418,32 @@ hello:
|
||||
dsdf # whatever
|
||||
|
||||
# yolo
|
||||
";
|
||||
",
|
||||
"$#$N:$>^_$^_$$^_$$^_$$<#$.",
|
||||
}
|
||||
|
||||
tokenize_success(text, "$#$N:$>^_$^_$$^_$$^_$$<#$.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_comment_before_variable() {
|
||||
let text = "
|
||||
summary_test!{tokenize_comment_before_variable,
|
||||
"
|
||||
#
|
||||
A='1'
|
||||
echo:
|
||||
echo {{A}}
|
||||
";
|
||||
tokenize_success(text, "$#$N='$N:$>^_{N}$<.");
|
||||
}
|
||||
",
|
||||
"$#$N='$N:$>^_{N}$<.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_interpolation_backticks() {
|
||||
tokenize_success(
|
||||
summary_test!{tokenize_interpolation_backticks,
|
||||
"hello:\n echo {{`echo hello` + `echo goodbye`}}",
|
||||
"N:$>^_{`+`}<."
|
||||
);
|
||||
}
|
||||
"N:$>^_{`+`}<.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_assignment_backticks() {
|
||||
tokenize_success(
|
||||
summary_test!{tokenize_assignment_backticks,
|
||||
"a = `echo hello` + `echo goodbye`",
|
||||
"N=`+`."
|
||||
);
|
||||
}
|
||||
"N=`+`.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_multiple() {
|
||||
let text = "
|
||||
summary_test!{tokenize_multiple,
|
||||
"
|
||||
hello:
|
||||
a
|
||||
b
|
||||
@ -442,79 +455,15 @@ hello:
|
||||
# hello
|
||||
bob:
|
||||
frank
|
||||
";
|
||||
",
|
||||
|
||||
tokenize_success(text, "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.");
|
||||
}
|
||||
"$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.",
|
||||
}
|
||||
|
||||
summary_test!{tokenize_comment, "a:=#", "N:=#."}
|
||||
|
||||
#[test]
|
||||
fn tokenize_comment() {
|
||||
tokenize_success("a:=#", "N:=#.")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_space_then_tab() {
|
||||
let text = "a:
|
||||
0
|
||||
1
|
||||
\t2
|
||||
";
|
||||
tokenize_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 9,
|
||||
line: 3,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::InconsistentLeadingWhitespace{expected: " ", found: "\t"},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_tabs_then_tab_space() {
|
||||
let text = "a:
|
||||
\t\t0
|
||||
\t\t 1
|
||||
\t 2
|
||||
";
|
||||
tokenize_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 12,
|
||||
line: 3,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "},
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_outer_shebang() {
|
||||
let text = "#!/usr/bin/env bash";
|
||||
tokenize_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::OuterShebang
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_unknown() {
|
||||
let text = "~";
|
||||
tokenize_error(text, CompilationError {
|
||||
text: text,
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::UnknownStartOfToken
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn tokenize_order() {
|
||||
let text = r"
|
||||
summary_test!{tokenize_order,
|
||||
r"
|
||||
b: a
|
||||
@mv a b
|
||||
|
||||
@ -526,60 +475,95 @@ d: c
|
||||
@rm c
|
||||
|
||||
c: b
|
||||
@mv b c";
|
||||
tokenize_success(text, "$N:N$>^_$$<N:$>^_$^_$$<N:N$>^_$$<N:N$>^_<.");
|
||||
}
|
||||
@mv b c",
|
||||
"$N:N$>^_$$<N:$>^_$^_$$<N:N$>^_$$<N:N$>^_<.",
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unterminated_string() {
|
||||
let text = r#"a = ""#;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
error_test! {
|
||||
name: tokenize_space_then_tab,
|
||||
input: "a:
|
||||
0
|
||||
1
|
||||
\t2
|
||||
",
|
||||
index: 9,
|
||||
line: 3,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: InconsistentLeadingWhitespace{expected: " ", found: "\t"},
|
||||
}
|
||||
|
||||
error_test! {
|
||||
name: tokenize_tabs_then_tab_space,
|
||||
input: "a:
|
||||
\t\t0
|
||||
\t\t 1
|
||||
\t 2
|
||||
",
|
||||
index: 12,
|
||||
line: 3,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "},
|
||||
}
|
||||
|
||||
error_test! {
|
||||
name: tokenize_outer_shebang,
|
||||
input: "#!/usr/bin/env bash",
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: OuterShebang,
|
||||
}
|
||||
|
||||
error_test! {
|
||||
name: tokenize_unknown,
|
||||
input: "~",
|
||||
index: 0,
|
||||
line: 0,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: UnknownStartOfToken,
|
||||
}
|
||||
|
||||
error_test! {
|
||||
name: unterminated_string,
|
||||
input: r#"a = ""#,
|
||||
index: 3,
|
||||
line: 0,
|
||||
column: 3,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::UnterminatedString,
|
||||
});
|
||||
}
|
||||
kind: UnterminatedString,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unterminated_string_with_escapes() {
|
||||
let text = r#"a = "\n\t\r\"\\"#;
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
error_test! {
|
||||
name: unterminated_string_with_escapes,
|
||||
input: r#"a = "\n\t\r\"\\"#,
|
||||
index: 3,
|
||||
line: 0,
|
||||
column: 3,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::UnterminatedString,
|
||||
});
|
||||
}
|
||||
#[test]
|
||||
fn unterminated_raw_string() {
|
||||
let text = "r a='asdf";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
kind: UnterminatedString,
|
||||
}
|
||||
|
||||
error_test! {
|
||||
name: unterminated_raw_string,
|
||||
input: "r a='asdf",
|
||||
index: 4,
|
||||
line: 0,
|
||||
column: 4,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::UnterminatedString,
|
||||
});
|
||||
}
|
||||
kind: UnterminatedString,
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn mixed_leading_whitespace() {
|
||||
let text = "a:\n\t echo hello";
|
||||
parse_error(text, CompilationError {
|
||||
text: text,
|
||||
error_test! {
|
||||
name: mixed_leading_whitespace,
|
||||
input: "a:\n\t echo hello",
|
||||
index: 3,
|
||||
line: 1,
|
||||
column: 0,
|
||||
width: None,
|
||||
kind: CompilationErrorKind::MixedLeadingWhitespace{whitespace: "\t "}
|
||||
});
|
||||
}
|
||||
|
||||
kind: MixedLeadingWhitespace{whitespace: "\t "},
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user