Fix bang lexing and placate clippy (#821)

This commit is contained in:
Casey Rodarmor 2021-05-07 00:14:38 -07:00 committed by GitHub
parent 8b9a5e6010
commit 7bc9d3986e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 99 additions and 73 deletions

View File

@ -75,7 +75,7 @@ jobs:
run: cargo test --all --verbose run: cargo test --all --verbose
- name: Clippy - name: Clippy
run: cargo clippy --all run: cargo clippy --all --all-targets --all-features
- name: Lint - name: Lint
if: matrix.os != 'windows-2016' if: matrix.os != 'windows-2016'

View File

@ -113,7 +113,7 @@ install-dev-deps-homebrew:
# everyone's favorite animate paper clip # everyone's favorite animate paper clip
clippy: clippy:
cargo clippy cargo clippy --all --all-targets --all-features
# count non-empty lines of code # count non-empty lines of code
sloc: sloc:

View File

@ -223,6 +223,9 @@ impl Display for CompilationError<'_> {
UnknownStartOfToken => { UnknownStartOfToken => {
writeln!(f, "Unknown start of token:")?; writeln!(f, "Unknown start of token:")?;
}, },
UnexpectedEndOfToken { expected } => {
writeln!(f, "Expected character `{}` but found end-of-file", expected)?;
},
MismatchedClosingDelimiter { MismatchedClosingDelimiter {
open, open,
open_line, open_line,

View File

@ -95,6 +95,9 @@ pub(crate) enum CompilationErrorKind<'src> {
UnexpectedCharacter { UnexpectedCharacter {
expected: char, expected: char,
}, },
UnexpectedEndOfToken {
expected: char,
},
UnknownSetting { UnknownSetting {
setting: &'src str, setting: &'src str,
}, },

View File

@ -340,7 +340,7 @@ impl Config {
let positional = Positional::from_values(matches.values_of(arg::ARGUMENTS)); let positional = Positional::from_values(matches.values_of(arg::ARGUMENTS));
for (name, value) in positional.overrides { for (name, value) in positional.overrides {
overrides.insert(name.to_owned(), value.to_owned()); overrides.insert(name.clone(), value.clone());
} }
let search_config = { let search_config = {
@ -798,7 +798,7 @@ impl Config {
arguments: &[String], arguments: &[String],
) -> Result<(), i32> { ) -> Result<(), i32> {
if let Err(error) = InterruptHandler::install(self.verbosity) { if let Err(error) = InterruptHandler::install(self.verbosity) {
warn!("Failed to set CTRL-C handler: {}", error) warn!("Failed to set CTRL-C handler: {}", error);
} }
let result = justfile.run(&self, search, overrides, arguments); let result = justfile.run(&self, search, overrides, arguments);
@ -851,7 +851,7 @@ impl Config {
if i > 0 { if i > 0 {
print!(" "); print!(" ");
} }
print!("{}", assignment.name) print!("{}", assignment.name);
} }
println!(); println!();
} }
@ -958,7 +958,7 @@ ARGS:
$(dry_run: $dry_run,)? $(dry_run: $dry_run,)?
$(highlight: $highlight,)? $(highlight: $highlight,)?
$(search_config: $search_config,)? $(search_config: $search_config,)?
$(shell: $shell.to_string(),)? $(shell: $shell.to_owned(),)?
$(shell_args: $shell_args,)? $(shell_args: $shell_args,)?
$(shell_present: $shell_present,)? $(shell_present: $shell_present,)?
$(subcommand: $subcommand,)? $(subcommand: $subcommand,)?
@ -1411,7 +1411,7 @@ ARGS:
name: arguments_leading_equals, name: arguments_leading_equals,
args: ["=foo"], args: ["=foo"],
subcommand: Subcommand::Run { subcommand: Subcommand::Run {
arguments: vec!["=foo".to_string()], arguments: vec!["=foo".to_owned()],
overrides: map!{}, overrides: map!{},
}, },
} }

View File

@ -29,6 +29,6 @@ mod tests {
#[test] #[test]
fn tick() { fn tick() {
assert_eq!(Enclosure::tick("foo").to_string(), "`foo`") assert_eq!(Enclosure::tick("foo").to_string(), "`foo`");
} }
} }

View File

@ -15,7 +15,7 @@ impl<'src> Justfile<'src> {
for recipe in self.recipes.values() { for recipe in self.recipes.values() {
if let Some(first_recipe) = first { if let Some(first_recipe) = first {
if recipe.line_number() < first_recipe.line_number() { if recipe.line_number() < first_recipe.line_number() {
first = Some(recipe) first = Some(recipe);
} }
} else { } else {
first = Some(recipe); first = Some(recipe);
@ -134,7 +134,7 @@ impl<'src> Justfile<'src> {
} else { } else {
return Err(RuntimeError::EvalUnknownVariable { return Err(RuntimeError::EvalUnknownVariable {
suggestion: self.suggest_variable(&variable), suggestion: self.suggest_variable(&variable),
variable: variable.to_owned(), variable: variable.clone(),
}); });
} }
} else { } else {
@ -224,7 +224,7 @@ impl<'src> Justfile<'src> {
let mut ran = BTreeSet::new(); let mut ran = BTreeSet::new();
for (recipe, arguments) in grouped { for (recipe, arguments) in grouped {
self.run_recipe(&context, recipe, arguments, &dotenv, &search, &mut ran)? self.run_recipe(&context, recipe, arguments, &dotenv, &search, &mut ran)?;
} }
Ok(()) Ok(())
@ -235,13 +235,11 @@ impl<'src> Justfile<'src> {
} }
pub(crate) fn get_recipe(&self, name: &str) -> Option<&Recipe<'src>> { pub(crate) fn get_recipe(&self, name: &str) -> Option<&Recipe<'src>> {
if let Some(recipe) = self.recipes.get(name) { self
Some(recipe) .recipes
} else if let Some(alias) = self.aliases.get(name) { .get(name)
Some(alias.target.as_ref()) .map(Rc::as_ref)
} else { .or_else(|| self.aliases.get(name).map(|alias| alias.target.as_ref()))
None
}
} }
fn run_recipe<'run>( fn run_recipe<'run>(
@ -599,9 +597,9 @@ mod tests {
"#, "#,
args: ["--quiet", "wut"], args: ["--quiet", "wut"],
error: Code { error: Code {
code: _,
line_number, line_number,
recipe, recipe,
..
}, },
check: { check: {
assert_eq!(recipe, "wut"); assert_eq!(recipe, "wut");

View File

@ -162,9 +162,14 @@ impl<'src> Lexer<'src> {
self.next_is('\n') || self.rest_starts_with("\r\n") self.next_is('\n') || self.rest_starts_with("\r\n")
} }
/// Are we at end-of-file?
fn at_eof(&self) -> bool {
self.rest().is_empty()
}
/// Are we at end-of-line or end-of-file? /// Are we at end-of-line or end-of-file?
fn at_eol_or_eof(&self) -> bool { fn at_eol_or_eof(&self) -> bool {
self.at_eol() || self.rest().is_empty() self.at_eol() || self.at_eof()
} }
/// Get current indentation /// Get current indentation
@ -293,11 +298,11 @@ impl<'src> Lexer<'src> {
match self.next { match self.next {
Some(first) => { Some(first) => {
if let Some(&interpolation_start) = self.interpolation_stack.last() { if let Some(&interpolation_start) = self.interpolation_stack.last() {
self.lex_interpolation(interpolation_start, first)? self.lex_interpolation(interpolation_start, first)?;
} else if self.recipe_body { } else if self.recipe_body {
self.lex_body()? self.lex_body()?;
} else { } else {
self.lex_normal(first)? self.lex_normal(first)?;
}; };
}, },
None => break, None => break,
@ -558,7 +563,7 @@ impl<'src> Lexer<'src> {
break Interpolation; break Interpolation;
} }
if self.rest().is_empty() { if self.at_eof() {
break EndOfFile; break EndOfFile;
} }
@ -684,6 +689,11 @@ impl<'src> Lexer<'src> {
} else { } else {
// Emit an unspecified token to consume the current character, // Emit an unspecified token to consume the current character,
self.token(Unspecified); self.token(Unspecified);
if self.at_eof() {
return Err(self.error(UnexpectedEndOfToken { expected: '=' }));
}
// …and advance past another character, // …and advance past another character,
self.advance()?; self.advance()?;
// …so that the error we produce highlights the unexpected character. // …so that the error we produce highlights the unexpected character.
@ -759,7 +769,7 @@ impl<'src> Lexer<'src> {
/// Lex whitespace: [ \t]+ /// Lex whitespace: [ \t]+
fn lex_whitespace(&mut self) -> CompilationResult<'src, ()> { fn lex_whitespace(&mut self) -> CompilationResult<'src, ()> {
while self.next_is_whitespace() { while self.next_is_whitespace() {
self.advance()? self.advance()?;
} }
self.token(Whitespace); self.token(Whitespace);
@ -870,10 +880,7 @@ mod tests {
.map(|token| token.kind) .map(|token| token.kind)
.collect::<Vec<TokenKind>>(); .collect::<Vec<TokenKind>>();
let have_lexemes = have let have_lexemes = have.iter().map(Token::lexeme).collect::<Vec<&str>>();
.iter()
.map(|token| token.lexeme())
.collect::<Vec<&str>>();
assert_eq!(have_kinds, want_kinds, "Token kind mismatch"); assert_eq!(have_kinds, want_kinds, "Token kind mismatch");
assert_eq!(have_lexemes, want_lexemes, "Token lexeme mismatch"); assert_eq!(have_lexemes, want_lexemes, "Token lexeme mismatch");
@ -2226,6 +2233,30 @@ mod tests {
}, },
} }
error! {
name: bang_eof,
input: "!",
offset: 1,
line: 0,
column: 1,
width: 0,
kind: UnexpectedEndOfToken {
expected: '=',
},
}
error! {
name: bang_unexpected,
input: "!%",
offset: 1,
line: 0,
column: 1,
width: 1,
kind: UnexpectedCharacter {
expected: '=',
},
}
#[test] #[test]
fn presume_error() { fn presume_error() {
assert_matches!( assert_matches!(

View File

@ -2,8 +2,12 @@
#![allow( #![allow(
clippy::blanket_clippy_restriction_lints, clippy::blanket_clippy_restriction_lints,
clippy::comparison_chain, clippy::comparison_chain,
clippy::create_dir,
clippy::default_numeric_fallback,
clippy::else_if_without_else, clippy::else_if_without_else,
clippy::enum_glob_use, clippy::enum_glob_use,
clippy::exhaustive_enums,
clippy::exhaustive_structs,
clippy::expect_used, clippy::expect_used,
clippy::filter_map, clippy::filter_map,
clippy::if_not_else, clippy::if_not_else,

View File

@ -62,9 +62,9 @@ impl<'src> Node<'src> for Expression<'src> {
let mut tree = Tree::atom(Keyword::If.lexeme()); let mut tree = Tree::atom(Keyword::If.lexeme());
tree.push_mut(lhs.tree()); tree.push_mut(lhs.tree());
if *inverted { if *inverted {
tree.push_mut("!=") tree.push_mut("!=");
} else { } else {
tree.push_mut("==") tree.push_mut("==");
} }
tree.push_mut(rhs.tree()); tree.push_mut(rhs.tree());
tree.push_mut(then.tree()); tree.push_mut(then.tree());
@ -72,9 +72,10 @@ impl<'src> Node<'src> for Expression<'src> {
tree tree
}, },
Expression::Call { thunk } => { Expression::Call { thunk } => {
use Thunk::*;
let mut tree = Tree::atom("call"); let mut tree = Tree::atom("call");
use Thunk::*;
match thunk { match thunk {
Nullary { name, .. } => tree.push_mut(name.lexeme()), Nullary { name, .. } => tree.push_mut(name.lexeme()),
Unary { name, arg, .. } => { Unary { name, arg, .. } => {
@ -157,8 +158,7 @@ impl<'src> Node<'src> for UnresolvedRecipe<'src> {
impl<'src> Node<'src> for Parameter<'src> { impl<'src> Node<'src> for Parameter<'src> {
fn tree(&self) -> Tree<'src> { fn tree(&self) -> Tree<'src> {
let mut children = Vec::new(); let mut children = vec![Tree::atom(self.name.lexeme())];
children.push(Tree::atom(self.name.lexeme()));
if let Some(default) = &self.default { if let Some(default) = &self.default {
children.push(default.tree()); children.push(default.tree());
@ -185,11 +185,11 @@ impl<'src> Node<'src> for Fragment<'src> {
impl<'src> Node<'src> for Set<'src> { impl<'src> Node<'src> for Set<'src> {
fn tree(&self) -> Tree<'src> { fn tree(&self) -> Tree<'src> {
let mut set = Tree::atom(Keyword::Set.lexeme()); use Setting::*;
let mut set = Tree::atom(Keyword::Set.lexeme());
set.push_mut(self.name.lexeme().replace('-', "_")); set.push_mut(self.name.lexeme().replace('-', "_"));
use Setting::*;
match &self.value { match &self.value {
DotenvLoad(value) | Export(value) | PositionalArguments(value) => DotenvLoad(value) | Export(value) | PositionalArguments(value) =>
set.push_mut(value.to_string()), set.push_mut(value.to_string()),

View File

@ -387,8 +387,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
let value = self.parse_expression()?; let value = self.parse_expression()?;
self.expect_eol()?; self.expect_eol()?;
Ok(Assignment { Ok(Assignment {
name,
export, export,
name,
value, value,
}) })
} }
@ -530,7 +530,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
unindented unindented
}; };
Ok(StringLiteral { cooked, raw, kind }) Ok(StringLiteral { kind, raw, cooked })
} }
/// Parse a name from an identifier token /// Parse a name from an identifier token

View File

@ -116,8 +116,8 @@ mod tests {
.cloned() .cloned()
.map(|(key, value): (&str, &str)| (key.to_owned(), value.to_owned())) .map(|(key, value): (&str, &str)| (key.to_owned(), value.to_owned()))
.collect(), .collect(),
search_directory: $search_directory.map(|dir: &str| dir.to_owned()), search_directory: $search_directory.map(str::to_owned),
arguments: $arguments.iter().cloned().map(|arg: &str| arg.to_owned()).collect(), arguments: $arguments.iter().cloned().map(str::to_owned).collect(),
}, },
) )
} }

View File

@ -128,7 +128,7 @@ impl<'src, D> Recipe<'src, D> {
// add blank lines so that lines in the generated script have the same line // add blank lines so that lines in the generated script have the same line
// number as the corresponding lines in the justfile // number as the corresponding lines in the justfile
for _ in 1..(self.line_number() + 2) { for _ in 1..(self.line_number() + 2) {
text += "\n" text += "\n";
} }
for line in &evaluated_lines[1..] { for line in &evaluated_lines[1..] {
text += line; text += line;

View File

@ -23,8 +23,8 @@ impl<'src, 'run> Scope<'src, 'run> {
pub(crate) fn bind(&mut self, export: bool, name: Name<'src>, value: String) { pub(crate) fn bind(&mut self, export: bool, name: Name<'src>, value: String) {
self.bindings.insert(Binding { self.bindings.insert(Binding {
name,
export, export,
name,
value, value,
}); });
} }

View File

@ -203,9 +203,7 @@ mod tests {
fn not_found() { fn not_found() {
let tmp = testing::tempdir(); let tmp = testing::tempdir();
match Search::justfile(tmp.path()) { match Search::justfile(tmp.path()) {
Err(SearchError::NotFound) => { Err(SearchError::NotFound) => {},
assert!(true);
},
_ => panic!("No justfile found error was expected"), _ => panic!("No justfile found error was expected"),
} }
} }
@ -218,16 +216,14 @@ mod tests {
fs::write(&path, "default:\n\techo ok").unwrap(); fs::write(&path, "default:\n\techo ok").unwrap();
path.pop(); path.pop();
path.push(FILENAME.to_uppercase()); path.push(FILENAME.to_uppercase());
if let Ok(_) = fs::File::open(path.as_path()) { if fs::File::open(path.as_path()).is_ok() {
// We are in case-insensitive file system // We are in case-insensitive file system
return; return;
} }
fs::write(&path, "default:\n\techo ok").unwrap(); fs::write(&path, "default:\n\techo ok").unwrap();
path.pop(); path.pop();
match Search::justfile(path.as_path()) { match Search::justfile(path.as_path()) {
Err(SearchError::MultipleCandidates { .. }) => { Err(SearchError::MultipleCandidates { .. }) => {},
assert!(true);
},
_ => panic!("Multiple candidates error was expected"), _ => panic!("Multiple candidates error was expected"),
} }
} }
@ -239,11 +235,8 @@ mod tests {
path.push(FILENAME); path.push(FILENAME);
fs::write(&path, "default:\n\techo ok").unwrap(); fs::write(&path, "default:\n\techo ok").unwrap();
path.pop(); path.pop();
match Search::justfile(path.as_path()) { if let Err(err) = Search::justfile(path.as_path()) {
Ok(_path) => { panic!("No errors were expected: {}", err);
assert!(true);
},
_ => panic!("No errors were expected"),
} }
} }
@ -265,11 +258,8 @@ mod tests {
path.push(spongebob_case); path.push(spongebob_case);
fs::write(&path, "default:\n\techo ok").unwrap(); fs::write(&path, "default:\n\techo ok").unwrap();
path.pop(); path.pop();
match Search::justfile(path.as_path()) { if let Err(err) = Search::justfile(path.as_path()) {
Ok(_path) => { panic!("No errors were expected: {}", err);
assert!(true);
},
_ => panic!("No errors were expected"),
} }
} }
@ -284,11 +274,8 @@ mod tests {
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory"); fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
path.push("b"); path.push("b");
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory"); fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
match Search::justfile(path.as_path()) { if let Err(err) = Search::justfile(path.as_path()) {
Ok(_path) => { panic!("No errors were expected: {}", err);
assert!(true);
},
_ => panic!("No errors were expected"),
} }
} }
@ -312,7 +299,7 @@ mod tests {
path.push(FILENAME); path.push(FILENAME);
assert_eq!(found_path, path); assert_eq!(found_path, path);
}, },
_ => panic!("No errors were expected"), Err(err) => panic!("No errors were expected: {}", err),
} }
} }

View File

@ -46,6 +46,6 @@ mod tests {
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"Multiple candidate justfiles found in `/foo`: `justfile` and `JUSTFILE`" "Multiple candidate justfiles found in `/foo`: `justfile` and `JUSTFILE`"
) );
} }
} }

View File

@ -2,8 +2,8 @@ use crate::common::*;
#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)] #[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]
pub(crate) struct StringKind { pub(crate) struct StringKind {
indented: bool,
delimiter: StringDelimiter, delimiter: StringDelimiter,
indented: bool,
} }
#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)] #[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]

View File

@ -56,7 +56,7 @@ impl Summary {
.into_iter() .into_iter()
.map(|(name, recipe)| { .map(|(name, recipe)| {
( (
name.to_string(), name.to_owned(),
Recipe::new(&recipe, aliases.remove(name).unwrap_or_default()), Recipe::new(&recipe, aliases.remove(name).unwrap_or_default()),
) )
}) })
@ -64,7 +64,7 @@ impl Summary {
assignments: justfile assignments: justfile
.assignments .assignments
.iter() .iter()
.map(|(name, assignment)| (name.to_string(), Assignment::new(assignment))) .map(|(name, assignment)| ((*name).to_owned(), Assignment::new(assignment)))
.collect(), .collect(),
} }
} }
@ -213,7 +213,7 @@ impl Expression {
use full::Expression::*; use full::Expression::*;
match expression { match expression {
Backtick { contents, .. } => Expression::Backtick { Backtick { contents, .. } => Expression::Backtick {
command: (*contents).to_owned(), command: (*contents).clone(),
}, },
Call { thunk } => match thunk { Call { thunk } => match thunk {
full::Thunk::Nullary { name, .. } => Expression::Call { full::Thunk::Nullary { name, .. } => Expression::Call {
@ -249,7 +249,7 @@ impl Expression {
inverted: *inverted, inverted: *inverted,
}, },
StringLiteral { string_literal } => Expression::String { StringLiteral { string_literal } => Expression::String {
text: string_literal.cooked.to_string(), text: string_literal.cooked.clone(),
}, },
Variable { name, .. } => Expression::Variable { Variable { name, .. } => Expression::Variable {
name: name.lexeme().to_owned(), name: name.lexeme().to_owned(),

View File

@ -25,8 +25,8 @@ pub(crate) fn search(config: &Config) -> Search {
let justfile = working_directory.join(crate::search::FILENAME); let justfile = working_directory.join(crate::search::FILENAME);
Search { Search {
working_directory,
justfile, justfile,
working_directory,
} }
} }

View File

@ -70,7 +70,7 @@ impl<'src> Token<'src> {
f, f,
"internal error: Error has invalid line number: {}", "internal error: Error has invalid line number: {}",
line_number line_number
)? )?;
}, },
} }
Ok(()) Ok(())