mirror of
https://github.com/casey/just.git
synced 2024-11-23 02:44:56 +03:00
Fix bang lexing and placate clippy (#821)
This commit is contained in:
parent
8b9a5e6010
commit
7bc9d3986e
2
.github/workflows/build.yaml
vendored
2
.github/workflows/build.yaml
vendored
@ -75,7 +75,7 @@ jobs:
|
||||
run: cargo test --all --verbose
|
||||
|
||||
- name: Clippy
|
||||
run: cargo clippy --all
|
||||
run: cargo clippy --all --all-targets --all-features
|
||||
|
||||
- name: Lint
|
||||
if: matrix.os != 'windows-2016'
|
||||
|
2
justfile
2
justfile
@ -113,7 +113,7 @@ install-dev-deps-homebrew:
|
||||
|
||||
# everyone's favorite animate paper clip
|
||||
clippy:
|
||||
cargo clippy
|
||||
cargo clippy --all --all-targets --all-features
|
||||
|
||||
# count non-empty lines of code
|
||||
sloc:
|
||||
|
@ -223,6 +223,9 @@ impl Display for CompilationError<'_> {
|
||||
UnknownStartOfToken => {
|
||||
writeln!(f, "Unknown start of token:")?;
|
||||
},
|
||||
UnexpectedEndOfToken { expected } => {
|
||||
writeln!(f, "Expected character `{}` but found end-of-file", expected)?;
|
||||
},
|
||||
MismatchedClosingDelimiter {
|
||||
open,
|
||||
open_line,
|
||||
|
@ -95,6 +95,9 @@ pub(crate) enum CompilationErrorKind<'src> {
|
||||
UnexpectedCharacter {
|
||||
expected: char,
|
||||
},
|
||||
UnexpectedEndOfToken {
|
||||
expected: char,
|
||||
},
|
||||
UnknownSetting {
|
||||
setting: &'src str,
|
||||
},
|
||||
|
@ -340,7 +340,7 @@ impl Config {
|
||||
let positional = Positional::from_values(matches.values_of(arg::ARGUMENTS));
|
||||
|
||||
for (name, value) in positional.overrides {
|
||||
overrides.insert(name.to_owned(), value.to_owned());
|
||||
overrides.insert(name.clone(), value.clone());
|
||||
}
|
||||
|
||||
let search_config = {
|
||||
@ -798,7 +798,7 @@ impl Config {
|
||||
arguments: &[String],
|
||||
) -> Result<(), i32> {
|
||||
if let Err(error) = InterruptHandler::install(self.verbosity) {
|
||||
warn!("Failed to set CTRL-C handler: {}", error)
|
||||
warn!("Failed to set CTRL-C handler: {}", error);
|
||||
}
|
||||
|
||||
let result = justfile.run(&self, search, overrides, arguments);
|
||||
@ -851,7 +851,7 @@ impl Config {
|
||||
if i > 0 {
|
||||
print!(" ");
|
||||
}
|
||||
print!("{}", assignment.name)
|
||||
print!("{}", assignment.name);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
@ -958,7 +958,7 @@ ARGS:
|
||||
$(dry_run: $dry_run,)?
|
||||
$(highlight: $highlight,)?
|
||||
$(search_config: $search_config,)?
|
||||
$(shell: $shell.to_string(),)?
|
||||
$(shell: $shell.to_owned(),)?
|
||||
$(shell_args: $shell_args,)?
|
||||
$(shell_present: $shell_present,)?
|
||||
$(subcommand: $subcommand,)?
|
||||
@ -1411,7 +1411,7 @@ ARGS:
|
||||
name: arguments_leading_equals,
|
||||
args: ["=foo"],
|
||||
subcommand: Subcommand::Run {
|
||||
arguments: vec!["=foo".to_string()],
|
||||
arguments: vec!["=foo".to_owned()],
|
||||
overrides: map!{},
|
||||
},
|
||||
}
|
||||
|
@ -29,6 +29,6 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn tick() {
|
||||
assert_eq!(Enclosure::tick("foo").to_string(), "`foo`")
|
||||
assert_eq!(Enclosure::tick("foo").to_string(), "`foo`");
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ impl<'src> Justfile<'src> {
|
||||
for recipe in self.recipes.values() {
|
||||
if let Some(first_recipe) = first {
|
||||
if recipe.line_number() < first_recipe.line_number() {
|
||||
first = Some(recipe)
|
||||
first = Some(recipe);
|
||||
}
|
||||
} else {
|
||||
first = Some(recipe);
|
||||
@ -134,7 +134,7 @@ impl<'src> Justfile<'src> {
|
||||
} else {
|
||||
return Err(RuntimeError::EvalUnknownVariable {
|
||||
suggestion: self.suggest_variable(&variable),
|
||||
variable: variable.to_owned(),
|
||||
variable: variable.clone(),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@ -224,7 +224,7 @@ impl<'src> Justfile<'src> {
|
||||
|
||||
let mut ran = BTreeSet::new();
|
||||
for (recipe, arguments) in grouped {
|
||||
self.run_recipe(&context, recipe, arguments, &dotenv, &search, &mut ran)?
|
||||
self.run_recipe(&context, recipe, arguments, &dotenv, &search, &mut ran)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -235,13 +235,11 @@ impl<'src> Justfile<'src> {
|
||||
}
|
||||
|
||||
pub(crate) fn get_recipe(&self, name: &str) -> Option<&Recipe<'src>> {
|
||||
if let Some(recipe) = self.recipes.get(name) {
|
||||
Some(recipe)
|
||||
} else if let Some(alias) = self.aliases.get(name) {
|
||||
Some(alias.target.as_ref())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
self
|
||||
.recipes
|
||||
.get(name)
|
||||
.map(Rc::as_ref)
|
||||
.or_else(|| self.aliases.get(name).map(|alias| alias.target.as_ref()))
|
||||
}
|
||||
|
||||
fn run_recipe<'run>(
|
||||
@ -599,9 +597,9 @@ mod tests {
|
||||
"#,
|
||||
args: ["--quiet", "wut"],
|
||||
error: Code {
|
||||
code: _,
|
||||
line_number,
|
||||
recipe,
|
||||
..
|
||||
},
|
||||
check: {
|
||||
assert_eq!(recipe, "wut");
|
||||
|
51
src/lexer.rs
51
src/lexer.rs
@ -162,9 +162,14 @@ impl<'src> Lexer<'src> {
|
||||
self.next_is('\n') || self.rest_starts_with("\r\n")
|
||||
}
|
||||
|
||||
/// Are we at end-of-file?
|
||||
fn at_eof(&self) -> bool {
|
||||
self.rest().is_empty()
|
||||
}
|
||||
|
||||
/// Are we at end-of-line or end-of-file?
|
||||
fn at_eol_or_eof(&self) -> bool {
|
||||
self.at_eol() || self.rest().is_empty()
|
||||
self.at_eol() || self.at_eof()
|
||||
}
|
||||
|
||||
/// Get current indentation
|
||||
@ -293,11 +298,11 @@ impl<'src> Lexer<'src> {
|
||||
match self.next {
|
||||
Some(first) => {
|
||||
if let Some(&interpolation_start) = self.interpolation_stack.last() {
|
||||
self.lex_interpolation(interpolation_start, first)?
|
||||
self.lex_interpolation(interpolation_start, first)?;
|
||||
} else if self.recipe_body {
|
||||
self.lex_body()?
|
||||
self.lex_body()?;
|
||||
} else {
|
||||
self.lex_normal(first)?
|
||||
self.lex_normal(first)?;
|
||||
};
|
||||
},
|
||||
None => break,
|
||||
@ -558,7 +563,7 @@ impl<'src> Lexer<'src> {
|
||||
break Interpolation;
|
||||
}
|
||||
|
||||
if self.rest().is_empty() {
|
||||
if self.at_eof() {
|
||||
break EndOfFile;
|
||||
}
|
||||
|
||||
@ -684,6 +689,11 @@ impl<'src> Lexer<'src> {
|
||||
} else {
|
||||
// Emit an unspecified token to consume the current character,
|
||||
self.token(Unspecified);
|
||||
|
||||
if self.at_eof() {
|
||||
return Err(self.error(UnexpectedEndOfToken { expected: '=' }));
|
||||
}
|
||||
|
||||
// …and advance past another character,
|
||||
self.advance()?;
|
||||
// …so that the error we produce highlights the unexpected character.
|
||||
@ -759,7 +769,7 @@ impl<'src> Lexer<'src> {
|
||||
/// Lex whitespace: [ \t]+
|
||||
fn lex_whitespace(&mut self) -> CompilationResult<'src, ()> {
|
||||
while self.next_is_whitespace() {
|
||||
self.advance()?
|
||||
self.advance()?;
|
||||
}
|
||||
|
||||
self.token(Whitespace);
|
||||
@ -870,10 +880,7 @@ mod tests {
|
||||
.map(|token| token.kind)
|
||||
.collect::<Vec<TokenKind>>();
|
||||
|
||||
let have_lexemes = have
|
||||
.iter()
|
||||
.map(|token| token.lexeme())
|
||||
.collect::<Vec<&str>>();
|
||||
let have_lexemes = have.iter().map(Token::lexeme).collect::<Vec<&str>>();
|
||||
|
||||
assert_eq!(have_kinds, want_kinds, "Token kind mismatch");
|
||||
assert_eq!(have_lexemes, want_lexemes, "Token lexeme mismatch");
|
||||
@ -2226,6 +2233,30 @@ mod tests {
|
||||
},
|
||||
}
|
||||
|
||||
error! {
|
||||
name: bang_eof,
|
||||
input: "!",
|
||||
offset: 1,
|
||||
line: 0,
|
||||
column: 1,
|
||||
width: 0,
|
||||
kind: UnexpectedEndOfToken {
|
||||
expected: '=',
|
||||
},
|
||||
}
|
||||
|
||||
error! {
|
||||
name: bang_unexpected,
|
||||
input: "!%",
|
||||
offset: 1,
|
||||
line: 0,
|
||||
column: 1,
|
||||
width: 1,
|
||||
kind: UnexpectedCharacter {
|
||||
expected: '=',
|
||||
},
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn presume_error() {
|
||||
assert_matches!(
|
||||
|
@ -2,8 +2,12 @@
|
||||
#![allow(
|
||||
clippy::blanket_clippy_restriction_lints,
|
||||
clippy::comparison_chain,
|
||||
clippy::create_dir,
|
||||
clippy::default_numeric_fallback,
|
||||
clippy::else_if_without_else,
|
||||
clippy::enum_glob_use,
|
||||
clippy::exhaustive_enums,
|
||||
clippy::exhaustive_structs,
|
||||
clippy::expect_used,
|
||||
clippy::filter_map,
|
||||
clippy::if_not_else,
|
||||
|
14
src/node.rs
14
src/node.rs
@ -62,9 +62,9 @@ impl<'src> Node<'src> for Expression<'src> {
|
||||
let mut tree = Tree::atom(Keyword::If.lexeme());
|
||||
tree.push_mut(lhs.tree());
|
||||
if *inverted {
|
||||
tree.push_mut("!=")
|
||||
tree.push_mut("!=");
|
||||
} else {
|
||||
tree.push_mut("==")
|
||||
tree.push_mut("==");
|
||||
}
|
||||
tree.push_mut(rhs.tree());
|
||||
tree.push_mut(then.tree());
|
||||
@ -72,9 +72,10 @@ impl<'src> Node<'src> for Expression<'src> {
|
||||
tree
|
||||
},
|
||||
Expression::Call { thunk } => {
|
||||
use Thunk::*;
|
||||
|
||||
let mut tree = Tree::atom("call");
|
||||
|
||||
use Thunk::*;
|
||||
match thunk {
|
||||
Nullary { name, .. } => tree.push_mut(name.lexeme()),
|
||||
Unary { name, arg, .. } => {
|
||||
@ -157,8 +158,7 @@ impl<'src> Node<'src> for UnresolvedRecipe<'src> {
|
||||
|
||||
impl<'src> Node<'src> for Parameter<'src> {
|
||||
fn tree(&self) -> Tree<'src> {
|
||||
let mut children = Vec::new();
|
||||
children.push(Tree::atom(self.name.lexeme()));
|
||||
let mut children = vec![Tree::atom(self.name.lexeme())];
|
||||
|
||||
if let Some(default) = &self.default {
|
||||
children.push(default.tree());
|
||||
@ -185,11 +185,11 @@ impl<'src> Node<'src> for Fragment<'src> {
|
||||
|
||||
impl<'src> Node<'src> for Set<'src> {
|
||||
fn tree(&self) -> Tree<'src> {
|
||||
let mut set = Tree::atom(Keyword::Set.lexeme());
|
||||
use Setting::*;
|
||||
|
||||
let mut set = Tree::atom(Keyword::Set.lexeme());
|
||||
set.push_mut(self.name.lexeme().replace('-', "_"));
|
||||
|
||||
use Setting::*;
|
||||
match &self.value {
|
||||
DotenvLoad(value) | Export(value) | PositionalArguments(value) =>
|
||||
set.push_mut(value.to_string()),
|
||||
|
@ -387,8 +387,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
|
||||
let value = self.parse_expression()?;
|
||||
self.expect_eol()?;
|
||||
Ok(Assignment {
|
||||
name,
|
||||
export,
|
||||
name,
|
||||
value,
|
||||
})
|
||||
}
|
||||
@ -530,7 +530,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
|
||||
unindented
|
||||
};
|
||||
|
||||
Ok(StringLiteral { cooked, raw, kind })
|
||||
Ok(StringLiteral { kind, raw, cooked })
|
||||
}
|
||||
|
||||
/// Parse a name from an identifier token
|
||||
|
@ -116,8 +116,8 @@ mod tests {
|
||||
.cloned()
|
||||
.map(|(key, value): (&str, &str)| (key.to_owned(), value.to_owned()))
|
||||
.collect(),
|
||||
search_directory: $search_directory.map(|dir: &str| dir.to_owned()),
|
||||
arguments: $arguments.iter().cloned().map(|arg: &str| arg.to_owned()).collect(),
|
||||
search_directory: $search_directory.map(str::to_owned),
|
||||
arguments: $arguments.iter().cloned().map(str::to_owned).collect(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ impl<'src, D> Recipe<'src, D> {
|
||||
// add blank lines so that lines in the generated script have the same line
|
||||
// number as the corresponding lines in the justfile
|
||||
for _ in 1..(self.line_number() + 2) {
|
||||
text += "\n"
|
||||
text += "\n";
|
||||
}
|
||||
for line in &evaluated_lines[1..] {
|
||||
text += line;
|
||||
|
@ -23,8 +23,8 @@ impl<'src, 'run> Scope<'src, 'run> {
|
||||
|
||||
pub(crate) fn bind(&mut self, export: bool, name: Name<'src>, value: String) {
|
||||
self.bindings.insert(Binding {
|
||||
name,
|
||||
export,
|
||||
name,
|
||||
value,
|
||||
});
|
||||
}
|
||||
|
@ -203,9 +203,7 @@ mod tests {
|
||||
fn not_found() {
|
||||
let tmp = testing::tempdir();
|
||||
match Search::justfile(tmp.path()) {
|
||||
Err(SearchError::NotFound) => {
|
||||
assert!(true);
|
||||
},
|
||||
Err(SearchError::NotFound) => {},
|
||||
_ => panic!("No justfile found error was expected"),
|
||||
}
|
||||
}
|
||||
@ -218,16 +216,14 @@ mod tests {
|
||||
fs::write(&path, "default:\n\techo ok").unwrap();
|
||||
path.pop();
|
||||
path.push(FILENAME.to_uppercase());
|
||||
if let Ok(_) = fs::File::open(path.as_path()) {
|
||||
if fs::File::open(path.as_path()).is_ok() {
|
||||
// We are in case-insensitive file system
|
||||
return;
|
||||
}
|
||||
fs::write(&path, "default:\n\techo ok").unwrap();
|
||||
path.pop();
|
||||
match Search::justfile(path.as_path()) {
|
||||
Err(SearchError::MultipleCandidates { .. }) => {
|
||||
assert!(true);
|
||||
},
|
||||
Err(SearchError::MultipleCandidates { .. }) => {},
|
||||
_ => panic!("Multiple candidates error was expected"),
|
||||
}
|
||||
}
|
||||
@ -239,11 +235,8 @@ mod tests {
|
||||
path.push(FILENAME);
|
||||
fs::write(&path, "default:\n\techo ok").unwrap();
|
||||
path.pop();
|
||||
match Search::justfile(path.as_path()) {
|
||||
Ok(_path) => {
|
||||
assert!(true);
|
||||
},
|
||||
_ => panic!("No errors were expected"),
|
||||
if let Err(err) = Search::justfile(path.as_path()) {
|
||||
panic!("No errors were expected: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
@ -265,11 +258,8 @@ mod tests {
|
||||
path.push(spongebob_case);
|
||||
fs::write(&path, "default:\n\techo ok").unwrap();
|
||||
path.pop();
|
||||
match Search::justfile(path.as_path()) {
|
||||
Ok(_path) => {
|
||||
assert!(true);
|
||||
},
|
||||
_ => panic!("No errors were expected"),
|
||||
if let Err(err) = Search::justfile(path.as_path()) {
|
||||
panic!("No errors were expected: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
@ -284,11 +274,8 @@ mod tests {
|
||||
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
|
||||
path.push("b");
|
||||
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
|
||||
match Search::justfile(path.as_path()) {
|
||||
Ok(_path) => {
|
||||
assert!(true);
|
||||
},
|
||||
_ => panic!("No errors were expected"),
|
||||
if let Err(err) = Search::justfile(path.as_path()) {
|
||||
panic!("No errors were expected: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
@ -312,7 +299,7 @@ mod tests {
|
||||
path.push(FILENAME);
|
||||
assert_eq!(found_path, path);
|
||||
},
|
||||
_ => panic!("No errors were expected"),
|
||||
Err(err) => panic!("No errors were expected: {}", err),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -46,6 +46,6 @@ mod tests {
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"Multiple candidate justfiles found in `/foo`: `justfile` and `JUSTFILE`"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,8 @@ use crate::common::*;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]
|
||||
pub(crate) struct StringKind {
|
||||
indented: bool,
|
||||
delimiter: StringDelimiter,
|
||||
indented: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]
|
||||
|
@ -56,7 +56,7 @@ impl Summary {
|
||||
.into_iter()
|
||||
.map(|(name, recipe)| {
|
||||
(
|
||||
name.to_string(),
|
||||
name.to_owned(),
|
||||
Recipe::new(&recipe, aliases.remove(name).unwrap_or_default()),
|
||||
)
|
||||
})
|
||||
@ -64,7 +64,7 @@ impl Summary {
|
||||
assignments: justfile
|
||||
.assignments
|
||||
.iter()
|
||||
.map(|(name, assignment)| (name.to_string(), Assignment::new(assignment)))
|
||||
.map(|(name, assignment)| ((*name).to_owned(), Assignment::new(assignment)))
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
@ -213,7 +213,7 @@ impl Expression {
|
||||
use full::Expression::*;
|
||||
match expression {
|
||||
Backtick { contents, .. } => Expression::Backtick {
|
||||
command: (*contents).to_owned(),
|
||||
command: (*contents).clone(),
|
||||
},
|
||||
Call { thunk } => match thunk {
|
||||
full::Thunk::Nullary { name, .. } => Expression::Call {
|
||||
@ -249,7 +249,7 @@ impl Expression {
|
||||
inverted: *inverted,
|
||||
},
|
||||
StringLiteral { string_literal } => Expression::String {
|
||||
text: string_literal.cooked.to_string(),
|
||||
text: string_literal.cooked.clone(),
|
||||
},
|
||||
Variable { name, .. } => Expression::Variable {
|
||||
name: name.lexeme().to_owned(),
|
||||
|
@ -25,8 +25,8 @@ pub(crate) fn search(config: &Config) -> Search {
|
||||
let justfile = working_directory.join(crate::search::FILENAME);
|
||||
|
||||
Search {
|
||||
working_directory,
|
||||
justfile,
|
||||
working_directory,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ impl<'src> Token<'src> {
|
||||
f,
|
||||
"internal error: Error has invalid line number: {}",
|
||||
line_number
|
||||
)?
|
||||
)?;
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
|
Loading…
Reference in New Issue
Block a user