1
1
mirror of https://github.com/casey/just.git synced 2024-11-22 02:09:44 +03:00

Use unstable rustfmt configuration options (#592)

This commit is contained in:
Casey Rodarmor 2020-02-10 20:07:06 -08:00 committed by GitHub
parent aceee3e217
commit 3ec7dea4a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
62 changed files with 569 additions and 550 deletions

2
.gitattributes vendored
View File

@ -1 +1 @@
Cargo.lock linguist-generated diff=nodiff
* -text

View File

@ -20,7 +20,7 @@ jobs:
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v1
- name: Install
- name: Install Main Toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
@ -42,8 +42,15 @@ jobs:
- name: Lint
if: matrix.os != 'windows-latest'
run: cargo run lint
- name: Install Rustfmt Toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
target: ${{ matrix.target }}
profile: minimal
components: rustfmt
- name: Format
run: cargo fmt --all -- --check
run: cargo +nightly fmt --all -- --check
- name: Completion Scripts
if: matrix.os != 'windows-latest'
run: |

View File

@ -33,6 +33,9 @@ build:
check:
cargo check
fmt:
cargo +nightly fmt --all
watch +COMMAND='test':
cargo watch --clear --exec "{{COMMAND}}"

View File

@ -1,2 +1,22 @@
tab_spaces = 2
max_width = 100
comment_width = 70
edition = "2018"
error_on_line_overflow = true
error_on_unformatted = true
format_code_in_doc_comments = true
format_macro_bodies = true
format_strings = true
match_arm_blocks = false
match_block_trailing_comma = true
max_width = 100
merge_imports = true
newline_style = "Unix"
normalize_comments = true
overflow_delimited_expr = true
reorder_impl_items = true
required_version = "1.4.11"
struct_field_align_threshold = 20
tab_spaces = 2
unstable_features = true
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

View File

@ -3,7 +3,7 @@ use crate::common::*;
/// An alias, e.g. `name := target`
#[derive(Debug, PartialEq)]
pub(crate) struct Alias<'src, T = Rc<Recipe<'src>>> {
pub(crate) name: Name<'src>,
pub(crate) name: Name<'src>,
pub(crate) target: T,
}

View File

@ -3,10 +3,10 @@ use crate::common::*;
use CompilationErrorKind::*;
pub(crate) struct Analyzer<'src> {
recipes: Table<'src, UnresolvedRecipe<'src>>,
recipes: Table<'src, UnresolvedRecipe<'src>>,
assignments: Table<'src, Assignment<'src>>,
aliases: Table<'src, Alias<'src, Name<'src>>>,
sets: Table<'src, Set<'src>>,
aliases: Table<'src, Alias<'src, Name<'src>>>,
sets: Table<'src, Set<'src>>,
}
impl<'src> Analyzer<'src> {
@ -18,10 +18,10 @@ impl<'src> Analyzer<'src> {
pub(crate) fn new() -> Analyzer<'src> {
Analyzer {
recipes: empty(),
recipes: empty(),
assignments: empty(),
aliases: empty(),
sets: empty(),
aliases: empty(),
sets: empty(),
}
}
@ -34,19 +34,19 @@ impl<'src> Analyzer<'src> {
Item::Alias(alias) => {
self.analyze_alias(&alias)?;
self.aliases.insert(alias);
}
},
Item::Assignment(assignment) => {
self.analyze_assignment(&assignment)?;
self.assignments.insert(assignment);
}
},
Item::Recipe(recipe) => {
self.analyze_recipe(&recipe)?;
self.recipes.insert(recipe);
}
},
Item::Set(set) => {
self.analyze_set(&set)?;
self.sets.insert(set);
}
},
}
}
@ -78,7 +78,7 @@ impl<'src> Analyzer<'src> {
Setting::Shell(shell) => {
assert!(settings.shell.is_none());
settings.shell = Some(shell);
}
},
}
}
@ -95,7 +95,7 @@ impl<'src> Analyzer<'src> {
if let Some(original) = self.recipes.get(recipe.name.lexeme()) {
return Err(recipe.name.token().error(DuplicateRecipe {
recipe: original.name(),
first: original.line_number(),
first: original.line_number(),
}));
}
@ -105,7 +105,7 @@ impl<'src> Analyzer<'src> {
for parameter in &recipe.parameters {
if parameters.contains(parameter.name.lexeme()) {
return Err(parameter.name.token().error(DuplicateParameter {
recipe: recipe.name.lexeme(),
recipe: recipe.name.lexeme(),
parameter: parameter.name.lexeme(),
}));
}
@ -169,7 +169,7 @@ impl<'src> Analyzer<'src> {
if let Some(original) = self.sets.get(set.name.lexeme()) {
return Err(set.name.error(DuplicateSet {
setting: original.name.lexeme(),
first: original.name.line,
first: original.name.line,
}));
}
@ -184,7 +184,7 @@ impl<'src> Analyzer<'src> {
// Make sure the alias doesn't conflict with any recipe
if let Some(recipe) = recipes.get(alias.name.lexeme()) {
return Err(token.error(AliasShadowsRecipe {
alias: alias.name.lexeme(),
alias: alias.name.lexeme(),
recipe_line: recipe.line_number(),
}));
}
@ -193,7 +193,7 @@ impl<'src> Analyzer<'src> {
match recipes.get(alias.target.lexeme()) {
Some(target) => Ok(alias.resolve(Rc::clone(target))),
None => Err(token.error(UnknownAliasTarget {
alias: alias.name.lexeme(),
alias: alias.name.lexeme(),
target: alias.target.lexeme(),
})),
}

View File

@ -4,9 +4,9 @@ use CompilationErrorKind::*;
pub(crate) struct AssignmentResolver<'src: 'run, 'run> {
assignments: &'run Table<'src, Assignment<'src>>,
stack: Vec<&'src str>,
seen: BTreeSet<&'src str>,
evaluated: BTreeSet<&'src str>,
stack: Vec<&'src str>,
seen: BTreeSet<&'src str>,
evaluated: BTreeSet<&'src str>,
}
impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
@ -41,12 +41,12 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
} else {
let message = format!("attempted to resolve unknown assignment `{}`", name);
let token = Token {
src: "",
src: "",
offset: 0,
line: 0,
line: 0,
column: 0,
length: 0,
kind: TokenKind::Unspecified,
kind: TokenKind::Unspecified,
};
return Err(CompilationError {
kind: Internal { message },
@ -74,19 +74,19 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
} else {
Err(name.token().error(UndefinedVariable { variable }))
}
}
},
Expression::Call { thunk } => match thunk {
Thunk::Nullary { .. } => Ok(()),
Thunk::Unary { arg, .. } => self.resolve_expression(arg),
Thunk::Binary { args: [a, b], .. } => {
self.resolve_expression(a)?;
self.resolve_expression(b)
}
},
},
Expression::Concatination { lhs, rhs } => {
self.resolve_expression(lhs)?;
self.resolve_expression(rhs)
}
},
Expression::StringLiteral { .. } | Expression::Backtick { .. } => Ok(()),
Expression::Group { contents } => self.resolve_expression(contents),
}

View File

@ -6,9 +6,9 @@ pub(crate) struct Binding<'src, V = String> {
/// Export binding as an environment variable to child processes
pub(crate) export: bool,
/// Binding name
pub(crate) name: Name<'src>,
pub(crate) name: Name<'src>,
/// Binding value
pub(crate) value: V,
pub(crate) value: V,
}
impl<'src, V> Keyed<'src> for Binding<'src, V> {

View File

@ -1,14 +1,13 @@
use crate::common::*;
use ansi_term::Color::*;
use ansi_term::{ANSIGenericString, Prefix, Style, Suffix};
use ansi_term::{ANSIGenericString, Color::*, Prefix, Style, Suffix};
use atty::Stream;
#[derive(Copy, Clone, Debug, PartialEq)]
pub(crate) struct Color {
use_color: UseColor,
atty: bool,
style: Style,
atty: bool,
style: Style,
}
impl Color {
@ -129,8 +128,8 @@ impl Default for Color {
fn default() -> Self {
Self {
use_color: UseColor::Auto,
atty: false,
style: Style::new(),
atty: false,
style: Style::new(),
}
}
}

View File

@ -3,7 +3,7 @@ use crate::common::*;
#[derive(Debug, PartialEq)]
pub(crate) struct CompilationError<'src> {
pub(crate) token: Token<'src>,
pub(crate) kind: CompilationErrorKind<'src>,
pub(crate) kind: CompilationErrorKind<'src>,
}
impl Error for CompilationError<'_> {}
@ -25,8 +25,8 @@ impl Display for CompilationError<'_> {
alias,
recipe_line.ordinal(),
)?;
}
CircularRecipeDependency { recipe, ref circle } => {
},
CircularRecipeDependency { recipe, ref circle } =>
if circle.len() == 2 {
writeln!(f, "Recipe `{}` depends on itself", recipe)?;
} else {
@ -36,12 +36,11 @@ impl Display for CompilationError<'_> {
recipe,
circle.join(" -> ")
)?;
}
}
},
CircularVariableDependency {
variable,
ref circle,
} => {
} =>
if circle.len() == 2 {
writeln!(f, "Variable `{}` is defined in terms of itself", variable)?;
} else {
@ -51,8 +50,7 @@ impl Display for CompilationError<'_> {
variable,
circle.join(" -> ")
)?;
}
}
},
InvalidEscapeSequence { character } => {
let representation = match character {
@ -63,23 +61,23 @@ impl Display for CompilationError<'_> {
_ => character.escape_default().collect(),
};
writeln!(f, "`\\{}` is not a valid escape sequence", representation)?;
}
},
DuplicateParameter { recipe, parameter } => {
writeln!(
f,
"Recipe `{}` has duplicate parameter `{}`",
recipe, parameter
)?;
}
},
DuplicateVariable { variable } => {
writeln!(f, "Variable `{}` has multiple definitions", variable)?;
}
},
UnexpectedToken {
ref expected,
found,
} => {
writeln!(f, "Expected {}, but found {}", List::or(expected), found)?;
}
},
DuplicateAlias { alias, first } => {
writeln!(
f,
@ -88,7 +86,7 @@ impl Display for CompilationError<'_> {
first.ordinal(),
self.token.line.ordinal(),
)?;
}
},
DuplicateRecipe { recipe, first } => {
writeln!(
f,
@ -97,7 +95,7 @@ impl Display for CompilationError<'_> {
first.ordinal(),
self.token.line.ordinal()
)?;
}
},
DuplicateSet { setting, first } => {
writeln!(
f,
@ -106,7 +104,7 @@ impl Display for CompilationError<'_> {
first.ordinal(),
self.token.line.ordinal(),
)?;
}
},
DependencyArgumentCountMismatch {
dependency,
found,
@ -129,35 +127,35 @@ impl Display for CompilationError<'_> {
} else {
writeln!(f, "at most {} {}", max, Count("argument", max))?;
}
}
},
ParameterShadowsVariable { parameter } => {
writeln!(
f,
"Parameter `{}` shadows variable of the same name",
parameter
)?;
}
},
RequiredParameterFollowsDefaultParameter { parameter } => {
writeln!(
f,
"Non-default parameter `{}` follows default parameter",
parameter
)?;
}
},
ParameterFollowsVariadicParameter { parameter } => {
writeln!(f, "Parameter `{}` follows variadic parameter", parameter)?;
}
},
MixedLeadingWhitespace { whitespace } => {
writeln!(
f,
"Found a mix of tabs and spaces in leading whitespace: `{}`\n\
Leading whitespace may consist of tabs or spaces, but not both",
"Found a mix of tabs and spaces in leading whitespace: `{}`\nLeading whitespace may \
consist of tabs or spaces, but not both",
ShowWhitespace(whitespace)
)?;
}
},
ExtraLeadingWhitespace => {
writeln!(f, "Recipe line has extra leading whitespace")?;
}
},
FunctionArgumentCountMismatch {
function,
found,
@ -171,50 +169,50 @@ impl Display for CompilationError<'_> {
Count("argument", found),
expected
)?;
}
},
InconsistentLeadingWhitespace { expected, found } => {
writeln!(
f,
"Recipe line has inconsistent leading whitespace. \
Recipe started with `{}` but found line with `{}`",
"Recipe line has inconsistent leading whitespace. Recipe started with `{}` but found \
line with `{}`",
ShowWhitespace(expected),
ShowWhitespace(found)
)?;
}
},
UnknownAliasTarget { alias, target } => {
writeln!(f, "Alias `{}` has an unknown target `{}`", alias, target)?;
}
},
UnknownDependency { recipe, unknown } => {
writeln!(
f,
"Recipe `{}` has unknown dependency `{}`",
recipe, unknown
)?;
}
},
UndefinedVariable { variable } => {
writeln!(f, "Variable `{}` not defined", variable)?;
}
},
UnknownFunction { function } => {
writeln!(f, "Call to unknown function `{}`", function)?;
}
},
UnknownSetting { setting } => {
writeln!(f, "Unknown setting `{}`", setting)?;
}
},
UnknownStartOfToken => {
writeln!(f, "Unknown start of token:")?;
}
},
UnpairedCarriageReturn => {
writeln!(f, "Unpaired carriage return")?;
}
},
UnterminatedInterpolation => {
writeln!(f, "Unterminated interpolation")?;
}
},
UnterminatedString => {
writeln!(f, "Unterminated string")?;
}
},
UnterminatedBacktick => {
writeln!(f, "Unterminated backtick")?;
}
},
Internal { ref message } => {
writeln!(
f,
@ -222,7 +220,7 @@ impl Display for CompilationError<'_> {
consider filing an issue: https://github.com/casey/just/issues/new",
message
)?;
}
},
}
write!(f, "{}", message.suffix())?;

View File

@ -3,7 +3,7 @@ use crate::common::*;
#[derive(Debug, PartialEq)]
pub(crate) enum CompilationErrorKind<'src> {
AliasShadowsRecipe {
alias: &'src str,
alias: &'src str,
recipe_line: usize,
},
CircularRecipeDependency {
@ -12,42 +12,42 @@ pub(crate) enum CompilationErrorKind<'src> {
},
CircularVariableDependency {
variable: &'src str,
circle: Vec<&'src str>,
circle: Vec<&'src str>,
},
DependencyArgumentCountMismatch {
dependency: &'src str,
found: usize,
min: usize,
max: usize,
found: usize,
min: usize,
max: usize,
},
DuplicateAlias {
alias: &'src str,
first: usize,
},
DuplicateParameter {
recipe: &'src str,
recipe: &'src str,
parameter: &'src str,
},
DuplicateRecipe {
recipe: &'src str,
first: usize,
first: usize,
},
DuplicateVariable {
variable: &'src str,
},
DuplicateSet {
setting: &'src str,
first: usize,
first: usize,
},
ExtraLeadingWhitespace,
FunctionArgumentCountMismatch {
function: &'src str,
found: usize,
found: usize,
expected: usize,
},
InconsistentLeadingWhitespace {
expected: &'src str,
found: &'src str,
found: &'src str,
},
Internal {
message: String,
@ -72,14 +72,14 @@ pub(crate) enum CompilationErrorKind<'src> {
},
UnexpectedToken {
expected: Vec<TokenKind>,
found: TokenKind,
found: TokenKind,
},
UnknownAliasTarget {
alias: &'src str,
alias: &'src str,
target: &'src str,
},
UnknownDependency {
recipe: &'src str,
recipe: &'src str,
unknown: &'src str,
},
UnknownFunction {

View File

@ -9,17 +9,17 @@ pub(crate) const INIT_JUSTFILE: &str = "default:\n\techo 'Hello, world!'\n";
#[derive(Debug, PartialEq)]
pub(crate) struct Config {
pub(crate) color: Color,
pub(crate) dry_run: bool,
pub(crate) highlight: bool,
pub(crate) color: Color,
pub(crate) dry_run: bool,
pub(crate) highlight: bool,
pub(crate) invocation_directory: PathBuf,
pub(crate) quiet: bool,
pub(crate) search_config: SearchConfig,
pub(crate) shell: String,
pub(crate) shell_args: Vec<String>,
pub(crate) shell_present: bool,
pub(crate) subcommand: Subcommand,
pub(crate) verbosity: Verbosity,
pub(crate) quiet: bool,
pub(crate) search_config: SearchConfig,
pub(crate) shell: String,
pub(crate) shell_args: Vec<String>,
pub(crate) shell_present: bool,
pub(crate) subcommand: Subcommand,
pub(crate) verbosity: Verbosity,
}
mod cmd {
@ -279,17 +279,15 @@ impl Config {
match (justfile, working_directory) {
(None, None) => SearchConfig::FromInvocationDirectory,
(Some(justfile), None) => SearchConfig::WithJustfile { justfile },
(Some(justfile), Some(working_directory)) => {
(Some(justfile), Some(working_directory)) =>
SearchConfig::WithJustfileAndWorkingDirectory {
justfile,
working_directory,
}
}
(None, Some(_)) => {
},
(None, Some(_)) =>
return Err(ConfigError::internal(
"--working-directory set without --justfile",
))
}
)),
}
}
};
@ -297,26 +295,26 @@ impl Config {
for subcommand in cmd::ARGLESS {
if matches.is_present(subcommand) {
match (!overrides.is_empty(), !positional.arguments.is_empty()) {
(false, false) => {}
(false, false) => {},
(true, false) => {
return Err(ConfigError::SubcommandOverrides {
subcommand: format!("--{}", subcommand.to_lowercase()),
overrides,
});
}
},
(false, true) => {
return Err(ConfigError::SubcommandArguments {
subcommand: format!("--{}", subcommand.to_lowercase()),
arguments: positional.arguments,
arguments: positional.arguments,
});
}
},
(true, true) => {
return Err(ConfigError::SubcommandOverridesAndArguments {
subcommand: format!("--{}", subcommand.to_lowercase()),
arguments: positional.arguments,
overrides,
});
}
},
}
}
}
@ -343,7 +341,7 @@ impl Config {
if !positional.arguments.is_empty() {
return Err(ConfigError::SubcommandArguments {
subcommand: format!("--{}", cmd::EVALUATE.to_lowercase()),
arguments: positional.arguments,
arguments: positional.arguments,
});
}
Subcommand::Evaluate { overrides }
@ -455,14 +453,13 @@ impl Config {
.status();
match error {
Ok(status) => {
Ok(status) =>
if status.success() {
Ok(())
} else {
eprintln!("Editor `{}` failed: {}", editor.to_string_lossy(), status);
Err(status.code().unwrap_or(EXIT_FAILURE))
}
}
},
Err(error) => {
eprintln!(
"Editor `{}` invocation failed: {}",
@ -470,7 +467,7 @@ impl Config {
error
);
Err(EXIT_FAILURE)
}
},
}
}
@ -555,9 +552,10 @@ impl Config {
}
}
// Declaring this outside of the nested loops will probably be more efficient, but
// it creates all sorts of lifetime issues with variables inside the loops.
// If this is inlined like the docs say, it shouldn't make any difference.
// Declaring this outside of the nested loops will probably be more
// efficient, but it creates all sorts of lifetime issues with
// variables inside the loops. If this is inlined like the
// docs say, it shouldn't make any difference.
let print_doc = |doc| {
print!(
" {:padding$}{} {}",
@ -643,13 +641,15 @@ mod tests {
use pretty_assertions::assert_eq;
// This test guards against unintended changes to the argument parser. We should have
// proper tests for all the flags, but this will do for now.
// This test guards against unintended changes to the argument parser.
// We should have proper tests for all the flags, but this will do
// for now.
#[test]
fn help() {
const EXPECTED_HELP: &str = "just v0.5.8
Casey Rodarmor <casey@rodarmor.com>
🤖 Just a command runner - https://github.com/casey/just
🤖 Just a command runner \
- https://github.com/casey/just
USAGE:
just [FLAGS] [OPTIONS] [--] [ARGUMENTS]...
@ -658,8 +658,8 @@ FLAGS:
--clear-shell-args Clear shell arguments
--dry-run Print what just would do without doing it
--dump Print entire justfile
-e, --edit \
Edit justfile with editor given by $VISUAL or $EDITOR, falling back to `vim`
-e, --edit Edit justfile with editor given by $VISUAL or $EDITOR, falling back \
to `vim`
--evaluate Print evaluated variables
--highlight Highlight echoed recipe lines in bold
--init Initialize new justfile in project root
@ -674,22 +674,22 @@ OPTIONS:
Print colorful output [default: auto] [possible values: auto, always, never]
--completions <SHELL>
Print shell completion script for <SHELL> \
[possible values: zsh, bash, fish, powershell, elvish]
Print shell completion script for <SHELL> [possible values: zsh, bash, fish, \
powershell, elvish]
-f, --justfile <JUSTFILE> Use <JUSTFILE> as justfile.
--set <VARIABLE> <VALUE> Override <VARIABLE> with <VALUE>
--shell <SHELL> Invoke <SHELL> to run recipes [default: sh]
--shell-arg <SHELL-ARG>... \
Invoke shell with <SHELL-ARG> as an argument [default: -cu]
--shell-arg <SHELL-ARG>... Invoke shell with <SHELL-ARG> as an argument \
[default: -cu]
-s, --show <RECIPE> Show information about <RECIPE>
-d, --working-directory <WORKING-DIRECTORY>
Use <WORKING-DIRECTORY> as working directory. --justfile must also be set
ARGS:
<ARGUMENTS>... \
Overrides and recipe(s) to run, defaulting to the first recipe in the justfile";
<ARGUMENTS>... Overrides and recipe(s) to run, defaulting to the first recipe in the \
justfile";
let app = Config::app().setting(AppSettings::ColorNever);
let mut buffer = Vec::new();

View File

@ -23,7 +23,7 @@ pub(crate) enum ConfigError {
))]
SubcommandArguments {
subcommand: String,
arguments: Vec<String>,
arguments: Vec<String>,
},
#[snafu(display(
"`{}` used with unexpected overrides: {}; and arguments: {}",
@ -33,8 +33,8 @@ pub(crate) enum ConfigError {
]
SubcommandOverridesAndArguments {
subcommand: String,
overrides: BTreeMap<String, String>,
arguments: Vec<String>,
overrides: BTreeMap<String, String>,
arguments: Vec<String>,
},
#[snafu(display(
"`{}` used with unexpected overrides: {}",
@ -43,7 +43,7 @@ pub(crate) enum ConfigError {
))]
SubcommandOverrides {
subcommand: String,
overrides: BTreeMap<String, String>,
overrides: BTreeMap<String, String>,
},
}

View File

@ -2,7 +2,7 @@ use crate::common::*;
#[derive(PartialEq, Debug)]
pub(crate) struct Dependency<'src> {
pub(crate) recipe: Rc<Recipe<'src>>,
pub(crate) recipe: Rc<Recipe<'src>>,
pub(crate) arguments: Vec<Expression<'src>>,
}

View File

@ -5,7 +5,7 @@ use crate::common::*;
pub struct Enclosure<T: Display> {
enclosure: &'static str,
value: T,
value: T,
}
impl<T: Display> Enclosure<T> {

View File

@ -16,7 +16,7 @@ impl<T, E: Error> ErrorResultExt<T> for Result<T, E> {
}
Err(error.code())
}
},
}
}
}

View File

@ -2,11 +2,11 @@ use crate::common::*;
pub(crate) struct Evaluator<'src: 'run, 'run> {
assignments: Option<&'run Table<'src, Assignment<'src>>>,
config: &'run Config,
dotenv: &'run BTreeMap<String, String>,
scope: Scope<'src, 'run>,
settings: &'run Settings<'run>,
search: &'run Search,
config: &'run Config,
dotenv: &'run BTreeMap<String, String>,
scope: Scope<'src, 'run>,
settings: &'run Settings<'run>,
search: &'run Search,
}
impl<'src, 'run> Evaluator<'src, 'run> {
@ -64,23 +64,22 @@ impl<'src, 'run> Evaluator<'src, 'run> {
message: format!("attempted to evaluate undefined variable `{}`", variable),
})
}
}
},
Expression::Call { thunk } => {
use Thunk::*;
let context = FunctionContext {
dotenv: self.dotenv,
dotenv: self.dotenv,
invocation_directory: &self.config.invocation_directory,
search: self.search,
search: self.search,
};
match thunk {
Nullary { name, function, .. } => {
Nullary { name, function, .. } =>
function(&context).map_err(|message| RuntimeError::FunctionCall {
function: *name,
message,
})
}
}),
Unary {
name,
function,
@ -107,18 +106,16 @@ impl<'src, 'run> Evaluator<'src, 'run> {
message,
}),
}
}
},
Expression::StringLiteral { string_literal } => Ok(string_literal.cooked.to_string()),
Expression::Backtick { contents, token } => {
Expression::Backtick { contents, token } =>
if self.config.dry_run {
Ok(format!("`{}`", contents))
} else {
Ok(self.run_backtick(contents, token)?)
}
}
Expression::Concatination { lhs, rhs } => {
Ok(self.evaluate_expression(lhs)? + &self.evaluate_expression(rhs)?)
}
},
Expression::Concatination { lhs, rhs } =>
Ok(self.evaluate_expression(lhs)? + &self.evaluate_expression(rhs)?),
Expression::Group { contents } => self.evaluate_expression(contents),
}
}
@ -155,7 +152,7 @@ impl<'src, 'run> Evaluator<'src, 'run> {
Fragment::Text { token } => evaluated += token.lexeme(),
Fragment::Interpolation { expression } => {
evaluated += &self.evaluate_expression(expression)?;
}
},
}
}
Ok(evaluated)

View File

@ -11,7 +11,7 @@ pub(crate) enum Expression<'src> {
/// `contents`
Backtick {
contents: &'src str,
token: Token<'src>,
token: Token<'src>,
},
/// `name(arguments)`
Call { thunk: Thunk<'src> },

View File

@ -1,7 +1,7 @@
use crate::common::*;
pub(crate) struct FunctionContext<'run> {
pub(crate) dotenv: &'run BTreeMap<String, String>,
pub(crate) dotenv: &'run BTreeMap<String, String>,
pub(crate) invocation_directory: &'run Path,
pub(crate) search: &'run Search,
pub(crate) search: &'run Search,
}

View File

@ -1,7 +1,7 @@
use crate::common::*;
pub(crate) struct InterruptHandler {
blocks: u32,
blocks: u32,
interrupted: bool,
}
@ -18,20 +18,17 @@ impl InterruptHandler {
match INSTANCE.lock() {
Ok(guard) => guard,
Err(poison_error) => {
eprintln!(
"{}",
RuntimeError::Internal {
message: format!("interrupt handler mutex poisoned: {}", poison_error),
}
);
eprintln!("{}", RuntimeError::Internal {
message: format!("interrupt handler mutex poisoned: {}", poison_error),
});
std::process::exit(EXIT_FAILURE);
}
},
}
}
fn new() -> Self {
Self {
blocks: 0,
blocks: 0,
interrupted: false,
}
}
@ -56,13 +53,9 @@ impl InterruptHandler {
pub(crate) fn unblock(&mut self) {
if self.blocks == 0 {
eprintln!(
"{}",
RuntimeError::Internal {
message: "attempted to unblock interrupt handler, but handler was not blocked"
.to_string(),
}
);
eprintln!("{}", RuntimeError::Internal {
message: "attempted to unblock interrupt handler, but handler was not blocked".to_string(),
});
std::process::exit(EXIT_FAILURE);
}

View File

@ -2,11 +2,11 @@ use crate::common::*;
#[derive(Debug, PartialEq)]
pub(crate) struct Justfile<'src> {
pub(crate) recipes: Table<'src, Rc<Recipe<'src>>>,
pub(crate) recipes: Table<'src, Rc<Recipe<'src>>>,
pub(crate) assignments: Table<'src, Assignment<'src>>,
pub(crate) aliases: Table<'src, Alias<'src>>,
pub(crate) settings: Settings<'src>,
pub(crate) warnings: Vec<Warning<'src>>,
pub(crate) aliases: Table<'src, Alias<'src>>,
pub(crate) settings: Settings<'src>,
pub(crate) warnings: Vec<Warning<'src>>,
}
impl<'src> Justfile<'src> {
@ -140,11 +140,11 @@ impl<'src> Justfile<'src> {
let argument_count = cmp::min(tail.len(), recipe.max_arguments());
if !argument_range.range_contains(&argument_count) {
return Err(RuntimeError::ArgumentCountMismatch {
recipe: recipe.name(),
recipe: recipe.name(),
parameters: recipe.parameters.iter().collect(),
found: tail.len(),
min: recipe.min_arguments(),
max: recipe.max_arguments(),
found: tail.len(),
min: recipe.min_arguments(),
max: recipe.max_arguments(),
});
}
grouped.push((recipe, &tail[0..argument_count]));

View File

@ -8,28 +8,28 @@ use TokenKind::*;
/// The lexer proceeds character-by-character, as opposed to using
/// regular expressions to lex tokens or semi-tokens at a time. As a
/// result, it is verbose and straightforward. Just used to have a
/// regex-based lexer, which was slower and generally godawful. However,
/// this should not be taken as a slight against regular expressions,
/// the lexer was just idiosyncratically bad.
/// regex-based lexer, which was slower and generally godawful.
/// However, this should not be taken as a slight against regular
/// expressions, the lexer was just idiosyncratically bad.
pub(crate) struct Lexer<'src> {
/// Source text
src: &'src str,
src: &'src str,
/// Char iterator
chars: Chars<'src>,
chars: Chars<'src>,
/// Tokens
tokens: Vec<Token<'src>>,
tokens: Vec<Token<'src>>,
/// Current token start
token_start: Position,
token_start: Position,
/// Current token end
token_end: Position,
token_end: Position,
/// Next character to be lexed
next: Option<char>,
next: Option<char>,
/// Next indent will start a recipe body
recipe_body_pending: bool,
/// Inside recipe body
recipe_body: bool,
recipe_body: bool,
/// Indentation stack
indentation: Vec<&'src str>,
indentation: Vec<&'src str>,
/// Current interpolation start token
interpolation_start: Option<Token<'src>>,
}
@ -48,7 +48,7 @@ impl<'src> Lexer<'src> {
let start = Position {
offset: 0,
column: 0,
line: 0,
line: 0,
};
Lexer {
@ -83,7 +83,7 @@ impl<'src> Lexer<'src> {
self.next = self.chars.next();
Ok(())
}
},
None => Err(self.internal_error("Lexer advanced past end of text")),
}
}
@ -158,12 +158,12 @@ impl<'src> Lexer<'src> {
fn internal_error(&self, message: impl Into<String>) -> CompilationError<'src> {
// Use `self.token_end` as the location of the error
let token = Token {
src: self.src,
src: self.src,
offset: self.token_end.offset,
line: self.token_end.line,
line: self.token_end.line,
column: self.token_end.column,
length: 0,
kind: Unspecified,
kind: Unspecified,
};
CompilationError {
kind: CompilationErrorKind::Internal {
@ -177,7 +177,8 @@ impl<'src> Lexer<'src> {
fn error(&self, kind: CompilationErrorKind<'src>) -> CompilationError<'src> {
// Use the in-progress token span as the location of the error.
// The width of the error site to highlight depends on the kind of error:
// The width of the error site to highlight depends on the kind of
// error:
let length = match kind {
// highlight ' or "
UnterminatedString => 1,
@ -202,7 +203,7 @@ impl<'src> Lexer<'src> {
fn unterminated_interpolation_error(interpolation_start: Token<'src>) -> CompilationError<'src> {
CompilationError {
token: interpolation_start,
kind: UnterminatedInterpolation,
kind: UnterminatedInterpolation,
}
}
@ -262,7 +263,7 @@ impl<'src> Lexer<'src> {
} else {
self.lex_normal(first)?
};
}
},
None => break,
}
}
@ -369,7 +370,7 @@ impl<'src> Lexer<'src> {
};
Ok(())
}
},
Continue => {
if !self.indentation().is_empty() {
for _ in self.indentation().chars() {
@ -380,7 +381,7 @@ impl<'src> Lexer<'src> {
}
Ok(())
}
},
Decrease => {
while self.indentation() != whitespace {
self.lex_dedent();
@ -395,14 +396,14 @@ impl<'src> Lexer<'src> {
}
Ok(())
}
},
Mixed { whitespace } => {
for _ in whitespace.chars() {
self.advance()?;
}
Err(self.error(MixedLeadingWhitespace { whitespace }))
}
},
Inconsistent => {
for _ in whitespace.chars() {
self.advance()?;
@ -410,9 +411,9 @@ impl<'src> Lexer<'src> {
Err(self.error(InconsistentLeadingWhitespace {
expected: self.indentation(),
found: whitespace,
found: whitespace,
}))
}
},
Increase => {
while self.next_is_whitespace() {
self.advance()?;
@ -429,7 +430,7 @@ impl<'src> Lexer<'src> {
}
Ok(())
}
},
}
}
@ -454,14 +455,13 @@ impl<'src> Lexer<'src> {
' ' | '\t' => self.lex_whitespace(),
'\'' => self.lex_raw_string(),
'"' => self.lex_cooked_string(),
_ => {
_ =>
if Self::is_identifier_start(start) {
self.lex_identifier()
} else {
self.advance()?;
Err(self.error(UnknownStartOfToken))
}
}
},
}
}
@ -478,7 +478,8 @@ impl<'src> Lexer<'src> {
// Emit interpolation end token
self.lex_double(InterpolationEnd)
} else if self.at_eol_or_eof() {
// Return unterminated interpolation error that highlights the opening {{
// Return unterminated interpolation error that highlights the opening
// {{
Err(Self::unterminated_interpolation_error(interpolation_start))
} else {
// Otherwise lex as per normal
@ -529,7 +530,7 @@ impl<'src> Lexer<'src> {
self.lex_double(InterpolationStart)?;
self.interpolation_start = Some(self.tokens[self.tokens.len() - 1]);
Ok(())
}
},
EndOfFile => Ok(()),
}
}
@ -677,7 +678,7 @@ impl<'src> Lexer<'src> {
match self.next {
Some('\'') => break,
None => return Err(self.error(UnterminatedString)),
_ => {}
_ => {},
}
self.advance()?;
@ -826,9 +827,8 @@ mod tests {
Dedent | Eof => "",
// Variable lexemes
Text | StringCooked | StringRaw | Identifier | Comment | Backtick | Unspecified => {
panic!("Token {:?} has no default lexeme", kind)
}
Text | StringCooked | StringRaw | Identifier | Comment | Backtick | Unspecified =>
panic!("Token {:?} has no default lexeme", kind),
}
}
@ -872,7 +872,7 @@ mod tests {
kind,
};
assert_eq!(have, want);
}
},
}
}

View File

@ -5,21 +5,21 @@ use crate::common::*;
pub struct List<T: Display, I: Iterator<Item = T> + Clone> {
conjunction: &'static str,
values: I,
values: I,
}
impl<T: Display, I: Iterator<Item = T> + Clone> List<T, I> {
pub fn or<II: IntoIterator<Item = T, IntoIter = I>>(values: II) -> List<T, I> {
List {
conjunction: "or",
values: values.into_iter(),
values: values.into_iter(),
}
}
pub fn and<II: IntoIterator<Item = T, IntoIter = I>>(values: II) -> List<T, I> {
List {
conjunction: "and",
values: values.into_iter(),
values: values.into_iter(),
}
}
@ -68,11 +68,11 @@ impl<T: Display, I: Iterator<Item = T> + Clone> Display for List<T, I> {
write!(f, ", {}", c)?;
current = Some(n);
next = values.next();
}
},
(Some(c), None) => {
write!(f, ", {} {}", self.conjunction, c)?;
return Ok(());
}
},
_ => unreachable!("Iterator was fused, but returned Some after None"),
}
}

View File

@ -8,13 +8,12 @@ pub(crate) fn load_dotenv() -> RunResult<'static, BTreeMap<String, String>> {
Ok(iter) => {
let result: dotenv::Result<BTreeMap<String, String>> = iter.collect();
result.map_err(|dotenv_error| RuntimeError::Dotenv { dotenv_error })
}
Err(dotenv_error) => {
},
Err(dotenv_error) =>
if dotenv_error.not_found() {
Ok(BTreeMap::new())
} else {
Err(RuntimeError::Dotenv { dotenv_error })
}
}
},
}
}

View File

@ -1,7 +1,7 @@
use crate::common::*;
pub(crate) struct LoadError<'path> {
pub(crate) path: &'path Path,
pub(crate) path: &'path Path,
pub(crate) io_error: io::Error,
}

View File

@ -1,16 +1,17 @@
use crate::common::*;
/// A module, the top-level type produced by the parser. So-named because
/// although at present, all justfiles consist of a single module, in the
/// future we will likely have multi-module and multi-file justfiles.
/// A module, the top-level type produced by the parser. So-named
/// because although at present, all justfiles consist of a single
/// module, in the future we will likely have multi-module and
/// multi-file justfiles.
///
/// Not all successful parses result in valid justfiles, so additional
/// consistency checks and name resolution are performed by the `Analyzer`,
/// which produces a `Justfile` from a `Module`.
/// consistency checks and name resolution are performed by the
/// `Analyzer`, which produces a `Justfile` from a `Module`.
#[derive(Debug)]
pub(crate) struct Module<'src> {
/// Items in the justfile
pub(crate) items: Vec<Item<'src>>,
pub(crate) items: Vec<Item<'src>>,
/// Non-fatal warnings encountered during parsing
pub(crate) warnings: Vec<Warning<'src>>,
}

View File

@ -1,14 +1,14 @@
use crate::common::*;
/// A name. This is effectively just a `Token` of kind `Identifier`, but we
/// give it its own type for clarity.
/// A name. This is effectively just a `Token` of kind `Identifier`,
/// but we give it its own type for clarity.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
pub(crate) struct Name<'src> {
pub(crate) offset: usize,
pub(crate) length: usize,
pub(crate) line: usize,
pub(crate) line: usize,
pub(crate) column: usize,
pub(crate) src: &'src str,
pub(crate) src: &'src str,
}
impl<'src> Name<'src> {
@ -20,12 +20,12 @@ impl<'src> Name<'src> {
/// Turn this name back into a token
pub(crate) fn token(&self) -> Token<'src> {
Token {
kind: TokenKind::Identifier,
kind: TokenKind::Identifier,
offset: self.offset,
length: self.length,
line: self.line,
line: self.line,
column: self.column,
src: self.src,
src: self.src,
}
}
@ -34,9 +34,9 @@ impl<'src> Name<'src> {
Name {
offset: token.offset,
length: token.length,
line: token.line,
line: token.line,
column: token.column,
src: token.src,
src: token.src,
}
}

View File

@ -1,9 +1,10 @@
use crate::common::*;
/// Methods commmon to all AST nodes. Currently only used in parser unit tests.
/// Methods commmon to all AST nodes. Currently only used in parser
/// unit tests.
pub(crate) trait Node<'src> {
/// Construct an untyped tree of atoms representing this Node. This function,
/// and `Tree` type, are only used in parser unit tests.
/// Construct an untyped tree of atoms representing this Node. This
/// function, and `Tree` type, are only used in parser unit tests.
fn tree(&self) -> Tree<'src>;
}
@ -59,18 +60,18 @@ impl<'src> Node<'src> for Expression<'src> {
Unary { name, arg, .. } => {
tree.push_mut(name.lexeme());
tree.push_mut(arg.tree());
}
},
Binary {
name, args: [a, b], ..
} => {
tree.push_mut(name.lexeme());
tree.push_mut(a.tree());
tree.push_mut(b.tree());
}
},
}
tree
}
},
Expression::Variable { name } => Tree::atom(name.lexeme()),
Expression::StringLiteral {
string_literal: StringLiteral { cooked, .. },
@ -175,7 +176,7 @@ impl<'src> Node<'src> for Set<'src> {
for argument in arguments {
set.push_mut(Tree::string(&argument.cooked));
}
}
},
}
set

View File

@ -28,7 +28,7 @@ pub(crate) fn output(mut command: Command) -> Result<String, OutputError> {
.to_string(),
),
}
}
},
Err(io_error) => Err(OutputError::Io(io_error)),
}
}

View File

@ -4,11 +4,11 @@ use crate::common::*;
#[derive(PartialEq, Debug)]
pub(crate) struct Parameter<'src> {
/// The parameter name
pub(crate) name: Name<'src>,
pub(crate) name: Name<'src>,
/// Parameter is variadic
pub(crate) variadic: bool,
/// An optional default expression
pub(crate) default: Option<Expression<'src>>,
pub(crate) default: Option<Expression<'src>>,
}
impl<'src> Display for Parameter<'src> {

View File

@ -4,25 +4,29 @@ use TokenKind::*;
/// Just language parser
///
/// The parser is a (hopefully) straightforward recursive descent parser.
/// The parser is a (hopefully) straightforward recursive descent
/// parser.
///
/// It uses a few tokens of lookahead to disambiguate different constructs.
/// It uses a few tokens of lookahead to disambiguate different
/// constructs.
///
/// The `expect_*` and `presume_`* methods are similar in that they assert
/// the type of unparsed tokens and consume them. However, upon encountering
/// an unexpected token, the `expect_*` methods return an unexpected token
/// error, whereas the `presume_*` tokens return an internal error.
/// The `expect_*` and `presume_`* methods are similar in that they
/// assert the type of unparsed tokens and consume them. However, upon
/// encountering an unexpected token, the `expect_*` methods return an
/// unexpected token error, whereas the `presume_*` tokens return an
/// internal error.
///
/// The `presume_*` methods are used when the token stream has been inspected
/// in some other way, and thus encountering an unexpected token is a bug in
/// Just, and not a syntax error.
/// The `presume_*` methods are used when the token stream has been
/// inspected in some other way, and thus encountering an unexpected
/// token is a bug in Just, and not a syntax error.
///
/// All methods starting with `parse_*` parse and return a language construct.
/// All methods starting with `parse_*` parse and return a language
/// construct.
pub(crate) struct Parser<'tokens, 'src> {
/// Source tokens
tokens: &'tokens [Token<'src>],
/// Index of the next un-parsed token
next: usize,
next: usize,
}
impl<'tokens, 'src> Parser<'tokens, 'src> {
@ -43,7 +47,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
Ok(self.next()?.error(kind))
}
/// Construct an unexpected token error with the token returned by `Parser::next`
/// Construct an unexpected token error with the token returned by
/// `Parser::next`
fn unexpected_token(
&self,
expected: &[TokenKind],
@ -93,11 +98,10 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
let mut rest = self.rest();
for kind in kinds {
match rest.next() {
Some(token) => {
Some(token) =>
if token.kind != *kind {
return false;
}
}
},
None => return false,
}
}
@ -125,8 +129,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
Err(self.internal_error("`Parser::advance()` advanced past end of token stream")?)
}
/// Return the next token if it is of kind `expected`, otherwise, return an
/// unexpected token error
/// Return the next token if it is of kind `expected`, otherwise,
/// return an unexpected token error
fn expect(&mut self, expected: TokenKind) -> CompilationResult<'src, Token<'src>> {
if let Some(token) = self.accept(expected)? {
Ok(token)
@ -157,8 +161,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
self.expect(Eol).map(|_| ()).expected(&[Eof])
}
/// Return an internal error if the next token is not of kind `Identifier` with
/// lexeme `lexeme`.
/// Return an internal error if the next token is not of kind
/// `Identifier` with lexeme `lexeme`.
fn presume_name(&mut self, lexeme: &str) -> CompilationResult<'src, ()> {
let next = self.advance()?;
@ -178,7 +182,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
}
}
/// Return an internal error if the next token is not of kind `kind`.
/// Return an internal error if the next token is not of kind
/// `kind`.
fn presume(&mut self, kind: TokenKind) -> CompilationResult<'src, Token<'src>> {
let next = self.advance()?;
@ -192,7 +197,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
}
}
/// Return an internal error if the next token is not one of kinds `kinds`.
/// Return an internal error if the next token is not one of kinds
/// `kinds`.
fn presume_any(&mut self, kinds: &[TokenKind]) -> CompilationResult<'src, Token<'src>> {
let next = self.advance()?;
if !kinds.contains(&next.kind) {
@ -267,16 +273,16 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
Comment => {
doc = Some(next.lexeme()[1..].trim());
self.expect_eol()?;
}
},
Eol => {
self.advance()?;
}
},
Eof => {
self.advance()?;
break;
}
},
Identifier => match next.lexeme() {
keyword::ALIAS => {
keyword::ALIAS =>
if self.next_are(&[Identifier, Identifier, Equals]) {
warnings.push(Warning::DeprecatedEquals {
equals: self.get(2)?,
@ -286,9 +292,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
items.push(Item::Alias(self.parse_alias()?));
} else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
}
}
keyword::EXPORT => {
},
keyword::EXPORT =>
if self.next_are(&[Identifier, Identifier, Equals]) {
warnings.push(Warning::DeprecatedEquals {
equals: self.get(2)?,
@ -300,16 +305,14 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
items.push(Item::Assignment(self.parse_assignment(true)?));
} else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
}
}
keyword::SET => {
},
keyword::SET =>
if self.next_are(&[Identifier, Identifier, ColonEquals]) {
items.push(Item::Set(self.parse_set()?));
} else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
}
}
_ => {
},
_ =>
if self.next_are(&[Identifier, Equals]) {
warnings.push(Warning::DeprecatedEquals {
equals: self.get(1)?,
@ -319,16 +322,15 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
items.push(Item::Assignment(self.parse_assignment(false)?));
} else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?));
}
}
},
},
At => {
self.presume(At)?;
items.push(Item::Recipe(self.parse_recipe(doc, true)?));
}
},
_ => {
return Err(self.unexpected_token(&[Identifier, At])?);
}
},
}
if next.kind != Comment {
@ -394,7 +396,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
let contents = &next.lexeme()[1..next.lexeme().len() - 1];
let token = self.advance()?;
Ok(Expression::Backtick { contents, token })
}
},
Identifier => {
let name = self.parse_name()?;
@ -406,13 +408,13 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} else {
Ok(Expression::Variable { name })
}
}
},
ParenL => {
self.presume(ParenL)?;
let contents = Box::new(self.parse_expression()?);
self.expect(ParenR)?;
Ok(Expression::Group { contents })
}
},
_ => Err(self.unexpected_token(&[StringCooked, StringRaw, Backtick, Identifier, ParenL])?),
}
}
@ -443,7 +445,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
return Err(
token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }),
);
}
},
}
escape = false;
} else if c == '\\' {
@ -456,7 +458,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
raw,
cooked: Cow::Owned(cooked),
})
}
},
_ => Err(token.error(CompilationErrorKind::Internal {
message: "`Parser::parse_string_literal` called on non-string token".to_string(),
})),
@ -656,7 +658,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
value: Setting::Shell(setting::Shell { command, arguments }),
name,
})
}
},
_ => Err(name.error(CompilationErrorKind::UnknownSetting {
setting: name.lexeme(),
})),
@ -743,7 +745,7 @@ mod tests {
kind,
};
assert_eq!(have, want);
}
},
}
}

View File

@ -71,8 +71,8 @@ impl PlatformInterface for Platform {
}
fn set_execute_permission(_path: &Path) -> Result<(), io::Error> {
// it is not necessary to set an execute permission on a script on windows,
// so this is a nop
// it is not necessary to set an execute permission on a script on
// windows, so this is a nop
Ok(())
}

View File

@ -1,8 +1,8 @@
use crate::common::*;
pub(crate) trait PlatformInterface {
/// Construct a command equivalent to running the script at `path` with the
/// shebang line `shebang`
/// Construct a command equivalent to running the script at `path`
/// with the shebang line `shebang`
fn make_shebang_command(
path: &Path,
working_directory: &Path,
@ -13,9 +13,11 @@ pub(crate) trait PlatformInterface {
/// Set the execute permission on the file pointed to by `path`
fn set_execute_permission(path: &Path) -> Result<(), io::Error>;
/// Extract the signal from a process exit status, if it was terminated by a signal
/// Extract the signal from a process exit status, if it was
/// terminated by a signal
fn signal_from_exit_status(exit_status: process::ExitStatus) -> Option<i32>;
/// Translate a path from a "native" path to a path the interpreter expects
/// Translate a path from a "native" path to a path the interpreter
/// expects
fn to_shell_path(working_directory: &Path, path: &Path) -> Result<String, String>;
}

View File

@ -3,5 +3,5 @@
pub(crate) struct Position {
pub(crate) offset: usize,
pub(crate) column: usize,
pub(crate) line: usize,
pub(crate) line: usize,
}

View File

@ -10,8 +10,9 @@ use crate::common::*;
///
/// - Overrides are of the form `NAME=.*`
///
/// - After overrides comes a single optional search directory argument.
/// This is either '.', '..', or an argument that contains a `/`.
/// - After overrides comes a single optional search directory
/// argument. This is either '.', '..', or an argument that contains
/// a `/`.
///
/// If the argument contains a `/`, everything before and including
/// the slash is the search directory, and everything after is added
@ -23,20 +24,20 @@ use crate::common::*;
/// being invoked and are a convenient way to override settings.
///
/// For modes that do not take other arguments, the search directory
/// argument determines where to begin searching for the justfile. This
/// allows command lines like `just -l ..` and `just ../build` to find
/// the same justfile.
/// argument determines where to begin searching for the justfile.
/// This allows command lines like `just -l ..` and `just ../build` to
/// find the same justfile.
///
/// For modes that do take other arguments, the search argument is simply
/// prepended to rest.
/// For modes that do take other arguments, the search argument is
/// simply prepended to rest.
#[cfg_attr(test, derive(PartialEq, Debug))]
pub struct Positional {
/// Overrides from values of the form `[a-zA-Z_][a-zA-Z0-9_-]*=.*`
pub overrides: Vec<(String, String)>,
pub overrides: Vec<(String, String)>,
/// An argument equal to '.', '..', or ending with `/`
pub search_directory: Option<String>,
/// Everything else
pub arguments: Vec<String>,
pub arguments: Vec<String>,
}
impl Positional {

View File

@ -2,8 +2,8 @@ use crate::common::*;
use std::process::{ExitStatus, Stdio};
/// Return a `RuntimeError::Signal` if the process was terminated by a signal,
/// otherwise return an `RuntimeError::UnknownFailure`
/// Return a `RuntimeError::Signal` if the process was terminated by a
/// signal, otherwise return an `RuntimeError::UnknownFailure`
fn error_from_signal(
recipe: &str,
line_number: Option<usize>,
@ -26,13 +26,13 @@ fn error_from_signal(
#[derive(PartialEq, Debug)]
pub(crate) struct Recipe<'src, D = Dependency<'src>> {
pub(crate) dependencies: Vec<D>,
pub(crate) doc: Option<&'src str>,
pub(crate) body: Vec<Line<'src>>,
pub(crate) name: Name<'src>,
pub(crate) parameters: Vec<Parameter<'src>>,
pub(crate) private: bool,
pub(crate) quiet: bool,
pub(crate) shebang: bool,
pub(crate) doc: Option<&'src str>,
pub(crate) body: Vec<Line<'src>>,
pub(crate) name: Name<'src>,
pub(crate) parameters: Vec<Parameter<'src>>,
pub(crate) private: bool,
pub(crate) quiet: bool,
pub(crate) shebang: bool,
}
impl<'src, D> Recipe<'src, D> {
@ -106,14 +106,14 @@ impl<'src, D> Recipe<'src, D> {
.prefix("just")
.tempdir()
.map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name(),
recipe: self.name(),
io_error: error,
})?;
let mut path = tmp.path().to_path_buf();
path.push(self.name());
{
let mut f = fs::File::create(&path).map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name(),
recipe: self.name(),
io_error: error,
})?;
let mut text = String::new();
@ -137,14 +137,14 @@ impl<'src, D> Recipe<'src, D> {
f.write_all(text.as_bytes())
.map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name(),
recipe: self.name(),
io_error: error,
})?;
}
// make the script executable
Platform::set_execute_permission(&path).map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name(),
recipe: self.name(),
io_error: error,
})?;
@ -177,7 +177,7 @@ impl<'src, D> Recipe<'src, D> {
// run it!
match InterruptHandler::guard(|| command.status()) {
Ok(exit_status) => {
Ok(exit_status) =>
if let Some(code) = exit_status.code() {
if code != 0 {
return Err(RuntimeError::Code {
@ -188,8 +188,7 @@ impl<'src, D> Recipe<'src, D> {
}
} else {
return Err(error_from_signal(self.name(), None, exit_status));
}
}
},
Err(io_error) => {
return Err(RuntimeError::Shebang {
recipe: self.name(),
@ -197,7 +196,7 @@ impl<'src, D> Recipe<'src, D> {
argument: argument.map(String::from),
io_error,
});
}
},
};
} else {
let mut lines = self.body.iter().peekable();
@ -260,7 +259,7 @@ impl<'src, D> Recipe<'src, D> {
cmd.export(dotenv, &scope);
match InterruptHandler::guard(|| cmd.status()) {
Ok(exit_status) => {
Ok(exit_status) =>
if let Some(code) = exit_status.code() {
if code != 0 {
return Err(RuntimeError::Code {
@ -275,14 +274,13 @@ impl<'src, D> Recipe<'src, D> {
Some(line_number),
exit_status,
));
}
}
},
Err(io_error) => {
return Err(RuntimeError::IoError {
recipe: self.name(),
io_error,
});
}
},
};
}
}

View File

@ -1,8 +1,8 @@
use crate::common::*;
pub(crate) struct RecipeContext<'src: 'run, 'run> {
pub(crate) config: &'run Config,
pub(crate) scope: Scope<'src, 'run>,
pub(crate) search: &'run Search,
pub(crate) config: &'run Config,
pub(crate) scope: Scope<'src, 'run>,
pub(crate) search: &'run Search,
pub(crate) settings: &'run Settings<'src>,
}

View File

@ -4,8 +4,8 @@ use CompilationErrorKind::*;
pub(crate) struct RecipeResolver<'src: 'run, 'run> {
unresolved_recipes: Table<'src, UnresolvedRecipe<'src>>,
resolved_recipes: Table<'src, Rc<Recipe<'src>>>,
assignments: &'run Table<'src, Assignment<'src>>,
resolved_recipes: Table<'src, Rc<Recipe<'src>>>,
assignments: &'run Table<'src, Assignment<'src>>,
}
impl<'src: 'run, 'run> RecipeResolver<'src, 'run> {
@ -107,7 +107,7 @@ impl<'src: 'run, 'run> RecipeResolver<'src, 'run> {
} else {
// dependency is unknown
return Err(dependency.recipe.error(UnknownDependency {
recipe: recipe.name(),
recipe: recipe.name(),
unknown: name,
}));
}

View File

@ -3,23 +3,23 @@ use crate::common::*;
#[derive(Debug)]
pub(crate) enum RuntimeError<'src> {
ArgumentCountMismatch {
recipe: &'src str,
recipe: &'src str,
parameters: Vec<&'src Parameter<'src>>,
found: usize,
min: usize,
max: usize,
found: usize,
min: usize,
max: usize,
},
Backtick {
token: Token<'src>,
token: Token<'src>,
output_error: OutputError,
},
Code {
recipe: &'src str,
recipe: &'src str,
line_number: Option<usize>,
code: i32,
code: i32,
},
Cygpath {
recipe: &'src str,
recipe: &'src str,
output_error: OutputError,
},
Dotenv {
@ -27,44 +27,44 @@ pub(crate) enum RuntimeError<'src> {
},
FunctionCall {
function: Name<'src>,
message: String,
message: String,
},
Internal {
message: String,
},
IoError {
recipe: &'src str,
recipe: &'src str,
io_error: io::Error,
},
Shebang {
recipe: &'src str,
command: String,
recipe: &'src str,
command: String,
argument: Option<String>,
io_error: io::Error,
},
Signal {
recipe: &'src str,
recipe: &'src str,
line_number: Option<usize>,
signal: i32,
signal: i32,
},
TmpdirIoError {
recipe: &'src str,
recipe: &'src str,
io_error: io::Error,
},
UnknownOverrides {
overrides: Vec<&'src str>,
},
UnknownRecipes {
recipes: Vec<&'src str>,
recipes: Vec<&'src str>,
suggestion: Option<&'src str>,
},
Unknown {
recipe: &'src str,
recipe: &'src str,
line_number: Option<usize>,
},
NoRecipes,
DefaultRecipeRequiresArguments {
recipe: &'src str,
recipe: &'src str,
min_arguments: usize,
},
}
@ -119,7 +119,7 @@ impl<'src> Display for RuntimeError<'src> {
if let Some(suggestion) = *suggestion {
write!(f, "\nDid you mean `{}`?", suggestion)?;
}
}
},
UnknownOverrides { overrides } => {
write!(
f,
@ -127,7 +127,7 @@ impl<'src> Display for RuntimeError<'src> {
Count("Variable", overrides.len()),
List::and_ticked(overrides),
)?;
}
},
ArgumentCountMismatch {
recipe,
parameters,
@ -173,12 +173,12 @@ impl<'src> Display for RuntimeError<'src> {
write!(f, " {}", param)?;
}
}
}
},
Code {
recipe,
line_number,
code,
} => {
} =>
if let Some(n) = line_number {
write!(
f,
@ -187,8 +187,7 @@ impl<'src> Display for RuntimeError<'src> {
)?;
} else {
write!(f, "Recipe `{}` failed with exit code {}", recipe, code)?;
}
}
},
Cygpath {
recipe,
output_error,
@ -196,56 +195,56 @@ impl<'src> Display for RuntimeError<'src> {
OutputError::Code(code) => {
write!(
f,
"Cygpath failed with exit code {} while translating recipe `{}` \
shebang interpreter path",
"Cygpath failed with exit code {} while translating recipe `{}` shebang interpreter \
path",
code, recipe
)?;
}
},
OutputError::Signal(signal) => {
write!(
f,
"Cygpath terminated by signal {} while translating recipe `{}` \
shebang interpreter path",
"Cygpath terminated by signal {} while translating recipe `{}` shebang interpreter \
path",
signal, recipe
)?;
}
},
OutputError::Unknown => {
write!(
f,
"Cygpath experienced an unknown failure while translating recipe `{}` \
shebang interpreter path",
"Cygpath experienced an unknown failure while translating recipe `{}` shebang \
interpreter path",
recipe
)?;
}
},
OutputError::Io(io_error) => {
match io_error.kind() {
io::ErrorKind::NotFound => write!(
f,
"Could not find `cygpath` executable to translate recipe `{}` \
shebang interpreter path:\n{}",
"Could not find `cygpath` executable to translate recipe `{}` shebang interpreter \
path:\n{}",
recipe, io_error
),
io::ErrorKind::PermissionDenied => write!(
f,
"Could not run `cygpath` executable to translate recipe `{}` \
shebang interpreter path:\n{}",
"Could not run `cygpath` executable to translate recipe `{}` shebang interpreter \
path:\n{}",
recipe, io_error
),
_ => write!(f, "Could not run `cygpath` executable:\n{}", io_error),
}?;
}
},
OutputError::Utf8(utf8_error) => {
write!(
f,
"Cygpath successfully translated recipe `{}` shebang interpreter path, \
but output was not utf8: {}",
"Cygpath successfully translated recipe `{}` shebang interpreter path, but output was \
not utf8: {}",
recipe, utf8_error
)?;
}
},
},
Dotenv { dotenv_error } => {
writeln!(f, "Failed to load .env: {}", dotenv_error)?;
}
},
FunctionCall { function, message } => {
writeln!(
f,
@ -253,13 +252,13 @@ impl<'src> Display for RuntimeError<'src> {
function.lexeme(),
message
)?;
}
},
Shebang {
recipe,
command,
argument,
io_error,
} => {
} =>
if let Some(argument) = argument {
write!(
f,
@ -272,13 +271,12 @@ impl<'src> Display for RuntimeError<'src> {
"Recipe `{}` with shebang `#!{}` execution error: {}",
recipe, command, io_error
)?;
}
}
},
Signal {
recipe,
line_number,
signal,
} => {
} =>
if let Some(n) = line_number {
write!(
f,
@ -287,12 +285,11 @@ impl<'src> Display for RuntimeError<'src> {
)?;
} else {
write!(f, "Recipe `{}` was terminated by signal {}", recipe, signal)?;
}
}
},
Unknown {
recipe,
line_number,
} => {
} =>
if let Some(n) = line_number {
write!(
f,
@ -301,8 +298,7 @@ impl<'src> Display for RuntimeError<'src> {
)?;
} else {
write!(f, "Recipe `{}` failed for an unknown reason", recipe)?;
}
}
},
IoError { recipe, io_error } => {
match io_error.kind() {
io::ErrorKind::NotFound => writeln!(
@ -317,28 +313,27 @@ impl<'src> Display for RuntimeError<'src> {
),
_ => writeln!(
f,
"Recipe `{}` could not be run because of an IO error while \
launching `sh`:{}",
"Recipe `{}` could not be run because of an IO error while launching `sh`:{}",
recipe, io_error
),
}?;
}
},
TmpdirIoError { recipe, io_error } => writeln!(
f,
"Recipe `{}` could not be run because of an IO error while trying \
to create a temporary directory or write a file to that directory`:{}",
"Recipe `{}` could not be run because of an IO error while trying to create a temporary \
directory or write a file to that directory`:{}",
recipe, io_error
)?,
Backtick { output_error, .. } => match output_error {
OutputError::Code(code) => {
writeln!(f, "Backtick failed with exit code {}", code)?;
}
},
OutputError::Signal(signal) => {
writeln!(f, "Backtick was terminated by signal {}", signal)?;
}
},
OutputError::Unknown => {
writeln!(f, "Backtick failed for an unknown reason")?;
}
},
OutputError::Io(io_error) => {
match io_error.kind() {
io::ErrorKind::NotFound => write!(
@ -353,23 +348,22 @@ impl<'src> Display for RuntimeError<'src> {
),
_ => write!(
f,
"Backtick could not be run because of an IO \
error while launching `sh`:\n{}",
"Backtick could not be run because of an IO error while launching `sh`:\n{}",
io_error
),
}?;
}
},
OutputError::Utf8(utf8_error) => {
writeln!(
f,
"Backtick succeeded but stdout was not utf8: {}",
utf8_error
)?;
}
},
},
NoRecipes => {
writeln!(f, "Justfile contains no recipes.",)?;
}
},
DefaultRecipeRequiresArguments {
recipe,
min_arguments,
@ -381,7 +375,7 @@ impl<'src> Display for RuntimeError<'src> {
min_arguments,
Count("argument", *min_arguments),
)?;
}
},
Internal { message } => {
write!(
f,
@ -389,7 +383,7 @@ impl<'src> Display for RuntimeError<'src> {
consider filing an issue: https://github.com/casey/just/issues/new",
message
)?;
}
},
}
write!(f, "{}", message.suffix())?;

View File

@ -2,21 +2,21 @@ use crate::common::*;
#[derive(Debug)]
pub(crate) struct Scope<'src: 'run, 'run> {
parent: Option<&'run Scope<'src, 'run>>,
parent: Option<&'run Scope<'src, 'run>>,
bindings: Table<'src, Binding<'src, String>>,
}
impl<'src, 'run> Scope<'src, 'run> {
pub(crate) fn child(parent: &'run Scope<'src, 'run>) -> Scope<'src, 'run> {
Scope {
parent: Some(parent),
parent: Some(parent),
bindings: Table::new(),
}
}
pub(crate) fn new() -> Scope<'src, 'run> {
Scope {
parent: None,
parent: None,
bindings: Table::new(),
}
}

View File

@ -6,7 +6,7 @@ pub(crate) const FILENAME: &str = "justfile";
const PROJECT_ROOT_CHILDREN: &[&str] = &[".bzr", ".git", ".hg", ".svn", "_darcs"];
pub(crate) struct Search {
pub(crate) justfile: PathBuf,
pub(crate) justfile: PathBuf,
pub(crate) working_directory: PathBuf,
}
@ -25,7 +25,7 @@ impl Search {
justfile,
working_directory,
})
}
},
SearchConfig::FromSearchDirectory { search_directory } => {
let search_directory = Self::clean(invocation_directory, search_directory);
@ -38,7 +38,7 @@ impl Search {
justfile,
working_directory,
})
}
},
SearchConfig::WithJustfile { justfile } => {
let justfile = Self::clean(invocation_directory, justfile);
@ -49,13 +49,13 @@ impl Search {
justfile,
working_directory,
})
}
},
SearchConfig::WithJustfileAndWorkingDirectory {
justfile,
working_directory,
} => Ok(Self {
justfile: Self::clean(invocation_directory, justfile),
justfile: Self::clean(invocation_directory, justfile),
working_directory: Self::clean(invocation_directory, working_directory),
}),
}
@ -75,7 +75,7 @@ impl Search {
justfile,
working_directory,
})
}
},
SearchConfig::FromSearchDirectory { search_directory } => {
let search_directory = Self::clean(invocation_directory, search_directory);
@ -88,7 +88,7 @@ impl Search {
justfile,
working_directory,
})
}
},
SearchConfig::WithJustfile { justfile } => {
let justfile = Self::clean(invocation_directory, justfile);
@ -99,13 +99,13 @@ impl Search {
justfile,
working_directory,
})
}
},
SearchConfig::WithJustfileAndWorkingDirectory {
justfile,
working_directory,
} => Ok(Self {
justfile: Self::clean(invocation_directory, justfile),
justfile: Self::clean(invocation_directory, justfile),
working_directory: Self::clean(invocation_directory, working_directory),
}),
}
@ -205,7 +205,7 @@ mod tests {
match Search::justfile(tmp.path()) {
Err(SearchError::NotFound) => {
assert!(true);
}
},
_ => panic!("No justfile found error was expected"),
}
}
@ -227,7 +227,7 @@ mod tests {
match Search::justfile(path.as_path()) {
Err(SearchError::MultipleCandidates { .. }) => {
assert!(true);
}
},
_ => panic!("Multiple candidates error was expected"),
}
}
@ -242,7 +242,7 @@ mod tests {
match Search::justfile(path.as_path()) {
Ok(_path) => {
assert!(true);
}
},
_ => panic!("No errors were expected"),
}
}
@ -268,7 +268,7 @@ mod tests {
match Search::justfile(path.as_path()) {
Ok(_path) => {
assert!(true);
}
},
_ => panic!("No errors were expected"),
}
}
@ -287,7 +287,7 @@ mod tests {
match Search::justfile(path.as_path()) {
Ok(_path) => {
assert!(true);
}
},
_ => panic!("No errors were expected"),
}
}
@ -311,7 +311,7 @@ mod tests {
path.pop();
path.push(FILENAME);
assert_eq!(found_path, path);
}
},
_ => panic!("No errors were expected"),
}
}

View File

@ -15,7 +15,7 @@ pub(crate) enum SearchConfig {
WithJustfile { justfile: PathBuf },
/// Use user-specified justfile and working directory.
WithJustfileAndWorkingDirectory {
justfile: PathBuf,
justfile: PathBuf,
working_directory: PathBuf,
},
}

View File

@ -20,7 +20,7 @@ pub(crate) enum SearchError {
))]
Io {
directory: PathBuf,
io_error: io::Error,
io_error: io::Error,
},
#[snafu(display("No justfile found"))]
NotFound,

View File

@ -2,7 +2,7 @@ use crate::common::*;
#[derive(Debug)]
pub(crate) struct Set<'src> {
pub(crate) name: Name<'src>,
pub(crate) name: Name<'src>,
pub(crate) value: Setting<'src>,
}

View File

@ -7,6 +7,6 @@ pub(crate) enum Setting<'src> {
#[derive(Debug, PartialEq)]
pub(crate) struct Shell<'src> {
pub(crate) command: StringLiteral<'src>,
pub(crate) command: StringLiteral<'src>,
pub(crate) arguments: Vec<StringLiteral<'src>>,
}

View File

@ -1,6 +1,6 @@
pub(crate) struct Shebang<'line> {
pub(crate) interpreter: &'line str,
pub(crate) argument: Option<&'line str>,
pub(crate) argument: Option<&'line str>,
}
impl<'line> Shebang<'line> {

View File

@ -1,6 +1,7 @@
use crate::common::*;
/// String wrapper that uses nonblank characters to display spaces and tabs
/// String wrapper that uses nonblank characters to display spaces and
/// tabs
pub struct ShowWhitespace<'str>(pub &'str str);
impl<'str> Display for ShowWhitespace<'str> {

View File

@ -2,7 +2,7 @@ use crate::common::*;
#[derive(PartialEq, Debug)]
pub(crate) struct StringLiteral<'src> {
pub(crate) raw: &'src str,
pub(crate) raw: &'src str,
pub(crate) cooked: Cow<'src, str>,
}

View File

@ -1,12 +1,12 @@
//! Justfile summary creation, for testing purposes only.
//!
//! The contents of this module are not bound by any stability guarantees.
//! Breaking changes may be introduced at any time.
//! The contents of this module are not bound by any stability
//! guarantees. Breaking changes may be introduced at any time.
//!
//! The main entry point into this module is the `summary` function, which
//! parses a justfile at a given path and produces a `Summary` object,
//! which broadly captures the functionality of the parsed justfile, or
//! an error message.
//! The main entry point into this module is the `summary` function,
//! which parses a justfile at a given path and produces a `Summary`
//! object, which broadly captures the functionality of the parsed
//! justfile, or an error message.
//!
//! This functionality is intended to be used with `janus`, a tool for
//! ensuring that changes to just do not inadvertently break or
@ -35,7 +35,7 @@ pub fn summary(path: &Path) -> Result<Result<Summary, String>, io::Error> {
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Summary {
pub assignments: BTreeMap<String, Assignment>,
pub recipes: BTreeMap<String, Recipe>,
pub recipes: BTreeMap<String, Recipe>,
}
impl Summary {
@ -50,7 +50,7 @@ impl Summary {
}
Summary {
recipes: justfile
recipes: justfile
.recipes
.into_iter()
.map(|(name, recipe)| {
@ -71,13 +71,13 @@ impl Summary {
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Recipe {
pub aliases: Vec<String>,
pub aliases: Vec<String>,
pub dependencies: Vec<Dependency>,
pub lines: Vec<Line>,
pub private: bool,
pub quiet: bool,
pub shebang: bool,
pub parameters: Vec<Parameter>,
pub lines: Vec<Line>,
pub private: bool,
pub quiet: bool,
pub shebang: bool,
pub parameters: Vec<Parameter>,
}
impl Recipe {
@ -101,16 +101,16 @@ impl Recipe {
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Parameter {
pub variadic: bool,
pub name: String,
pub default: Option<Expression>,
pub name: String,
pub default: Option<Expression>,
}
impl Parameter {
fn new(parameter: &full::Parameter) -> Parameter {
Parameter {
variadic: parameter.variadic,
name: parameter.name.lexeme().to_owned(),
default: parameter.default.as_ref().map(Expression::new),
name: parameter.name.lexeme().to_owned(),
default: parameter.default.as_ref().map(Expression::new),
}
}
}
@ -149,14 +149,14 @@ impl Fragment {
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Assignment {
pub exported: bool,
pub exported: bool,
pub expression: Expression,
}
impl Assignment {
fn new(assignment: &full::Assignment) -> Assignment {
Assignment {
exported: assignment.export,
exported: assignment.export,
expression: Expression::new(&assignment.value),
}
}
@ -168,7 +168,7 @@ pub enum Expression {
command: String,
},
Call {
name: String,
name: String,
arguments: Vec<Expression>,
},
Concatination {
@ -192,17 +192,17 @@ impl Expression {
},
Call { thunk } => match thunk {
full::Thunk::Nullary { name, .. } => Expression::Call {
name: name.lexeme().to_owned(),
name: name.lexeme().to_owned(),
arguments: Vec::new(),
},
full::Thunk::Unary { name, arg, .. } => Expression::Call {
name: name.lexeme().to_owned(),
name: name.lexeme().to_owned(),
arguments: vec![Expression::new(arg)],
},
full::Thunk::Binary {
name, args: [a, b], ..
} => Expression::Call {
name: name.lexeme().to_owned(),
name: name.lexeme().to_owned(),
arguments: vec![Expression::new(a), Expression::new(b)],
},
},
@ -223,14 +223,14 @@ impl Expression {
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Dependency {
pub recipe: String,
pub recipe: String,
pub arguments: Vec<Expression>,
}
impl Dependency {
fn new(dependency: &full::Dependency) -> Dependency {
Dependency {
recipe: dependency.recipe.name().to_owned(),
recipe: dependency.recipe.name().to_owned(),
arguments: dependency.arguments.iter().map(Expression::new).collect(),
}
}

View File

@ -73,8 +73,8 @@ impl<'key, V: Keyed<'key>> Index<&'key str> for Table<'key, V> {
}
impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> {
type Item = (&'key str, V);
type IntoIter = btree_map::IntoIter<&'key str, V>;
type Item = (&'key str, V);
fn into_iter(self) -> btree_map::IntoIter<&'key str, V> {
self.map.into_iter()
@ -82,8 +82,8 @@ impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> {
}
impl<'table, V: Keyed<'table> + 'table> IntoIterator for &'table Table<'table, V> {
type Item = (&'table &'table str, &'table V);
type IntoIter = btree_map::Iter<'table, &'table str, V>;
type Item = (&'table &'table str, &'table V);
#[must_use]
fn into_iter(self) -> btree_map::Iter<'table, &'table str, V> {

View File

@ -76,7 +76,7 @@ pub(crate) fn analysis_error(
kind,
};
assert_eq!(have, want);
}
},
}
}

View File

@ -4,21 +4,21 @@ use crate::common::*;
#[derivative(Debug, PartialEq = "feature_allow_slow_enum")]
pub(crate) enum Thunk<'src> {
Nullary {
name: Name<'src>,
name: Name<'src>,
#[derivative(Debug = "ignore", PartialEq = "ignore")]
function: fn(&FunctionContext) -> Result<String, String>,
},
Unary {
name: Name<'src>,
name: Name<'src>,
#[derivative(Debug = "ignore", PartialEq = "ignore")]
function: fn(&FunctionContext, &str) -> Result<String, String>,
arg: Box<Expression<'src>>,
arg: Box<Expression<'src>>,
},
Binary {
name: Name<'src>,
name: Name<'src>,
#[derivative(Debug = "ignore", PartialEq = "ignore")]
function: fn(&FunctionContext, &str, &str) -> Result<String, String>,
args: [Box<Expression<'src>>; 2],
args: [Box<Expression<'src>>; 2],
},
}
@ -46,11 +46,11 @@ impl<'src> Thunk<'src> {
args: [a, b],
name,
})
}
},
_ => Err(
name.error(CompilationErrorKind::FunctionArgumentCountMismatch {
function: name.lexeme(),
found: arguments.len(),
found: arguments.len(),
expected: function.argc(),
}),
),

View File

@ -4,10 +4,10 @@ use crate::common::*;
pub(crate) struct Token<'src> {
pub(crate) offset: usize,
pub(crate) length: usize,
pub(crate) line: usize,
pub(crate) line: usize,
pub(crate) column: usize,
pub(crate) src: &'src str,
pub(crate) kind: TokenKind,
pub(crate) src: &'src str,
pub(crate) kind: TokenKind,
}
impl<'src> Token<'src> {
@ -63,16 +63,15 @@ impl<'src> Token<'src> {
space_width,
color.suffix()
)?;
}
None => {
},
None =>
if self.offset != self.src.len() {
write!(
f,
"internal error: Error has invalid line number: {}",
line_number
)?
}
}
},
}
Ok(())
}

View File

@ -31,35 +31,31 @@ pub(crate) enum TokenKind {
impl Display for TokenKind {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
use TokenKind::*;
write!(
f,
"{}",
match *self {
At => "'@'",
Backtick => "backtick",
BracketL => "'['",
BracketR => "']'",
Colon => "':'",
ColonEquals => "':='",
Comma => "','",
Comment => "comment",
Dedent => "dedent",
Eof => "end of file",
Eol => "end of line",
Equals => "'='",
Identifier => "identifier",
Indent => "indent",
InterpolationEnd => "'}}'",
InterpolationStart => "'{{'",
ParenL => "'('",
ParenR => "')'",
Plus => "'+'",
StringCooked => "cooked string",
StringRaw => "raw string",
Text => "command text",
Whitespace => "whitespace",
Unspecified => "unspecified",
}
)
write!(f, "{}", match *self {
At => "'@'",
Backtick => "backtick",
BracketL => "'['",
BracketR => "']'",
Colon => "':'",
ColonEquals => "':='",
Comma => "','",
Comment => "comment",
Dedent => "dedent",
Eof => "end of file",
Eol => "end of line",
Equals => "'='",
Identifier => "identifier",
Indent => "indent",
InterpolationEnd => "'}}'",
InterpolationStart => "'{{'",
ParenL => "'('",
ParenR => "')'",
Plus => "'+'",
StringCooked => "cooked string",
StringRaw => "raw string",
Text => "command text",
Whitespace => "whitespace",
Unspecified => "unspecified",
})
}
}

View File

@ -2,9 +2,10 @@ use crate::common::*;
use std::mem;
/// Construct a `Tree` from a symbolic expression literal. This macro, and the
/// Tree type, are only used in the Parser unit tests, as a concise notation
/// representing the expected results of parsing a given string.
/// Construct a `Tree` from a symbolic expression literal. This macro,
/// and the Tree type, are only used in the Parser unit tests, as a
/// concise notation representing the expected results of parsing a
/// given string.
macro_rules! tree {
{
($($child:tt)*)
@ -62,19 +63,20 @@ impl<'text> Tree<'text> {
Tree::atom(format!("\"{}\"", contents.as_ref()))
}
/// Push a child node into self, turning it into a List if it was an Atom
/// Push a child node into self, turning it into a List if it was an
/// Atom
pub(crate) fn push(self, tree: impl Into<Tree<'text>>) -> Tree<'text> {
match self {
Tree::List(mut children) => {
children.push(tree.into());
Tree::List(children)
}
},
Tree::Atom(text) => Tree::List(vec![Tree::Atom(text), tree.into()]),
}
}
/// Extend a self with a tail of Trees, turning self into a List if it
/// was an Atom
/// Extend a self with a tail of Trees, turning self into a List if
/// it was an Atom
pub(crate) fn extend<I, T>(self, tail: I) -> Tree<'text>
where
I: IntoIterator<Item = T>,
@ -114,7 +116,7 @@ impl Display for Tree<'_> {
}
write!(f, ")")
}
},
Tree::Atom(text) => write!(f, "{}", text),
}
}

View File

@ -2,6 +2,6 @@ use crate::common::*;
#[derive(PartialEq, Debug)]
pub(crate) struct UnresolvedDependency<'src> {
pub(crate) recipe: Name<'src>,
pub(crate) recipe: Name<'src>,
pub(crate) arguments: Vec<Expression<'src>>,
}

View File

@ -17,9 +17,9 @@ impl<'src> UnresolvedRecipe<'src> {
return Err(unresolved.recipe.error(
CompilationErrorKind::DependencyArgumentCountMismatch {
dependency: unresolved.recipe.lexeme(),
found: unresolved.arguments.len(),
min: resolved.min_arguments(),
max: resolved.max_arguments(),
found: unresolved.arguments.len(),
min: resolved.min_arguments(),
max: resolved.max_arguments(),
},
));
}
@ -30,7 +30,7 @@ impl<'src> UnresolvedRecipe<'src> {
.into_iter()
.zip(resolved)
.map(|(unresolved, resolved)| Dependency {
recipe: resolved,
recipe: resolved,
arguments: unresolved.arguments,
})
.collect();

View File

@ -24,11 +24,11 @@ impl<'expression, 'src> Iterator for Variables<'expression, 'src> {
self.stack.push(lhs);
self.stack.push(rhs);
self.next()
}
},
Some(Expression::Group { contents }) => {
self.stack.push(contents);
self.next()
}
},
}
}
}

View File

@ -32,7 +32,7 @@ impl Display for Warning<'_> {
f,
"Please see this issue for more details: https://github.com/casey/just/issues/379"
)?;
}
},
}
write!(f, "{}", message.suffix())?;

View File

@ -100,7 +100,7 @@ impl Entry {
for (name, entry) in entries {
entry.instantiate(&path.join(name));
}
}
},
}
}
@ -137,7 +137,8 @@ macro_rules! entries {
$($name:tt : $contents:tt,)*
} => {
{
let mut entries: std::collections::HashMap<&'static str, $crate::Entry> = std::collections::HashMap::new();
use std::collections::HashMap;
let mut entries: HashMap<&'static str, $crate::Entry> = HashMap::new();
$(
entries.insert($crate::name!($name), $crate::entry!($contents));

View File

@ -40,24 +40,24 @@ macro_rules! test {
struct Test<'a> {
justfile: &'a str,
args: &'a [&'a str],
stdin: &'a str,
stdout: &'a str,
stderr: &'a str,
status: i32,
shell: bool,
args: &'a [&'a str],
stdin: &'a str,
stdout: &'a str,
stderr: &'a str,
status: i32,
shell: bool,
}
impl<'a> Default for Test<'a> {
fn default() -> Test<'a> {
Test {
justfile: "",
args: &[],
stdin: "",
stdout: "",
stderr: "",
status: EXIT_SUCCESS,
shell: true,
args: &[],
stdin: "",
stdout: "",
stderr: "",
status: EXIT_SUCCESS,
shell: true,
}
}
}
@ -943,7 +943,11 @@ foo A B C='C':
",
args: ("foo", "bar"),
stdout: "",
stderr: "error: Recipe `foo` got 1 argument but takes at least 2\nusage:\n just foo A B C='C'\n",
stderr: "
error: Recipe `foo` got 1 argument but takes at least 2
usage:
just foo A B C='C'
",
status: EXIT_FAILURE,
}

View File

@ -20,8 +20,8 @@ const DATA: &str = "OK";
const WANT: &str = "shebang: OK\nexpression: OK\ndefault: OK\nlinewise: OK\n";
/// Test that just runs with the correct working directory when invoked with
/// `--justfile` but not `--working-directory`
/// Test that just runs with the correct working directory when
/// invoked with `--justfile` but not `--working-directory`
#[test]
fn justfile_without_working_directory() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! {
@ -46,9 +46,9 @@ fn justfile_without_working_directory() -> Result<(), Box<dyn Error>> {
Ok(())
}
/// Test that just runs with the correct working directory when invoked with
/// `--justfile` but not `--working-directory`, and justfile path has no
/// parent
/// Test that just runs with the correct working directory when
/// invoked with `--justfile` but not `--working-directory`, and
/// justfile path has no parent
#[test]
fn justfile_without_working_directory_relative() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! {
@ -74,7 +74,8 @@ fn justfile_without_working_directory_relative() -> Result<(), Box<dyn Error>> {
Ok(())
}
/// Test that just invokes commands from the directory in which the justfile is found
/// Test that just invokes commands from the directory in which the
/// justfile is found
#[test]
fn change_working_directory_to_search_justfile_parent() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! {
@ -99,8 +100,8 @@ fn change_working_directory_to_search_justfile_parent() -> Result<(), Box<dyn Er
Ok(())
}
/// Test that just runs with the correct working directory when invoked with
/// `--justfile` but not `--working-directory`
/// Test that just runs with the correct working directory when
/// invoked with `--justfile` but not `--working-directory`
#[test]
fn justfile_and_working_directory() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! {
@ -129,8 +130,8 @@ fn justfile_and_working_directory() -> Result<(), Box<dyn Error>> {
Ok(())
}
/// Test that just runs with the correct working directory when invoked with
/// `--justfile` but not `--working-directory`
/// Test that just runs with the correct working directory when
/// invoked with `--justfile` but not `--working-directory`
#[test]
fn search_dir_child() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! {
@ -157,8 +158,8 @@ fn search_dir_child() -> Result<(), Box<dyn Error>> {
Ok(())
}
/// Test that just runs with the correct working directory when invoked with
/// `--justfile` but not `--working-directory`
/// Test that just runs with the correct working directory when
/// invoked with `--justfile` but not `--working-directory`
#[test]
fn search_dir_parent() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! {