mirror of
https://github.com/casey/just.git
synced 2024-11-22 02:09:44 +03:00
Merge branch 'master' into feat/working-directory-attribute
This commit is contained in:
commit
d034fd1510
42
README.md
42
README.md
@ -1292,19 +1292,19 @@ Available recipes:
|
||||
|
||||
### Variables and Substitution
|
||||
|
||||
Variables, strings, concatenation, path joining, and substitution using `{{…}}`
|
||||
are supported:
|
||||
Variables, strings, concatenation, path joining, substitution using `{{…}}`, and function calls are supported:
|
||||
|
||||
```just
|
||||
tmpdir := `mktemp -d`
|
||||
version := "0.2.7"
|
||||
tardir := tmpdir / "awesomesauce-" + version
|
||||
tarball := tardir + ".tar.gz"
|
||||
config := quote(config_dir() / ".project-config")
|
||||
|
||||
publish:
|
||||
rm -f {{tarball}}
|
||||
mkdir {{tardir}}
|
||||
cp README.md *.c {{tardir}}
|
||||
cp README.md *.c {{ config }} {{tardir}}
|
||||
tar zcvf {{tarball}} {{tardir}}
|
||||
scp {{tarball}} me@server.com:release/
|
||||
rm -rf {{tarball}} {{tardir}}
|
||||
@ -1497,8 +1497,8 @@ Done!
|
||||
|
||||
### Functions
|
||||
|
||||
`just` provides a few built-in functions that might be useful when writing
|
||||
recipes.
|
||||
`just` provides many built-in functions for use in expressions, including
|
||||
recipe body `{{…}}` substitutions, assignments, and default parameter values.
|
||||
|
||||
All functions ending in `_directory` can be abbreviated to `_dir`. So
|
||||
`home_directory()` can also be written as `home_dir()`. In addition,
|
||||
@ -2077,6 +2077,10 @@ See the [Strings](#strings) section for details on unindenting.
|
||||
Backticks may not start with `#!`. This syntax is reserved for a future
|
||||
upgrade.
|
||||
|
||||
The [`shell(…)` function](#external-commands) provides a more general mechanism
|
||||
to invoke external commands, including the ability to execute the contents of a
|
||||
variable as a command, and to pass arguments to a command.
|
||||
|
||||
### Conditional Expressions
|
||||
|
||||
`if`/`else` expressions evaluate different branches depending on if two
|
||||
@ -3337,7 +3341,33 @@ Imports may be made optional by putting a `?` after the `import` keyword:
|
||||
import? 'foo/bar.just'
|
||||
```
|
||||
|
||||
Missing source files for optional imports do not produce an error.
|
||||
Importing the same source file multiple times is not an error<sup>master</sup>.
|
||||
This allows importing multiple justfiles, for example `foo.just` and
|
||||
`bar.just`, which both import a third justfile containing shared recipes, for
|
||||
example `baz.just`, without the duplicate import of `baz.just` being an error:
|
||||
|
||||
```mf
|
||||
# justfile
|
||||
import 'foo.just'
|
||||
import 'bar.just'
|
||||
```
|
||||
|
||||
```mf
|
||||
# foo.just
|
||||
import 'baz.just'
|
||||
foo: baz
|
||||
```
|
||||
|
||||
```mf
|
||||
# bar.just
|
||||
import 'baz.just'
|
||||
bar: baz
|
||||
```
|
||||
|
||||
```just
|
||||
# baz
|
||||
baz:
|
||||
```
|
||||
|
||||
### Modules<sup>1.19.0</sup>
|
||||
|
||||
|
@ -35,6 +35,7 @@ impl<'run, 'src> Analyzer<'run, 'src> {
|
||||
root: &Path,
|
||||
) -> CompileResult<'src, Justfile<'src>> {
|
||||
let mut definitions = HashMap::new();
|
||||
let mut imports = HashSet::new();
|
||||
|
||||
let mut stack = Vec::new();
|
||||
let ast = asts.get(root).unwrap();
|
||||
@ -54,7 +55,9 @@ impl<'run, 'src> Analyzer<'run, 'src> {
|
||||
Item::Comment(_) => (),
|
||||
Item::Import { absolute, .. } => {
|
||||
if let Some(absolute) = absolute {
|
||||
stack.push(asts.get(absolute).unwrap());
|
||||
if imports.insert(absolute) {
|
||||
stack.push(asts.get(absolute).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
Item::Module {
|
||||
|
@ -21,7 +21,6 @@ impl Compiler {
|
||||
let tokens = Lexer::lex(relative, src)?;
|
||||
let mut ast = Parser::parse(
|
||||
current.file_depth,
|
||||
¤t.path,
|
||||
¤t.import_offsets,
|
||||
¤t.namepath,
|
||||
&tokens,
|
||||
@ -214,14 +213,7 @@ impl Compiler {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
|
||||
let tokens = Lexer::test_lex(src)?;
|
||||
let ast = Parser::parse(
|
||||
0,
|
||||
&PathBuf::new(),
|
||||
&[],
|
||||
&Namepath::default(),
|
||||
&tokens,
|
||||
&PathBuf::new(),
|
||||
)?;
|
||||
let ast = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())?;
|
||||
let root = PathBuf::from("justfile");
|
||||
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
||||
asts.insert(root.clone(), ast);
|
||||
|
@ -26,7 +26,6 @@ use {super::*, TokenKind::*};
|
||||
pub(crate) struct Parser<'run, 'src> {
|
||||
expected_tokens: BTreeSet<TokenKind>,
|
||||
file_depth: u32,
|
||||
file_path: &'run Path,
|
||||
import_offsets: Vec<usize>,
|
||||
module_namepath: &'run Namepath<'src>,
|
||||
next_token: usize,
|
||||
@ -39,7 +38,6 @@ impl<'run, 'src> Parser<'run, 'src> {
|
||||
/// Parse `tokens` into an `Ast`
|
||||
pub(crate) fn parse(
|
||||
file_depth: u32,
|
||||
file_path: &'run Path,
|
||||
import_offsets: &[usize],
|
||||
module_namepath: &'run Namepath<'src>,
|
||||
tokens: &'run [Token<'src>],
|
||||
@ -48,7 +46,6 @@ impl<'run, 'src> Parser<'run, 'src> {
|
||||
Self {
|
||||
expected_tokens: BTreeSet::new(),
|
||||
file_depth,
|
||||
file_path,
|
||||
import_offsets: import_offsets.to_vec(),
|
||||
module_namepath,
|
||||
next_token: 0,
|
||||
@ -910,7 +907,6 @@ impl<'run, 'src> Parser<'run, 'src> {
|
||||
dependencies,
|
||||
doc,
|
||||
file_depth: self.file_depth,
|
||||
file_path: self.file_path.into(),
|
||||
import_offsets: self.import_offsets.clone(),
|
||||
name,
|
||||
namepath: self.module_namepath.join(name),
|
||||
@ -1162,15 +1158,8 @@ mod tests {
|
||||
fn test(text: &str, want: Tree) {
|
||||
let unindented = unindent(text);
|
||||
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
|
||||
let justfile = Parser::parse(
|
||||
0,
|
||||
&PathBuf::new(),
|
||||
&[],
|
||||
&Namepath::default(),
|
||||
&tokens,
|
||||
&PathBuf::new(),
|
||||
)
|
||||
.expect("parsing failed");
|
||||
let justfile = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())
|
||||
.expect("parsing failed");
|
||||
let have = justfile.tree();
|
||||
if have != want {
|
||||
println!("parsed text: {unindented}");
|
||||
@ -1208,14 +1197,7 @@ mod tests {
|
||||
) {
|
||||
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
||||
|
||||
match Parser::parse(
|
||||
0,
|
||||
&PathBuf::new(),
|
||||
&[],
|
||||
&Namepath::default(),
|
||||
&tokens,
|
||||
&PathBuf::new(),
|
||||
) {
|
||||
match Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new()) {
|
||||
Ok(_) => panic!("Parsing unexpectedly succeeded"),
|
||||
Err(have) => {
|
||||
let want = CompileError {
|
||||
|
@ -26,8 +26,6 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
|
||||
#[serde(skip)]
|
||||
pub(crate) file_depth: u32,
|
||||
#[serde(skip)]
|
||||
pub(crate) file_path: PathBuf,
|
||||
#[serde(skip)]
|
||||
pub(crate) import_offsets: Vec<usize>,
|
||||
pub(crate) name: Name<'src>,
|
||||
pub(crate) namepath: Namepath<'src>,
|
||||
|
@ -59,15 +59,8 @@ pub(crate) fn analysis_error(
|
||||
) {
|
||||
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
||||
|
||||
let ast = Parser::parse(
|
||||
0,
|
||||
&PathBuf::new(),
|
||||
&[],
|
||||
&Namepath::default(),
|
||||
&tokens,
|
||||
&PathBuf::new(),
|
||||
)
|
||||
.expect("Parsing failed in analysis test...");
|
||||
let ast = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())
|
||||
.expect("Parsing failed in analysis test...");
|
||||
|
||||
let root = PathBuf::from("justfile");
|
||||
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
||||
|
@ -50,7 +50,6 @@ impl<'src> UnresolvedRecipe<'src> {
|
||||
dependencies,
|
||||
doc: self.doc,
|
||||
file_depth: self.file_depth,
|
||||
file_path: self.file_path,
|
||||
import_offsets: self.import_offsets,
|
||||
name: self.name,
|
||||
namepath: self.namepath,
|
||||
|
@ -360,3 +360,51 @@ fn reused_import_are_allowed() {
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiply_imported_items_do_not_conflict() {
|
||||
Test::new()
|
||||
.justfile(
|
||||
"
|
||||
import 'a.just'
|
||||
import 'a.just'
|
||||
foo: bar
|
||||
",
|
||||
)
|
||||
.write(
|
||||
"a.just",
|
||||
"
|
||||
x := 'y'
|
||||
|
||||
@bar:
|
||||
echo hello
|
||||
",
|
||||
)
|
||||
.stdout("hello\n")
|
||||
.run();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_multiply_imported_items_do_not_conflict() {
|
||||
Test::new()
|
||||
.justfile(
|
||||
"
|
||||
import 'a.just'
|
||||
import 'b.just'
|
||||
foo: bar
|
||||
",
|
||||
)
|
||||
.write("a.just", "import 'c.just'")
|
||||
.write("b.just", "import 'c.just'")
|
||||
.write(
|
||||
"c.just",
|
||||
"
|
||||
x := 'y'
|
||||
|
||||
@bar:
|
||||
echo hello
|
||||
",
|
||||
)
|
||||
.stdout("hello\n")
|
||||
.run();
|
||||
}
|
||||
|
@ -1,4 +1,7 @@
|
||||
use {super::*, pretty_assertions::assert_eq};
|
||||
use {
|
||||
super::*,
|
||||
pretty_assertions::{assert_eq, StrComparison},
|
||||
};
|
||||
|
||||
macro_rules! test {
|
||||
{
|
||||
@ -205,6 +208,14 @@ impl Test {
|
||||
equal
|
||||
}
|
||||
|
||||
fn compare_string(name: &str, have: &str, want: &str) -> bool {
|
||||
let equal = have == want;
|
||||
if !equal {
|
||||
eprintln!("Bad {name}: {}", StrComparison::new(&have, &want));
|
||||
}
|
||||
equal
|
||||
}
|
||||
|
||||
if let Some(justfile) = &self.justfile {
|
||||
let justfile = unindent(justfile);
|
||||
fs::write(self.justfile_path(), justfile).unwrap();
|
||||
@ -266,8 +277,8 @@ impl Test {
|
||||
}
|
||||
|
||||
if !compare("status", output.status.code(), Some(self.status))
|
||||
| (self.stdout_regex.is_none() && !compare("stdout", output_stdout, &stdout))
|
||||
| (self.stderr_regex.is_none() && !compare("stderr", output_stderr, &stderr))
|
||||
| (self.stdout_regex.is_none() && !compare_string("stdout", output_stdout, &stdout))
|
||||
| (self.stderr_regex.is_none() && !compare_string("stderr", output_stderr, &stderr))
|
||||
{
|
||||
panic!("Output mismatch.");
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user