Rename --setup to --prepare

This commit is contained in:
sharkdp 2018-01-18 18:13:12 +01:00
parent d5e02ff768
commit a656fbe508
4 changed files with 39 additions and 30 deletions

View File

@ -2,7 +2,8 @@
A command-line benchmarking tool (*inspired by [bench](https://github.com/Gabriel439/bench)*).
**Demo**: Benchmarking [`fd`](https://github.com/sharkdp/fd) and [`find`](https://www.gnu.org/software/findutils/):
**Demo**: Benchmarking [`fd`](https://github.com/sharkdp/fd) and
[`find`](https://www.gnu.org/software/findutils/):
![hyperfine](https://i.imgur.com/5OqrGWe.gif)
@ -17,32 +18,40 @@ A command-line benchmarking tool (*inspired by [bench](https://github.com/Gabrie
### Basic benchmark
To run a benchmark, you can simply call `hyperfine <command>...` where the argument(s) can be any shell command. For example:
To run a benchmark, you can simply call `hyperfine <command>...`. The argument(s) can be any
shell command. For example:
``` bash
> hyperfine 'sleep 0.3'
```
Hyperfine will automatically determine the number of runs to perform for each command. By default, it will perform *at least* 10 benchmarking runs. To change this, you can use the `-m`/`--min-runs` option:
Hyperfine will automatically determine the number of runs to perform for each command. By default,
it will perform *at least* 10 benchmarking runs. To change this, you can use the `-m`/`--min-runs`
option:
``` bash
> hyperfine --min-runs 5 'sleep 0.2' 'sleep 3.2'
```
### I/O-heavy programs
If the program execution time is limited by disk I/O, the benchmarking results can be heavily influence by disk caches and whether they are cold or warm.
If the program execution time is limited by disk I/O, the benchmarking results can be heavily
influence by disk caches and whether they are cold or warm.
If you want to run the benchmark on a warm cache, you can use the `-w`/`--warmup` option to perform a certain amount of program executions before the actual benchmark:
If you want to run the benchmark on a warm cache, you can use the `-w`/`--warmup` option to perform
a certain number of program executions before the actual benchmark:
``` bash
> hyperfine --warmup 3 'grep -R TODO *'
```
Conversely, if you want to run the benchmark for a cold cache, you can use the `-S`/`--setup` option to run a special command before *each* benchmarking run. For example, to clear disk caches on Linux, you can run
Conversely, if you want to run the benchmark for a cold cache, you can use the `-p`/`--prepare`
option to run a special command before *each* timing run. For example, to clear harddisk caches
on Linux, you can run
``` bash
sync; echo 3 | sudo tee /proc/sys/vm/drop_caches
```
To use this with Hyperfine, call `sudo echo` to temporarily gain sudo permissions and then call:
To use this specific command with Hyperfine, call `sudo echo` to temporarily gain sudo permissions
and then call:
``` bash
hyperfine -S 'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches' 'grep -R TODO *'
hyperfine --prepare 'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches' 'grep -R TODO *'
```
## Installation

View File

@ -133,13 +133,13 @@ pub fn run_benchmark(
// Set up progress bar (and spinner for initial measurement)
let bar = get_progress_bar(options.min_runs, "Initial time measurement");
// Run setup / cleanup command
let run_setup_command = || {
if let Some(ref setup_command) = options.setup_command {
let _ = time_shell_command(setup_command, options.ignore_failure, None);
// Run init / cleanup command
let run_preparation_command = || {
if let Some(ref preparation_command) = options.preparation_command {
let _ = time_shell_command(preparation_command, options.ignore_failure, None);
}
};
run_setup_command();
run_preparation_command();
// Initial timing run
let res = time_shell_command(cmd, options.ignore_failure, Some(shell_spawning_time))?;
@ -165,7 +165,7 @@ pub fn run_benchmark(
// Gather statistics
for _ in 0..count_remaining {
run_setup_command();
run_preparation_command();
let msg = {
let mean = format_duration(mean(&execution_times), Unit::Auto);

View File

@ -22,8 +22,8 @@ pub struct HyperfineOptions {
/// Whether or not to ignore non-zero exit codes
pub ignore_failure: bool,
/// Command to run before each benchmark run
pub setup_command: Option<String>,
/// Command to run before each timing run
pub preparation_command: Option<String>,
}
impl Default for HyperfineOptions {
@ -33,7 +33,7 @@ impl Default for HyperfineOptions {
min_runs: 10,
min_time_sec: 3.0,
ignore_failure: false,
setup_command: None,
preparation_command: None,
}
}
}

View File

@ -52,12 +52,6 @@ fn main() {
.multiple(true)
.empty_values(false),
)
.arg(
Arg::with_name("ignore-failure")
.long("ignore-failure")
.short("i")
.help("Ignore non-zero exit codes"),
)
.arg(
Arg::with_name("warmup")
.long("warmup")
@ -81,20 +75,24 @@ fn main() {
)),
)
.arg(
Arg::with_name("setup")
.long("setup")
.short("S")
Arg::with_name("prepare")
.long("prepare")
.short("p")
.takes_value(true)
.value_name("CMD")
.help(
"Execute CMD before each benchmark run. This is useful for \
"Execute CMD before each timing run. This is useful for \
clearing disk caches, for example",
),
)
.arg(
Arg::with_name("ignore-failure")
.long("ignore-failure")
.short("i")
.help("Ignore non-zero exit codes"),
)
.get_matches();
options.ignore_failure = matches.is_present("ignore-failure");
let str_to_u64 = |n| u64::from_str_radix(n, 10).ok();
options.warmup_count = matches
@ -107,7 +105,9 @@ fn main() {
options.min_runs = cmp::max(2, min_runs);
}
options.setup_command = matches.value_of("setup").map(String::from);
options.preparation_command = matches.value_of("prepare").map(String::from);
options.ignore_failure = matches.is_present("ignore-failure");
let commands = matches.values_of("command").unwrap().collect();
let res = run(&commands, &options);