Can now test multiple files

This commit is contained in:
John Konecny 2024-08-03 19:51:47 -04:00
parent 9fc6dccb9f
commit 78afd368cc
No known key found for this signature in database
GPG Key ID: E2BE1C73CA4FE2E7

View File

@ -264,6 +264,7 @@ pub fn build_app() -> Command {
Arg::new(ROC_FILE)
.help("The .roc file to test")
.value_parser(value_parser!(PathBuf))
.num_args(0..)
.required(false)
.default_value(DEFAULT_ROC_FILENAME)
)
@ -498,18 +499,20 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
Some(n) => Threading::AtMost(*n),
};
let path = matches.get_one::<PathBuf>(ROC_FILE).unwrap();
// let path = matches.get_one::<PathBuf>(ROC_FILE).unwrap();
let paths: Vec<_> = matches.get_many::<PathBuf>(ROC_FILE).unwrap().collect();
// Spawn the root task
if !path.exists() {
let current_dir = env::current_dir().unwrap();
let expected_file_path = current_dir.join(path);
for path in paths.iter() {
// Spawn the root task
if !path.exists() {
let current_dir = env::current_dir().unwrap();
let expected_file_path = current_dir.join(path);
let current_dir_string = current_dir.display();
let expected_file_path_string = expected_file_path.display();
let current_dir_string = current_dir.display();
let expected_file_path_string = expected_file_path.display();
// TODO these should use roc_reporting to display nicer error messages.
match matches.value_source(ROC_FILE) {
// TODO these should use roc_reporting to display nicer error messages.
match matches.value_source(ROC_FILE) {
Some(ValueSource::DefaultValue) => {
eprintln!(
"\nThe current directory ({current_dir_string}) does not contain a {DEFAULT_ROC_FILENAME} file to use as a default.\n\nYou can run `roc help` for more information on how to provide a .roc file.\n"
@ -518,115 +521,135 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
_ => eprintln!("\nThis file was not found: {expected_file_path_string}\n\nYou can run `roc help` for more information on how to provide a .roc file.\n"),
}
process::exit(1);
process::exit(1);
}
}
let arena = &arena;
let function_kind = FunctionKind::from_env();
let mut all_files_total_failed_count = 0;
let mut all_files_total_passed_count = 0;
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
for path in paths.iter() {
let arena = &arena;
let function_kind = FunctionKind::from_env();
// Step 1: compile the app and generate the .o file
let load_config = LoadConfig {
target,
function_kind,
// TODO: expose this from CLI?
render: roc_reporting::report::RenderTarget::ColorTerminal,
palette: roc_reporting::report::DEFAULT_PALETTE,
threading,
exec_mode: ExecutionMode::Test,
};
let load_result = roc_load::load_and_monomorphize(
arena,
path.to_path_buf(),
opt_main_path.cloned(),
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
load_config,
);
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
let mut loaded = match load_result {
Ok(loaded) => loaded,
Err(LoadMonomorphizedError::LoadingProblem(problem)) => {
return handle_loading_problem(problem);
}
Err(LoadMonomorphizedError::ErrorModule(module)) => {
return handle_error_module(module, start_time.elapsed(), path.as_os_str(), false);
}
};
let problems = report_problems_monomorphized(&mut loaded);
let mut expectations = std::mem::take(&mut loaded.expectations);
let interns = loaded.interns.clone();
let sources = loaded.sources.clone();
let (dyn_lib, expects_by_module, layout_interner) =
roc_repl_expect::run::expect_mono_module_to_dylib(
arena,
// Step 1: compile the app and generate the .o file
let load_config = LoadConfig {
target,
loaded,
opt_level,
LlvmBackendMode::CliTest,
)
.unwrap();
// Print warnings before running tests.
{
debug_assert_eq!(
problems.errors, 0,
"if there were errors, we would have already exited."
function_kind,
// TODO: expose this from CLI?
render: roc_reporting::report::RenderTarget::ColorTerminal,
palette: roc_reporting::report::DEFAULT_PALETTE,
threading,
exec_mode: ExecutionMode::Test,
};
let load_result = roc_load::load_and_monomorphize(
arena,
path.to_path_buf(),
opt_main_path.cloned(),
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
load_config,
);
if problems.warnings > 0 {
problems.print_error_warning_count(start_time.elapsed());
println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "".repeat(80));
let mut loaded = match load_result {
Ok(loaded) => loaded,
Err(LoadMonomorphizedError::LoadingProblem(problem)) => {
return handle_loading_problem(problem);
}
Err(LoadMonomorphizedError::ErrorModule(module)) => {
return handle_error_module(module, start_time.elapsed(), path.as_os_str(), false);
}
};
let problems = report_problems_monomorphized(&mut loaded);
let mut expectations = std::mem::take(&mut loaded.expectations);
let interns = loaded.interns.clone();
let sources = loaded.sources.clone();
let (dyn_lib, expects_by_module, layout_interner) =
roc_repl_expect::run::expect_mono_module_to_dylib(
arena,
target,
loaded,
opt_level,
LlvmBackendMode::CliTest,
)
.unwrap();
// Print warnings before running tests.
{
debug_assert_eq!(
problems.errors, 0,
"if there were errors, we would have already exited."
);
if problems.warnings > 0 {
problems.print_error_warning_count(start_time.elapsed());
println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "".repeat(80));
}
}
// Run the tests.
let arena = &bumpalo::Bump::new();
let interns = arena.alloc(interns);
let mut writer = std::io::stdout();
let mut total_failed_count = 0;
let mut total_passed_count = 0;
let mut results_by_module = Vec::new();
let global_layout_interner = layout_interner.into_global();
let compilation_duration = start_time.elapsed();
for (module_id, expects) in expects_by_module.into_iter() {
let test_start_time = Instant::now();
let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects(
&mut writer,
roc_reporting::report::RenderTarget::ColorTerminal,
arena,
interns,
&global_layout_interner,
&dyn_lib,
&mut expectations,
expects,
)
.unwrap();
let tests_duration = test_start_time.elapsed();
results_by_module.push(ModuleTestResults {
module_id,
failed_count,
passed_count,
tests_duration,
});
total_failed_count += failed_count;
total_passed_count += passed_count;
}
let total_duration = start_time.elapsed();
all_files_total_failed_count += total_failed_count;
all_files_total_passed_count += total_passed_count;
if total_failed_count == 0 && total_passed_count == 0 {
// TODO print this in a more nicely formatted way!
println!("No expectations were found.");
} else if matches.get_flag(FLAG_VERBOSE) {
println!("Compiled in {} ms.", compilation_duration.as_millis());
for module_test_results in results_by_module {
print_test_results(module_test_results, &sources);
}
} else {
let test_summary_str =
test_summary(total_failed_count, total_passed_count, total_duration);
println!("{test_summary_str}");
}
}
// Run the tests.
let arena = &bumpalo::Bump::new();
let interns = arena.alloc(interns);
let mut writer = std::io::stdout();
let mut total_failed_count = 0;
let mut total_passed_count = 0;
let mut results_by_module = Vec::new();
let global_layout_interner = layout_interner.into_global();
let compilation_duration = start_time.elapsed();
for (module_id, expects) in expects_by_module.into_iter() {
let test_start_time = Instant::now();
let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects(
&mut writer,
roc_reporting::report::RenderTarget::ColorTerminal,
arena,
interns,
&global_layout_interner,
&dyn_lib,
&mut expectations,
expects,
)
.unwrap();
let tests_duration = test_start_time.elapsed();
results_by_module.push(ModuleTestResults {
module_id,
failed_count,
passed_count,
tests_duration,
});
total_failed_count += failed_count;
total_passed_count += passed_count;
}
let total_duration = start_time.elapsed();
if total_failed_count == 0 && total_passed_count == 0 {
if all_files_total_failed_count == 0 && all_files_total_passed_count == 0 {
// TODO print this in a more nicely formatted way!
println!("No expectations were found.");
@ -637,18 +660,7 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
// running tests altogether!
Ok(2)
} else {
if matches.get_flag(FLAG_VERBOSE) {
println!("Compiled in {} ms.", compilation_duration.as_millis());
for module_test_results in results_by_module {
print_test_results(module_test_results, &sources);
}
} else {
let test_summary_str =
test_summary(total_failed_count, total_passed_count, total_duration);
println!("{test_summary_str}");
}
Ok((total_failed_count > 0) as i32)
Ok((all_files_total_failed_count > 0) as i32)
}
}