mirror of
https://github.com/AleoHQ/leo.git
synced 2024-12-18 23:02:35 +03:00
test-framework rework and clean up start
This commit is contained in:
parent
3c7de6e389
commit
8972674dec
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "leo-test-framework"
|
||||
version = "1.5.3"
|
||||
authors = [ "The Aleo Team <hello@aleo.org>" ]
|
||||
authors = ["The Aleo Team <hello@aleo.org>"]
|
||||
description = "Leo testing framework"
|
||||
homepage = "https://aleo.org"
|
||||
repository = "https://github.com/AleoHQ/leo"
|
||||
@ -10,61 +10,50 @@ keywords = [
|
||||
"cryptography",
|
||||
"leo",
|
||||
"programming-language",
|
||||
"zero-knowledge"
|
||||
"zero-knowledge",
|
||||
]
|
||||
categories = [ "cryptography::cryptocurrencies", "web-programming" ]
|
||||
include = [ "Cargo.toml", "src", "README.md", "LICENSE.md" ]
|
||||
categories = ["cryptography::cryptocurrencies", "web-programming"]
|
||||
include = ["Cargo.toml", "src", "README.md", "LICENSE.md"]
|
||||
license = "GPL-3.0"
|
||||
edition = "2018"
|
||||
|
||||
[[bench]]
|
||||
name = "leo_compiler"
|
||||
path = "benches/leo_compiler.rs"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
backtrace = "0.3.65"
|
||||
walkdir = "2.3.2"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0"
|
||||
features = [ "derive" ]
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1.0"
|
||||
features = [ "preserve_order" ]
|
||||
features = ["preserve_order"]
|
||||
|
||||
[dependencies.serde_yaml]
|
||||
version = "0.8"
|
||||
|
||||
# List of dependencies for tgc binary;
|
||||
# disabled for now while porting modules from staging
|
||||
# [dependencies.leo-ast]
|
||||
# path = "../ast"
|
||||
# version = "1.5.2"
|
||||
|
||||
# [dependencies.leo-ast-passes]
|
||||
# path = "../ast-passes"
|
||||
# version = "1.5.2"
|
||||
|
||||
# [dependencies.leo-parser]
|
||||
# path = "../parser"
|
||||
# version = "1.5.2"
|
||||
|
||||
# [dependencies.leo-imports]
|
||||
# path = "../imports"
|
||||
# version = "1.5.2"
|
||||
|
||||
# [dependencies.leo-asg]
|
||||
# path = "../asg"
|
||||
# version = "1.5.2"
|
||||
|
||||
# [dependencies.leo-compiler]
|
||||
# path = "../compiler"
|
||||
# version = "1.5.2"
|
||||
|
||||
[dependencies.structopt]
|
||||
version = "0.3"
|
||||
|
||||
# List of dependencies for errcov
|
||||
|
||||
[dependencies.leo-errors]
|
||||
path = "../../leo/errors"
|
||||
version = "1.5.3"
|
||||
|
||||
[dependencies.regex]
|
||||
version = "1.5"
|
||||
|
||||
[dev-dependencies.criterion]
|
||||
version = "0.3"
|
||||
|
||||
[dev-dependencies.leo-compiler]
|
||||
path = "../../compiler/compiler"
|
||||
version = "1.5.3"
|
||||
|
||||
[dev-dependencies.leo-span]
|
||||
path = "../../leo/span"
|
||||
version = "1.5.3"
|
||||
|
193
tests/test-framework/benches/leo_compiler.rs
Normal file
193
tests/test-framework/benches/leo_compiler.rs
Normal file
@ -0,0 +1,193 @@
|
||||
// Copyright (C) 2019-2022 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use leo_compiler::Compiler;
|
||||
use leo_errors::emitter::Handler;
|
||||
use leo_span::{
|
||||
source_map::FileName,
|
||||
symbol::{SessionGlobals, SESSION_GLOBALS},
|
||||
};
|
||||
use leo_test_framework::{
|
||||
runner::{Namespace, ParseType, Runner},
|
||||
Test,
|
||||
};
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
macro_rules! sample {
|
||||
($name:expr) => {
|
||||
Sample {
|
||||
name: $name,
|
||||
input: include_str!(concat!("./", $name, ".leo")),
|
||||
path: concat!("./", $name, ".leo"),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Sample {
|
||||
name: &'static str,
|
||||
input: &'static str,
|
||||
path: &'static str,
|
||||
}
|
||||
|
||||
fn new_compiler<'a>(handler: &'a Handler, main_file_path: &str) -> Compiler<'a> {
|
||||
Compiler::new(
|
||||
handler,
|
||||
PathBuf::from(main_file_path),
|
||||
PathBuf::from("/tmp/output/"),
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
impl Sample {
|
||||
const SAMPLES: &'static [Sample] = &[sample!("big"), sample!("iteration")];
|
||||
|
||||
fn data(&self) -> (&str, FileName) {
|
||||
black_box((self.input, FileName::Custom(self.path.into())))
|
||||
}
|
||||
|
||||
fn bench_parse(&self, c: &mut Criterion) {
|
||||
c.bench_function(&format!("parse {}", self.name), |b| {
|
||||
b.iter_custom(|iters| {
|
||||
let mut time = Duration::default();
|
||||
for _ in 0..iters {
|
||||
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
|
||||
let handler = Handler::default();
|
||||
let mut compiler = new_compiler(&handler, self.path);
|
||||
let (input, name) = self.data();
|
||||
let start = Instant::now();
|
||||
let out = compiler.parse_program_from_string(input, name);
|
||||
time += start.elapsed();
|
||||
out.expect("Failed to parse program")
|
||||
});
|
||||
}
|
||||
time
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn bench_symbol_table(&self, c: &mut Criterion) {
|
||||
c.bench_function(&format!("symbol table pass {}", self.name), |b| {
|
||||
b.iter_custom(|iters| {
|
||||
let mut time = Duration::default();
|
||||
for _ in 0..iters {
|
||||
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
|
||||
let handler = Handler::default();
|
||||
let mut compiler = new_compiler(&handler, self.path);
|
||||
let (input, name) = self.data();
|
||||
compiler
|
||||
.parse_program_from_string(input, name)
|
||||
.expect("Failed to parse program");
|
||||
let start = Instant::now();
|
||||
let out = compiler.symbol_table_pass();
|
||||
time += start.elapsed();
|
||||
out.expect("failed to generate symbol table");
|
||||
});
|
||||
}
|
||||
time
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn bench_type_checker(&self, c: &mut Criterion) {
|
||||
c.bench_function(&format!("type checker pass {}", self.name), |b| {
|
||||
b.iter_custom(|iters| {
|
||||
let mut time = Duration::default();
|
||||
for _ in 0..iters {
|
||||
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
|
||||
let handler = Handler::default();
|
||||
let mut compiler = new_compiler(&handler, self.path);
|
||||
let (input, name) = self.data();
|
||||
compiler
|
||||
.parse_program_from_string(input, name)
|
||||
.expect("Failed to parse program");
|
||||
let mut symbol_table = compiler.symbol_table_pass().expect("failed to generate symbol table");
|
||||
let start = Instant::now();
|
||||
let out = compiler.type_checker_pass(&mut symbol_table);
|
||||
time += start.elapsed();
|
||||
out.expect("failed to run type check pass")
|
||||
});
|
||||
}
|
||||
time
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn bench_full(&self, c: &mut Criterion) {
|
||||
c.bench_function(&format!("full {}", self.name), |b| {
|
||||
b.iter_custom(|iters| {
|
||||
let mut time = Duration::default();
|
||||
for _ in 0..iters {
|
||||
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
|
||||
let handler = Handler::default();
|
||||
let mut compiler = new_compiler(&handler, self.path);
|
||||
let (input, name) = self.data();
|
||||
let start = Instant::now();
|
||||
compiler
|
||||
.parse_program_from_string(input, name)
|
||||
.expect("Failed to parse program");
|
||||
let mut symbol_table = compiler.symbol_table_pass().expect("failed to generate symbol table");
|
||||
compiler
|
||||
.type_checker_pass(&mut symbol_table)
|
||||
.expect("failed to run type check pass");
|
||||
time += start.elapsed();
|
||||
});
|
||||
}
|
||||
time
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn bench_parse(c: &mut Criterion) {
|
||||
for sample in Sample::SAMPLES {
|
||||
sample.bench_parse(c);
|
||||
}
|
||||
}
|
||||
|
||||
fn bench_symbol_table(c: &mut Criterion) {
|
||||
for sample in Sample::SAMPLES {
|
||||
sample.bench_symbol_table(c);
|
||||
}
|
||||
}
|
||||
|
||||
fn bench_type_checker(c: &mut Criterion) {
|
||||
for sample in Sample::SAMPLES {
|
||||
sample.bench_type_checker(c);
|
||||
}
|
||||
}
|
||||
|
||||
fn bench_full(c: &mut Criterion) {
|
||||
for sample in Sample::SAMPLES {
|
||||
sample.bench_full(c);
|
||||
}
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
name = benches;
|
||||
config = Criterion::default().sample_size(200).measurement_time(Duration::from_secs(30)).nresamples(200_000);
|
||||
targets =
|
||||
bench_parse,
|
||||
bench_symbol_table,
|
||||
bench_type_checker,
|
||||
bench_full
|
||||
);
|
||||
criterion_main!(benches);
|
@ -16,25 +16,31 @@
|
||||
|
||||
use std::{fs, path::Path};
|
||||
|
||||
pub fn find_tests<T: AsRef<Path>>(path: T, out: &mut Vec<(String, String)>) {
|
||||
for entry in fs::read_dir(path).expect("fail to read tests") {
|
||||
let entry = entry.expect("fail to read tests").path();
|
||||
if entry.is_dir() {
|
||||
find_tests(entry.as_path(), out);
|
||||
continue;
|
||||
} else if entry.extension().and_then(|x| x.to_str()).unwrap_or_default() != "leo" {
|
||||
continue;
|
||||
}
|
||||
let content = fs::read_to_string(entry.as_path()).expect("failed to read test");
|
||||
out.push((entry.as_path().to_str().unwrap_or_default().to_string(), content));
|
||||
}
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub fn find_tests<T: AsRef<Path> + Copy>(path: T, filter: T) -> Vec<(String, String)> {
|
||||
let count = WalkDir::new(path)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|f| {
|
||||
let path = f.path();
|
||||
if matches!(path.extension(), Some(s) if s == "leo") && !path.starts_with(filter) {
|
||||
let content = fs::read_to_string(path).expect("failed to read test");
|
||||
Some((path.to_str().unwrap_or_default().to_string(), content))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(String, String)>>();
|
||||
dbg!("find_tests count {}", count.len());
|
||||
count
|
||||
}
|
||||
|
||||
pub fn split_tests_oneline(source: &str) -> Vec<&str> {
|
||||
pub fn split_tests_one_line(source: &str) -> Vec<&str> {
|
||||
source.lines().map(|x| x.trim()).filter(|x| !x.is_empty()).collect()
|
||||
}
|
||||
|
||||
pub fn split_tests_twoline(source: &str) -> Vec<String> {
|
||||
pub fn split_tests_two_line(source: &str) -> Vec<String> {
|
||||
let mut out = vec![];
|
||||
let mut lines = vec![];
|
||||
for line in source.lines() {
|
||||
|
@ -81,51 +81,96 @@ fn take_hook(
|
||||
output.map_err(|_| panic_buf.lock().unwrap().take().expect("failed to get panic message"))
|
||||
}
|
||||
|
||||
pub fn run_tests<T: Runner>(runner: &T, expectation_category: &str) {
|
||||
std::env::remove_var("LEO_BACKTRACE"); // always remove backtrace so it doesn't clog output files
|
||||
std::env::set_var("LEO_TESTFRAMEWORK", "true");
|
||||
let mut pass_categories = 0;
|
||||
let mut pass_tests = 0;
|
||||
let mut fail_tests = 0;
|
||||
let mut fail_categories = Vec::new();
|
||||
pub struct TestCases {
|
||||
tests: Vec<(String, String)>,
|
||||
path_prefix: PathBuf,
|
||||
fail_categories: Vec<TestFailure>,
|
||||
}
|
||||
|
||||
let mut tests = Vec::new();
|
||||
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
test_dir.push("../../tests/");
|
||||
impl TestCases {
|
||||
fn new(additional_check: impl Fn(&TestConfig) -> bool) -> (Self, Vec<TestConfig>) {
|
||||
let mut path_prefix = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
path_prefix.push("../../tests/");
|
||||
|
||||
let mut expectation_dir = test_dir.clone();
|
||||
expectation_dir.push("expectations");
|
||||
let mut expectation_dir = path_prefix.clone();
|
||||
expectation_dir.push("expectations");
|
||||
|
||||
find_tests(&test_dir, &mut tests);
|
||||
let mut new = Self {
|
||||
tests: Vec::new(),
|
||||
path_prefix,
|
||||
fail_categories: Vec::new(),
|
||||
};
|
||||
let tests = new.load_tests(additional_check);
|
||||
(new, tests)
|
||||
}
|
||||
|
||||
let filter = std::env::var("TEST_FILTER").unwrap_or_default();
|
||||
let filter = filter.trim();
|
||||
fn load_tests(&mut self, additional_check: impl Fn(&TestConfig) -> bool) -> Vec<TestConfig> {
|
||||
let filter = PathBuf::from(std::env::var("TEST_FILTER").unwrap_or_default().trim());
|
||||
let mut configs = Vec::new();
|
||||
|
||||
let mut outputs = vec![];
|
||||
self.tests = find_tests(&self.path_prefix, &filter)
|
||||
.into_iter()
|
||||
.filter(|(path, content)| {
|
||||
let config = match extract_test_config(content) {
|
||||
None => {
|
||||
self.fail_categories.push(TestFailure {
|
||||
path: path.to_string(),
|
||||
errors: vec![TestError::MissingTestConfig],
|
||||
});
|
||||
return true;
|
||||
}
|
||||
Some(cfg) => cfg,
|
||||
};
|
||||
|
||||
for (path, content) in tests.into_iter() {
|
||||
if !filter.is_empty() && !path.contains(filter) {
|
||||
continue;
|
||||
let res = additional_check(&config);
|
||||
configs.push(config);
|
||||
res
|
||||
})
|
||||
.collect();
|
||||
|
||||
configs
|
||||
}
|
||||
|
||||
pub(crate) fn process_tests<P, O>(
|
||||
&mut self,
|
||||
configs: Vec<TestConfig>,
|
||||
expectation_category: &str,
|
||||
mut process: P,
|
||||
) -> Vec<O>
|
||||
where
|
||||
P: FnMut(&mut Self, (&Path, &str, &str, TestConfig)) -> O,
|
||||
{
|
||||
std::env::remove_var("LEO_BACKTRACE"); // always remove backtrace so it doesn't clog output files
|
||||
std::env::set_var("LEO_TESTFRAMEWORK", "true");
|
||||
|
||||
let mut output = Vec::new();
|
||||
dbg!("in proccesing: tests {}, configs {}", self.tests.len(), configs.len());
|
||||
for ((path, content), config) in self.tests.clone().iter().zip(configs.into_iter()) {
|
||||
let path = Path::new(&path);
|
||||
let relative_path = path.strip_prefix(&self.path_prefix).expect("path error for test");
|
||||
|
||||
let test_name = relative_path
|
||||
.file_stem()
|
||||
.expect("no file name for test")
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let end_of_header = content.find("*/").expect("failed to find header block in test");
|
||||
let content = &content[end_of_header + 2..];
|
||||
|
||||
output.push(process(self, (path, content, &test_name, config)));
|
||||
|
||||
std::env::remove_var("LEO_TESTFRAMEWORK");
|
||||
}
|
||||
let config = extract_test_config(&content);
|
||||
if config.is_none() {
|
||||
//panic!("missing configuration for {}", path);
|
||||
// fail_categories.push(TestFailure {
|
||||
// path,
|
||||
// errors: vec![TestError::MissingTestConfig],
|
||||
// });
|
||||
continue;
|
||||
}
|
||||
let config = config.unwrap();
|
||||
let namespace = runner.resolve_namespace(&config.namespace);
|
||||
if namespace.is_none() {
|
||||
continue;
|
||||
}
|
||||
let namespace = namespace.unwrap();
|
||||
output
|
||||
}
|
||||
|
||||
fn clear_expectations(&self, path: &Path, expectation_category: &str) -> (PathBuf, Option<TestExpectation>) {
|
||||
let path = Path::new(&path);
|
||||
let relative_path = path.strip_prefix(&test_dir).expect("path error for test");
|
||||
let mut expectation_path = expectation_dir.clone();
|
||||
let relative_path = path.strip_prefix(&self.path_prefix).expect("path error for test");
|
||||
let expectation_dir = self.path_prefix.clone();
|
||||
let mut expectation_path = expectation_dir;
|
||||
expectation_path.push(expectation_category);
|
||||
expectation_path.push(relative_path.parent().expect("no parent dir for test"));
|
||||
let mut expectation_name = relative_path
|
||||
@ -137,132 +182,129 @@ pub fn run_tests<T: Runner>(runner: &T, expectation_category: &str) {
|
||||
expectation_name += ".out";
|
||||
expectation_path.push(&expectation_name);
|
||||
|
||||
let test_name = relative_path
|
||||
.file_stem()
|
||||
.expect("no file name for test")
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let expectations: Option<TestExpectation> = if expectation_path.exists() {
|
||||
if expectation_path.exists() {
|
||||
if !std::env::var("CLEAR_LEO_TEST_EXPECTATIONS")
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.is_empty()
|
||||
{
|
||||
None
|
||||
(expectation_path, None)
|
||||
} else {
|
||||
let raw = std::fs::read_to_string(&expectation_path).expect("failed to read expectations file");
|
||||
Some(serde_yaml::from_str(&raw).expect("invalid yaml in expectations file"))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let end_of_header = content.find("*/").expect("failed to find header block in test");
|
||||
let content = &content[end_of_header + 2..];
|
||||
|
||||
let tests = match namespace.parse_type() {
|
||||
ParseType::Line => crate::fetch::split_tests_oneline(content)
|
||||
.into_iter()
|
||||
.map(|x| x.to_string())
|
||||
.collect(),
|
||||
ParseType::ContinuousLines => crate::fetch::split_tests_twoline(content),
|
||||
ParseType::Whole => vec![content.to_string()],
|
||||
};
|
||||
|
||||
let mut errors = vec![];
|
||||
if let Some(expectations) = expectations.as_ref() {
|
||||
if tests.len() != expectations.outputs.len() {
|
||||
errors.push(TestError::MismatchedTestExpectationLength);
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_outputs = vec![];
|
||||
|
||||
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
|
||||
for (i, test) in tests.into_iter().enumerate() {
|
||||
let expected_output = expected_output.as_mut().and_then(|x| x.next()).cloned();
|
||||
println!("running test {} @ '{}'", test_name, path.to_str().unwrap());
|
||||
let panic_buf = set_hook();
|
||||
let leo_output = panic::catch_unwind(|| {
|
||||
namespace.run_test(Test {
|
||||
name: test_name.clone(),
|
||||
content: test.clone(),
|
||||
path: path.into(),
|
||||
config: config.extra.clone(),
|
||||
})
|
||||
});
|
||||
let output = take_hook(leo_output, panic_buf);
|
||||
if let Some(error) = emit_errors(&test, &output, &config.expectation, expected_output, i) {
|
||||
fail_tests += 1;
|
||||
errors.push(error);
|
||||
} else {
|
||||
pass_tests += 1;
|
||||
new_outputs.push(
|
||||
output
|
||||
.unwrap()
|
||||
.as_ref()
|
||||
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
|
||||
.unwrap_or_else(|e| Value::String(e.clone())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if errors.is_empty() {
|
||||
if expectations.is_none() {
|
||||
outputs.push((
|
||||
(
|
||||
expectation_path,
|
||||
TestExpectation {
|
||||
namespace: config.namespace,
|
||||
expectation: config.expectation,
|
||||
outputs: new_outputs,
|
||||
},
|
||||
));
|
||||
Some(serde_yaml::from_str(&raw).expect("invalid yaml in expectations file")),
|
||||
)
|
||||
}
|
||||
pass_categories += 1;
|
||||
} else {
|
||||
fail_categories.push(TestFailure {
|
||||
path: path.to_str().unwrap().to_string(),
|
||||
errors,
|
||||
})
|
||||
(expectation_path, None)
|
||||
}
|
||||
}
|
||||
if !fail_categories.is_empty() {
|
||||
for (i, fail) in fail_categories.iter().enumerate() {
|
||||
println!(
|
||||
"\n\n-----------------TEST #{} FAILED (and shouldn't have)-----------------",
|
||||
i + 1
|
||||
);
|
||||
println!("File: {}", fail.path);
|
||||
for error in &fail.errors {
|
||||
println!("{}", error);
|
||||
}
|
||||
}
|
||||
panic!(
|
||||
"failed {}/{} tests in {}/{} categories",
|
||||
pass_tests,
|
||||
fail_tests + pass_tests,
|
||||
fail_categories.len(),
|
||||
fail_categories.len() + pass_categories
|
||||
);
|
||||
} else {
|
||||
for (path, new_expectation) in outputs {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).expect("failed to make test expectation parent directory");
|
||||
std::fs::write(
|
||||
&path,
|
||||
serde_yaml::to_string(&new_expectation).expect("failed to serialize expectation yaml"),
|
||||
)
|
||||
.expect("failed to write expectation file");
|
||||
}
|
||||
println!(
|
||||
"passed {}/{} tests in {}/{} categories",
|
||||
pass_tests,
|
||||
fail_tests + pass_tests,
|
||||
pass_categories,
|
||||
pass_categories
|
||||
);
|
||||
}
|
||||
|
||||
std::env::remove_var("LEO_TESTFRAMEWORK");
|
||||
pub fn run_tests<T: Runner>(runner: &T, expectation_category: &str) {
|
||||
let (mut cases, configs) = Self::new(|_| false);
|
||||
|
||||
let mut pass_categories = 0;
|
||||
let mut pass_tests = 0;
|
||||
let mut fail_tests = 0;
|
||||
|
||||
let mut outputs = vec![];
|
||||
dbg!("running tests");
|
||||
cases.process_tests(
|
||||
configs,
|
||||
expectation_category,
|
||||
|cases, (path, content, test_name, config)| {
|
||||
dbg!("processing tests");
|
||||
let namespace = match runner.resolve_namespace(&config.namespace) {
|
||||
None => todo!("continue"),
|
||||
Some(ns) => ns,
|
||||
};
|
||||
|
||||
let (expectation_path, expectations) = cases.clear_expectations(path, expectation_category);
|
||||
|
||||
let tests = match namespace.parse_type() {
|
||||
ParseType::Line => crate::fetch::split_tests_one_line(content)
|
||||
.into_iter()
|
||||
.map(|x| x.to_string())
|
||||
.collect(),
|
||||
ParseType::ContinuousLines => crate::fetch::split_tests_two_line(content),
|
||||
ParseType::Whole => vec![content.to_string()],
|
||||
};
|
||||
dbg!("tests len{}", tests.len());
|
||||
|
||||
let mut errors = vec![];
|
||||
if let Some(expectations) = expectations.as_ref() {
|
||||
if tests.len() != expectations.outputs.len() {
|
||||
errors.push(TestError::MismatchedTestExpectationLength);
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_outputs = vec![];
|
||||
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
|
||||
for (i, test) in tests.into_iter().enumerate() {
|
||||
let expected_output = expected_output.as_mut().and_then(|x| x.next()).cloned();
|
||||
println!("running test {} @ '{}'", test_name, path.to_str().unwrap());
|
||||
let panic_buf = set_hook();
|
||||
let leo_output = panic::catch_unwind(|| {
|
||||
namespace.run_test(Test {
|
||||
name: test_name.to_string(),
|
||||
content: test.clone(),
|
||||
path: path.into(),
|
||||
config: config.extra.clone(),
|
||||
})
|
||||
});
|
||||
let output = take_hook(leo_output, panic_buf);
|
||||
if let Some(error) = emit_errors(&test, &output, &config.expectation, expected_output, i) {
|
||||
fail_tests += 1;
|
||||
errors.push(error);
|
||||
} else {
|
||||
pass_tests += 1;
|
||||
new_outputs.push(
|
||||
output
|
||||
.unwrap()
|
||||
.as_ref()
|
||||
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
|
||||
.unwrap_or_else(|e| Value::String(e.clone())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if errors.is_empty() {
|
||||
if expectations.is_none() {
|
||||
outputs.push((
|
||||
expectation_path,
|
||||
TestExpectation {
|
||||
namespace: config.namespace,
|
||||
expectation: config.expectation,
|
||||
outputs: new_outputs,
|
||||
},
|
||||
));
|
||||
}
|
||||
pass_categories += 1;
|
||||
} else {
|
||||
cases.fail_categories.push(TestFailure {
|
||||
path: path.to_str().unwrap().to_string(),
|
||||
errors,
|
||||
})
|
||||
}
|
||||
|
||||
todo!()
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
struct Bencher;
|
||||
|
||||
impl Bencher {
|
||||
fn get_benches() -> Vec<(String, String)> {
|
||||
let (mut cases, configs) = TestCases::new(|config| config.expectation == TestExpectationMode::Fail);
|
||||
|
||||
let expectation_category = "compiler";
|
||||
let tests = cases.process_tests(configs, expectation_category, |_, (_, content, test_name, _)| {
|
||||
(test_name.to_string(), content.to_string())
|
||||
});
|
||||
|
||||
tests
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user