integrated benches to test-framework

This commit is contained in:
0rphon 2022-06-08 15:33:12 -07:00
parent 5ce5102a37
commit 32d07583c5
10 changed files with 391 additions and 395 deletions

View File

@ -8,6 +8,6 @@ input_file: input/dummy.in
function main() -> bool {
const expected: address = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8s7pyjh9;
return input.state.root == [0u8; 32]
return input.state.root == [0u8; 32]
&& input.state_leaf.network_id == 0u8;
}

View File

@ -4,31 +4,31 @@ expectation: Pass
*/
[main]
a: bool = true;
b: u8 = 2u8;
c: field = 0field;
d: group = (0, 1)group;
a: bool = true;
b: u8 = 2u8;
c: field = 0field;
d: group = (0, 1)group;
e: address = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8;
f: [u8; 32] = [0u8; 32];
g: [[u8; 2]; 3] = [[0u8; 2]; 3];
h: (bool, bool) = (true, false);
f: [u8; 32] = [0u8; 32];
g: [[u8; 2]; 3] = [[0u8; 2]; 3];
h: (bool, bool) = (true, false);
[registers]
r0: bool = true;
r1: u8 = 2u8;
r2: field = 0field;
r3: group = (0, 1)group;
r0: bool = true;
r1: u8 = 2u8;
r2: field = 0field;
r3: group = (0, 1)group;
r4: address = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8;
r5: [u8; 32] = [0u8; 32];
r6: [[u8; 2]; 3] = [[0u8; 2]; 3];
r7: (bool, bool) = (true, false);
r5: [u8; 32] = [0u8; 32];
r6: [[u8; 2]; 3] = [[0u8; 2]; 3];
r7: (bool, bool) = (true, false);
[constants]
c0: bool = true;
c1: u8 = 2u8;
c2: field = 0field;
c3: group = (0, 1)group;
c0: bool = true;
c1: u8 = 2u8;
c2: field = 0field;
c3: group = (0, 1)group;
c4: address = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8;
c5: [u8; 32] = [0u8; 32];
c6: [[u8; 2]; 3] = [[0u8; 2]; 3];
c7: (bool, bool) = (true, false);
c5: [u8; 32] = [0u8; 32];
c6: [[u8; 2]; 3] = [[0u8; 2]; 3];
c7: (bool, bool) = (true, false);

View File

@ -6,10 +6,10 @@ input_file: inputs/branch.in
function main (x: address, y: bool) -> bool {
let z: address = aleo18cw5zdez3zhypev3tnfhmwvhre9ramwle4up947gcyy5rnmjw5yqn93wsr;
let z: address = aleo1fj982yqchhy973kz7e9jk6er7t6qd6jm9anplnlprem507w6lv9spwvfxx;
if y {
z = aleo1f2gs8g0qpumlgzpvmkw3q07y6xrwsdr0lqsu9h9fgnh8d7e44v9qhpgpkj;
z = aleo16s003g206rjms5pm4ak48340f7y4z4dsskuqfrd2gvqz6umh2qfq7lajfp;
}
return z == aleo1f2gs8g0qpumlgzpvmkw3q07y6xrwsdr0lqsu9h9fgnh8d7e44v9qhpgpkj;
return z == aleo1drcl2g8zxhxjzjw63ajp067gzvl94am3z7m7wgrzmr2ecd5sdq8sy66l5k;
}

View File

@ -7,7 +7,7 @@ input_file:
*/
function main(x: address) -> bool {
const sender: address = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8s7pyjh9;
const sender: address = aleo10qerras5799u6k7rjtc9y3hcwxuykr45qra7x7dp6jgnc0923czqm0lgta;
return x == sender;
}

View File

@ -7,8 +7,8 @@ input_file:
*/
function main(x: address) -> bool {
const sender: address = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8s7pyjh9;
const receiver: address = aleo18qgam03qe483tdrcc3fkqwpp38ehff4a2xma6lu7hams6lfpgcpqy3sr3p;
const sender: address = aleo1l7ytv5jqjzpxtjqttl5z9mle8ujcpac9t6tkge5f4haah4pxas8sagzecd;
const receiver: address = aleo1dtpkpg3d653mdlzh6g028937qdgujecn5gw5tzh7ftcvyz7jxvfqw6t8p6;
return x == sender ? receiver == x : sender == x;
}

View File

@ -19,7 +19,6 @@ edition = "2018"
[[bench]]
name = "leo_compiler"
path = "benches/leo_compiler.rs"
harness = false
[dependencies]
@ -56,4 +55,4 @@ version = "1.5.3"
[dev-dependencies.leo-span]
path = "../../leo/span"
version = "1.5.3"
version = "1.5.3"

View File

@ -15,15 +15,12 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use leo_compiler::Compiler;
use leo_errors::emitter::Handler;
use leo_errors::emitter::{Emitter, Handler};
use leo_span::{
source_map::FileName,
symbol::{SessionGlobals, SESSION_GLOBALS},
};
use leo_test_framework::{
runner::{Namespace, ParseType, Runner},
Test,
};
use leo_test_framework::get_benches;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use std::{
@ -31,37 +28,65 @@ use std::{
time::{Duration, Instant},
};
macro_rules! sample {
($name:expr) => {
Sample {
name: $name,
input: include_str!(concat!("./", $name, ".leo")),
path: concat!("./", $name, ".leo"),
}
};
enum BenchMode {
Parse,
Symbol,
Type,
Full,
}
#[derive(Clone, Copy)]
struct BufEmitter;
impl Emitter for BufEmitter {
fn emit_err(&mut self, _: leo_errors::LeoError) {}
fn last_emitted_err_code(&self) -> Option<i32> {
Some(0)
}
fn emit_warning(&mut self, _: leo_errors::LeoWarning) {}
}
impl BufEmitter {
fn new_handler() -> Handler {
Handler::new(Box::new(Self))
}
}
#[derive(Clone)]
struct Sample {
name: &'static str,
input: &'static str,
path: &'static str,
name: String,
input: String,
}
fn new_compiler<'a>(handler: &'a Handler, main_file_path: &str) -> Compiler<'a> {
fn new_compiler(handler: &Handler) -> Compiler<'_> {
Compiler::new(
handler,
PathBuf::from(main_file_path),
PathBuf::from("/tmp/output/"),
PathBuf::from(String::new()),
PathBuf::from(String::new()),
None,
)
}
impl Sample {
const SAMPLES: &'static [Sample] = &[sample!("big"), sample!("iteration")];
fn load_samples() -> Vec<Self> {
get_benches()
.into_iter()
.map(|(name, input)| Self { name, input })
.collect()
}
fn data(&self) -> (&str, FileName) {
black_box((self.input, FileName::Custom(self.path.into())))
black_box((&self.input, FileName::Custom(String::new())))
}
fn bench(&self, c: &mut Criterion, mode: BenchMode) {
match mode {
BenchMode::Parse => self.bench_parse(c),
BenchMode::Symbol => self.bench_symbol_table(c),
BenchMode::Type => self.bench_type_checker(c),
BenchMode::Full => self.bench_full(c),
}
}
fn bench_parse(&self, c: &mut Criterion) {
@ -70,8 +95,8 @@ impl Sample {
let mut time = Duration::default();
for _ in 0..iters {
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
let handler = Handler::default();
let mut compiler = new_compiler(&handler, self.path);
let handler = BufEmitter::new_handler();
let mut compiler = new_compiler(&handler);
let (input, name) = self.data();
let start = Instant::now();
let out = compiler.parse_program_from_string(input, name);
@ -90,8 +115,8 @@ impl Sample {
let mut time = Duration::default();
for _ in 0..iters {
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
let handler = Handler::default();
let mut compiler = new_compiler(&handler, self.path);
let handler = BufEmitter::new_handler();
let mut compiler = new_compiler(&handler);
let (input, name) = self.data();
compiler
.parse_program_from_string(input, name)
@ -113,8 +138,8 @@ impl Sample {
let mut time = Duration::default();
for _ in 0..iters {
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
let handler = Handler::default();
let mut compiler = new_compiler(&handler, self.path);
let handler = BufEmitter::new_handler();
let mut compiler = new_compiler(&handler);
let (input, name) = self.data();
compiler
.parse_program_from_string(input, name)
@ -137,8 +162,8 @@ impl Sample {
let mut time = Duration::default();
for _ in 0..iters {
SESSION_GLOBALS.set(&SessionGlobals::default(), || {
let handler = Handler::default();
let mut compiler = new_compiler(&handler, self.path);
let handler = BufEmitter::new_handler();
let mut compiler = new_compiler(&handler);
let (input, name) = self.data();
let start = Instant::now();
compiler
@ -157,37 +182,26 @@ impl Sample {
}
}
fn bench_parse(c: &mut Criterion) {
for sample in Sample::SAMPLES {
sample.bench_parse(c);
}
macro_rules! bench {
($name:ident, $mode:expr) => {
fn $name(c: &mut Criterion) {
Sample::load_samples().into_iter().for_each(|s| s.bench(c, $mode))
}
};
}
fn bench_symbol_table(c: &mut Criterion) {
for sample in Sample::SAMPLES {
sample.bench_symbol_table(c);
}
}
fn bench_type_checker(c: &mut Criterion) {
for sample in Sample::SAMPLES {
sample.bench_type_checker(c);
}
}
fn bench_full(c: &mut Criterion) {
for sample in Sample::SAMPLES {
sample.bench_full(c);
}
}
bench!(bench_parse, BenchMode::Parse);
bench!(bench_symbol, BenchMode::Symbol);
bench!(bench_type, BenchMode::Type);
bench!(bench_full, BenchMode::Full);
criterion_group!(
name = benches;
config = Criterion::default().sample_size(200).measurement_time(Duration::from_secs(30)).nresamples(200_000);
config = Criterion::default().sample_size(200).measurement_time(Duration::from_secs(5)).nresamples(200_000);
targets =
bench_parse,
bench_symbol_table,
bench_type_checker,
bench_symbol,
bench_type,
bench_full
);
criterion_main!(benches);

View File

@ -1,239 +1,241 @@
// Copyright (C) 2019-2021 Aleo Systems Inc.
// This file is part of the Leo library.
// // Copyright (C) 2019-2021 Aleo Systems Inc.
// // This file is part of the Leo library.
// The Leo library is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// // The Leo library is free software: you can redistribute it and/or modify
// // it under the terms of the GNU General Public License as published by
// // the Free Software Foundation, either version 3 of the License, or
// // (at your option) any later version.
// The Leo library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// // The Leo library is distributed in the hope that it will be useful,
// // but WITHOUT ANY WARRANTY; without even the implied warranty of
// // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// // GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
// // You should have received a copy of the GNU General Public License
// // along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use leo_errors::{AstError, InputError, LeoMessageCode, PackageError, ParserError};
use leo_test_framework::{
fetch::find_tests,
output::TestExpectation,
test::{extract_test_config, TestExpectationMode as Expectation},
};
// use leo_errors::{AstError, InputError, LeoMessageCode, PackageError, ParserError};
// use leo_test_framework::{
// fetch::find_tests,
// output::TestExpectation,
// test::{extract_test_config, TestExpectationMode as Expectation},
// };
use regex::Regex;
use serde_yaml::Value;
use std::collections::{BTreeMap, HashSet};
use std::{error::Error, fs, io, path::PathBuf};
use structopt::{clap::AppSettings, StructOpt};
// use regex::Regex;
// use serde_yaml::Value;
// use std::collections::{BTreeMap, HashSet};
// use std::{error::Error, fs, io, path::PathBuf};
// use structopt::{clap::AppSettings, StructOpt};
#[derive(StructOpt)]
#[structopt(name = "error-coverage", author = "The Aleo Team <hello@aleo.org>", setting = AppSettings::ColoredHelp)]
struct Opt {
#[structopt(
short,
long,
help = "Path to the output file, defaults to stdout.",
parse(from_os_str)
)]
output: Option<PathBuf>,
}
// #[derive(StructOpt)]
// #[structopt(name = "error-coverage", author = "The Aleo Team <hello@aleo.org>", setting = AppSettings::ColoredHelp)]
// struct Opt {
// #[structopt(
// short,
// long,
// help = "Path to the output file, defaults to stdout.",
// parse(from_os_str)
// )]
// output: Option<PathBuf>,
// }
fn main() {
handle_error(run_with_args(Opt::from_args()));
}
// fn main() {
// handle_error(run_with_args(Opt::from_args()));
// }
fn run_with_args(opt: Opt) -> Result<(), Box<dyn Error>> {
// Variable that stores all the tests.
let mut tests = Vec::new();
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("../");
// fn run_with_args(opt: Opt) -> Result<(), Box<dyn Error>> {
// // Variable that stores all the tests.
// let mut tests = Vec::new();
// let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
// test_dir.push("../");
let mut expectation_dir = test_dir.clone();
expectation_dir.push("expectations");
// let mut expectation_dir = test_dir.clone();
// expectation_dir.push("expectations");
find_tests(&test_dir, &mut tests);
// find_tests(&test_dir, &mut tests);
// Store all covered error codes
let mut found_codes = BTreeMap::new();
let re = Regex::new(r"Error \[(?P<code>.*)\]:.*").unwrap();
// // Store all covered error codes
// let mut found_codes = BTreeMap::new();
// let re = Regex::new(r"Error \[(?P<code>.*)\]:.*").unwrap();
for (path, content) in tests.into_iter() {
if let Some(config) = extract_test_config(&content) {
// Skip passing tests.
if config.expectation == Expectation::Pass {
continue;
}
// for (path, content) in tests.into_iter() {
// if let Some(config) = extract_test_config(&content) {
// // Skip passing tests.
// if config.expectation == Expectation::Pass {
// continue;
// }
let mut expectation_path = expectation_dir.clone();
// let mut expectation_path = expectation_dir.clone();
let path = PathBuf::from(path);
let relative_path = path.strip_prefix(&test_dir).expect("path error for test");
// let path = PathBuf::from(path);
// let relative_path = path.strip_prefix(&test_dir).expect("path error for test");
let mut relative_expectation_path = relative_path.to_str().unwrap().to_string();
relative_expectation_path += ".out";
// let mut relative_expectation_path = relative_path.to_str().unwrap().to_string();
// relative_expectation_path += ".out";
// Add expectation category
if relative_expectation_path.starts_with("compiler") {
expectation_path.push("compiler");
} else {
expectation_path.push("parser");
}
expectation_path.push(&relative_expectation_path);
// // Add expectation category
// if relative_expectation_path.starts_with("compiler") {
// expectation_path.push("compiler");
// } else {
// expectation_path.push("parser");
// }
// expectation_path.push(&relative_expectation_path);
if expectation_path.exists() {
let raw = std::fs::read_to_string(&expectation_path).expect("failed to read expectations file");
let expectation: TestExpectation =
serde_yaml::from_str(&raw).expect("invalid yaml in expectations file");
// if expectation_path.exists() {
// let raw = std::fs::read_to_string(&expectation_path).expect("failed to read expectations file");
// let expectation: TestExpectation =
// serde_yaml::from_str(&raw).expect("invalid yaml in expectations file");
for value in expectation.outputs {
if let serde_yaml::Value::String(message) = value {
if let Some(caps) = re.captures(&message) {
if let Some(code) = caps.name("code") {
let files = found_codes
.entry(code.as_str().to_string())
.or_insert_with(HashSet::new);
let path = expectation_path
.strip_prefix(test_dir.clone())
.expect("invalid prefix for expectation path");
files.insert(PathBuf::from(path));
}
}
}
}
}
}
}
// for value in expectation.outputs {
// if let serde_yaml::Value::String(message) = value {
// if let Some(caps) = re.captures(&message) {
// if let Some(code) = caps.name("code") {
// let files = found_codes
// .entry(code.as_str().to_string())
// .or_insert_with(HashSet::new);
// let path = expectation_path
// .strip_prefix(test_dir.clone())
// .expect("invalid prefix for expectation path");
// files.insert(PathBuf::from(path));
// }
// }
// }
// }
// }
// }
// }
// Collect all defined error codes.
let mut all_codes = HashSet::new();
collect_error_codes(
&mut all_codes,
AstError::message_type(),
AstError::code_identifier(),
AstError::code_mask(),
AstError::num_exit_codes(),
);
collect_error_codes(
&mut all_codes,
InputError::message_type(),
InputError::code_identifier(),
InputError::code_mask(),
InputError::num_exit_codes(),
);
collect_error_codes(
&mut all_codes,
PackageError::message_type(),
PackageError::code_identifier(),
PackageError::code_mask(),
PackageError::num_exit_codes(),
);
collect_error_codes(
&mut all_codes,
ParserError::message_type(),
ParserError::code_identifier(),
ParserError::code_mask(),
ParserError::num_exit_codes(),
);
// // Collect all defined error codes.
// let mut all_codes = HashSet::new();
// collect_error_codes(
// &mut all_codes,
// AstError::message_type(),
// AstError::code_identifier(),
// AstError::code_mask(),
// AstError::num_exit_codes(),
// );
// collect_error_codes(
// &mut all_codes,
// InputError::message_type(),
// InputError::code_identifier(),
// InputError::code_mask(),
// InputError::num_exit_codes(),
// );
// collect_error_codes(
// &mut all_codes,
// PackageError::message_type(),
// PackageError::code_identifier(),
// PackageError::code_mask(),
// PackageError::num_exit_codes(),
// );
// collect_error_codes(
// &mut all_codes,
// ParserError::message_type(),
// ParserError::code_identifier(),
// ParserError::code_mask(),
// ParserError::num_exit_codes(),
// );
// Repackage data into values compatible with serde_yaml
let mut covered_errors = serde_yaml::Mapping::new();
let mut unknown_errors = serde_yaml::Mapping::new();
// // Repackage data into values compatible with serde_yaml
// let mut covered_errors = serde_yaml::Mapping::new();
// let mut unknown_errors = serde_yaml::Mapping::new();
for (code, paths) in found_codes.iter() {
let mut yaml_paths = Vec::with_capacity(paths.len());
for path in paths {
yaml_paths.push(path.to_str().unwrap());
}
yaml_paths.sort_unstable();
let yaml_paths = yaml_paths.iter().map(|s| Value::String(s.to_string())).collect();
// for (code, paths) in found_codes.iter() {
// let mut yaml_paths = Vec::with_capacity(paths.len());
// for path in paths {
// yaml_paths.push(path.to_str().unwrap());
// }
// yaml_paths.sort_unstable();
// let yaml_paths = yaml_paths.iter().map(|s| Value::String(s.to_string())).collect();
if all_codes.contains(code) {
covered_errors.insert(Value::String(code.to_owned()), Value::Sequence(yaml_paths));
} else {
unknown_errors.insert(Value::String(code.to_owned()), Value::Sequence(yaml_paths));
}
all_codes.remove(code);
}
// if all_codes.contains(code) {
// covered_errors.insert(Value::String(code.to_owned()), Value::Sequence(yaml_paths));
// } else {
// unknown_errors.insert(Value::String(code.to_owned()), Value::Sequence(yaml_paths));
// }
// all_codes.remove(code);
// }
let mut codes: Vec<String> = all_codes.drain().collect();
codes.sort();
// let mut codes: Vec<String> = all_codes.drain().collect();
// codes.sort();
let mut uncovered_errors = Vec::new();
for code in codes {
uncovered_errors.push(Value::String(code))
}
// let mut uncovered_errors = Vec::new();
// for code in codes {
// uncovered_errors.push(Value::String(code))
// }
let mut uncovered_information = serde_yaml::Mapping::new();
uncovered_information.insert(
Value::String(String::from("count")),
Value::Number(serde_yaml::Number::from(uncovered_errors.len())),
);
uncovered_information.insert(Value::String(String::from("codes")), Value::Sequence(uncovered_errors));
// let mut uncovered_information = serde_yaml::Mapping::new();
// uncovered_information.insert(
// Value::String(String::from("count")),
// Value::Number(serde_yaml::Number::from(uncovered_errors.len())),
// );
// uncovered_information.insert(Value::String(String::from("codes")), Value::Sequence(uncovered_errors));
let mut covered_information = serde_yaml::Mapping::new();
covered_information.insert(
Value::String(String::from("count")),
Value::Number(serde_yaml::Number::from(covered_errors.len())),
);
covered_information.insert(Value::String(String::from("codes")), Value::Mapping(covered_errors));
// let mut covered_information = serde_yaml::Mapping::new();
// covered_information.insert(
// Value::String(String::from("count")),
// Value::Number(serde_yaml::Number::from(covered_errors.len())),
// );
// covered_information.insert(Value::String(String::from("codes")), Value::Mapping(covered_errors));
let mut unknown_information = serde_yaml::Mapping::new();
unknown_information.insert(
Value::String(String::from("count")),
Value::Number(serde_yaml::Number::from(unknown_errors.len())),
);
unknown_information.insert(Value::String(String::from("codes")), Value::Mapping(unknown_errors));
// let mut unknown_information = serde_yaml::Mapping::new();
// unknown_information.insert(
// Value::String(String::from("count")),
// Value::Number(serde_yaml::Number::from(unknown_errors.len())),
// );
// unknown_information.insert(Value::String(String::from("codes")), Value::Mapping(unknown_errors));
let mut results = serde_yaml::Mapping::new();
results.insert(
Value::String(String::from("uncovered")),
Value::Mapping(uncovered_information),
);
// let mut results = serde_yaml::Mapping::new();
// results.insert(
// Value::String(String::from("uncovered")),
// Value::Mapping(uncovered_information),
// );
results.insert(
Value::String(String::from("covered")),
Value::Mapping(covered_information),
);
results.insert(
Value::String(String::from("unknown")),
Value::Mapping(unknown_information),
);
// results.insert(
// Value::String(String::from("covered")),
// Value::Mapping(covered_information),
// );
// results.insert(
// Value::String(String::from("unknown")),
// Value::Mapping(unknown_information),
// );
// Output error coverage results
if let Some(pathbuf) = opt.output {
let file = fs::File::create(pathbuf).expect("error creating output file");
serde_yaml::to_writer(file, &results).expect("serialization failed for error coverage report");
} else {
serde_yaml::to_writer(io::stdout(), &results).expect("serialization failed for error coverage report");
}
// // Output error coverage results
// if let Some(pathbuf) = opt.output {
// let file = fs::File::create(pathbuf).expect("error creating output file");
// serde_yaml::to_writer(file, &results).expect("serialization failed for error coverage report");
// } else {
// serde_yaml::to_writer(io::stdout(), &results).expect("serialization failed for error coverage report");
// }
Ok(())
}
// Ok(())
// }
fn collect_error_codes(
codes: &mut HashSet<String>,
error_type: String,
code_identifier: i8,
exit_code_mask: i32,
num_exit_codes: i32,
) {
for exit_code in 0..num_exit_codes {
codes.insert(format!(
"E{}{:0>3}{:0>4}",
error_type,
code_identifier,
exit_code_mask + exit_code,
));
}
}
// fn collect_error_codes(
// codes: &mut HashSet<String>,
// error_type: String,
// code_identifier: i8,
// exit_code_mask: i32,
// num_exit_codes: i32,
// ) {
// for exit_code in 0..num_exit_codes {
// codes.insert(format!(
// "E{}{:0>3}{:0>4}",
// error_type,
// code_identifier,
// exit_code_mask + exit_code,
// ));
// }
// }
fn handle_error(res: Result<(), Box<dyn Error>>) {
match res {
Ok(_) => (),
Err(err) => {
eprintln!("Error: {}", err);
std::process::exit(1);
}
}
}
// fn handle_error(res: Result<(), Box<dyn Error>>) {
// match res {
// Ok(_) => (),
// Err(err) => {
// eprintln!("Error: {}", err);
// std::process::exit(1);
// }
// }
// }
fn main() {}

View File

@ -18,22 +18,20 @@ use std::{fs, path::Path};
use walkdir::WalkDir;
pub fn find_tests<T: AsRef<Path> + Copy>(path: T, filter: T) -> Vec<(String, String)> {
let count = WalkDir::new(path)
pub fn find_tests<T: AsRef<Path> + Copy>(path: T) -> Vec<(String, String)> {
WalkDir::new(path)
.into_iter()
.flatten()
.filter_map(|f| {
let path = f.path();
if matches!(path.extension(), Some(s) if s == "leo") && !path.starts_with(filter) {
if matches!(path.extension(), Some(s) if s == "leo") {
let content = fs::read_to_string(path).expect("failed to read test");
Some((path.to_str().unwrap_or_default().to_string(), content))
} else {
None
}
})
.collect::<Vec<(String, String)>>();
dbg!("find_tests count {}", count.len());
count
.collect::<Vec<(String, String)>>()
}
pub fn split_tests_one_line(source: &str) -> Vec<&str> {

View File

@ -88,9 +88,13 @@ pub struct TestCases {
}
impl TestCases {
fn new(additional_check: impl Fn(&TestConfig) -> bool) -> (Self, Vec<TestConfig>) {
fn new(expectation_category: &str, additional_check: impl Fn(&TestConfig) -> bool) -> (Self, Vec<TestConfig>) {
let mut path_prefix = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path_prefix.push("../../tests/");
path_prefix.push(expectation_category);
if let Ok(p) = std::env::var("TEST_FILTER") {
path_prefix.push(p)
}
let mut expectation_dir = path_prefix.clone();
expectation_dir.push("expectations");
@ -105,10 +109,9 @@ impl TestCases {
}
fn load_tests(&mut self, additional_check: impl Fn(&TestConfig) -> bool) -> Vec<TestConfig> {
let filter = PathBuf::from(std::env::var("TEST_FILTER").unwrap_or_default().trim());
let mut configs = Vec::new();
self.tests = find_tests(&self.path_prefix, &filter)
self.tests = find_tests(&self.path_prefix)
.into_iter()
.filter(|(path, content)| {
let config = match extract_test_config(content) {
@ -131,12 +134,7 @@ impl TestCases {
configs
}
pub(crate) fn process_tests<P, O>(
&mut self,
configs: Vec<TestConfig>,
expectation_category: &str,
mut process: P,
) -> Vec<O>
pub(crate) fn process_tests<P, O>(&mut self, configs: Vec<TestConfig>, mut process: P) -> Vec<O>
where
P: FnMut(&mut Self, (&Path, &str, &str, TestConfig)) -> O,
{
@ -144,12 +142,10 @@ impl TestCases {
std::env::set_var("LEO_TESTFRAMEWORK", "true");
let mut output = Vec::new();
dbg!("in proccesing: tests {}, configs {}", self.tests.len(), configs.len());
for ((path, content), config) in self.tests.clone().iter().zip(configs.into_iter()) {
let path = Path::new(&path);
let relative_path = path.strip_prefix(&self.path_prefix).expect("path error for test");
let test_name = relative_path
let test_name = path
.file_stem()
.expect("no file name for test")
.to_str()
@ -202,107 +198,94 @@ impl TestCases {
}
pub fn run_tests<T: Runner>(runner: &T, expectation_category: &str) {
let (mut cases, configs) = Self::new(|_| false);
let (mut cases, configs) = Self::new(expectation_category, |_| true);
let mut pass_categories = 0;
let mut pass_tests = 0;
let mut fail_tests = 0;
let mut outputs = vec![];
dbg!("running tests");
cases.process_tests(
configs,
expectation_category,
|cases, (path, content, test_name, config)| {
dbg!("processing tests");
let namespace = match runner.resolve_namespace(&config.namespace) {
None => todo!("continue"),
Some(ns) => ns,
};
cases.process_tests(configs, |cases, (path, content, test_name, config)| {
let namespace = match runner.resolve_namespace(&config.namespace) {
Some(ns) => ns,
None => return,
};
let (expectation_path, expectations) = cases.clear_expectations(path, expectation_category);
let (expectation_path, expectations) = cases.clear_expectations(path, expectation_category);
let tests = match namespace.parse_type() {
ParseType::Line => crate::fetch::split_tests_one_line(content)
.into_iter()
.map(|x| x.to_string())
.collect(),
ParseType::ContinuousLines => crate::fetch::split_tests_two_line(content),
ParseType::Whole => vec![content.to_string()],
};
dbg!("tests len{}", tests.len());
let tests = match namespace.parse_type() {
ParseType::Line => crate::fetch::split_tests_one_line(content)
.into_iter()
.map(|x| x.to_string())
.collect(),
ParseType::ContinuousLines => crate::fetch::split_tests_two_line(content),
ParseType::Whole => vec![content.to_string()],
};
let mut errors = vec![];
if let Some(expectations) = expectations.as_ref() {
if tests.len() != expectations.outputs.len() {
errors.push(TestError::MismatchedTestExpectationLength);
}
let mut errors = vec![];
if let Some(expectations) = expectations.as_ref() {
if tests.len() != expectations.outputs.len() {
errors.push(TestError::MismatchedTestExpectationLength);
}
}
let mut new_outputs = vec![];
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
for (i, test) in tests.into_iter().enumerate() {
let expected_output = expected_output.as_mut().and_then(|x| x.next()).cloned();
println!("running test {} @ '{}'", test_name, path.to_str().unwrap());
let panic_buf = set_hook();
let leo_output = panic::catch_unwind(|| {
namespace.run_test(Test {
name: test_name.to_string(),
content: test.clone(),
path: path.into(),
config: config.extra.clone(),
})
});
let output = take_hook(leo_output, panic_buf);
if let Some(error) = emit_errors(&test, &output, &config.expectation, expected_output, i) {
fail_tests += 1;
errors.push(error);
} else {
pass_tests += 1;
new_outputs.push(
output
.unwrap()
.as_ref()
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
.unwrap_or_else(|e| Value::String(e.clone())),
);
}
}
if errors.is_empty() {
if expectations.is_none() {
outputs.push((
expectation_path,
TestExpectation {
namespace: config.namespace,
expectation: config.expectation,
outputs: new_outputs,
},
));
}
pass_categories += 1;
} else {
cases.fail_categories.push(TestFailure {
path: path.to_str().unwrap().to_string(),
errors,
let mut new_outputs = vec![];
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
for (i, test) in tests.into_iter().enumerate() {
let expected_output = expected_output.as_mut().and_then(|x| x.next()).cloned();
println!("running test {} @ '{}'", test_name, path.to_str().unwrap());
let panic_buf = set_hook();
let leo_output = panic::catch_unwind(|| {
namespace.run_test(Test {
name: test_name.to_string(),
content: test.clone(),
path: path.into(),
config: config.extra.clone(),
})
});
let output = take_hook(leo_output, panic_buf);
if let Some(error) = emit_errors(&test, &output, &config.expectation, expected_output, i) {
fail_tests += 1;
errors.push(error);
} else {
pass_tests += 1;
new_outputs.push(
output
.unwrap()
.as_ref()
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
.unwrap_or_else(|e| Value::String(e.clone())),
);
}
},
);
}
}
}
struct Bencher;
impl Bencher {
fn get_benches() -> Vec<(String, String)> {
let (mut cases, configs) = TestCases::new(|config| config.expectation == TestExpectationMode::Fail);
let expectation_category = "compiler";
let tests = cases.process_tests(configs, expectation_category, |_, (_, content, test_name, _)| {
(test_name.to_string(), content.to_string())
if errors.is_empty() {
if expectations.is_none() {
outputs.push((
expectation_path,
TestExpectation {
namespace: config.namespace,
expectation: config.expectation,
outputs: new_outputs,
},
));
}
pass_categories += 1;
} else {
cases.fail_categories.push(TestFailure {
path: path.to_str().unwrap().to_string(),
errors,
})
}
});
tests
}
}
/// returns (name, content) for all benchmark samples
pub fn get_benches() -> Vec<(String, String)> {
let (mut cases, configs) = TestCases::new("compiler", |config| config.expectation != TestExpectationMode::Fail);
cases.process_tests(configs, |_, (_, content, test_name, _)| {
(test_name.to_string(), content.to_string())
})
}