doing some recommended changes

This commit is contained in:
gluax 2022-06-12 09:26:14 -07:00
parent 041bd57c5b
commit 3a056220ff
5 changed files with 52 additions and 26 deletions

View File

@ -32,14 +32,20 @@ use std::path::PathBuf;
use crate::OutputOptions;
#[derive(Clone)]
/// The primary entry point of the Leo compiler.
#[derive(Clone)]
pub struct Compiler<'a> {
/// The handler is used for error and warning emissions.
handler: &'a Handler,
/// The path to the main leo file.
main_file_path: PathBuf,
/// The path to where the compiler outputs all generated files.
output_directory: PathBuf,
/// The AST for the program.
pub ast: Ast,
/// The input ast for the program if it exists.
pub input_ast: Option<InputAst>,
/// Compiler options on some optional output files.
output_options: OutputOptions,
}

View File

@ -16,7 +16,10 @@
#[derive(Clone, Default)]
pub struct OutputOptions {
/// Whether spans are enabled in the output ASTs.
pub spans_enabled: bool,
/// If enabled writes the AST after parsing.
pub ast_initial: bool,
/// If enabled writes the input AST after parsing.
pub input_ast_initial: bool,
}

View File

@ -14,6 +14,8 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
//! This file contains tools for benchmarking the Leo compiler and its stages.
use leo_compiler::Compiler;
use leo_errors::emitter::{Emitter, Handler};
use leo_span::{
@ -28,13 +30,19 @@ use std::{
time::{Duration, Instant},
};
/// An enum to represent the stage of the Compiler we are benchmarking.
enum BenchMode {
/// Benchmarks parsing.
Parse,
/// Benchmarks symbol table generation.
Symbol,
/// Benchmarks type checking.
Type,
/// Benchmarks all the above stages.
Full,
}
/// A dummy buffer emitter since we only test on valid programs.
struct BufEmitter;
impl Emitter for BufEmitter {
@ -53,12 +61,14 @@ impl BufEmitter {
}
}
/// The name of the test, and the test content.
#[derive(Clone)]
struct Sample {
name: String,
input: String,
}
/// A helper function to help create a Leo Compiler struct.
fn new_compiler(handler: &Handler) -> Compiler<'_> {
Compiler::new(
handler,
@ -69,6 +79,9 @@ fn new_compiler(handler: &Handler) -> Compiler<'_> {
}
impl Sample {
/// Loads all the benchmark samples.
/// Leverages the test-framework to grab all tests
/// that are passing compiler tests or marked as benchmark tests.
fn load_samples() -> Vec<Self> {
get_benches()
.into_iter()
@ -91,6 +104,8 @@ impl Sample {
fn bench_parse(&self, c: &mut Criterion) {
c.bench_function(&format!("parse {}", self.name), |b| {
// Iter custom is used so we can use custom timings around the compiler stages.
// This way we can only time the necessary stage.
b.iter_custom(|iters| {
let mut time = Duration::default();
for _ in 0..iters {
@ -111,6 +126,8 @@ impl Sample {
fn bench_symbol_table(&self, c: &mut Criterion) {
c.bench_function(&format!("symbol table pass {}", self.name), |b| {
// Iter custom is used so we can use custom timings around the compiler stages.
// This way we can only time the necessary stage.
b.iter_custom(|iters| {
let mut time = Duration::default();
for _ in 0..iters {
@ -134,6 +151,8 @@ impl Sample {
fn bench_type_checker(&self, c: &mut Criterion) {
c.bench_function(&format!("type checker pass {}", self.name), |b| {
// Iter custom is used so we can use custom timings around the compiler stages.
// This way we can only time the necessary stage.
b.iter_custom(|iters| {
let mut time = Duration::default();
for _ in 0..iters {
@ -158,6 +177,8 @@ impl Sample {
fn bench_full(&self, c: &mut Criterion) {
c.bench_function(&format!("full {}", self.name), |b| {
// Iter custom is used so we can use custom timings around the compiler stages.
// This way we can only time the necessary stages.
b.iter_custom(|iters| {
let mut time = Duration::default();
for _ in 0..iters {

View File

@ -21,20 +21,16 @@ use std::{
use walkdir::WalkDir;
pub fn find_tests<T: AsRef<Path> + Copy>(path: T) -> Vec<(PathBuf, String)> {
WalkDir::new(path)
.into_iter()
.flatten()
.filter_map(|f| {
let path = f.path();
if matches!(path.extension(), Some(s) if s == "leo") {
let content = fs::read_to_string(path).expect("failed to read test");
Some((path.to_path_buf(), content))
} else {
None
}
pub fn find_tests(path: &'_ Path) -> impl Iterator<Item = (PathBuf, String)> + '_ {
WalkDir::new(path).into_iter().flatten().filter_map(move |f| {
let path = f.path();
path.extension().filter(|s| *s == "leo").map(|_| {
(
path.to_path_buf(),
fs::read_to_string(path).expect("failed to read test"),
)
})
.collect::<Vec<(PathBuf, String)>>()
})
}
pub fn split_tests_one_line(source: &str) -> Vec<&str> {

View File

@ -50,6 +50,10 @@ pub trait Runner {
fn resolve_namespace(&self, name: &str) -> Option<Box<dyn Namespace>>;
}
fn is_env_var_set(var: &str) -> bool {
std::env::var(var).unwrap_or_else(|_| "".to_string()).trim().is_empty()
}
fn set_hook() -> Arc<Mutex<Option<String>>> {
let panic_buf = Arc::new(Mutex::new(None));
let thread_id = thread::current().id();
@ -57,10 +61,7 @@ fn set_hook() -> Arc<Mutex<Option<String>>> {
let panic_buf = panic_buf.clone();
Box::new(move |e| {
if thread::current().id() == thread_id {
if !std::env::var("RUST_BACKTRACE")
.unwrap_or_else(|_| "".to_string())
.is_empty()
{
if !is_env_var_set("RUST_BACKTRACE") {
*panic_buf.lock().unwrap() = Some(format!("{:?}", backtrace::Backtrace::new()));
} else {
*panic_buf.lock().unwrap() = Some(e.to_string());
@ -111,7 +112,7 @@ impl TestCases {
fn load_tests(&mut self, additional_check: impl Fn(&TestConfig) -> bool) -> Vec<TestConfig> {
let mut configs = Vec::new();
self.tests = find_tests(&self.path_prefix)
self.tests = find_tests(&self.path_prefix.clone())
.into_iter()
.filter(|(path, content)| {
let config = match extract_test_config(content) {
@ -130,6 +131,7 @@ impl TestCases {
res
})
.collect();
dbg!(self.tests.len());
configs
}
@ -177,11 +179,7 @@ impl TestCases {
expectation_path.push(&expectation_name);
if expectation_path.exists() {
if !std::env::var("CLEAR_LEO_TEST_EXPECTATIONS")
.unwrap_or_default()
.trim()
.is_empty()
{
if !is_env_var_set("CLEAR_LEO_TEST_EXPECTATIONS") {
(expectation_path, None)
} else {
let raw = std::fs::read_to_string(&expectation_path).expect("failed to read expectations file");
@ -284,8 +282,10 @@ pub fn get_benches() -> Vec<(String, String)> {
let (mut cases, configs) = TestCases::new("compiler", |config| {
(&config.namespace == "Bench" && config.expectation == TestExpectationMode::Pass)
|| (&config.namespace == "Compile"
&& config.expectation != TestExpectationMode::Fail
&& config.expectation != TestExpectationMode::Skip)
&& !matches!(
config.expectation,
TestExpectationMode::Fail | TestExpectationMode::Skip
))
});
cases.process_tests(configs, |_, (_, content, test_name, _)| {