Merge remote-tracking branch 'origin/experimental' into experimental

This commit is contained in:
Nicolas Abril 2022-11-30 16:13:03 +01:00
commit f3e33792c2
95 changed files with 2126 additions and 171 deletions

7
Cargo.lock generated
View File

@ -675,8 +675,13 @@ version = "0.1.0"
dependencies = [ dependencies = [
"kind-checker", "kind-checker",
"kind-driver", "kind-driver",
"kind-query", "kind-parser",
"kind-pass",
"kind-report", "kind-report",
"kind-span",
"kind-target-hvm",
"kind-target-kdl",
"kind-tree",
"ntest", "ntest",
"pretty_assertions", "pretty_assertions",
"walkdir", "walkdir",

View File

@ -17,4 +17,10 @@ members = [
# "crates/kind-lint", # "crates/kind-lint",
# "crates/kind-query", # "crates/kind-query",
# "crates/kind-macros", # "crates/kind-macros",
] ]
[profile.test.package.hvm]
opt-level = 3
[profile.bench.package.hvm]
opt-level = 3

View File

@ -494,14 +494,14 @@ fn codegen_entry(file: &mut lang::File, entry: &desugared::Entry) {
vec_preppend![ vec_preppend![
mk_ctr_name(&entry.name), mk_ctr_name(&entry.name),
mk_var("orig"); mk_var("orig");
base_vars.clone() base_vars
], ],
), ),
rhs: mk_ctr( rhs: mk_ctr(
TermTag::HoasF(entry.name.to_string()).to_string(), TermTag::HoasF(entry.name.to_string()).to_string(),
vec_preppend![ vec_preppend![
mk_var("orig"); mk_var("orig");
base_vars.clone() base_vars
], ],
), ),
}); });
@ -512,7 +512,7 @@ fn codegen_entry(file: &mut lang::File, entry: &desugared::Entry) {
vec_preppend![ vec_preppend![
mk_ctr_name(&entry.name), mk_ctr_name(&entry.name),
mk_var("orig"); mk_var("orig");
base_vars.clone() base_vars
], ],
), ),
rhs: mk_ctr( rhs: mk_ctr(
@ -553,7 +553,7 @@ pub fn codegen_book(book: &Book, functions_to_check: Vec<String>) -> lang::File
let functions_entry = lang::Rule { let functions_entry = lang::Rule {
lhs: mk_ctr("Functions".to_owned(), vec![]), lhs: mk_ctr("Functions".to_owned(), vec![]),
rhs: codegen_vec(functions_to_check.iter().map(|x| mk_ctr_name_from_str(&x))), rhs: codegen_vec(functions_to_check.iter().map(|x| mk_ctr_name_from_str(x))),
}; };
for entry in book.entrs.values() { for entry in book.entrs.values() {

View File

@ -56,12 +56,14 @@ pub fn type_check(
) -> bool { ) -> bool {
let file = gen_checker(book, functions_to_check); let file = gen_checker(book, functions_to_check);
match eval(&file.to_string(), "Main", false) { match eval(&file, "Main", false) {
Ok(term) => { Ok(term) => {
let errs = parse_report(&term).expect(&format!( let errs = parse_report(&term).unwrap_or_else(|_| {
"Internal Error: Cannot parse the report message from the type checker: {}", panic!(
term "Internal Error: Cannot parse the report message from the type checker: {}",
)); term
)
});
let succeeded = errs.is_empty(); let succeeded = errs.is_empty();
for err in errs { for err in errs {
@ -80,7 +82,7 @@ pub fn type_check(
pub fn eval_api(book: &Book) -> Box<Term> { pub fn eval_api(book: &Book) -> Box<Term> {
let file = gen_checker(book, Vec::new()); let file = gen_checker(book, Vec::new());
let file = language::syntax::read_file(&file.to_string()).unwrap(); let file = language::syntax::read_file(&file).unwrap();
let book = language::rulebook::gen_rulebook(&file); let book = language::rulebook::gen_rulebook(&file);

View File

@ -17,11 +17,11 @@ impl Diagnostic for DeriveError {
DeriveError::CannotUseNamedVariable(range) => DiagnosticFrame { DeriveError::CannotUseNamedVariable(range) => DiagnosticFrame {
code: 103, code: 103,
severity: Severity::Error, severity: Severity::Error,
title: format!("Cannot use named variable on match derivations"), title: "Cannot use named variable on match derivations".to_string(),
subtitles: vec![], subtitles: vec![],
hints: vec![], hints: vec![],
positions: vec![Marker { positions: vec![Marker {
position: range.clone(), position: *range,
color: Color::Fst, color: Color::Fst,
text: "Here!".to_string(), text: "Here!".to_string(),
no_code: false, no_code: false,

View File

@ -160,7 +160,7 @@ pub fn derive_match(
new_args.push(match arg { new_args.push(match arg {
Binding::Positional(expr) => AppBinding::explicit(expr.clone()), Binding::Positional(expr) => AppBinding::explicit(expr.clone()),
Binding::Named(range, _, expr) => { Binding::Named(range, _, expr) => {
errs.push(Box::new(DeriveError::CannotUseNamedVariable(range.clone()))); errs.push(Box::new(DeriveError::CannotUseNamedVariable(*range)));
AppBinding::explicit(expr.clone()) AppBinding::explicit(expr.clone())
} }
}); });
@ -191,7 +191,7 @@ pub fn derive_match(
types.push(Argument { types.push(Argument {
hidden: false, hidden: false,
erased: false, erased: false,
name: Ident::new_static(&format!("{}_", cons.name.to_string()), range), name: Ident::new_static(&format!("{}_", cons.name), range),
typ: Some(cons_type), typ: Some(cons_type),
range, range,
}); });
@ -209,7 +209,7 @@ pub fn derive_match(
rules: vec![], rules: vec![],
range, range,
attrs: Vec::new(), attrs: Vec::new(),
generated_by: Some(sum.name.to_string().clone()), generated_by: Some(sum.name.to_string()),
}; };
return (entry, errs); return (entry, errs);
} }
@ -262,7 +262,7 @@ pub fn derive_match(
let renames = FxHashMap::from_iter( let renames = FxHashMap::from_iter(
sum.parameters sum.parameters
.extend(&cons.args) .extend(&cons.args)
.map(|x| (x.name.to_string(), format!("{}_", x.name.to_string()))) .map(|x| (x.name.to_string(), format!("{}_", x.name)))
.iter() .iter()
.cloned(), .cloned(),
); );

View File

@ -151,7 +151,7 @@ pub fn derive_open(range: Range, rec: &RecordDecl) -> concrete::Entry {
range: rec.constructor.range, range: rec.constructor.range,
})]; })];
let entry = Entry { Entry {
name, name,
docs: Vec::new(), docs: Vec::new(),
args: types, args: types,
@ -159,8 +159,6 @@ pub fn derive_open(range: Range, rec: &RecordDecl) -> concrete::Entry {
rules, rules,
range: rec.constructor.range, range: rec.constructor.range,
attrs: Vec::new(), attrs: Vec::new(),
generated_by: Some(rec.name.to_string().clone()), generated_by: Some(rec.name.to_string()),
}; }
entry
} }

View File

@ -105,7 +105,7 @@ impl Diagnostic for DriverError {
DriverError::ThereIsntAMain => DiagnosticFrame { DriverError::ThereIsntAMain => DiagnosticFrame {
code: 103, code: 103,
severity: Severity::Error, severity: Severity::Error,
title: format!("Cannot find 'Main' function to run the file."), title: "Cannot find 'Main' function to run the file.".to_string(),
subtitles: vec![], subtitles: vec![],
hints: vec![], hints: vec![],
positions: vec![], positions: vec![],

View File

@ -1,6 +1,6 @@
use checker::eval; use checker::eval;
use errors::DriverError; use errors::DriverError;
use kind_pass::{desugar, erasure, expand, inline::inline_book}; use kind_pass::{desugar, erasure, inline::inline_book};
use kind_report::report::FileCache; use kind_report::report::FileCache;
use kind_span::SyntaxCtxIndex; use kind_span::SyntaxCtxIndex;
@ -50,8 +50,6 @@ pub fn type_check_book(
pub fn to_book(session: &mut Session, path: &PathBuf) -> Option<concrete::Book> { pub fn to_book(session: &mut Session, path: &PathBuf) -> Option<concrete::Book> {
let mut concrete_book = resolution::parse_and_store_book(session, path)?; let mut concrete_book = resolution::parse_and_store_book(session, path)?;
expand::expand_book(session.diagnostic_sender.clone(), &mut concrete_book);
let failed = resolution::check_unbound_top_level(session, &mut concrete_book); let failed = resolution::check_unbound_top_level(session, &mut concrete_book);
if failed { if failed {

View File

@ -4,6 +4,7 @@
//! depedencies. //! depedencies.
use fxhash::FxHashSet; use fxhash::FxHashSet;
use kind_pass::expand::expand_module;
use kind_pass::expand::uses::expand_uses; use kind_pass::expand::uses::expand_uses;
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -64,10 +65,10 @@ fn ident_to_path(
let mut raw_path = root.to_path_buf(); let mut raw_path = root.to_path_buf();
raw_path.push(PathBuf::from(segments.join("/"))); raw_path.push(PathBuf::from(segments.join("/")));
match accumulate_neighbour_paths(&ident, &raw_path) { match accumulate_neighbour_paths(ident, &raw_path) {
Ok(None) if search_on_parent => { Ok(None) if search_on_parent => {
raw_path.pop(); raw_path.pop();
accumulate_neighbour_paths(&ident, &raw_path) accumulate_neighbour_paths(ident, &raw_path)
} }
rest => rest, rest => rest,
} }
@ -110,7 +111,7 @@ fn module_to_book<'a>(
for cons in &sum.constructors { for cons in &sum.constructors {
let mut cons_ident = sum.name.add_segment(cons.name.to_str()); let mut cons_ident = sum.name.add_segment(cons.name.to_str());
cons_ident.range = cons.name.range.clone(); cons_ident.range = cons.name.range;
if try_to_insert_new_name(failed, session, cons_ident.clone(), book) { if try_to_insert_new_name(failed, session, cons_ident.clone(), book) {
public_names.insert(cons_ident.to_string()); public_names.insert(cons_ident.to_string());
book.count book.count
@ -156,10 +157,10 @@ fn module_to_book<'a>(
public_names public_names
} }
fn parse_and_store_book_by_identifier<'a>( fn parse_and_store_book_by_identifier(
session: &mut Session, session: &mut Session,
ident: &QualifiedIdent, ident: &QualifiedIdent,
book: &'a mut Book, book: &mut Book,
) -> bool { ) -> bool {
if book.entries.contains_key(ident.to_string().as_str()) { if book.entries.contains_key(ident.to_string().as_str()) {
return false; return false;
@ -175,10 +176,10 @@ fn parse_and_store_book_by_identifier<'a>(
} }
} }
fn parse_and_store_book_by_path<'a>( fn parse_and_store_book_by_path(
session: &mut Session, session: &mut Session,
path: &PathBuf, path: &PathBuf,
book: &'a mut Book, book: &mut Book,
) -> bool { ) -> bool {
if !path.exists() { if !path.exists() {
session session
@ -218,6 +219,8 @@ fn parse_and_store_book_by_path<'a>(
expand_uses(&mut module, session.diagnostic_sender.clone()); expand_uses(&mut module, session.diagnostic_sender.clone());
expand_module(session.diagnostic_sender.clone(), &mut module);
let mut state = UnboundCollector::new(session.diagnostic_sender.clone(), false); let mut state = UnboundCollector::new(session.diagnostic_sender.clone(), false);
state.visit_module(&mut module); state.visit_module(&mut module);
@ -227,9 +230,12 @@ fn parse_and_store_book_by_path<'a>(
} }
module_to_book(&mut failed, session, module, book); module_to_book(&mut failed, session, module, book);
for idents in state.unbound_top_level.values() { for idents in state.unbound_top_level.values() {
failed |= parse_and_store_book_by_identifier(session, &idents.iter().nth(0).unwrap(), book); let fst = idents.iter().next().unwrap();
if !book.names.contains_key(&fst.to_string()) {
failed |= parse_and_store_book_by_identifier(session, fst, book);
}
} }
failed failed
@ -276,7 +282,7 @@ pub fn check_unbound_top_level(session: &mut Session, book: &mut Book) -> bool {
.map(|x| x.to_ident()) .map(|x| x.to_ident())
.collect(); .collect();
if !res.is_empty() { if !res.is_empty() {
unbound_variable(session, &book, &res); unbound_variable(session, book, &res);
failed = true; failed = true;
} }
} }

View File

@ -29,7 +29,7 @@ fn is_valid_id(chr: char) -> bool {
} }
fn is_valid_upper_start(chr: char) -> bool { fn is_valid_upper_start(chr: char) -> bool {
matches!(chr, 'A'..='Z') chr.is_ascii_uppercase()
} }
fn is_valid_id_start(chr: char) -> bool { fn is_valid_id_start(chr: char) -> bool {

View File

@ -143,7 +143,7 @@ impl<'a> DesugarState<'a> {
if (fill_hidden && arg_decl.hidden) || arguments[i].is_some() { if (fill_hidden && arg_decl.hidden) || arguments[i].is_some() {
continue; continue;
} }
arguments[i] = Some((v.range, self.desugar_expr(&v))); arguments[i] = Some((v.range, self.desugar_expr(v)));
break; break;
} }
} }

View File

@ -20,13 +20,13 @@ impl<'a> DesugarState<'a> {
} }
fn attr_invalid_argument(&mut self, attr: &Attribute) { fn attr_invalid_argument(&mut self, attr: &Attribute) {
if !attr.value.is_some() { if attr.value.is_none() {
self.send_err(PassError::InvalidAttributeArgument(attr.range)) self.send_err(PassError::InvalidAttributeArgument(attr.range))
}; };
} }
fn attr_expects_a_value(&mut self, attr: &Attribute) { fn attr_expects_a_value(&mut self, attr: &Attribute) {
if !attr.value.is_some() { if attr.value.is_none() {
self.send_err(PassError::AttributeExpectsAValue(attr.range)) self.send_err(PassError::AttributeExpectsAValue(attr.range))
}; };
} }

View File

@ -24,7 +24,7 @@ enum Ambient {
} }
impl Ambient { impl Ambient {
pub fn to_relev(&self) -> Relevance { pub fn as_relevance(&self) -> Relevance {
match self { match self {
Ambient::Irrelevant => Relevance::Irrelevant, Ambient::Irrelevant => Relevance::Irrelevant,
Ambient::Unknown | Ambient::Relevant => Relevance::Relevant, Ambient::Unknown | Ambient::Relevant => Relevance::Relevant,
@ -50,8 +50,8 @@ pub struct ErasureState<'a> {
failed: bool, failed: bool,
} }
pub fn erase_book<'a>( pub fn erase_book(
book: &'a desugared::Book, book: &desugared::Book,
errs: Sender<Box<dyn Diagnostic>>, errs: Sender<Box<dyn Diagnostic>>,
entrypoints: Vec<String>, entrypoints: Vec<String>,
) -> Option<untyped::Book> { ) -> Option<untyped::Book> {
@ -411,7 +411,7 @@ impl<'a> ErasureState<'a> {
Let { name, val, next } => { Let { name, val, next } => {
let backup = self.ctx.clone(); let backup = self.ctx.clone();
self.ctx.insert(name.to_string(), ambient.to_relev()); self.ctx.insert(name.to_string(), ambient.as_relevance());
let val = self.erase_expr(ambient, edge, val); let val = self.erase_expr(ambient, edge, val);
let next = self.erase_expr(ambient, edge, next); let next = self.erase_expr(ambient, edge, next);
@ -470,7 +470,7 @@ impl<'a> ErasureState<'a> {
let var_rev = self let var_rev = self
.ctx .ctx
.get(&name.to_string()) .get(&name.to_string())
.expect(&format!("Uwnraping {}", name)); .unwrap();
if *var_rev == Relevance::Irrelevant && ambient != Ambient::Irrelevant { if *var_rev == Relevance::Irrelevant && ambient != Ambient::Irrelevant {
self.set_relevance(edge, Relevance::Irrelevant, name.range) self.set_relevance(edge, Relevance::Irrelevant, name.range)

View File

@ -537,7 +537,7 @@ impl Diagnostic for PassError {
PassError::AttributeExpectsAValue(place) => DiagnosticFrame { PassError::AttributeExpectsAValue(place) => DiagnosticFrame {
code: 209, code: 209,
severity: Severity::Error, severity: Severity::Error,
title: format!("This attribute expects a value"), title: "This attribute expects a value".to_string(),
subtitles: vec![], subtitles: vec![],
hints: vec![], hints: vec![],
positions: vec![Marker { positions: vec![Marker {
@ -551,7 +551,7 @@ impl Diagnostic for PassError {
PassError::AttributeDoesNotExists(place) => DiagnosticFrame { PassError::AttributeDoesNotExists(place) => DiagnosticFrame {
code: 209, code: 209,
severity: Severity::Error, severity: Severity::Error,
title: format!("This attribute does not exists"), title: "This attribute does not exists".to_string(),
subtitles: vec![], subtitles: vec![],
hints: vec![], hints: vec![],
positions: vec![Marker { positions: vec![Marker {

View File

@ -13,7 +13,8 @@ use kind_derive::setters::derive_setters;
use kind_report::data::Diagnostic; use kind_report::data::Diagnostic;
use kind_span::Locatable; use kind_span::Locatable;
use kind_span::Range; use kind_span::Range;
use kind_tree::concrete::{Attribute, Book, TopLevel}; use kind_tree::concrete::Module;
use kind_tree::concrete::{Attribute, TopLevel};
use crate::errors::PassError; use crate::errors::PassError;
/// Expands sum type and record definitions to a lot of /// Expands sum type and record definitions to a lot of
@ -127,12 +128,12 @@ pub fn expand_derive(
} }
} }
pub fn expand_book(error_channel: Sender<Box<dyn Diagnostic>>, book: &mut Book) -> bool { pub fn expand_module(error_channel: Sender<Box<dyn Diagnostic>>, module: &mut Module) -> bool {
let mut failed = false; let mut failed = false;
let mut entries = FxHashMap::default(); let mut entries = FxHashMap::default();
for entry in book.entries.values() { for entry in &module.entries {
match entry { match entry {
TopLevel::SumType(sum) => { TopLevel::SumType(sum) => {
if let Some(derive) = expand_derive(error_channel.clone(), &sum.attrs) { if let Some(derive) = expand_derive(error_channel.clone(), &sum.attrs) {
@ -196,10 +197,8 @@ pub fn expand_book(error_channel: Sender<Box<dyn Diagnostic>>, book: &mut Book)
} }
} }
for (name, (tl, count)) in entries { for (_, (tl, _)) in entries {
book.count.insert(name.clone(), count); module.entries.push(TopLevel::Entry(tl));
book.names.insert(name.clone().to_string(), tl.name.clone());
book.entries.insert(name.clone(), TopLevel::Entry(tl));
} }
failed failed

View File

@ -90,9 +90,9 @@ fn get_colorizer<T>(color: &Color) -> &dyn Fn(T) -> Paint<T> {
// TODO: Remove common indentation. // TODO: Remove common indentation.
// TODO: Prioritize inline marcations. // TODO: Prioritize inline marcations.
fn colorize_code<'a, T: Write + Sized>( fn colorize_code<T: Write + Sized>(
markers: &mut [&(Point, Point, &Marker)], markers: &mut [&(Point, Point, &Marker)],
code_line: &'a str, code_line: &str,
modify: &dyn Fn(&str) -> String, modify: &dyn Fn(&str) -> String,
fmt: &mut T, fmt: &mut T,
) -> std::fmt::Result { ) -> std::fmt::Result {

View File

@ -11,7 +11,7 @@ pub fn compile_book(book: untyped::Book, trace: bool) -> File {
smaps: Default::default(), smaps: Default::default(),
}; };
for (_, entry) in book.entrs { for (_, entry) in book.entrs {
compile_entry(&mut file, *entry, trace); compile_entry(&mut file, entry, trace);
} }
file file
} }
@ -45,7 +45,7 @@ pub fn compile_term(expr: &untyped::Expr) -> Box<Term> {
App { fun, args } => args.iter().fold(compile_term(fun), |func, arg| { App { fun, args } => args.iter().fold(compile_term(fun), |func, arg| {
Box::new(Term::App { Box::new(Term::App {
func, func,
argm: compile_term(&arg), argm: compile_term(arg),
}) })
}), }),
Fun { name, args } | Ctr { name, args } => Box::new(Term::Ctr { Fun { name, args } | Ctr { name, args } => Box::new(Term::Ctr {
@ -80,11 +80,11 @@ fn compile_rule(name: String, rule: untyped::Rule) -> Rule {
} }
} }
fn compile_entry(file: &mut File, entry: untyped::Entry, trace: bool) { fn compile_entry(file: &mut File, entry: Box<untyped::Entry>, trace: bool) {
if entry.attrs.trace.is_some() || trace { if entry.attrs.trace.is_some() || trace {
let _with_args = entry.attrs.trace.unwrap_or(false); let _with_args = entry.attrs.trace.unwrap_or(false);
let name_trace = format!("{}__trace", entry.name.to_string()); let name_trace = format!("{}__trace", entry.name);
for rule in entry.rules { for rule in entry.rules {
file.rules.push(compile_rule(name_trace.clone(), rule)) file.rules.push(compile_rule(name_trace.clone(), rule))
} }

View File

@ -65,8 +65,8 @@ fn encode_base64_u8(num: u8) -> char {
fn u128_to_kdl_name(mut num: u128) -> String { fn u128_to_kdl_name(mut num: u128) -> String {
let mut encoded = [0 as char; 12]; let mut encoded = [0 as char; 12];
for i in 0..12 { for item in &mut encoded {
encoded[i] = encode_base64_u8((num & 0x3f) as u8); *item = encode_base64_u8((num & 0x3f) as u8);
num >>= 6; num >>= 6;
} }
encoded.into_iter().collect() encoded.into_iter().collect()
@ -127,7 +127,7 @@ pub fn compile_book(
} }
pub fn compile_rule(ctx: &mut CompileCtx, rule: &untyped::Rule) -> kindelia_lang::ast::Rule { pub fn compile_rule(ctx: &mut CompileCtx, rule: &untyped::Rule) -> kindelia_lang::ast::Rule {
let name = ctx.kdl_names.get(rule.name.to_str()).unwrap().clone(); let name = *ctx.kdl_names.get(rule.name.to_str()).unwrap();
let mut args = Vec::new(); let mut args = Vec::new();
for pat in &rule.pats { for pat in &rule.pats {
let arg = compile_expr(ctx, pat); let arg = compile_expr(ctx, pat);
@ -135,8 +135,8 @@ pub fn compile_rule(ctx: &mut CompileCtx, rule: &untyped::Rule) -> kindelia_lang
} }
let lhs = kdl::Term::fun(name, args); let lhs = kdl::Term::fun(name, args);
let rhs = compile_expr(ctx, &rule.body); let rhs = compile_expr(ctx, &rule.body);
let rule = kdl::Rule { lhs, rhs };
rule kdl::Rule { lhs, rhs }
} }
pub fn err_term() -> kindelia_lang::ast::Term { pub fn err_term() -> kindelia_lang::ast::Term {
@ -152,7 +152,7 @@ pub fn compile_expr(ctx: &mut CompileCtx, expr: &untyped::Expr) -> kindelia_lang
From::App { fun, args } => { From::App { fun, args } => {
let mut expr = compile_expr(ctx, fun); let mut expr = compile_expr(ctx, fun);
for binding in args { for binding in args {
let body = compile_expr(ctx, &binding); let body = compile_expr(ctx, binding);
expr = To::App { expr = To::App {
func: Box::new(expr), func: Box::new(expr),
argm: Box::new(body), argm: Box::new(body),
@ -320,7 +320,7 @@ pub fn compile_expr(ctx: &mut CompileCtx, expr: &untyped::Expr) -> kindelia_lang
} => { } => {
let name = kdl::Name::from_str(param.to_str()); let name = kdl::Name::from_str(param.to_str());
if let Ok(name) = name { if let Ok(name) = name {
let body = Box::new(compile_expr(ctx, &body)); let body = Box::new(compile_expr(ctx, body));
To::Lam { name, body } To::Lam { name, body }
} else { } else {
ctx.send_err(Box::new(KdlError::InvalidVarName(param.range))); ctx.send_err(Box::new(KdlError::InvalidVarName(param.range)));
@ -357,14 +357,14 @@ pub fn compile_expr(ctx: &mut CompileCtx, expr: &untyped::Expr) -> kindelia_lang
} }
From::Str { val } => { From::Str { val } => {
let nil = kdl::Term::Ctr { let nil = kdl::Term::Ctr {
name: ctx.kdl_names.get("String.nil").unwrap().clone(), name: *ctx.kdl_names.get("String.nil").unwrap(),
args: vec![], args: vec![],
}; };
let cons_name = ctx.kdl_names.get("String.cons").unwrap().clone(); let cons_name = *ctx.kdl_names.get("String.cons").unwrap();
let cons = |numb: u128, next| kdl::Term::Ctr { let cons = |numb: u128, next| kdl::Term::Ctr {
name: cons_name.clone(), name: cons_name,
args: vec![ args: vec![
kdl::Term::Num { kdl::Term::Num {
numb: kdl::U120::new(numb).unwrap(), numb: kdl::U120::new(numb).unwrap(),
@ -407,7 +407,7 @@ pub fn compile_entry(ctx: &mut CompileCtx, entry: &untyped::Entry) {
} }
} }
if entry.rules.len() == 0 { if entry.rules.is_empty() {
// Functions with no rules become Ctr // Functions with no rules become Ctr
let sttm = kdl::Statement::Ctr { let sttm = kdl::Statement::Ctr {
name, name,
@ -463,7 +463,7 @@ impl Display for File {
writeln!(f, "{}", ctr.1)?; writeln!(f, "{}", ctr.1)?;
} }
if self.ctrs.len() > 0 && self.funs.len() > 0 { if !self.ctrs.is_empty() && !self.funs.is_empty() {
writeln!(f)?; writeln!(f)?;
} }

View File

@ -170,7 +170,7 @@ fn split_rule(
let new_ctr = Expr::ctr(name.range, name.clone(), new_ctr_args); let new_ctr = Expr::ctr(name.range, name.clone(), new_ctr_args);
subst(&mut new_rule_body, &opat_name, &new_ctr); subst(&mut new_rule_body, opat_name, &new_ctr);
} }
(ExprKind::Var { .. }, _) => { (ExprKind::Var { .. }, _) => {
new_rule_pats.push(other_pat.clone()); new_rule_pats.push(other_pat.clone());
@ -229,7 +229,7 @@ fn flatten_entry(entry: &Entry) -> Vec<Entry> {
let rule = &entry.rules[i]; let rule = &entry.rules[i];
if must_split(rule) { if must_split(rule) {
let (old_rule, split_entries) = let (old_rule, split_entries) =
split_rule(rule, &entry, i, &mut name_count, &mut skip); split_rule(rule, entry, i, &mut name_count, &mut skip);
old_entry_rules.push(old_rule); old_entry_rules.push(old_rule);
new_entries.extend(split_entries); new_entries.extend(split_entries);
} else { } else {
@ -263,7 +263,5 @@ pub fn flatten(book: untyped::Book) -> untyped::Book {
} }
} }
let book = Book { names, entrs }; Book { names, entrs }
book
} }

View File

@ -46,13 +46,13 @@ impl LinearizeCtx {
match arg { match arg {
Term::Var { name } => { Term::Var { name } => {
let new_name = self.create_name(); let new_name = self.create_name();
self.name_table.insert(name.clone(), new_name); self.name_table.insert(*name, new_name);
} }
Term::Ctr { name: _, args } => { Term::Ctr { name: _, args } => {
for arg in args { for arg in args {
if let Term::Var { name } = arg { if let Term::Var { name } = arg {
let new_name = self.create_name(); let new_name = self.create_name();
self.name_table.insert(name.clone(), new_name); self.name_table.insert(*name, new_name);
} else { } else {
unreachable!(); // We expect a flat rule unreachable!(); // We expect a flat rule
} }
@ -128,7 +128,7 @@ pub fn linearize_rule(rule: Rule) -> Rule {
let lhs = linearize_term(&mut ctx, &rule.lhs, true); let lhs = linearize_term(&mut ctx, &rule.lhs, true);
let vals: Vec<Name> = ctx.name_table.values().map(Name::clone).collect(); let vals: Vec<Name> = ctx.name_table.values().map(Name::clone).collect();
for val in vals { for val in vals {
let expr = Box::new(Term::Var { name: val.clone() }); let expr = Box::new(Term::Var { name: val });
rhs = dup_var(&mut ctx, &val, expr, rhs); rhs = dup_var(&mut ctx, &val, expr, rhs);
} }
Rule { Rule {
@ -141,7 +141,7 @@ pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Ter
let term = match term { let term = match term {
Term::Var { name } => { Term::Var { name } => {
if lhs { if lhs {
let mut name = ctx.name_table.get(name).unwrap_or(name).clone(); let mut name = *ctx.name_table.get(name).unwrap_or(name);
rename_erased(ctx, &mut name); rename_erased(ctx, &mut name);
Term::Var { name } Term::Var { name }
} else { } else {
@ -150,7 +150,7 @@ pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Ter
if let Some(name) = ctx.name_table.get(name) { if let Some(name) = ctx.name_table.get(name) {
let used = *ctx let used = *ctx
.uses .uses
.entry(name.clone()) .entry(*name)
.and_modify(|x| *x += 1) .and_modify(|x| *x += 1)
.or_insert(1); .or_insert(1);
let name = Name::from_str(&format!("{}.{}", name, used - 1)).unwrap(); // TODO: Think if this errs or not let name = Name::from_str(&format!("{}.{}", name, used - 1)).unwrap(); // TODO: Think if this errs or not
@ -171,16 +171,16 @@ pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Ter
let expr = linearize_term(ctx, expr, lhs); let expr = linearize_term(ctx, expr, lhs);
let got_0 = ctx.name_table.remove(nam0); let got_0 = ctx.name_table.remove(nam0);
let got_1 = ctx.name_table.remove(nam0); let got_1 = ctx.name_table.remove(nam0);
ctx.name_table.insert(nam0.clone(), new_nam0.clone()); ctx.name_table.insert(*nam0, new_nam0);
ctx.name_table.insert(nam1.clone(), new_nam1.clone()); ctx.name_table.insert(*nam1, new_nam1);
let body = linearize_term(ctx, body, lhs); let body = linearize_term(ctx, body, lhs);
ctx.name_table.remove(nam0); ctx.name_table.remove(nam0);
if let Some(x) = got_0 { if let Some(x) = got_0 {
ctx.name_table.insert(nam0.clone(), x); ctx.name_table.insert(*nam0, x);
} }
ctx.name_table.remove(nam1); ctx.name_table.remove(nam1);
if let Some(x) = got_1 { if let Some(x) = got_1 {
ctx.name_table.insert(nam1.clone(), x); ctx.name_table.insert(*nam1, x);
} }
let nam0 = Name::from_str(&format!("{}{}", new_nam0, ".0")).unwrap(); let nam0 = Name::from_str(&format!("{}{}", new_nam0, ".0")).unwrap();
let nam1 = Name::from_str(&format!("{}{}", new_nam1, ".0")).unwrap(); let nam1 = Name::from_str(&format!("{}{}", new_nam1, ".0")).unwrap();
@ -194,14 +194,14 @@ pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Ter
Term::Lam { name, body } => { Term::Lam { name, body } => {
let mut new_name = ctx.create_name(); let mut new_name = ctx.create_name();
let got_name = ctx.name_table.remove(name); let got_name = ctx.name_table.remove(name);
ctx.name_table.insert(name.clone(), new_name.clone()); ctx.name_table.insert(*name, new_name);
let body = linearize_term(ctx, body, lhs); let body = linearize_term(ctx, body, lhs);
ctx.name_table.remove(name); ctx.name_table.remove(name);
if let Some(x) = got_name { if let Some(x) = got_name {
ctx.name_table.insert(name.clone(), x); ctx.name_table.insert(*name, x);
} }
let expr = Box::new(Term::Var { let expr = Box::new(Term::Var {
name: new_name.clone(), name: new_name,
}); });
let body = dup_var(ctx, &new_name, expr, body); let body = dup_var(ctx, &new_name, expr, body);
rename_erased(ctx, &mut new_name); rename_erased(ctx, &mut new_name);
@ -222,7 +222,7 @@ pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Ter
new_args.push(*arg); new_args.push(*arg);
} }
Term::Ctr { Term::Ctr {
name: name.clone(), name: *name,
args: new_args, args: new_args,
} }
} }
@ -233,7 +233,7 @@ pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Ter
new_args.push(*arg); new_args.push(*arg);
} }
Term::Fun { Term::Fun {
name: name.clone(), name: *name,
args: new_args, args: new_args,
} }
} }

View File

@ -5,11 +5,18 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dev-dependencies]
kind-driver = { path = "../kind-driver" } kind-driver = { path = "../kind-driver" }
kind-report = { path = "../kind-report" } kind-parser = { path = "../kind-parser" }
kind-tree = { path = "../kind-tree" }
kind-span = { path = "../kind-span" }
kind-report = { path = "../kind-report" }
kind-checker = { path = "../kind-checker" } kind-checker = { path = "../kind-checker" }
kind-query = { path = "../kind-query" } kind-pass = { path = "../kind-pass" }
kind-target-hvm = { path = "../kind-target-hvm" }
kind-target-kdl = { path = "../kind-target-kdl" }
pretty_assertions = "1.3.0" pretty_assertions = "1.3.0"
ntest = "0.8.1" ntest = "0.8.1"
walkdir = "2" walkdir = "2"

View File

@ -0,0 +1,212 @@
#![feature(test)]
extern crate test;
use std::{fs, path::{PathBuf, Path}};
use driver::{resolution};
use kind_driver::session::Session;
use kind_pass::{expand::{self, uses::expand_uses}, desugar, erasure};
use kind_tree::concrete;
use test::Bencher;
use kind_checker as checker;
use kind_driver as driver;
fn new_session() -> Session {
let (rx, _) = std::sync::mpsc::channel();
let root = PathBuf::from("./suite/lib").canonicalize().unwrap();
Session::new(root, rx)
}
fn get_book(session: &mut Session, path: &str) -> Result<concrete::Book, String> {
let path = PathBuf::from(path);
match resolution::parse_and_store_book(session, &path) {
Some(res) => Ok(res),
None => Err("Cannot parse".to_string())
}
}
fn exp_paths() -> Vec<&'static str> {
vec![
"./suite/eval/Getters.kind2",
"./suite/eval/Setters.kind2",
"./suite/eval/DoNotation.kind2",
"./suite/eval/User.kind2",
]
}
#[bench]
fn bench_exp_pure_parsing(b: &mut Bencher) {
let paths = exp_paths();
let paths: Vec<_> = paths.iter().map(|x| fs::read_to_string(x).unwrap()).collect();
b.iter(|| {
paths.iter().map(|input| {
let session = new_session();
kind_parser::parse_book(session.diagnostic_sender.clone(), 0, &input)
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_use_expansion(b: &mut Bencher) {
let paths = exp_paths();
let mut paths: Vec<_> = paths.iter().map(|x| {
let input = fs::read_to_string(x).unwrap();
let (rx, _) = std::sync::mpsc::channel();
let (modu, failed) = kind_parser::parse_book(rx, 0, &input);
assert!(!failed);
modu
}).collect();
b.iter(|| {
paths.iter_mut().map(|module| {
let (rx, _) = std::sync::mpsc::channel();
expand_uses(module, rx);
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_derive_expansion(b: &mut Bencher) {
let paths = exp_paths();
let mut books: Vec<_> = paths.iter().map(|x| {
let input = fs::read_to_string(x).unwrap();
let (rx, _) = std::sync::mpsc::channel();
let (mut module, failed) = kind_parser::parse_book(rx.clone(), 0, &input);
assert!(!failed);
expand_uses(&mut module, rx);
module
}).collect();
b.iter(|| {
books.iter_mut().map(|module| {
let (rx, tx) = std::sync::mpsc::channel();
expand::expand_module(rx, module);
assert!(tx.iter().collect::<Vec<_>>().is_empty())
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_check_unbound(b: &mut Bencher) {
let paths = exp_paths();
let mut books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
(session, book)
}).collect();
b.iter(|| {
books.iter_mut().map(|(session, book)| {
let failed = resolution::check_unbound_top_level(session, book);
assert!(!failed)
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_desugar(b: &mut Bencher) {
let paths = exp_paths();
let mut books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let failed = resolution::check_unbound_top_level(&mut session, &mut book);
assert!(!failed);
(session, book)
}).collect();
b.iter(|| {
books.iter_mut().map(|(session, book)| {
desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap()
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_erase(b: &mut Bencher) {
let paths = exp_paths();
let books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let failed = resolution::check_unbound_top_level(&mut session, &mut book);
let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap();
assert!(!failed);
(session, book)
}).collect();
b.iter(|| {
books.iter().map(|(session, book)| {
erasure::erase_book(
book,
session.diagnostic_sender.clone(),
vec!["Main".to_string()],
).unwrap();
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_to_hvm(b: &mut Bencher) {
let paths = exp_paths();
let books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let failed = resolution::check_unbound_top_level(&mut session, &mut book);
let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap();
assert!(!failed);
let book = erasure::erase_book(
&book,
session.diagnostic_sender.clone(),
vec!["Main".to_string()],
).unwrap();
(session, book)
}).collect();
b.iter(move || {
books.iter().map(move |(_, book)| {
kind_target_hvm::compile_book(book.to_owned(), false)
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_check_without_the_checker(b: &mut Bencher) {
let mut paths = exp_paths();
let books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let failed = resolution::check_unbound_top_level(&mut session, &mut book);
let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap();
assert!(!failed);
(session, book)
}).collect();
b.iter(move || {
books.iter().map(move |(session, book)| {
let all = book.entrs.iter().map(|x| x.0).cloned().collect();
let succeeded = checker::type_check(book, session.diagnostic_sender.clone(), all);
assert!(succeeded)
}).fold(0, |n, _| n + 1)
})
}

View File

@ -0,0 +1,44 @@
#derive[match]
type Algebra.Laws.Inverse <t> <concat: (t -> t -> t)> <inverse: (t -> t)> <empty: t> {
new (left_inverse: ((x : t) -> (Equal empty (concat x (inverse x))))) (right_inverse: ((x : t) -> (Equal empty (concat (inverse x) x))))
}
type Algebra.Magma <t: Type> {
new (concat: (t -> t -> t))
}
type Algebra.Semigroup <t: Type> {
new (magma: (Algebra.Magma t)) (associativity: (Algebra.Laws.associativity.eta (Algebra.Magma.concat magma)))
}
Algebra.Group.concat <t> (group: (Algebra.Group t)) : (t -> t -> t)
Algebra.Group.concat t (Algebra.Group.new t_ monoid inverse inverse_proof) = (Algebra.Monoid.concat monoid)
Algebra.Laws.associativity.eta <t> (concat: (t -> t -> t)) : Type
Algebra.Laws.associativity.eta t concat = ((a : t) -> (b : t) -> (c : t) -> (Equal (concat (concat a b) c) (concat a (concat b c))))
type Algebra.Laws.Identity <t> <concat: (t -> t -> t)> <empty: t> {
new (left_identity: ((x : t) -> (Equal x (concat empty x)))) (right_identity: ((x : t) -> (Equal x (concat x empty))))
}
Algebra.Monoid.empty <t> (monoid: (Algebra.Monoid t)) : t
Algebra.Monoid.empty t (Algebra.Monoid.new t_ sg empty id) = empty
type Algebra.Monoid <t: Type> {
new (sg: (Algebra.Semigroup t)) (empty: t) (identity: (Algebra.Laws.Identity t (Algebra.Semigroup.concat sg) empty))
}
Algebra.Semigroup.concat <t> (semigroup: (Algebra.Semigroup t)) : (t -> t -> t)
Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat magma)
Algebra.Monoid.concat <t> (monoid: (Algebra.Monoid t)) : (t -> t -> t)
Algebra.Monoid.concat t (Algebra.Monoid.new t_ sg empty id) = (Algebra.Semigroup.concat sg)
Algebra.Magma.concat <t> (magma: (Algebra.Magma t)) : (t -> t -> t)
Algebra.Magma.concat t (Algebra.Magma.new t_ concat) = concat
Equal <t> (a: t) (b: t) : Type
type Algebra.Group <t> {
new (monoid: (Algebra.Monoid t)) (invert: (t -> t)) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat monoid) invert (Algebra.Monoid.empty monoid)))
}

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@
* Expected: U60 * Expected: U60
/--[tests/suite/checker/derive/Inspection.kind2:3:3] /--[suite/checker/Inspection.kind2:3:3]
| |
2 | Main = 2 | Main =
3 | ? 3 | ?

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1 @@
Ok!

View File

@ -1,6 +1,6 @@
ERROR The case is not covering all the values inside of it! ERROR The case is not covering all the values inside of it!
/--[tests/suite/checker/derive/fail/IncompleteCase.kind2:12:9] /--[suite/checker/derive/fail/IncompleteCase.kind2:12:9]
| |
11 | let User.new (ttt = e) e .. = User.new 2 4 1 11 | let User.new (ttt = e) e .. = User.new 2 4 1
12 | let User.new (ttt = f) name = User.new 6 7 3 12 | let User.new (ttt = f) name = User.new 6 7 3

View File

@ -1,6 +1,6 @@
ERROR Repeated named variable ERROR Repeated named variable
/--[tests/suite/checker/derive/fail/Repeated.kind2:12:19] /--[suite/checker/derive/fail/Repeated.kind2:12:19]
| |
11 | let User.new (ttt = e) e .. = User.new 2 4 1 11 | let User.new (ttt = e) e .. = User.new 2 4 1
12 | let User.new (ttt = f) ttt = User.new 6 7 3 12 | let User.new (ttt = f) ttt = User.new 6 7 3
@ -12,7 +12,7 @@
ERROR The case is not covering all the values inside of it! ERROR The case is not covering all the values inside of it!
/--[tests/suite/checker/derive/fail/Repeated.kind2:12:9] /--[suite/checker/derive/fail/Repeated.kind2:12:9]
| |
11 | let User.new (ttt = e) e .. = User.new 2 4 1 11 | let User.new (ttt = e) e .. = User.new 2 4 1
12 | let User.new (ttt = f) ttt = User.new 6 7 3 12 | let User.new (ttt = f) ttt = User.new 6 7 3

View File

@ -1,6 +1,6 @@
ERROR Defined multiple times for the same name ERROR Defined multiple times for the same name
/--[tests/suite/checker/derive/fail/RepeatedDef.kind2:2:5] /--[suite/checker/derive/fail/RepeatedDef.kind2:2:5]
| |
1 | type Nat { 1 | type Nat {
2 | zero 2 | zero

View File

@ -4,7 +4,7 @@
* Expected : (Eq _ 123u120 124u120) * Expected : (Eq _ 123u120 124u120)
/--[tests/suite/checker/derive/fail/WrongU120Eq.kind2:12:9] /--[suite/checker/derive/fail/WrongU120Eq.kind2:12:9]
| |
11 | Teste : Eq 123u120 124u120 11 | Teste : Eq 123u120 124u120
12 | Teste = Eq.rfl 12 | Teste = Eq.rfl

View File

@ -0,0 +1,21 @@
ERROR Type mismatch
* Got : ((x_1 : Type) -> (x_2 : Type) -> Type)
* Expected : ((x_1 : t) -> (x_2 : t) -> t)
* Context:
* t : Type
* t_ : Type
* t_ = t
* magma : (Algebra.Magma Type)
* assoc : ((a : _) -> (b : _) -> (c : _) -> (Equal _ (((Algebra.Magma.concat _ magma) (((Algebra.Magma.concat _ magma) a) b)) c) (((Algebra.Magma.concat _ magma) a) (((Algebra.Magma.concat _ magma) b) c))))
/--[suite/checker/fail/MismatchOne.kind2:32:69]
|
31 | Algebra.Semigroup.concat <t> (semigroup: (Algebra.Semigroup t)) : (t -> t -> t)
32 | Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat magma)
| v---------------------------
| \Here!
33 |

View File

@ -0,0 +1,44 @@
#derive[match]
type Algebra.Laws.Inverse <t> <concat: (t -> t -> t)> <inverse: (t -> t)> <empty: t> {
new (left_inverse: ((x : t) -> (Equal empty (concat x (inverse x))))) (right_inverse: ((x : t) -> (Equal empty (concat (inverse x) x))))
}
type Algebra.Magma <t: Type> {
new (concat: (t -> t -> t))
}
type Algebra.Semigroup <t: Type> {
new (magma: (Algebra.Magma Type)) (associativity: (Algebra.Laws.associativity.eta (Algebra.Magma.concat magma)))
}
Algebra.Group.concat <t> (group: (Algebra.Group t)) : (t -> t -> t)
Algebra.Group.concat t (Algebra.Group.new t_ monoid inverse inverse_proof) = (Algebra.Monoid.concat monoid)
Algebra.Laws.associativity.eta <t> (concat: (t -> t -> t)) : Type
Algebra.Laws.associativity.eta t concat = ((a : t) -> (b : t) -> (c : t) -> (Equal (concat (concat a b) c) (concat a (concat b c))))
type Algebra.Laws.Identity <t> <concat: (t -> t -> t)> <empty: t> {
new (left_identity: ((x : t) -> (Equal x (concat empty x)))) (right_identity: ((x : t) -> (Equal x (concat x empty))))
}
Algebra.Monoid.empty <t> (monoid: (Algebra.Monoid t)) : t
Algebra.Monoid.empty t (Algebra.Monoid.new t_ sg empty id) = empty
type Algebra.Monoid <t: Type> {
new (sg: (Algebra.Semigroup t)) (empty: t) (identity: (Algebra.Laws.Identity t (Algebra.Semigroup.concat sg) empty))
}
Algebra.Semigroup.concat <t> (semigroup: (Algebra.Semigroup t)) : (t -> t -> t)
Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat magma)
Algebra.Monoid.concat <t> (monoid: (Algebra.Monoid t)) : (t -> t -> t)
Algebra.Monoid.concat t (Algebra.Monoid.new t_ sg empty id) = (Algebra.Semigroup.concat sg)
Algebra.Magma.concat <t> (magma: (Algebra.Magma t)) : (t -> t -> t)
Algebra.Magma.concat t (Algebra.Magma.new t_ concat) = concat
Equal <t> (a: t) (b: t) : Type
type Algebra.Group <t> {
new (monoid: (Algebra.Monoid t)) (invert: (t -> t)) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat monoid) invert (Algebra.Monoid.empty monoid)))
}

View File

@ -0,0 +1,16 @@
ERROR Type mismatch
* Got : Type
* Expected : U60
* Context:
* a : U60
/--[suite/checker/fail/MismatchTwo.kind2:2:10]
|
1 | Main (a: U60): U60
2 | Main a = Type
| v---
| \Here!

View File

@ -0,0 +1,2 @@
Main (a: U60): U60
Main a = Type

View File

@ -1,6 +1,6 @@
ERROR Required functions are not implemented for this type. ERROR Required functions are not implemented for this type.
/--[tests/suite/eval/NoMatch.kind2:3:5] /--[suite/eval/NoMatch.kind2:3:5]
| |
| / | /
3 | | match NoMatch NoMatch.pudding { 3 | | match NoMatch NoMatch.pudding {

View File

@ -37,11 +37,11 @@ fn test_kind2(path: &Path, run: fn(&Path) -> String) -> Result<(), Error> {
} }
#[test] #[test]
#[timeout(15000)] #[timeout(30000)]
fn test_checker() -> Result<(), Error> { fn test_checker() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/checker"), |path| { test_kind2(Path::new("./suite/checker"), |path| {
let (rx, tx) = std::sync::mpsc::channel(); let (rx, tx) = std::sync::mpsc::channel();
let root = PathBuf::from("./tests/suite/lib").canonicalize().unwrap(); let root = PathBuf::from("./suite/lib").canonicalize().unwrap();
let mut session = Session::new(root, rx); let mut session = Session::new(root, rx);
let entrypoints = vec!["Main".to_string()]; let entrypoints = vec!["Main".to_string()];
@ -71,9 +71,9 @@ fn test_checker() -> Result<(), Error> {
#[test] #[test]
#[timeout(15000)] #[timeout(15000)]
fn test_eval() -> Result<(), Error> { fn test_eval() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/eval"), |path| { test_kind2(Path::new("./suite/eval"), |path| {
let (rx, tx) = std::sync::mpsc::channel(); let (rx, tx) = std::sync::mpsc::channel();
let root = PathBuf::from("./tests/suite/lib").canonicalize().unwrap(); let root = PathBuf::from("./suite/lib").canonicalize().unwrap();
let mut session = Session::new(root, rx); let mut session = Session::new(root, rx);
let entrypoints = vec!["Main".to_string()]; let entrypoints = vec!["Main".to_string()];
@ -107,9 +107,9 @@ fn test_eval() -> Result<(), Error> {
#[test] #[test]
#[timeout(15000)] #[timeout(15000)]
fn test_kdl() -> Result<(), Error> { fn test_kdl() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/kdl"), |path| { test_kind2(Path::new("./suite/kdl"), |path| {
let (rx, tx) = std::sync::mpsc::channel(); let (rx, tx) = std::sync::mpsc::channel();
let root = PathBuf::from("./tests/suite/lib").canonicalize().unwrap(); let root = PathBuf::from("./suite/lib").canonicalize().unwrap();
let mut session = Session::new(root, rx); let mut session = Session::new(root, rx);
let entrypoints = vec!["Main".to_string()]; let entrypoints = vec!["Main".to_string()];
@ -123,7 +123,7 @@ fn test_kdl() -> Result<(), Error> {
match check { match check {
Some(file) if diagnostics.is_empty() => { Some(file) if diagnostics.is_empty() => {
file.to_string() file.to_string()
} },
_ => { _ => {
let mut res_string = String::new(); let mut res_string = String::new();

View File

@ -1,11 +0,0 @@
ERROR Unexpected token ':'.
/--[tests/suite/checker/derive/Algebra.kind2:1:69]
|
1 | Algebra.Group.new <t: Type> (monoid: (Algebra.Monoid t)) (invert: (_: t) t) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat _ monoid) invert (Algebra.Monoid.empty _ monoid))) : (Algebra.Group t)
| v
| \Here!
2 |
3 | Algebra.Group (t: Type) : Type

View File

@ -1,41 +0,0 @@
Algebra.Group.new <t: Type> (monoid: (Algebra.Monoid t)) (invert: (_: t) t) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat _ monoid) invert (Algebra.Monoid.empty _ monoid))) : (Algebra.Group t)
Algebra.Group (t: Type) : Type
Algebra.Monoid.concat <t: Type> (monoid: (Algebra.Monoid t)) : (_: t) (_: t) t
Algebra.Monoid.concat t (Algebra.Monoid.new t_ sg empty id) = (Algebra.Semigroup.concat _ sg)
Algebra.Monoid.new <t: Type> (sg: (Algebra.Semigroup t)) (empty: t) (identity: (Algebra.Laws.Identity t (Algebra.Semigroup.concat _ sg) empty)) : (Algebra.Monoid t)
Algebra.Semigroup (t: Type) : Type
Algebra.Laws.Identity (t: Type) (concat: (_: t) (_: t) t) (empty: t) : Type
Algebra.Monoid (t: Type) : Type
Algebra.Semigroup.concat <t: Type> (semigroup: (Algebra.Semigroup t)) : (_: t) (_: t) t
Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat _ magma)
Algebra.Semigroup.new <t: Type> (magma: (Algebra.Magma t)) (associativity: (Algebra.Laws.associativity.eta _ (Algebra.Magma.concat _ magma))) : (Algebra.Semigroup t)
Algebra.Magma (t: Type) : Type
Algebra.Laws.associativity.eta <t: Type> (concat: (_: t) (_: t) t) : Type
Algebra.Laws.associativity.eta t concat = (a: t) (b: t) (c: t) (Equal _ (concat (concat a b) c) (concat a (concat b c)))
Equal <t: Type> (a: t) (b: t) : Type
Algebra.Magma.concat <t: Type> (magma: (Algebra.Magma t)) : (_: t) (_: t) t
Algebra.Magma.concat t (Algebra.Magma.new t_ concat) = concat
Algebra.Magma.new <t: Type> (concat: (_: t) (_: t) t) : (Algebra.Magma t)
Algebra.Monoid.empty <t: Type> (monoid: (Algebra.Monoid t)) : t
Algebra.Monoid.empty t (Algebra.Monoid.new t_ sg empty id) = empty
Algebra.Laws.Inverse (t: Type) (concat: (_: t) (_: t) t) (inverse: (_: t) t) (empty: t) : Type

View File

@ -359,11 +359,10 @@ impl Display for SttmKind {
write!(f, "{};\n{}", expr, next) write!(f, "{};\n{}", expr, next)
} }
SttmKind::Return(ret) => { SttmKind::Return(ret) => {
write!(f, "return {}\n", ret) writeln!(f, "return {}", ret)
} }
SttmKind::RetExpr(ret) => { SttmKind::RetExpr(ret) => {
write!(f, "{}\n", ret) writeln!(f, "{}", ret)
} }
} }
} }

View File

@ -178,14 +178,14 @@ impl Expr {
pub fn fun(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> { pub fn fun(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> {
Box::new(Expr { Box::new(Expr {
range: range.into(), range,
data: ExprKind::Fun { name, args }, data: ExprKind::Fun { name, args },
}) })
} }
pub fn ctr(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> { pub fn ctr(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> {
Box::new(Expr { Box::new(Expr {
range: range.into(), range,
data: ExprKind::Ctr { name, args }, data: ExprKind::Ctr { name, args },
}) })
} }

View File

@ -107,21 +107,21 @@ impl Expr {
pub fn fun(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> { pub fn fun(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> {
Box::new(Expr { Box::new(Expr {
range: range.into(), range,
data: ExprKind::Fun { name, args }, data: ExprKind::Fun { name, args },
}) })
} }
pub fn app(range: Range, fun: Box<Expr>, args: Vec<Box<Expr>>) -> Box<Expr> { pub fn app(range: Range, fun: Box<Expr>, args: Vec<Box<Expr>>) -> Box<Expr> {
Box::new(Expr { Box::new(Expr {
range: range.into(), range,
data: ExprKind::App { fun, args }, data: ExprKind::App { fun, args },
}) })
} }
pub fn ctr(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> { pub fn ctr(range: Range, name: QualifiedIdent, args: Vec<Box<Expr>>) -> Box<Expr> {
Box::new(Expr { Box::new(Expr {
range: range.into(), range,
data: ExprKind::Ctr { name, args }, data: ExprKind::Ctr { name, args },
}) })
} }