From 10e496dcd21e3b2fc6aac32dc44fcb6cd3fb05b5 Mon Sep 17 00:00:00 2001 From: mjh <61671361+mjh316@users.noreply.github.com> Date: Tue, 4 Oct 2022 00:45:09 -0700 Subject: [PATCH 01/17] Fixed a typo in the `map` function example --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5508155e..1f0576a3 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Pure functions are defined via equations, as in [Haskell](https://www.haskell.or // Applies a function to every element of a list map (list: List a) (f: a -> b) : List b map a b Nil f = Nil -map a b (Cons head tail) f = Cons (f x) (map tail f) +map a b (Cons head tail) f = Cons (f head) (map tail f) ``` Side-effective programs are written via monadic monads, resembling [Rust](https://www.rust-lang.org/) and [TypeScript](https://www.typescriptlang.org/): From 06a500d5a77b51711e84c9f9211a861a3ee08beb Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Mon, 3 Oct 2022 11:04:09 +0200 Subject: [PATCH 02/17] Check if rule patterns are valid after compilation --- Cargo.lock | 2 +- src/codegen/kdl/book.rs | 56 ++++++++++++++++++----- tests/suite/to_kdl/invalid_fun_pat.golden | 1 + tests/suite/to_kdl/invalid_fun_pat.kind2 | 6 +++ 4 files changed, 53 insertions(+), 12 deletions(-) create mode 100644 tests/suite/to_kdl/invalid_fun_pat.golden create mode 100644 tests/suite/to_kdl/invalid_fun_pat.kind2 diff --git a/Cargo.lock b/Cargo.lock index 8cfe6d3e..f6ea18df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -446,7 +446,7 @@ dependencies = [ [[package]] name = "kind2" -version = "0.2.76" +version = "0.2.79" dependencies = [ "clap", "highlight_error", diff --git a/src/codegen/kdl/book.rs b/src/codegen/kdl/book.rs index a9cc5aa8..f5dc986b 100644 --- a/src/codegen/kdl/book.rs +++ b/src/codegen/kdl/book.rs @@ -101,6 +101,7 @@ pub fn compile_book(book: &Book) -> Result { if let Term::Typ { orig: _ } = &*entry.tipo { continue; } + // TODO: Group errors for all entries let entrs = compile_entry(book, entry)?; for entry in entrs { comp_book.names.push(entry.name.clone()); @@ -112,18 +113,26 @@ pub fn compile_book(book: &Book) -> Result { // Can become multiple entries after flatenning pub fn compile_entry(book: &Book, entry: &Entry) -> Result, String> { - fn compile_rule(book: &Book, entry: &Entry, rule: &Rule) -> CompRule { + fn compile_rule(book: &Book, entry: &Entry, rule: &Rule) -> Result { let name = rule.name.0.clone(); let mut pats = Vec::new(); for (arg, pat) in entry.args.iter().zip(rule.pats.iter()) { if !arg.eras { let pat = erase(book, pat); - // TODO: Check if the pattern has some invalid term (anything other than num, ctr or var) - pats.push(pat); + match is_valid_pattern(&*pat) { + Ok(()) => { + pats.push(pat); + } + Err(err_term) => { + // TODO: Add Display trait for compterms + // TODO: Tell the user exactly why this term is incorrect + return Err(format!("Found invalid term \"{:?}\" in rule pattern matching for entry \"{}\".", err_term, entry.name)); + } + } } } let body = erase(book, &rule.body); - CompRule { name, pats, body } + Ok(CompRule { name, pats, body }) } fn make_u120_new(old_entry: &Entry) -> CompEntry { @@ -208,13 +217,12 @@ pub fn compile_entry(book: &Book, entry: &Entry) -> Result, Strin // high and low are used for type compatibility with u60 "U120.low" => Ok(vec![make_u120_low(&entry)]), _ => { - let new_entry = CompEntry { - name: entry.name.0.clone(), - args: entry.args.iter().filter(|x| !x.eras).map(|x| x.name.0.clone()).collect(), - rules: entry.rules.iter().map(|rule| compile_rule(book, entry, rule)).collect(), - attrs: entry.attrs.clone(), - orig: true, - }; + let name = entry.name.0.clone(); + let args = entry.args.iter().filter(|x| !x.eras).map(|x| x.name.0.clone()).collect(); + // TODO: Group all errs together instead of failing on the first one + let rules = entry.rules.iter().map(|rule| compile_rule(book, entry, rule)).collect::, String>>()?; + let attrs = entry.attrs.clone(); + let new_entry = CompEntry { name, args, rules, attrs, orig: true }; // TODO: We probably need to handle U60 separately as well. // Since they compile to U120, it wont overflow as expected and conversion to signed will fail. let new_entry = convert_u120_entry(new_entry)?; @@ -229,6 +237,32 @@ pub fn compile_entry(book: &Book, entry: &Entry) -> Result, Strin } } +// True if the compiled term is a valid rule pattern. +// Rule patterns must be normalized terms with only Ctrs, Nums and Vars (no Lams, Dups or Lets) +pub fn is_valid_pattern(pat: &CompTerm) -> Result<(), &CompTerm> { + let mut check_stack: Vec<&CompTerm> = vec![pat]; + while !check_stack.is_empty() { + let term = check_stack.pop().unwrap(); + match term { + CompTerm::Ctr { args, .. } => { + for arg in args { + check_stack.push(arg); + } + }, + CompTerm::Var { .. } => (), + CompTerm::Num { .. } => (), + CompTerm::Lam { .. } => { return Err(term) } + CompTerm::App { .. } => { return Err(term) } + CompTerm::Dup { .. } => { return Err(term) } + CompTerm::Let { .. } => { return Err(term) } + CompTerm::Fun { .. } => { return Err(term) } + CompTerm::Op2 { .. } => { return Err(term) } + CompTerm::Nil { .. } => { return Err(term) } + }; + } + Ok(()) +} + // Splits an entry with rules with nested cases into multiple entries with flattened rules. pub fn flatten(entry: CompEntry) -> Vec { fn post_inc(n: &mut u64) -> u64 { diff --git a/tests/suite/to_kdl/invalid_fun_pat.golden b/tests/suite/to_kdl/invalid_fun_pat.golden new file mode 100644 index 00000000..9f1adfa8 --- /dev/null +++ b/tests/suite/to_kdl/invalid_fun_pat.golden @@ -0,0 +1 @@ +Found invalid term "Fun { name: "A", args: [Ctr { name: "B.new", args: [Num { numb: 1 }] }, Num { numb: 1 }] }" in rule pattern matching for entry "A". \ No newline at end of file diff --git a/tests/suite/to_kdl/invalid_fun_pat.kind2 b/tests/suite/to_kdl/invalid_fun_pat.kind2 new file mode 100644 index 00000000..0bd0036b --- /dev/null +++ b/tests/suite/to_kdl/invalid_fun_pat.kind2 @@ -0,0 +1,6 @@ +// Fails on the internal (A ...) since function application is not allowed in pattern +A (b : B) (n : U60) : U60 +A (B.new (A (B.new 1) 1)) 1 = 9 + +B : Type +B.new (n : U60) : B \ No newline at end of file From d835522697ef38dd3057ece0880fe5e9c816012d Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Tue, 4 Oct 2022 11:09:32 +0200 Subject: [PATCH 03/17] Use Ident type in more places --- src/book.rs | 4 +- src/book/name.rs | 8 ++ src/codegen/kdl.rs | 86 ++++++----- src/codegen/kdl/book.rs | 166 +++++++++++----------- src/driver/loader.rs | 2 +- src/lowering.rs | 2 +- src/lowering/adjust.rs | 5 +- src/lowering/resolve.rs | 4 +- src/parser.rs | 2 +- tests/suite/to_kdl/invalid_fun_pat.golden | 2 +- 10 files changed, 139 insertions(+), 142 deletions(-) diff --git a/src/book.rs b/src/book.rs index 0582459c..e0268aad 100644 --- a/src/book.rs +++ b/src/book.rs @@ -27,7 +27,7 @@ pub struct Attribute { // A book is a collection of entries. #[derive(Clone, Debug, Default)] pub struct Book { - pub names: Vec, + pub names: Vec, pub entrs: HashMap>, pub holes: u64, } @@ -181,7 +181,7 @@ impl Display for Entry { impl Display for Book { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { for name in &self.names { - writeln!(f, "{}\n", self.entrs.get(&Ident(name.clone())).unwrap())?; + writeln!(f, "{}\n", self.entrs.get(&name).unwrap())?; } Ok(()) } diff --git a/src/book/name.rs b/src/book/name.rs index f9329aeb..ebf5dc6e 100644 --- a/src/book/name.rs +++ b/src/book/name.rs @@ -57,6 +57,10 @@ impl EncodedName { } impl Ident { + pub fn new(name: &str) -> Ident { + Ident(name.to_string()) + } + pub fn encode(&self) -> EncodedName { EncodedName::from_string(&self.0) } @@ -77,6 +81,10 @@ impl Ident { false } } + + pub fn len(&self) -> usize { + self.0.len() + } } impl Path { diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index d940580b..b4472c3e 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -9,9 +9,9 @@ use std::collections::HashMap; pub const KDL_NAME_LEN: usize = 12; -pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result { +pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result { let term = match term { - CompTerm::Var { name } => name.clone(), + CompTerm::Var { name } => name.to_string(), CompTerm::Lam { name, body } => { let body = to_kdl_term(kdl_names, body)?; format!("@{} {}", name, body) @@ -58,7 +58,7 @@ pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Resu Ok(term) } -pub fn to_kdl_rule(_book: &Book, kdl_names: &HashMap, rule: &CompRule) -> Result { +pub fn to_kdl_rule(_book: &Book, kdl_names: &HashMap, rule: &CompRule) -> Result { let name = &rule.name; let kdl_name = kdl_names.get(name).unwrap(); let mut pats = vec![]; // stringified pattern args @@ -72,43 +72,36 @@ pub fn to_kdl_rule(_book: &Book, kdl_names: &HashMap, rule: &Com Ok(rule) } -pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, entry: &CompEntry) -> Result { - let entry = match entry.name.as_str() { - _ => { - let kdl_name = kdl_names.get(&entry.name).unwrap(); - let args_names = entry.args.iter().map(|arg| format!(" {}", arg)).collect::(); - // If this entry existed in the original kind code, add some annotations as comments - let kind_entry = book.entrs.get(&Ident(entry.name.clone())); - let is_knd_ent = matches!(kind_entry, Some(_)); - let cmnt = if is_knd_ent { - let kind_entry = kind_entry.unwrap(); - let args_typed = kind_entry - .args - .iter() - .map(|arg| format!(" {}({}: {})", if arg.eras { "-" } else { "" }, arg.name, &arg.tipo)) - .collect::(); - format!("// {}{} : {}\n", entry.name, args_typed, &kind_entry.tipo) - } else { - String::new() - }; - // Entries with no rules become constructors - // Entries with rules become functions - let fun = if entry.rules.is_empty() { - format!("ctr {{{}{}}}\n\n", kdl_name, args_names) - } else { - let mut rules = vec![]; - for rule in &entry.rules { - rules.push(format!("\n {}", to_kdl_rule(book, kdl_names, rule)?)); - } - format!("fun ({}{}) {{{}\n}}\n\n", kdl_name, args_names, rules.join("")) - }; - cmnt + &fun - } +pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, entry: &CompEntry) -> Result { + let kdl_name = kdl_names.get(&entry.name).unwrap(); + let args_names = entry.args.iter().map(|arg| format!(" {}", arg)).collect::(); + // If this entry existed in the original kind code, add some annotations as comments + let cmnt = if let Some(kind_entry) = book.entrs.get(&entry.name) { + let args_typed = kind_entry + .args + .iter() + .map(|arg| format!(" {}({}: {})", if arg.eras { "-" } else { "" }, arg.name, &arg.tipo)) + .collect::(); + format!("// {}{} : {}\n", entry.name, args_typed, &kind_entry.tipo) + } else { + String::new() }; + // Entries with no rules become constructors + // Entries with rules become functions + let fun = if entry.rules.is_empty() { + format!("ctr {{{}{}}}\n\n", kdl_name, args_names) + } else { + let mut rules = vec![]; + for rule in &entry.rules { + rules.push(format!("\n {}", to_kdl_rule(book, kdl_names, rule)?)); + } + format!("fun ({}{}) {{{}\n}}\n\n", kdl_name, args_names, rules.join("")) + }; + let entry = cmnt + &fun; Ok(entry) } -pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &CompBook) -> Result { +pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &CompBook) -> Result { let mut lines = vec![]; let mut run = String::new(); for name in &comp_book.names { @@ -133,30 +126,30 @@ pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: // Returns a map of kind names to kindelia names // Returns an err if any of the names can't be converted -pub fn get_kdl_names(book: &CompBook, namespace: &Option) -> Result, String> { +pub fn get_kdl_names(book: &CompBook, namespace: &Option) -> Result, String> { // Fits a name to the max size allowed by kindelia. // If the name is too large, truncates and replaces the last characters by random chars. - fn rand_shorten(name: &String, ns: &str) -> String { + fn rand_shorten(name: &Ident, ns: &str) -> Ident { let max_fn_name = KDL_NAME_LEN - ns.len(); // If the name doesn't fit, truncate and insert some random characters at the end let name = if name.len() > max_fn_name { let n_rnd_chrs = usize::min(3, max_fn_name); - let name_cut = name[..max_fn_name - n_rnd_chrs].to_string(); + let name_cut = name.0[..max_fn_name - n_rnd_chrs].to_string(); let mut rng = rand::thread_rng(); let rnd_chrs = (0..n_rnd_chrs).map(|_| rng.gen_range(0..63)).map(encode_base64).collect::(); - format!("{}{}", name_cut, rnd_chrs) + Ident(format!("{}{}", name_cut, rnd_chrs)) } else { name.clone() }; - format!("{}{}", ns, name) + Ident(format!("{}{}", ns, name)) } - fn get_kdl_name(entry: &CompEntry, ns: &str) -> Result { + fn get_kdl_name(entry: &CompEntry, ns: &str) -> Result { let kind_name = &entry.name; // If the entry uses a kindelia name, use it let kdln = if let Some(kdln_attr) = entry.get_attribute("kdl_name") { - let kdln = kdln_attr.value.unwrap().0; - if !kdln.chars().next().unwrap().is_uppercase() { + let kdln = kdln_attr.value.unwrap(); + if !kdln.0.chars().next().unwrap().is_uppercase() { let err = format!("Kindelia name \"{}\" doesn't start with an uppercase letter.", kdln); return Err(err); } @@ -169,7 +162,7 @@ pub fn get_kdl_names(book: &CompBook, namespace: &Option) -> Result) -> Result, }, App { @@ -20,22 +20,22 @@ pub enum CompTerm { argm: Box, }, Dup { - nam0: String, - nam1: String, + nam0: Ident, + nam1: Ident, expr: Box, body: Box, }, Let { - name: String, + name: Ident, expr: Box, body: Box, }, Ctr { - name: String, + name: Ident, args: Vec>, }, Fun { - name: String, + name: Ident, args: Vec>, }, Num { @@ -51,15 +51,15 @@ pub enum CompTerm { #[derive(Clone, Debug)] pub struct CompRule { - pub name: String, + pub name: Ident, pub pats: Vec>, pub body: Box, } #[derive(Clone, Debug)] pub struct CompEntry { - pub name: String, - pub args: Vec, + pub name: Ident, + pub args: Vec, pub rules: Vec, pub attrs: Vec, pub orig: bool, @@ -67,8 +67,8 @@ pub struct CompEntry { #[derive(Clone, Debug)] pub struct CompBook { - pub names: Vec, - pub entrs: HashMap, + pub names: Vec, + pub entrs: HashMap, } impl CompEntry { @@ -88,17 +88,17 @@ pub fn compile_book(book: &Book) -> Result { entrs: HashMap::new(), }; for name in &book.names { - let entry = book.entrs.get(&Ident(name.clone())).unwrap(); + let entry = book.entrs.get(name).unwrap(); // Don't compile primitive U120 operations // TODO: If this compiler eventually gets used for other targets (like HVM), this will need to be separated. // We could do passes of compiler features (like flattening, linearizing, etc) also separately. - if u120_to_oper(&entry.name.0).is_some() { + if u120_to_oper(&entry.name).is_some() { continue; } // Skip over useless entries // TODO: This doesn't cover all cases. We need something like `erase` but for a Book. // Also maybe there are functions of type Type that should be compiled? - if let Term::Typ { orig: _ } = &*entry.tipo { + if let Term::Typ { .. } = &*entry.tipo { continue; } // TODO: Group errors for all entries @@ -114,7 +114,7 @@ pub fn compile_book(book: &Book) -> Result { // Can become multiple entries after flatenning pub fn compile_entry(book: &Book, entry: &Entry) -> Result, String> { fn compile_rule(book: &Book, entry: &Entry, rule: &Rule) -> Result { - let name = rule.name.0.clone(); + let name = rule.name.clone(); let mut pats = Vec::new(); for (arg, pat) in entry.args.iter().zip(rule.pats.iter()) { if !arg.eras { @@ -138,23 +138,26 @@ pub fn compile_entry(book: &Book, entry: &Entry) -> Result, Strin fn make_u120_new(old_entry: &Entry) -> CompEntry { // U120.new hi lo = (+ (<< hi 60) (>> (<< lo 60) 60)) CompEntry { - name: "U120.new".to_string(), - args: vec!["hi".to_string(), "lo".to_string()], + name: Ident::new("U120.new"), + args: vec![Ident::new("hi"), Ident::new("lo")], rules: vec![CompRule { - name: "U120.new".to_string(), - pats: vec![Box::new(CompTerm::Var { name: "hi".to_string() }), Box::new(CompTerm::Var { name: "lo".to_string() })], + name: Ident::new("U120.new"), + pats: vec![ + Box::new(CompTerm::Var { name: Ident::new("hi") }), + Box::new(CompTerm::Var { name: Ident::new("lo") }) + ], body: Box::new(CompTerm::Op2 { oper: Operator::Add, val0: Box::new(CompTerm::Op2 { oper: Operator::Shl, - val0: Box::new(CompTerm::Var { name: "hi".to_string() }), + val0: Box::new(CompTerm::Var { name: Ident::new("hi") }), val1: Box::new(CompTerm::Num { numb: 60 }), }), val1: Box::new(CompTerm::Op2 { oper: Operator::Shr, val0: Box::new(CompTerm::Op2 { oper: Operator::Shl, - val0: Box::new(CompTerm::Var { name: "lo".to_string() }), + val0: Box::new(CompTerm::Var { name: Ident::new("lo") }), val1: Box::new(CompTerm::Num { numb: 60 }), }), val1: Box::new(CompTerm::Num { numb: 60 }), @@ -169,16 +172,16 @@ pub fn compile_entry(book: &Book, entry: &Entry) -> Result, Strin fn make_u120_low(old_entry: &Entry) -> CompEntry { // U120.low n = (>> (<< n 60) 60)) CompEntry { - name: "U120.low".to_string(), - args: vec!["n".to_string()], + name: Ident::new("U120.low"), + args: vec![Ident::new("n")], rules: vec![CompRule { - name: "U120.low".to_string(), - pats: vec![Box::new(CompTerm::Var { name: "n".to_string() })], + name: Ident::new("U120.low"), + pats: vec![Box::new(CompTerm::Var { name: Ident::new("n") })], body: Box::new(CompTerm::Op2 { oper: Operator::Shr, val0: Box::new(CompTerm::Op2 { oper: Operator::Shl, - val0: Box::new(CompTerm::Var { name: "n".to_string() }), + val0: Box::new(CompTerm::Var { name: Ident::new("n") }), val1: Box::new(CompTerm::Num { numb: 60 }), }), val1: Box::new(CompTerm::Num { numb: 60 }), @@ -192,14 +195,14 @@ pub fn compile_entry(book: &Book, entry: &Entry) -> Result, Strin fn make_u120_high(old_entry: &Entry) -> CompEntry { // U120.high n = (>> n 60) CompEntry { - name: "U120.high".to_string(), - args: vec!["n".to_string()], + name: Ident::new("U120.high"), + args: vec![Ident::new("n")], rules: vec![CompRule { - name: "U120.high".to_string(), - pats: vec![Box::new(CompTerm::Var { name: "n".to_string() })], + name: Ident::new("U120.high"), + pats: vec![Box::new(CompTerm::Var { name: Ident::new("n") })], body: Box::new(CompTerm::Op2 { oper: Operator::Shr, - val0: Box::new(CompTerm::Var { name: "n".to_string() }), + val0: Box::new(CompTerm::Var { name: Ident::new("n") }), val1: Box::new(CompTerm::Num { numb: 60 }), }), }], @@ -217,8 +220,8 @@ pub fn compile_entry(book: &Book, entry: &Entry) -> Result, Strin // high and low are used for type compatibility with u60 "U120.low" => Ok(vec![make_u120_low(&entry)]), _ => { - let name = entry.name.0.clone(); - let args = entry.args.iter().filter(|x| !x.eras).map(|x| x.name.0.clone()).collect(); + let name = entry.name.clone(); + let args = entry.args.iter().filter(|x| !x.eras).map(|x| x.name.clone()).collect(); // TODO: Group all errs together instead of failing on the first one let rules = entry.rules.iter().map(|rule| compile_rule(book, entry, rule)).collect::, String>>()?; let attrs = entry.attrs.clone(); @@ -321,7 +324,7 @@ pub fn flatten(entry: CompEntry) -> Vec { // Each rule that must be split creates a new entry that inspects one layer of Ctrs // The old rule is rewritten to be flat and call the new entry let n = post_inc(name_count); - let new_entry_name = format!("{}{}_", entry.name, n); + let new_entry_name = Ident(format!("{}{}_", entry.name, n)); let mut new_entry_attrs = entry.attrs.clone(); // If the old rule had a kdl name, create a new kdl name for the split entry for attr in &mut new_entry_attrs { @@ -344,7 +347,7 @@ pub fn flatten(entry: CompEntry) -> Vec { for field in pat_args { let arg = match &**field { CompTerm::Ctr { .. } | CompTerm::Num { .. } => { - let name = format!(".{}", post_inc(&mut var_count)); + let name = Ident(format!(".{}", post_inc(&mut var_count))); Box::new(CompTerm::Var { name }) } CompTerm::Var { .. } => field.clone(), @@ -420,7 +423,7 @@ pub fn flatten(entry: CompEntry) -> Vec { let mut new_ctr_args = vec![]; for _ in 0..rule_pat_args.len() { let new_arg = CompTerm::Var { - name: format!(".{}", post_inc(&mut var_count)), + name: Ident(format!(".{}", post_inc(&mut var_count))), }; new_ctr_args.push(Box::new(new_arg.clone())); new_rule_pats.push(Box::new(new_arg)); @@ -459,7 +462,7 @@ pub fn flatten(entry: CompEntry) -> Vec { } } assert!(!new_entry_rules.is_empty()); // There's at least one rule, since rules always match with themselves - let new_entry_args = (0..new_entry_rules[0].pats.len()).map(|n| format!("x{}", n)).collect(); + let new_entry_args = (0..new_entry_rules[0].pats.len()).map(|n| Ident(format!("x{}", n))).collect(); let new_entry = CompEntry { name: new_entry_name, args: new_entry_args, @@ -476,7 +479,6 @@ pub fn flatten(entry: CompEntry) -> Vec { let mut skip: HashSet = HashSet::new(); let mut new_entries: Vec = Vec::new(); let mut old_entry_rules: Vec = Vec::new(); - let old_entry_args: Vec = entry.args.clone(); for i in 0..entry.rules.len() { if !skip.contains(&i) { let rule = &entry.rules[i]; @@ -491,7 +493,7 @@ pub fn flatten(entry: CompEntry) -> Vec { } let old_entry = CompEntry { name: entry.name, - args: old_entry_args, + args: entry.args, rules: old_entry_rules, orig: entry.orig, attrs: entry.attrs, @@ -501,7 +503,7 @@ pub fn flatten(entry: CompEntry) -> Vec { } // Substitute all instances of a variable in a term with another term -pub fn subst(term: &mut CompTerm, sub_name: &str, value: &CompTerm) { +pub fn subst(term: &mut CompTerm, sub_name: &Ident, value: &CompTerm) { match term { CompTerm::Var { name } => { if sub_name == name { @@ -552,16 +554,16 @@ pub fn subst(term: &mut CompTerm, sub_name: &str, value: &CompTerm) { pub fn erase(book: &Book, term: &Term) -> Box { match term { Term::Typ { .. } => Box::new(CompTerm::Nil), - Term::Var { orig: _, name } => { - let name = name.0.clone(); + Term::Var { name, .. } => { + let name = name.clone(); Box::new(CompTerm::Var { name }) } - Term::Lam { orig: _, name, body } => { - let name = name.0.clone(); + Term::Lam { name, body, .. } => { + let name = name.clone(); let body = erase(book, body); Box::new(CompTerm::Lam { name, body }) } - Term::App { orig: _, func, argm } => { + Term::App { func, argm, .. } => { let func = erase(book, func); let argm = erase(book, argm); Box::new(CompTerm::App { func, argm }) @@ -572,23 +574,17 @@ pub fn erase(book: &Book, term: &Term) -> Box { tipo: _, body: _, } => Box::new(CompTerm::Nil), - Term::Let { orig: _, name, expr, body } => { - let name = name.0.clone(); + Term::Let { name, expr, body, .. } => { + let name = name.clone(); let expr = erase(book, expr); let body = erase(book, body); Box::new(CompTerm::Let { name, expr, body }) } - Term::Ann { orig: _, expr, tipo: _ } => erase(book, expr), - Term::Sub { - orig: _, - expr, - name: _, - indx: _, - redx: _, - } => erase(book, expr), - Term::Ctr { orig: _, name, args: term_args } => { - let name = name.0.clone(); - let entr = book.entrs.get(&Ident(name.clone())).unwrap(); + Term::Ann { expr, .. } => erase(book, expr), + Term::Sub { expr, .. } => erase(book, expr), + Term::Ctr { name, args: term_args, .. } => { + let name = name.clone(); + let entr = book.entrs.get(&name).unwrap(); let mut args = vec![]; for (idx, arg) in term_args.iter().enumerate() { if !entr.args[idx].eras { @@ -597,9 +593,9 @@ pub fn erase(book: &Book, term: &Term) -> Box { } Box::new(CompTerm::Ctr { name, args }) } - Term::Fun { orig: _, name, args: term_args } => { - let name = name.0.clone(); - let entr = book.entrs.get(&Ident(name.clone())).unwrap(); + Term::Fun { name, args: term_args, .. } => { + let name = name.clone(); + let entr = book.entrs.get(&name).unwrap(); let mut args = vec![]; for (idx, arg) in term_args.iter().enumerate() { if !entr.args[idx].eras { @@ -608,26 +604,26 @@ pub fn erase(book: &Book, term: &Term) -> Box { } Box::new(CompTerm::Fun { name, args }) } - Term::Hlp { orig: _ } => Box::new(CompTerm::Nil), - Term::U60 { orig: _ } => Box::new(CompTerm::Nil), - Term::Num { orig: _, numb } => { + Term::Hlp { .. } => Box::new(CompTerm::Nil), + Term::U60 { .. } => Box::new(CompTerm::Nil), + Term::Num { numb, .. } => { let numb = *numb as u128; Box::new(CompTerm::Num { numb }) } - Term::Op2 { orig: _, oper, val0, val1 } => { + Term::Op2 { oper, val0, val1, .. } => { let oper = *oper; let val0 = erase(book, val0); let val1 = erase(book, val1); Box::new(CompTerm::Op2 { oper, val0, val1 }) } - Term::Hol { orig: _, numb: _ } => Box::new(CompTerm::Nil), + Term::Hol { .. } => Box::new(CompTerm::Nil), Term::Mat { .. } => Box::new(CompTerm::Nil), Term::Open { .. } => Box::new(CompTerm::Nil), } } // Counts usages of a name in an erased term -pub fn count_uses(term: &CompTerm, count_name: &str) -> usize { +pub fn count_uses(term: &CompTerm, count_name: &Ident) -> usize { match term { CompTerm::Var { name } => { if name == count_name { @@ -654,28 +650,28 @@ pub fn count_uses(term: &CompTerm, count_name: &str) -> usize { let body_count = if name == count_name { 0 } else { count_uses(body, count_name) }; expr_count + body_count } - CompTerm::Ctr { name: _, args } => { + CompTerm::Ctr { args, .. } => { let mut sum = 0; for arg in args { sum += count_uses(arg, count_name); } sum } - CompTerm::Fun { name: _, args } => { + CompTerm::Fun { args, .. } => { let mut sum = 0; for arg in args { sum += count_uses(arg, count_name); } sum } - CompTerm::Op2 { oper: _, val0, val1 } => count_uses(val0, count_name) + count_uses(val1, count_name), + CompTerm::Op2 { val0, val1, .. } => count_uses(val0, count_name) + count_uses(val1, count_name), CompTerm::Num { .. } => 0, CompTerm::Nil => 0, } } // Renames a target variable using the fresh names in a vector -pub fn rename_clones(term: &mut CompTerm, target: &str, names: &mut Vec) { +pub fn rename_clones(term: &mut CompTerm, target: &Ident, names: &mut Vec) { match term { CompTerm::Var { name } => { if name == target { @@ -703,17 +699,17 @@ pub fn rename_clones(term: &mut CompTerm, target: &str, names: &mut Vec) rename_clones(body, target, names); } } - CompTerm::Ctr { name: _, args } => { + CompTerm::Ctr { args, .. } => { for arg in args { rename_clones(arg, target, names); } } - CompTerm::Fun { name: _, args } => { + CompTerm::Fun { args, .. } => { for arg in args { rename_clones(arg, target, names); } } - CompTerm::Op2 { oper: _, val0, val1 } => { + CompTerm::Op2 { val0, val1, .. } => { rename_clones(val0, target, names); rename_clones(val1, target, names); } @@ -724,7 +720,7 @@ pub fn rename_clones(term: &mut CompTerm, target: &str, names: &mut Vec) pub fn linearize_rule(rule: &mut CompRule) { // Returns left-hand side variables - fn collect_lhs_vars<'a>(term: &'a mut CompTerm, vars: &mut HashMap) { + fn collect_lhs_vars<'a>(term: &'a mut CompTerm, vars: &mut HashMap) { match term { CompTerm::Var { name } => { vars.insert(name.clone(), term); @@ -749,11 +745,11 @@ pub fn linearize_rule(rule: &mut CompRule) { // ---------------------------------------------------------------- // dup x0 x1 = x; dup x2 x3 = x0; dup x4 x5 = x1; (Foo x2 x3 x4 x5) // Returns the number of times the variable was used in the body. - pub fn linearize_name(body: &mut CompTerm, name: &mut String, fresh: &mut u64) -> usize { - fn fresh_name(fresh: &mut u64) -> String { + pub fn linearize_name(body: &mut CompTerm, name: &mut Ident, fresh: &mut u64) -> usize { + fn fresh_name(fresh: &mut u64) -> Ident { let name = format!("_{}", fresh); *fresh += 1; - name + Ident(name) } let uses = count_uses(body, name); if uses > 1 { @@ -771,7 +767,7 @@ pub fn linearize_rule(rule: &mut CompRule) { let nam0 = names[i * 2].clone(); let nam1 = names[i * 2 + 1].clone(); let expr = Box::new(CompTerm::Var { - name: if i == 0 { name.to_string() } else { names[i - 1].clone() }, + name: if i == 0 { name.clone() } else { names[i - 1].clone() }, }); let new_body = CompTerm::Dup { nam0, @@ -785,7 +781,7 @@ pub fn linearize_rule(rule: &mut CompRule) { } } } else if uses == 0 { - *name = String::from("~") + *name = Ident::new("~"); } uses } @@ -852,7 +848,7 @@ pub fn linearize_rule(rule: &mut CompRule) { let uses = linearize_name(&mut rule.body, &mut name, &mut fresh); if uses == 0 { if let CompTerm::Var { name } = var { - *name = String::from("~"); + *name = Ident::new("~"); } } // The reason why we don't simply pass a real mutable reference to our variable @@ -885,7 +881,7 @@ pub fn convert_u120_term(term: &CompTerm, rhs: bool) -> Result, St let term = Box::new(match term { // Swap U120.new by a number CompTerm::Ctr { name, args } => { - if name == "U120.new" { + if name.0 == "U120.new" { if let (CompTerm::Num { numb: num1 }, CompTerm::Num { numb: num2 }) = (&*args[0], &*args[1]) { CompTerm::Num { numb: (num1 << 60) + num2 } } else if rhs { @@ -950,8 +946,8 @@ pub fn convert_u120_term(term: &CompTerm, rhs: bool) -> Result, St // Converts a U120 function name to the corresponding primitive operation // None if the name is not of an operation -pub fn u120_to_oper(name: &str) -> Option { - match name { +pub fn u120_to_oper(name: &Ident) -> Option { + match name.0.as_str() { "U120.add" => Some(Operator::Add), "U120.sub" => Some(Operator::Sub), "U120.mul" => Some(Operator::Mul), diff --git a/src/driver/loader.rs b/src/driver/loader.rs index df68a5a3..ead818cc 100644 --- a/src/driver/loader.rs +++ b/src/driver/loader.rs @@ -119,7 +119,7 @@ pub fn load_entry(config: &Config, name: &str, load: &mut Load) -> Result<(), St load.file.push(file); for name in &new_book.names { load.book.names.push(name.clone()); - load.book.entrs.insert(Ident(name.clone()), new_book.entrs.get(&Ident(name.to_string())).unwrap().clone()); + load.book.entrs.insert(name.clone(), new_book.entrs.get(&name).unwrap().clone()); } for unbound in &new_book.get_unbounds(config) { diff --git a/src/lowering.rs b/src/lowering.rs index f6865d36..e73624a4 100644 --- a/src/lowering.rs +++ b/src/lowering.rs @@ -206,7 +206,7 @@ impl Book { pub fn get_unbounds(&self, config: &Config) -> HashSet { let mut state = UnboundState::new(HashMap::new(), config); for name in &self.names { - let entry = self.entrs.get(&Ident(name.clone())).unwrap(); + let entry = self.entrs.get(&name).unwrap(); entry.fill_unbound(false, &mut state); } state.unbound diff --git a/src/lowering/adjust.rs b/src/lowering/adjust.rs index 7400b750..fe169666 100644 --- a/src/lowering/adjust.rs +++ b/src/lowering/adjust.rs @@ -561,10 +561,9 @@ impl Book { let mut state = AdjustState::new(self, config); for name in &self.names { - let ident = Ident(name.clone()); - let entry = self.entrs.get(&ident).unwrap(); + let entry = self.entrs.get(&name).unwrap(); names.push(name.clone()); - entrs.insert(ident, Box::new(entry.adjust(false, &mut state)?)); + entrs.insert(name.clone(), Box::new(entry.adjust(false, &mut state)?)); } Ok(Book { names, entrs, holes: state.holes }) diff --git a/src/lowering/resolve.rs b/src/lowering/resolve.rs index d4311bfc..3d9cc26a 100644 --- a/src/lowering/resolve.rs +++ b/src/lowering/resolve.rs @@ -168,9 +168,9 @@ impl Resolve for Book { } // Just to change the order of each name. for name in &self.names { - let mut new_name = Ident(name.clone()); + let mut new_name = name.clone(); new_name.resolve(current, Span::Generated, map)?; - new_names.push(new_name.0); + new_names.push(new_name); } self.entrs = new_entrs; self.names = new_names; diff --git a/src/parser.rs b/src/parser.rs index 8cab5580..adfffee2 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -212,7 +212,7 @@ pub fn parse_book(state: parser::State) -> parser::Answer<(Box, HashMap Date: Wed, 5 Oct 2022 21:44:02 +0200 Subject: [PATCH 04/17] Add kdl_state attribute for initial state for kindelia function --- src/codegen/kdl.rs | 36 +++++++++-- src/driver/loader.rs | 4 ++ src/lowering/adjust.rs | 4 ++ src/lowering/attributes.rs | 70 ++++++++++++++++++++-- tests/suite/to_kdl/fun_with_state.golden | 7 +++ tests/suite/to_kdl/fun_with_state.kind2 | 8 +++ tests/suite/to_kdl/non_inline_state.golden | 1 + tests/suite/to_kdl/non_inline_state.kind2 | 6 ++ tests/suite/to_kdl/state_with_args.golden | 1 + tests/suite/to_kdl/state_with_args.kind2 | 7 +++ tests/suite/to_kdl/state_with_attr.golden | 1 + tests/suite/to_kdl/state_with_attr.kind2 | 9 +++ 12 files changed, 143 insertions(+), 11 deletions(-) create mode 100644 tests/suite/to_kdl/fun_with_state.golden create mode 100644 tests/suite/to_kdl/fun_with_state.kind2 create mode 100644 tests/suite/to_kdl/non_inline_state.golden create mode 100644 tests/suite/to_kdl/non_inline_state.kind2 create mode 100644 tests/suite/to_kdl/state_with_args.golden create mode 100644 tests/suite/to_kdl/state_with_args.kind2 create mode 100644 tests/suite/to_kdl/state_with_attr.golden create mode 100644 tests/suite/to_kdl/state_with_attr.kind2 diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index b4472c3e..b24dcf19 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -5,7 +5,7 @@ use crate::book::Book; pub use crate::codegen::kdl::book::*; use rand::Rng; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; pub const KDL_NAME_LEN: usize = 12; @@ -58,7 +58,7 @@ pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result Ok(term) } -pub fn to_kdl_rule(_book: &Book, kdl_names: &HashMap, rule: &CompRule) -> Result { +pub fn to_kdl_rule(kdl_names: &HashMap, rule: &CompRule) -> Result { let name = &rule.name; let kdl_name = kdl_names.get(name).unwrap(); let mut pats = vec![]; // stringified pattern args @@ -72,7 +72,7 @@ pub fn to_kdl_rule(_book: &Book, kdl_names: &HashMap, rule: &CompR Ok(rule) } -pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, entry: &CompEntry) -> Result { +pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: &CompBook, entry: &CompEntry) -> Result { let kdl_name = kdl_names.get(&entry.name).unwrap(); let args_names = entry.args.iter().map(|arg| format!(" {}", arg)).collect::(); // If this entry existed in the original kind code, add some annotations as comments @@ -93,9 +93,22 @@ pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, entry: &Comp } else { let mut rules = vec![]; for rule in &entry.rules { - rules.push(format!("\n {}", to_kdl_rule(book, kdl_names, rule)?)); + rules.push(format!("\n {}", to_kdl_rule(kdl_names, rule)?)); + } + match entry.get_attribute("kdl_state") { + // If the function has an initial state, compile the state together with it + Some(attr) => { + let state_fn_name = attr.value.unwrap(); + let state_fn = comp_book.entrs.get(&state_fn_name).ok_or(format!("Initial state function \"{}\" for function \"{}\" not found.", state_fn_name, entry.name))?; + let state_term = state_fn.rules[0].body.clone(); // This is checked when validating the attributes + let init_state = to_kdl_term(kdl_names, &*state_term)?; + format!("fun ({}{}) {{{}\n}} with {{\n {}\n}}\n\n", kdl_name, args_names, rules.join(""), init_state) + } + // Otherwise just compile the function as normal + None => { + format!("fun ({}{}) {{{}\n}}\n\n", kdl_name, args_names, rules.join("")) + } } - format!("fun ({}{}) {{{}\n}}\n\n", kdl_name, args_names, rules.join("")) }; let entry = cmnt + &fun; Ok(entry) @@ -104,6 +117,13 @@ pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, entry: &Comp pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &CompBook) -> Result { let mut lines = vec![]; let mut run = String::new(); + let mut init_funs: HashSet = HashSet::new(); // Functions that are the initial state to some other function + for name in &comp_book.names { + let entry = comp_book.entrs.get(name).unwrap(); + if let Some(attr) = entry.get_attribute("kdl_state") { + init_funs.insert(attr.value.unwrap()); + } + } for name in &comp_book.names { let entry = comp_book.entrs.get(name).unwrap(); // Functions with attribute "kdl_erase" are not compiled @@ -116,7 +136,11 @@ pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &C run.push_str(&stmnt); continue; } - lines.push(to_kdl_entry(book, kdl_names, entry)?); + // Initial state for functions is compiled by the function itself + if init_funs.contains(name) { + continue; + } + lines.push(to_kdl_entry(book, kdl_names, comp_book, entry)?); } Ok(lines.join("") + &run) } diff --git a/src/driver/loader.rs b/src/driver/loader.rs index ead818cc..6a16fe0d 100644 --- a/src/driver/loader.rs +++ b/src/driver/loader.rs @@ -53,6 +53,10 @@ pub fn render_error(config: &Config, files: &[File], err: AdjustError) -> String AdjustErrorKind::AttributeWithoutArgs { name } => format!("You should not put arguments on the attribute '{}'.\n{}", name, high_line), AdjustErrorKind::AttributeMissingArg { name } => format!("Attribute '{}' needs to be given a value.\n{}", name, high_line), AdjustErrorKind::WrongTargetAttribute { name, target } => format!("The attribute '{}' only works in the target '{}'.\n{}", name, target, high_line), + AdjustErrorKind::NotInlineable { fn_name, attr_name } => format!("Function '{}' must have exactly one rule with only variable patterns to be '{}'.\n{}", fn_name, attr_name, high_line), + AdjustErrorKind::FunctionHasArgs { fn_name, attr_name } => format!("Function '{}' must not have any arguments to be '{}'.\n{}", fn_name, attr_name, high_line), + AdjustErrorKind::FunctionNotFound { name } => format!("Function '{}' was not found.\n{}", name, high_line), + AdjustErrorKind::HasKdlAttrs { name } => format!("Function '{}' must not have any kdl attributes.\n{}", name, high_line), }; } diff --git a/src/lowering/adjust.rs b/src/lowering/adjust.rs index fe169666..fbe95efb 100644 --- a/src/lowering/adjust.rs +++ b/src/lowering/adjust.rs @@ -24,6 +24,10 @@ pub enum AdjustErrorKind { AttributeWithoutArgs { name: String }, AttributeMissingArg { name: String }, WrongTargetAttribute { name: String, target: Target }, + NotInlineable { fn_name: String, attr_name: String }, + FunctionHasArgs { fn_name: String, attr_name: String }, + FunctionNotFound { name: String }, + HasKdlAttrs { name: String }, UseOpenInstead, UseMatchInstead, RepeatedVariable, diff --git a/src/lowering/attributes.rs b/src/lowering/attributes.rs index 3b82eec5..6bc65729 100644 --- a/src/lowering/attributes.rs +++ b/src/lowering/attributes.rs @@ -1,4 +1,4 @@ -use crate::book::{span::Span, Attribute, Book, Entry}; +use crate::book::{span::{Span, Localized}, Attribute, Book, Entry, term::Term, name::Ident}; use crate::driver::config::{Config, Target}; use super::adjust::{AdjustError, AdjustErrorKind}; @@ -9,6 +9,7 @@ pub fn adjust_err(orig: Span, kind: AdjustErrorKind) -> Result Result<(), AdjustError> { match &attr.value { Some(_) => adjust_err(attr.orig, AdjustErrorKind::AttributeWithoutArgs { name: attr.name.0.clone() }), @@ -16,6 +17,7 @@ pub fn without_args(attr: &Attribute) -> Result<(), AdjustError> { } } +// Checks that an attribute has args pub fn with_args(attr: &Attribute) -> Result<(), AdjustError> { match &attr.value { Some(_) => Ok(()), @@ -23,6 +25,7 @@ pub fn with_args(attr: &Attribute) -> Result<(), AdjustError> { } } +// Checks that the function is being processed in the correct target pub fn only_target(config: &Config, attr: &Attribute, target: Target) -> Result<(), AdjustError> { if config.target == target || config.target == Target::All { Ok(()) @@ -31,11 +34,59 @@ pub fn only_target(config: &Config, attr: &Attribute, target: Target) -> Result< } } +// Checks that the function can be inlined +// A function is inlineable if it has only one rule and all its patterns are variables +pub fn is_inlineable(entry: &Entry, attr: &Attribute) -> Result<(), AdjustError> { + if entry.rules.len() != 1 { + let fn_name = entry.name.0.clone(); + let attr_name = attr.name.0.clone(); + adjust_err(entry.orig, AdjustErrorKind::NotInlineable { fn_name, attr_name } ) + } else { + for pat in &entry.rules[0].pats { + if !matches!(&**pat, Term::Var { .. }) { + let fn_name = entry.name.0.clone(); + let attr_name = attr.name.0.clone(); + return adjust_err((&**pat).get_origin(), AdjustErrorKind::NotInlineable { fn_name, attr_name } ); + } + } + Ok(()) + } +} + +// Checks that a function has no args +pub fn no_fn_args(entry: &Entry, attr: &Attribute) -> Result<(), AdjustError> { + if let Some(arg) = entry.args.iter().filter(|x| !x.eras).next() { + let fn_name = entry.name.0.clone(); + let attr_name = attr.name.0.clone(); + adjust_err(arg.orig, AdjustErrorKind::FunctionHasArgs { fn_name, attr_name }) + } else { + Ok(()) + } +} + +pub fn fn_exists<'a>(book: &'a Book, attr: &Attribute, entry_name: &Ident) -> Result<&'a Entry, AdjustError> { + if let Some(entry) = book.entrs.get(entry_name) { + Ok(entry) + } else { + adjust_err(attr.orig, AdjustErrorKind::FunctionNotFound { name: entry_name.0.clone() }) + } +} + +pub fn no_kdl_attrs(entry: &Entry) -> Result<(), AdjustError> { + let kdl_attrs = ["kdl_erase", "kdl_run", "kdl_name", "kdl_state"]; + for attr_name in kdl_attrs { + if let Some(attr) = entry.get_attribute(attr_name) { + return adjust_err(attr.orig, AdjustErrorKind::HasKdlAttrs { name: entry.name.0.clone() }); + } + } + Ok(()) +} + // Main functions // Attributes are just for compiler magic so // they have no specification so we should check then. -pub fn check_attribute(config: &Config, attr: &Attribute) -> Result<(), AdjustError> { +pub fn check_attribute(config: &Config, book: &Book, attr: &Attribute) -> Result<(), AdjustError> { match attr.name.0.as_str() { "kdl_erase" => without_args(attr), "kdl_run" => { @@ -43,22 +94,31 @@ pub fn check_attribute(config: &Config, attr: &Attribute) -> Result<(), AdjustEr only_target(config, attr, Target::Kdl) } "kdl_name" => with_args(attr), + "kdl_state" => { + with_args(attr)?; + // TODO: The state function shouldnt be called anywhere + // TODO: We need to put this function in the book even though its not called anywhere + let state_fn = fn_exists(book, attr, attr.value.as_ref().unwrap())?; + no_kdl_attrs(state_fn)?; + no_fn_args(state_fn, attr)?; + is_inlineable(state_fn, attr) + } _ => adjust_err(attr.orig, AdjustErrorKind::InvalidAttribute { name: attr.name.0.clone() }), } } // Just checks all the attributes before they're expanded // in the other parts of the code. -pub fn check_entry_attributes(config: &Config, entry: &Entry) -> Result<(), AdjustError> { +pub fn check_entry_attributes(config: &Config, book: &Book, entry: &Entry) -> Result<(), AdjustError> { for attr in &entry.attrs { - check_attribute(config, attr)? + check_attribute(config, book, attr)? } Ok(()) } pub fn check_attributes(config: &Config, book: &Book) -> Result<(), AdjustError> { for entry in book.entrs.values() { - check_entry_attributes(config, entry)?; + check_entry_attributes(config, book, entry)?; } Ok(()) } diff --git a/tests/suite/to_kdl/fun_with_state.golden b/tests/suite/to_kdl/fun_with_state.golden new file mode 100644 index 00000000..b09c1a16 --- /dev/null +++ b/tests/suite/to_kdl/fun_with_state.golden @@ -0,0 +1,7 @@ +// MyFn : U60 +fun (MyFn) { + (MyFn) = #1 +} with { + #2 +} + diff --git a/tests/suite/to_kdl/fun_with_state.kind2 b/tests/suite/to_kdl/fun_with_state.kind2 new file mode 100644 index 00000000..d4fb6099 --- /dev/null +++ b/tests/suite/to_kdl/fun_with_state.kind2 @@ -0,0 +1,8 @@ +#kdl_state = MyFn.state +MyFn : U60 { + 1 +} + +MyFn.state : U60 { + 2 +} \ No newline at end of file diff --git a/tests/suite/to_kdl/non_inline_state.golden b/tests/suite/to_kdl/non_inline_state.golden new file mode 100644 index 00000000..31156b1b --- /dev/null +++ b/tests/suite/to_kdl/non_inline_state.golden @@ -0,0 +1 @@ +Function 'MyFn.state' must have exactly one rule with only variable patterns to be 'kdl_state'. diff --git a/tests/suite/to_kdl/non_inline_state.kind2 b/tests/suite/to_kdl/non_inline_state.kind2 new file mode 100644 index 00000000..d94e77b5 --- /dev/null +++ b/tests/suite/to_kdl/non_inline_state.kind2 @@ -0,0 +1,6 @@ +#kdl_state = MyFn.state +MyFn : U60 { + 1 +} + +MyFn.state : U60 \ No newline at end of file diff --git a/tests/suite/to_kdl/state_with_args.golden b/tests/suite/to_kdl/state_with_args.golden new file mode 100644 index 00000000..580a46e5 --- /dev/null +++ b/tests/suite/to_kdl/state_with_args.golden @@ -0,0 +1 @@ +Function 'MyFn.state' must not have any arguments to be 'kdl_state'. diff --git a/tests/suite/to_kdl/state_with_args.kind2 b/tests/suite/to_kdl/state_with_args.kind2 new file mode 100644 index 00000000..2679bd38 --- /dev/null +++ b/tests/suite/to_kdl/state_with_args.kind2 @@ -0,0 +1,7 @@ +#kdl_state = MyFn.state +MyFn : U60 { + 1 +} + +MyFn.state (a: U60): U60 +MyFn.state a = a \ No newline at end of file diff --git a/tests/suite/to_kdl/state_with_attr.golden b/tests/suite/to_kdl/state_with_attr.golden new file mode 100644 index 00000000..4d61f8db --- /dev/null +++ b/tests/suite/to_kdl/state_with_attr.golden @@ -0,0 +1 @@ +Function 'MyFn.state' must not have any kdl attributes. diff --git a/tests/suite/to_kdl/state_with_attr.kind2 b/tests/suite/to_kdl/state_with_attr.kind2 new file mode 100644 index 00000000..e307640e --- /dev/null +++ b/tests/suite/to_kdl/state_with_attr.kind2 @@ -0,0 +1,9 @@ +#kdl_state = MyFn.state +MyFn : U60 { + 1 +} + +#kdl_run +MyFn.state : U60 { + 0 +} \ No newline at end of file From 7782961b5b9aabe1046e1eb61c5fb6ea8937e542 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Thu, 6 Oct 2022 09:53:00 +0200 Subject: [PATCH 05/17] Fix test for name shortening --- tests/suite/checker/names.kind2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/suite/checker/names.kind2 b/tests/suite/checker/names.kind2 index 88831270..a90d987c 100644 --- a/tests/suite/checker/names.kind2 +++ b/tests/suite/checker/names.kind2 @@ -1,4 +1,4 @@ -use A as Maybe +use Maybe as A /Test : A/ U60 /Test = A/pure 2 \ No newline at end of file From 4566c26ce7c72bfa6bf33c714a102015fda6dc68 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Thu, 6 Oct 2022 09:54:37 +0200 Subject: [PATCH 06/17] Compile kind lambdas to lambdas with ! in kdl --- src/codegen/kdl.rs | 2 +- tests/suite/to_kdl/lam.golden | 10 ++++++++++ tests/suite/to_kdl/lam.kind2 | 8 ++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 tests/suite/to_kdl/lam.golden create mode 100644 tests/suite/to_kdl/lam.kind2 diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index b24dcf19..6359191f 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -19,7 +19,7 @@ pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result CompTerm::App { func, argm } => { let func = to_kdl_term(kdl_names, func)?; let argm = to_kdl_term(kdl_names, argm)?; - format!("({} {})", func, argm) + format!("(!{} {})", func, argm) } CompTerm::Dup { nam0, nam1, expr, body } => { let expr = to_kdl_term(kdl_names, expr)?; diff --git a/tests/suite/to_kdl/lam.golden b/tests/suite/to_kdl/lam.golden new file mode 100644 index 00000000..0611f361 --- /dev/null +++ b/tests/suite/to_kdl/lam.golden @@ -0,0 +1,10 @@ +// CoolFn : (_: U60) U60 +fun (CoolFn) { + (CoolFn) = @x (* #2 x) +} + +// CoolFnApp (n: U60) : U60 +fun (CoolFnApp n) { + (CoolFnApp n) = let lam = @x (!(CoolFn) x); (!lam n) +} + diff --git a/tests/suite/to_kdl/lam.kind2 b/tests/suite/to_kdl/lam.kind2 new file mode 100644 index 00000000..d63a0fee --- /dev/null +++ b/tests/suite/to_kdl/lam.kind2 @@ -0,0 +1,8 @@ +CoolFn : U60 -> U60 { + (x: U60) => (* 2 x) +} + +CoolFnApp (n: U60) : U60 { + let lam = (x: U60) => ((CoolFn) x) + (lam n) +} \ No newline at end of file From bfa7f284019728a5ca3e1a73c85393cd0b61ff2f Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Thu, 6 Oct 2022 10:05:42 +0200 Subject: [PATCH 07/17] Slightly improve genereated kdl indentation --- src/codegen/kdl.rs | 6 +++--- tests/suite/to_kdl/change_name.golden | 3 ++- .../suite/to_kdl/erase_still_callable.golden | 3 ++- tests/suite/to_kdl/erased_arg.golden | 10 ++++++--- tests/suite/to_kdl/flatten_u60.golden | 6 ++++-- tests/suite/to_kdl/flattening.golden | 21 ++++++++++++------- tests/suite/to_kdl/kdl_erased.golden | 6 ++++-- tests/suite/to_kdl/lam.golden | 7 +++++-- tests/suite/to_kdl/operators.golden | 2 +- tests/suite/to_kdl/remove_names.golden | 7 ++++++- tests/suite/to_kdl/run.golden | 7 ++++--- tests/suite/to_kdl/u60.golden | 2 +- 12 files changed, 53 insertions(+), 27 deletions(-) diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index 6359191f..94b0f60b 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -29,7 +29,7 @@ pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result CompTerm::Let { name, expr, body } => { let expr = to_kdl_term(kdl_names, expr)?; let body = to_kdl_term(kdl_names, body)?; - format!("let {} = {}; {}", name, expr, body) + format!("let {} = {};\n {}", name, expr, body) } CompTerm::Ctr { name, args } => { let kdl_name = kdl_names.get(name).unwrap_or_else(|| panic!("{}", name)); @@ -68,7 +68,7 @@ pub fn to_kdl_rule(kdl_names: &HashMap, rule: &CompRule) -> Result pats.push(pat); } let body = to_kdl_term(kdl_names, &rule.body)?; - let rule = format!("({}{}) = {}", kdl_name, pats.join(""), body); + let rule = format!("({}{}) =\n {}", kdl_name, pats.join(""), body); Ok(rule) } @@ -132,7 +132,7 @@ pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &C continue; } if entry.get_attribute("kdl_run").is_some() { - let stmnt = format!("run {{\n {}\n}}\n\n", to_kdl_term(kdl_names, &*entry.rules[0].body)?); + let stmnt = format!("run {{\n {}\n}}\n\n", to_kdl_term(kdl_names, &*entry.rules[0].body)?); run.push_str(&stmnt); continue; } diff --git a/tests/suite/to_kdl/change_name.golden b/tests/suite/to_kdl/change_name.golden index d086935d..14edd282 100644 --- a/tests/suite/to_kdl/change_name.golden +++ b/tests/suite/to_kdl/change_name.golden @@ -1,5 +1,6 @@ // Jonathan.Joestar : U60 fun (JOJO) { - (JOJO) = #42 + (JOJO) = + #42 } diff --git a/tests/suite/to_kdl/erase_still_callable.golden b/tests/suite/to_kdl/erase_still_callable.golden index 119af8bc..b888f34c 100644 --- a/tests/suite/to_kdl/erase_still_callable.golden +++ b/tests/suite/to_kdl/erase_still_callable.golden @@ -1,5 +1,6 @@ // FnA : _ fun (A) { - (A) = (+ (B) #1) + (A) = + (+ (B) #1) } diff --git a/tests/suite/to_kdl/erased_arg.golden b/tests/suite/to_kdl/erased_arg.golden index 8057c3c4..4f11a44d 100644 --- a/tests/suite/to_kdl/erased_arg.golden +++ b/tests/suite/to_kdl/erased_arg.golden @@ -1,14 +1,18 @@ // Arity3 -(e: U60) -(f: U60) -(g: Type) -(h: Type) -(i: Type) (d: U60) : U60 fun (Arity3 d) { - (Arity3 d) = d + (Arity3 d) = + d } // ArityB -(e: U60) -(g: Type) (d: U60) -(f: U60) -(h: Type) -(h: U60) -(i: Type) : U60 fun (ArityB d) { - (ArityB d) = d + (ArityB d) = + d } run { - let ~ = (+ (+ (Arity3 #3) (Arity3 #6)) #5); let b = (+ (+ (ArityB #2) (ArityB #3)) #5); b + let ~ = (+ (+ (Arity3 #3) (Arity3 #6)) #5); + let b = (+ (+ (ArityB #2) (ArityB #3)) #5); + b } diff --git a/tests/suite/to_kdl/flatten_u60.golden b/tests/suite/to_kdl/flatten_u60.golden index c139573e..afef07eb 100644 --- a/tests/suite/to_kdl/flatten_u60.golden +++ b/tests/suite/to_kdl/flatten_u60.golden @@ -1,10 +1,12 @@ fun (A0_ x0) { - (A0_ #2) = #9 + (A0_ #2) = + #9 } // A (b: (B)) (n: U60) : U60 fun (A b n) { - (A {B_new .0} #1) = (A0_ .0) + (A {B_new .0} #1) = + (A0_ .0) } // B.new (n: U60) : (B) diff --git a/tests/suite/to_kdl/flattening.golden b/tests/suite/to_kdl/flattening.golden index a7534738..9de97f01 100644 --- a/tests/suite/to_kdl/flattening.golden +++ b/tests/suite/to_kdl/flattening.golden @@ -6,25 +6,32 @@ ctr {List_cons head tail} // Ora.Ora.ora -(a: Type) (h: (List a)) : (List a) fun (Ora_Ora_ora h) { - (Ora_Ora_ora {List_nil}) = {List_nil} - (Ora_Ora_ora {List_cons x xs}) = {List_cons x xs} + (Ora_Ora_ora {List_nil}) = + {List_nil} + (Ora_Ora_ora {List_cons x xs}) = + {List_cons x xs} } fun (Null0_0_ x0 x1 x2 x3) { - (Null0_0_ ~ ~ ~ {List_cons ~ ~}) = {List_nil} + (Null0_0_ ~ ~ ~ {List_cons ~ ~}) = + {List_nil} } fun (Null0_1_ x0 x1) { - (Null0_1_ {List_cons ~ ~} ~) = {List_nil} + (Null0_1_ {List_cons ~ ~} ~) = + {List_nil} } fun (Null0_ x0 x1) { - (Null0_ {List_cons y ys} {List_cons x .0}) = (Null0_0_ y ys x .0) - (Null0_ {List_nil} {List_cons .0 xs}) = (Null0_1_ .0 xs) + (Null0_ {List_cons y ys} {List_cons x .0}) = + (Null0_0_ y ys x .0) + (Null0_ {List_nil} {List_cons .0 xs}) = + (Null0_1_ .0 xs) } // Null -(a: Type) (h: (List (List a))) : (List a) fun (Null h) { - (Null {List_cons .0 .1}) = (Null0_ .0 .1) + (Null {List_cons .0 .1}) = + (Null0_ .0 .1) } diff --git a/tests/suite/to_kdl/kdl_erased.golden b/tests/suite/to_kdl/kdl_erased.golden index 04a7b165..d4ce1673 100644 --- a/tests/suite/to_kdl/kdl_erased.golden +++ b/tests/suite/to_kdl/kdl_erased.golden @@ -1,10 +1,12 @@ // Ata : U60 fun (A) { - (A) = #2 + (A) = + #2 } // Ce : U60 fun (C) { - (C) = #4 + (C) = + #4 } diff --git a/tests/suite/to_kdl/lam.golden b/tests/suite/to_kdl/lam.golden index 0611f361..bda1934a 100644 --- a/tests/suite/to_kdl/lam.golden +++ b/tests/suite/to_kdl/lam.golden @@ -1,10 +1,13 @@ // CoolFn : (_: U60) U60 fun (CoolFn) { - (CoolFn) = @x (* #2 x) + (CoolFn) = + @x (* #2 x) } // CoolFnApp (n: U60) : U60 fun (CoolFnApp n) { - (CoolFnApp n) = let lam = @x (!(CoolFn) x); (!lam n) + (CoolFnApp n) = + let lam = @x (!(CoolFn) x); + (!lam n) } diff --git a/tests/suite/to_kdl/operators.golden b/tests/suite/to_kdl/operators.golden index c251ceb0..83b2bd24 100644 --- a/tests/suite/to_kdl/operators.golden +++ b/tests/suite/to_kdl/operators.golden @@ -1,4 +1,4 @@ run { - (+ #2 (- #3 (* #4 (/ #5 (% #6 (& #2 (| #8 (^ #9 (<< #10 (>> #23 (< #2 (>= #4 (<= (== #4 #4) (> #3 (!= #5 #3))))))))))))))) + (+ #2 (- #3 (* #4 (/ #5 (% #6 (& #2 (| #8 (^ #9 (<< #10 (>> #23 (< #2 (>= #4 (<= (== #4 #4) (> #3 (!= #5 #3))))))))))))))) } diff --git a/tests/suite/to_kdl/remove_names.golden b/tests/suite/to_kdl/remove_names.golden index 2f7159dd..1a8677a2 100644 --- a/tests/suite/to_kdl/remove_names.golden +++ b/tests/suite/to_kdl/remove_names.golden @@ -6,6 +6,11 @@ ctr {List_cons head tail} // Ora.Ora.ora (h: (List U60)) : (List U60) fun (Ora_Ora_ora h) { - (Ora_Ora_ora {List_cons ~ xs}) = let a = #2; let b = #3; let c = #4; let ~ = (+ c b); {List_cons a xs} + (Ora_Ora_ora {List_cons ~ xs}) = + let a = #2; + let b = #3; + let c = #4; + let ~ = (+ c b); + {List_cons a xs} } diff --git a/tests/suite/to_kdl/run.golden b/tests/suite/to_kdl/run.golden index 2246a020..885b9372 100644 --- a/tests/suite/to_kdl/run.golden +++ b/tests/suite/to_kdl/run.golden @@ -1,13 +1,14 @@ // Ata : U60 fun (A) { - (A) = #2 + (A) = + #2 } run { - #3 + #3 } run { - #4 + #4 } diff --git a/tests/suite/to_kdl/u60.golden b/tests/suite/to_kdl/u60.golden index 6ba642b1..52dabc78 100644 --- a/tests/suite/to_kdl/u60.golden +++ b/tests/suite/to_kdl/u60.golden @@ -1,4 +1,4 @@ run { - #2 + #2 } From 01937a919ffa3b2c3279a1761daa8adf8706033a Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Fri, 7 Oct 2022 08:09:57 +0200 Subject: [PATCH 08/17] Fix indentation for initial state --- src/codegen/kdl.rs | 2 +- tests/suite/to_kdl/fun_with_state.golden | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index 94b0f60b..28673072 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -102,7 +102,7 @@ pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: & let state_fn = comp_book.entrs.get(&state_fn_name).ok_or(format!("Initial state function \"{}\" for function \"{}\" not found.", state_fn_name, entry.name))?; let state_term = state_fn.rules[0].body.clone(); // This is checked when validating the attributes let init_state = to_kdl_term(kdl_names, &*state_term)?; - format!("fun ({}{}) {{{}\n}} with {{\n {}\n}}\n\n", kdl_name, args_names, rules.join(""), init_state) + format!("fun ({}{}) {{{}\n}} with {{\n {}\n}}\n\n", kdl_name, args_names, rules.join(""), init_state) } // Otherwise just compile the function as normal None => { diff --git a/tests/suite/to_kdl/fun_with_state.golden b/tests/suite/to_kdl/fun_with_state.golden index b09c1a16..ff6de9e6 100644 --- a/tests/suite/to_kdl/fun_with_state.golden +++ b/tests/suite/to_kdl/fun_with_state.golden @@ -1,7 +1,8 @@ // MyFn : U60 fun (MyFn) { - (MyFn) = #1 + (MyFn) = + #1 } with { - #2 + #2 } From d82ed4d61a3f90f722229c53533b06a59f168220 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Wed, 12 Oct 2022 18:01:18 +0200 Subject: [PATCH 09/17] feat: add inline attr, separate kdl compiler passes Add an inline attribute that substitutes a function application for the body of the rule that matches. This is still limited and rejects many easily rewritable programs. We would need to compile the intermediate terms to HVM and normalize them then to properly and efficiently handle all valid inlining cases. I also separated each of the passes of the kdl compiler to separate functions that go over the whole AST. This is probably less efficient, but makes it easier to reuse this code for other targets and to develop new passes. This commit also does some light refactoring of other things surrounding the kdl compiler. --- src/codegen/kdl.rs | 55 +- src/codegen/kdl/book.rs | 890 +------------ src/codegen/kdl/passes.rs | 1147 +++++++++++++++++ src/driver.rs | 10 +- src/driver/loader.rs | 2 +- src/lowering/adjust.rs | 2 +- src/lowering/attributes.rs | 59 +- tests/mod.rs | 4 +- tests/suite/to_kdl/bad_inline.golden | 1 + tests/suite/to_kdl/bad_inline.kind2 | 17 + tests/suite/to_kdl/inline.golden | 13 + tests/suite/to_kdl/inline.kind2 | 32 + tests/suite/to_kdl/inline_with_arg.golden | 1 + tests/suite/to_kdl/inline_with_arg.kind2 | 4 + ...args.golden => kdl_state_with_args.golden} | 0 ...h_args.kind2 => kdl_state_with_args.kind2} | 0 ...attr.golden => kdl_state_with_attr.golden} | 0 ...h_attr.kind2 => kdl_state_with_attr.kind2} | 0 tests/suite/to_kdl/non_inline_state.golden | 2 +- 19 files changed, 1291 insertions(+), 948 deletions(-) create mode 100644 src/codegen/kdl/passes.rs create mode 100644 tests/suite/to_kdl/bad_inline.golden create mode 100644 tests/suite/to_kdl/bad_inline.kind2 create mode 100644 tests/suite/to_kdl/inline.golden create mode 100644 tests/suite/to_kdl/inline.kind2 create mode 100644 tests/suite/to_kdl/inline_with_arg.golden create mode 100644 tests/suite/to_kdl/inline_with_arg.kind2 rename tests/suite/to_kdl/{state_with_args.golden => kdl_state_with_args.golden} (100%) rename tests/suite/to_kdl/{state_with_args.kind2 => kdl_state_with_args.kind2} (100%) rename tests/suite/to_kdl/{state_with_attr.golden => kdl_state_with_attr.golden} (100%) rename tests/suite/to_kdl/{state_with_attr.kind2 => kdl_state_with_attr.kind2} (100%) diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index 28673072..ec5913f7 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -1,4 +1,5 @@ mod book; +pub mod passes; use crate::book::name::Ident; use crate::book::Book; @@ -9,37 +10,37 @@ use std::collections::{HashMap, HashSet}; pub const KDL_NAME_LEN: usize = 12; -pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result { +pub fn stringify_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result { let term = match term { CompTerm::Var { name } => name.to_string(), CompTerm::Lam { name, body } => { - let body = to_kdl_term(kdl_names, body)?; + let body = stringify_kdl_term(kdl_names, body)?; format!("@{} {}", name, body) } CompTerm::App { func, argm } => { - let func = to_kdl_term(kdl_names, func)?; - let argm = to_kdl_term(kdl_names, argm)?; + let func = stringify_kdl_term(kdl_names, func)?; + let argm = stringify_kdl_term(kdl_names, argm)?; format!("(!{} {})", func, argm) } CompTerm::Dup { nam0, nam1, expr, body } => { - let expr = to_kdl_term(kdl_names, expr)?; - let body = to_kdl_term(kdl_names, body)?; + let expr = stringify_kdl_term(kdl_names, expr)?; + let body = stringify_kdl_term(kdl_names, body)?; format!("dup {} {} = {}; {}", nam0, nam1, expr, body) } CompTerm::Let { name, expr, body } => { - let expr = to_kdl_term(kdl_names, expr)?; - let body = to_kdl_term(kdl_names, body)?; + let expr = stringify_kdl_term(kdl_names, expr)?; + let body = stringify_kdl_term(kdl_names, body)?; format!("let {} = {};\n {}", name, expr, body) } CompTerm::Ctr { name, args } => { let kdl_name = kdl_names.get(name).unwrap_or_else(|| panic!("{}", name)); - let args = args.iter().map(|x| to_kdl_term(kdl_names, x)).collect::, String>>()?; + let args = args.iter().map(|x| stringify_kdl_term(kdl_names, x)).collect::, String>>()?; let args = args.iter().map(|x| format!(" {}", x)).collect::(); format!("{{{}{}}}", kdl_name, args) } CompTerm::Fun { name, args } => { let kdl_name = kdl_names.get(name).unwrap_or_else(|| panic!("{}", name)); - let args = args.iter().map(|x| to_kdl_term(kdl_names, x)).collect::, String>>()?; + let args = args.iter().map(|x| stringify_kdl_term(kdl_names, x)).collect::, String>>()?; let args = args.iter().map(|x| format!(" {}", x)).collect::(); format!("({}{})", kdl_name, args) } @@ -47,8 +48,8 @@ pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result format!("#{}", numb) } CompTerm::Op2 { oper, val0, val1 } => { - let val0 = to_kdl_term(kdl_names, val0)?; - let val1 = to_kdl_term(kdl_names, val1)?; + let val0 = stringify_kdl_term(kdl_names, val0)?; + let val1 = stringify_kdl_term(kdl_names, val1)?; format!("({} {} {})", oper, val0, val1) } CompTerm::Nil => { @@ -58,21 +59,21 @@ pub fn to_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> Result Ok(term) } -pub fn to_kdl_rule(kdl_names: &HashMap, rule: &CompRule) -> Result { +pub fn stringify_kdl_rule(kdl_names: &HashMap, rule: &CompRule) -> Result { let name = &rule.name; let kdl_name = kdl_names.get(name).unwrap(); let mut pats = vec![]; // stringified pattern args for pat in rule.pats.iter() { - let pat = to_kdl_term(kdl_names, pat)?; + let pat = stringify_kdl_term(kdl_names, pat)?; pats.push(" ".to_string()); pats.push(pat); } - let body = to_kdl_term(kdl_names, &rule.body)?; + let body = stringify_kdl_term(kdl_names, &rule.body)?; let rule = format!("({}{}) =\n {}", kdl_name, pats.join(""), body); Ok(rule) } -pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: &CompBook, entry: &CompEntry) -> Result { +pub fn stringify_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: &CompBook, entry: &CompEntry) -> Result { let kdl_name = kdl_names.get(&entry.name).unwrap(); let args_names = entry.args.iter().map(|arg| format!(" {}", arg)).collect::(); // If this entry existed in the original kind code, add some annotations as comments @@ -93,7 +94,7 @@ pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: & } else { let mut rules = vec![]; for rule in &entry.rules { - rules.push(format!("\n {}", to_kdl_rule(kdl_names, rule)?)); + rules.push(format!("\n {}", stringify_kdl_rule(kdl_names, rule)?)); } match entry.get_attribute("kdl_state") { // If the function has an initial state, compile the state together with it @@ -101,7 +102,7 @@ pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: & let state_fn_name = attr.value.unwrap(); let state_fn = comp_book.entrs.get(&state_fn_name).ok_or(format!("Initial state function \"{}\" for function \"{}\" not found.", state_fn_name, entry.name))?; let state_term = state_fn.rules[0].body.clone(); // This is checked when validating the attributes - let init_state = to_kdl_term(kdl_names, &*state_term)?; + let init_state = stringify_kdl_term(kdl_names, &*state_term)?; format!("fun ({}{}) {{{}\n}} with {{\n {}\n}}\n\n", kdl_name, args_names, rules.join(""), init_state) } // Otherwise just compile the function as normal @@ -114,7 +115,7 @@ pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap, comp_book: & Ok(entry) } -pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &CompBook) -> Result { +pub fn stringify_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &CompBook) -> Result { let mut lines = vec![]; let mut run = String::new(); let mut init_funs: HashSet = HashSet::new(); // Functions that are the initial state to some other function @@ -132,7 +133,7 @@ pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &C continue; } if entry.get_attribute("kdl_run").is_some() { - let stmnt = format!("run {{\n {}\n}}\n\n", to_kdl_term(kdl_names, &*entry.rules[0].body)?); + let stmnt = format!("run {{\n {}\n}}\n\n", stringify_kdl_term(kdl_names, &*entry.rules[0].body)?); run.push_str(&stmnt); continue; } @@ -140,11 +141,23 @@ pub fn to_kdl_book(book: &Book, kdl_names: &HashMap, comp_book: &C if init_funs.contains(name) { continue; } - lines.push(to_kdl_entry(book, kdl_names, comp_book, entry)?); + lines.push(stringify_kdl_entry(book, kdl_names, comp_book, entry)?); } Ok(lines.join("") + &run) } +pub fn to_kdl_book(book: Book, namespace: &Option) -> Result { + let book = passes::erase_funs(book)?; + let comp_book = passes::erase_terms(&book) + .and_then(passes::inline) + .and_then(passes::remove_u120_opers) + .and_then(passes::convert_u120_uses) + .and_then(passes::flatten) + .and_then(passes::linearize_rules)?; + let kdl_names = get_kdl_names(&comp_book, namespace)?; + stringify_kdl_book(&book, &kdl_names, &comp_book) +} + // Utils // ----- diff --git a/src/codegen/kdl/book.rs b/src/codegen/kdl/book.rs index d85a8f8e..d55d306b 100644 --- a/src/codegen/kdl/book.rs +++ b/src/codegen/kdl/book.rs @@ -1,10 +1,8 @@ use crate::book::name::Ident; -use crate::book::term::{Operator, Term}; -use crate::book::{Attribute, Entry, Rule}; -use crate::codegen::kdl::Book; +use crate::book::term::Operator; +use crate::book::Attribute; use std::collections::HashMap; -use std::collections::HashSet; #[derive(Clone, Debug)] pub enum CompTerm { @@ -81,887 +79,3 @@ impl CompEntry { None } } - -pub fn compile_book(book: &Book) -> Result { - let mut comp_book = CompBook { - names: Vec::new(), - entrs: HashMap::new(), - }; - for name in &book.names { - let entry = book.entrs.get(name).unwrap(); - // Don't compile primitive U120 operations - // TODO: If this compiler eventually gets used for other targets (like HVM), this will need to be separated. - // We could do passes of compiler features (like flattening, linearizing, etc) also separately. - if u120_to_oper(&entry.name).is_some() { - continue; - } - // Skip over useless entries - // TODO: This doesn't cover all cases. We need something like `erase` but for a Book. - // Also maybe there are functions of type Type that should be compiled? - if let Term::Typ { .. } = &*entry.tipo { - continue; - } - // TODO: Group errors for all entries - let entrs = compile_entry(book, entry)?; - for entry in entrs { - comp_book.names.push(entry.name.clone()); - comp_book.entrs.insert(entry.name.clone(), entry); - } - } - Ok(comp_book) -} - -// Can become multiple entries after flatenning -pub fn compile_entry(book: &Book, entry: &Entry) -> Result, String> { - fn compile_rule(book: &Book, entry: &Entry, rule: &Rule) -> Result { - let name = rule.name.clone(); - let mut pats = Vec::new(); - for (arg, pat) in entry.args.iter().zip(rule.pats.iter()) { - if !arg.eras { - let pat = erase(book, pat); - match is_valid_pattern(&*pat) { - Ok(()) => { - pats.push(pat); - } - Err(err_term) => { - // TODO: Add Display trait for compterms - // TODO: Tell the user exactly why this term is incorrect - return Err(format!("Found invalid term \"{:?}\" in rule pattern matching for entry \"{}\".", err_term, entry.name)); - } - } - } - } - let body = erase(book, &rule.body); - Ok(CompRule { name, pats, body }) - } - - fn make_u120_new(old_entry: &Entry) -> CompEntry { - // U120.new hi lo = (+ (<< hi 60) (>> (<< lo 60) 60)) - CompEntry { - name: Ident::new("U120.new"), - args: vec![Ident::new("hi"), Ident::new("lo")], - rules: vec![CompRule { - name: Ident::new("U120.new"), - pats: vec![ - Box::new(CompTerm::Var { name: Ident::new("hi") }), - Box::new(CompTerm::Var { name: Ident::new("lo") }) - ], - body: Box::new(CompTerm::Op2 { - oper: Operator::Add, - val0: Box::new(CompTerm::Op2 { - oper: Operator::Shl, - val0: Box::new(CompTerm::Var { name: Ident::new("hi") }), - val1: Box::new(CompTerm::Num { numb: 60 }), - }), - val1: Box::new(CompTerm::Op2 { - oper: Operator::Shr, - val0: Box::new(CompTerm::Op2 { - oper: Operator::Shl, - val0: Box::new(CompTerm::Var { name: Ident::new("lo") }), - val1: Box::new(CompTerm::Num { numb: 60 }), - }), - val1: Box::new(CompTerm::Num { numb: 60 }), - }), - }), - }], - orig: true, - attrs: old_entry.attrs.clone(), - } - } - - fn make_u120_low(old_entry: &Entry) -> CompEntry { - // U120.low n = (>> (<< n 60) 60)) - CompEntry { - name: Ident::new("U120.low"), - args: vec![Ident::new("n")], - rules: vec![CompRule { - name: Ident::new("U120.low"), - pats: vec![Box::new(CompTerm::Var { name: Ident::new("n") })], - body: Box::new(CompTerm::Op2 { - oper: Operator::Shr, - val0: Box::new(CompTerm::Op2 { - oper: Operator::Shl, - val0: Box::new(CompTerm::Var { name: Ident::new("n") }), - val1: Box::new(CompTerm::Num { numb: 60 }), - }), - val1: Box::new(CompTerm::Num { numb: 60 }), - }), - }], - orig: true, - attrs: old_entry.attrs.clone(), - } - } - - fn make_u120_high(old_entry: &Entry) -> CompEntry { - // U120.high n = (>> n 60) - CompEntry { - name: Ident::new("U120.high"), - args: vec![Ident::new("n")], - rules: vec![CompRule { - name: Ident::new("U120.high"), - pats: vec![Box::new(CompTerm::Var { name: Ident::new("n") })], - body: Box::new(CompTerm::Op2 { - oper: Operator::Shr, - val0: Box::new(CompTerm::Var { name: Ident::new("n") }), - val1: Box::new(CompTerm::Num { numb: 60 }), - }), - }], - orig: true, - attrs: old_entry.attrs.clone(), - } - } - - match entry.name.0.as_str() { - // Some U120 functions should have a special compilation - "U120.new" => Ok(vec![make_u120_new(&entry)]), - // U120.new becomes a special function that joins two numbers as if they were U60s - // TODO: We could rewrite these both to not need this workaround, but it would become rather slow on normal HVM (~100 rewrites instead of 1) - "U120.high" => Ok(vec![make_u120_high(&entry)]), - // high and low are used for type compatibility with u60 - "U120.low" => Ok(vec![make_u120_low(&entry)]), - _ => { - let name = entry.name.clone(); - let args = entry.args.iter().filter(|x| !x.eras).map(|x| x.name.clone()).collect(); - // TODO: Group all errs together instead of failing on the first one - let rules = entry.rules.iter().map(|rule| compile_rule(book, entry, rule)).collect::, String>>()?; - let attrs = entry.attrs.clone(); - let new_entry = CompEntry { name, args, rules, attrs, orig: true }; - // TODO: We probably need to handle U60 separately as well. - // Since they compile to U120, it wont overflow as expected and conversion to signed will fail. - let new_entry = convert_u120_entry(new_entry)?; - let mut new_entrs = flatten(new_entry); - for entry in &mut new_entrs { - for rule in &mut entry.rules { - linearize_rule(rule); - } - } - Ok(new_entrs) - } - } -} - -// True if the compiled term is a valid rule pattern. -// Rule patterns must be normalized terms with only Ctrs, Nums and Vars (no Lams, Dups or Lets) -pub fn is_valid_pattern(pat: &CompTerm) -> Result<(), &CompTerm> { - let mut check_stack: Vec<&CompTerm> = vec![pat]; - while !check_stack.is_empty() { - let term = check_stack.pop().unwrap(); - match term { - CompTerm::Ctr { args, .. } => { - for arg in args { - check_stack.push(arg); - } - }, - CompTerm::Var { .. } => (), - CompTerm::Num { .. } => (), - CompTerm::Lam { .. } => { return Err(term) } - CompTerm::App { .. } => { return Err(term) } - CompTerm::Dup { .. } => { return Err(term) } - CompTerm::Let { .. } => { return Err(term) } - CompTerm::Fun { .. } => { return Err(term) } - CompTerm::Op2 { .. } => { return Err(term) } - CompTerm::Nil { .. } => { return Err(term) } - }; - } - Ok(()) -} - -// Splits an entry with rules with nested cases into multiple entries with flattened rules. -pub fn flatten(entry: CompEntry) -> Vec { - fn post_inc(n: &mut u64) -> u64 { - let old_n = *n; - *n += 1; - old_n - } - - fn must_split(rule: &CompRule) -> bool { - for pat in &rule.pats { - if let CompTerm::Ctr { args, .. } = &**pat { - for arg in args { - if matches!(&**arg, CompTerm::Ctr { .. } | CompTerm::Num { .. }) { - return true; - } - } - } - } - false - } - - // return true on the first if both rules always match together - fn matches_together(a: &CompRule, b: &CompRule) -> (bool, bool) { - let mut same_shape = true; - for (a_pat, b_pat) in a.pats.iter().zip(&b.pats) { - match (&**a_pat, &**b_pat) { - (CompTerm::Ctr { name: a_name, .. }, CompTerm::Ctr { name: b_name, .. }) => { - if a_name != b_name { - return (false, false); - } - } - (CompTerm::Num { numb: a_numb }, CompTerm::Num { numb: b_numb }) => { - if a_numb != b_numb { - return (false, false); - } - } - (CompTerm::Ctr { .. }, CompTerm::Num { .. }) => { - return (false, false); - } - (CompTerm::Num { .. }, CompTerm::Ctr { .. }) => { - return (false, false); - } - (CompTerm::Ctr { .. }, CompTerm::Var { .. }) => { - same_shape = false; - } - (CompTerm::Num { .. }, CompTerm::Var { .. }) => { - same_shape = false; - } - _ => {} - } - } - (true, same_shape) - } - - fn split_rule(rule: &CompRule, entry: &CompEntry, i: usize, name_count: &mut u64, skip: &mut HashSet) -> (CompRule, Vec) { - // Each rule that must be split creates a new entry that inspects one layer of Ctrs - // The old rule is rewritten to be flat and call the new entry - let n = post_inc(name_count); - let new_entry_name = Ident(format!("{}{}_", entry.name, n)); - let mut new_entry_attrs = entry.attrs.clone(); - // If the old rule had a kdl name, create a new kdl name for the split entry - for attr in &mut new_entry_attrs { - if attr.name.0 == "kdl_name" { - let old_kdln = attr.value.as_ref().unwrap(); // Checked before in adjust step - let new_kdln = Ident(format!("{}{}_", old_kdln, n)); - attr.value = Some(new_kdln); - break; - } - } - let mut new_entry_rules: Vec = Vec::new(); - // Rewrite the old rule to be flat and point to the new entry - let mut old_rule_pats: Vec> = Vec::new(); - let mut old_rule_body_args: Vec> = Vec::new(); - let mut var_count = 0; - for pat in &rule.pats { - match &**pat { - CompTerm::Ctr { name: pat_name, args: pat_args } => { - let mut new_pat_args = Vec::new(); - for field in pat_args { - let arg = match &**field { - CompTerm::Ctr { .. } | CompTerm::Num { .. } => { - let name = Ident(format!(".{}", post_inc(&mut var_count))); - Box::new(CompTerm::Var { name }) - } - CompTerm::Var { .. } => field.clone(), - _ => { - panic!("?"); - } - }; - new_pat_args.push(arg.clone()); - old_rule_body_args.push(arg); - } - old_rule_pats.push(Box::new(CompTerm::Ctr { - name: pat_name.clone(), - args: new_pat_args, - })); - } - CompTerm::Var { name } => { - old_rule_pats.push(pat.clone()); - old_rule_body_args.push(Box::new(CompTerm::Var { name: name.clone() })); - } - CompTerm::Num { .. } => { - old_rule_pats.push(pat.clone()); - } - _ => { - panic!("Found invalid pattern \"{:?}\" while flattening entry \"{}\".", pat, entry.name); - } - } - } - let old_rule_body = Box::new(CompTerm::Fun { - name: new_entry_name.clone(), - args: old_rule_body_args, - }); - let old_rule = CompRule { - name: entry.name.clone(), - pats: old_rule_pats, - body: old_rule_body, - }; - //(Foo Tic (Bar a b) (Haz c d)) = A - //(Foo Tic x y) = B - //--------------------------------- - //(Foo Tic (Bar a b) (Haz c d)) = B[x <- (Bar a b), y <- (Haz c d)] - // - //(Foo.0 a b c d) = ... - - // Check the rules to see if there's any that will be covered by the new entry, including the rule itself. - // Skips previously checked rules to avoid duplication. - // For each unique matching rule, creates a new flattening rule for the entry. - // Ex: (Fun (Ctr1 (Ctr2))) and (Fun (Ctr1 (Ctr3))) will both flatten to (Fun (Ctr1 .0)) and can be merged - for (j, other) in entry.rules.iter().enumerate().skip(i) { - let (compatible, same_shape) = matches_together(rule, other); - if compatible { - // (Foo a (B x P) (C y0 y1)) = F - // (Foo (A k) (B x Q) y ) = G - // ----------------------------- - // (Foo a (B x u) (C y0 y1)) = (Foo.0 a x u y0 y1) - // (Foo.0 a x P y0 y1) = F - // (Foo.0 (A k) x Q f0 f1) = G [y <- (C f0 f1)] // f0 and f1 are fresh - - // Skip identical rules - if same_shape { - skip.insert(j); - } - let mut new_rule_pats = Vec::new(); - let mut new_rule_body = other.body.clone(); - for (rule_pat, other_pat) in rule.pats.iter().zip(&other.pats) { - match (&**rule_pat, &**other_pat) { - (CompTerm::Ctr { .. }, CompTerm::Ctr { args: other_pat_args, .. }) => { - // Bring the arguments of a constructor outside - new_rule_pats.extend(other_pat_args.clone()); - } - ( - CompTerm::Ctr { - name: rule_pat_name, - args: rule_pat_args, - }, - CompTerm::Var { name: other_pat_name }, - ) => { - let mut new_ctr_args = vec![]; - for _ in 0..rule_pat_args.len() { - let new_arg = CompTerm::Var { - name: Ident(format!(".{}", post_inc(&mut var_count))), - }; - new_ctr_args.push(Box::new(new_arg.clone())); - new_rule_pats.push(Box::new(new_arg)); - } - let new_ctr = CompTerm::Ctr { - name: rule_pat_name.clone(), - args: new_ctr_args, - }; - subst(&mut new_rule_body, other_pat_name, &new_ctr); - } - (CompTerm::Var { .. }, _) => { - new_rule_pats.push(other_pat.clone()); - } - // Nums are like Ctr with no args, so nothing to bring out - (CompTerm::Num { .. }, CompTerm::Num { .. }) => (), - (CompTerm::Num { .. }, CompTerm::Var { name: other_pat_name }) => { - subst(&mut new_rule_body, other_pat_name, rule_pat); - } - _ => { - panic!("Internal error. Please report."); // not possible since it matches - } - } - } - let new_rule = CompRule { - name: new_entry_name.clone(), - pats: new_rule_pats, - body: new_rule_body, - }; - new_entry_rules.push(new_rule); - } - } - assert!(!new_entry_rules.is_empty()); // There's at least one rule, since rules always match with themselves - let new_entry_args = (0..new_entry_rules[0].pats.len()).map(|n| Ident(format!("x{}", n))).collect(); - let new_entry = CompEntry { - name: new_entry_name, - args: new_entry_args, - rules: new_entry_rules, - attrs: new_entry_attrs, - orig: false, - }; - let new_split_entries = flatten(new_entry); - (old_rule, new_split_entries) - } - - let mut name_count = 0; - - let mut skip: HashSet = HashSet::new(); - let mut new_entries: Vec = Vec::new(); - let mut old_entry_rules: Vec = Vec::new(); - for i in 0..entry.rules.len() { - if !skip.contains(&i) { - let rule = &entry.rules[i]; - if must_split(rule) { - let (old_rule, split_entries) = split_rule(rule, &entry, i, &mut name_count, &mut skip); - old_entry_rules.push(old_rule); - new_entries.extend(split_entries); - } else { - old_entry_rules.push(entry.rules[i].clone()); - } - } - } - let old_entry = CompEntry { - name: entry.name, - args: entry.args, - rules: old_entry_rules, - orig: entry.orig, - attrs: entry.attrs, - }; - new_entries.push(old_entry); - new_entries -} - -// Substitute all instances of a variable in a term with another term -pub fn subst(term: &mut CompTerm, sub_name: &Ident, value: &CompTerm) { - match term { - CompTerm::Var { name } => { - if sub_name == name { - *term = value.clone(); - } - } - CompTerm::Dup { nam0, nam1, expr, body } => { - subst(&mut *expr, sub_name, value); - if nam0 != sub_name && nam1 != sub_name { - subst(&mut *body, sub_name, value); - } - } - CompTerm::Let { name, expr, body } => { - subst(&mut *expr, sub_name, value); - if name != sub_name { - subst(&mut *body, sub_name, value); - } - } - CompTerm::Lam { name, body } => { - if name != sub_name { - subst(&mut *body, sub_name, value); - } - } - CompTerm::App { func, argm } => { - subst(&mut *func, sub_name, value); - subst(&mut *argm, sub_name, value); - } - CompTerm::Ctr { args, .. } => { - for arg in args { - subst(&mut *arg, sub_name, value); - } - } - CompTerm::Fun { args, .. } => { - for arg in args { - subst(&mut *arg, sub_name, value); - } - } - CompTerm::Num { .. } => {} - CompTerm::Op2 { val0, val1, .. } => { - subst(&mut *val0, sub_name, value); - subst(&mut *val1, sub_name, value); - } - CompTerm::Nil => {} - } -} - -// Removes proof-irrelevant parts of the term -pub fn erase(book: &Book, term: &Term) -> Box { - match term { - Term::Typ { .. } => Box::new(CompTerm::Nil), - Term::Var { name, .. } => { - let name = name.clone(); - Box::new(CompTerm::Var { name }) - } - Term::Lam { name, body, .. } => { - let name = name.clone(); - let body = erase(book, body); - Box::new(CompTerm::Lam { name, body }) - } - Term::App { func, argm, .. } => { - let func = erase(book, func); - let argm = erase(book, argm); - Box::new(CompTerm::App { func, argm }) - } - Term::All { - orig: _, - name: _, - tipo: _, - body: _, - } => Box::new(CompTerm::Nil), - Term::Let { name, expr, body, .. } => { - let name = name.clone(); - let expr = erase(book, expr); - let body = erase(book, body); - Box::new(CompTerm::Let { name, expr, body }) - } - Term::Ann { expr, .. } => erase(book, expr), - Term::Sub { expr, .. } => erase(book, expr), - Term::Ctr { name, args: term_args, .. } => { - let name = name.clone(); - let entr = book.entrs.get(&name).unwrap(); - let mut args = vec![]; - for (idx, arg) in term_args.iter().enumerate() { - if !entr.args[idx].eras { - args.push(erase(book, arg)); - } - } - Box::new(CompTerm::Ctr { name, args }) - } - Term::Fun { name, args: term_args, .. } => { - let name = name.clone(); - let entr = book.entrs.get(&name).unwrap(); - let mut args = vec![]; - for (idx, arg) in term_args.iter().enumerate() { - if !entr.args[idx].eras { - args.push(erase(book, arg)); - } - } - Box::new(CompTerm::Fun { name, args }) - } - Term::Hlp { .. } => Box::new(CompTerm::Nil), - Term::U60 { .. } => Box::new(CompTerm::Nil), - Term::Num { numb, .. } => { - let numb = *numb as u128; - Box::new(CompTerm::Num { numb }) - } - Term::Op2 { oper, val0, val1, .. } => { - let oper = *oper; - let val0 = erase(book, val0); - let val1 = erase(book, val1); - Box::new(CompTerm::Op2 { oper, val0, val1 }) - } - Term::Hol { .. } => Box::new(CompTerm::Nil), - Term::Mat { .. } => Box::new(CompTerm::Nil), - Term::Open { .. } => Box::new(CompTerm::Nil), - } -} - -// Counts usages of a name in an erased term -pub fn count_uses(term: &CompTerm, count_name: &Ident) -> usize { - match term { - CompTerm::Var { name } => { - if name == count_name { - 1 - } else { - 0 - } - } - CompTerm::Lam { name, body } => { - if name == count_name { - 0 - } else { - count_uses(body, count_name) - } - } - CompTerm::App { func, argm } => count_uses(func, count_name) + count_uses(argm, count_name), - CompTerm::Dup { nam0, nam1, expr, body } => { - let expr_count = count_uses(expr, count_name); - let body_count = if nam0 == count_name || nam1 == count_name { 0 } else { count_uses(body, count_name) }; - expr_count + body_count - } - CompTerm::Let { name, expr, body } => { - let expr_count = count_uses(expr, count_name); - let body_count = if name == count_name { 0 } else { count_uses(body, count_name) }; - expr_count + body_count - } - CompTerm::Ctr { args, .. } => { - let mut sum = 0; - for arg in args { - sum += count_uses(arg, count_name); - } - sum - } - CompTerm::Fun { args, .. } => { - let mut sum = 0; - for arg in args { - sum += count_uses(arg, count_name); - } - sum - } - CompTerm::Op2 { val0, val1, .. } => count_uses(val0, count_name) + count_uses(val1, count_name), - CompTerm::Num { .. } => 0, - CompTerm::Nil => 0, - } -} - -// Renames a target variable using the fresh names in a vector -pub fn rename_clones(term: &mut CompTerm, target: &Ident, names: &mut Vec) { - match term { - CompTerm::Var { name } => { - if name == target { - *name = names.pop().unwrap(); - } - } - CompTerm::Lam { name, body } => { - if name != target { - rename_clones(body, target, names); - } - } - CompTerm::App { func, argm } => { - rename_clones(func, target, names); - rename_clones(argm, target, names); - } - CompTerm::Dup { nam0, nam1, expr, body } => { - rename_clones(expr, target, names); - if nam0 != target && nam1 != target { - rename_clones(body, target, names); - } - } - CompTerm::Let { name, expr, body } => { - rename_clones(expr, target, names); - if name != target { - rename_clones(body, target, names); - } - } - CompTerm::Ctr { args, .. } => { - for arg in args { - rename_clones(arg, target, names); - } - } - CompTerm::Fun { args, .. } => { - for arg in args { - rename_clones(arg, target, names); - } - } - CompTerm::Op2 { val0, val1, .. } => { - rename_clones(val0, target, names); - rename_clones(val1, target, names); - } - CompTerm::Num { .. } => {} - CompTerm::Nil => {} - } -} - -pub fn linearize_rule(rule: &mut CompRule) { - // Returns left-hand side variables - fn collect_lhs_vars<'a>(term: &'a mut CompTerm, vars: &mut HashMap) { - match term { - CompTerm::Var { name } => { - vars.insert(name.clone(), term); - } - CompTerm::App { func, argm } => { - collect_lhs_vars(func, vars); - collect_lhs_vars(argm, vars); - } - CompTerm::Ctr { args, .. } => { - for arg in args { - collect_lhs_vars(arg, vars); - } - } - CompTerm::Num { .. } => {} - _ => { - panic!("Invalid left-hand side."); - } - } - } - - // linearize_name (Foo x x x x) 'x' 0 - // ---------------------------------------------------------------- - // dup x0 x1 = x; dup x2 x3 = x0; dup x4 x5 = x1; (Foo x2 x3 x4 x5) - // Returns the number of times the variable was used in the body. - pub fn linearize_name(body: &mut CompTerm, name: &mut Ident, fresh: &mut u64) -> usize { - fn fresh_name(fresh: &mut u64) -> Ident { - let name = format!("_{}", fresh); - *fresh += 1; - Ident(name) - } - let uses = count_uses(body, name); - if uses > 1 { - let mut names = vec![]; - for _ in 0..(uses - 1) * 2 { - names.push(fresh_name(fresh)); - } - //println!("-> uses is {}, names is {:?}", uses, names); - let mut renames = vec![]; - for rename in names[names.len() - uses..].iter().rev() { - renames.push(rename.clone()); - } - rename_clones(body, name, &mut renames); - for i in (0..uses - 1).rev() { - let nam0 = names[i * 2].clone(); - let nam1 = names[i * 2 + 1].clone(); - let expr = Box::new(CompTerm::Var { - name: if i == 0 { name.clone() } else { names[i - 1].clone() }, - }); - let new_body = CompTerm::Dup { - nam0, - nam1, - expr, - body: Box::new(CompTerm::Nil), - }; - let old_body = std::mem::replace(body, new_body); - if let CompTerm::Dup { ref mut body, .. } = body { - let _ = std::mem::replace(body, Box::new(old_body)); - } - } - } else if uses == 0 { - *name = Ident::new("~"); - } - uses - } - - // Linearies an erased term, replacing cloned variables by dups - pub fn linearize_term(term: &mut CompTerm, fresh: &mut u64) { - //println!("Linearizing: {:?}", term); - match term { - CompTerm::Var { name: _ } => {} - CompTerm::Lam { ref mut name, body } => { - linearize_term(body, fresh); - linearize_name(body, name, fresh); - } - CompTerm::App { func, argm } => { - linearize_term(func, fresh); - linearize_term(argm, fresh); - } - CompTerm::Let { ref mut name, expr, body } => { - linearize_term(expr, fresh); - linearize_term(body, fresh); - linearize_name(body, name, fresh); - } - CompTerm::Ctr { name: _, args } => { - for arg in args { - linearize_term(arg, fresh); - } - } - CompTerm::Fun { name: _, args } => { - for arg in args { - linearize_term(arg, fresh); - } - } - CompTerm::Op2 { oper: _, val0, val1 } => { - linearize_term(val0, fresh); - linearize_term(val1, fresh); - } - CompTerm::Dup { - ref mut nam0, - ref mut nam1, - expr, - body, - .. - } => { - // should be unreachable under normal usage, but I made it anyway - linearize_term(expr, fresh); - linearize_term(body, fresh); - linearize_name(body, nam0, fresh); - linearize_name(body, nam1, fresh); - } - CompTerm::Num { .. } => {} - CompTerm::Nil => {} - } - } - - let mut vars = HashMap::new(); // rule pattern vars - for pat in &mut rule.pats { - collect_lhs_vars(&mut **pat, &mut vars); - } - let mut fresh = 0; - for (mut name, var) in vars.drain() { - // linearizes rule pattern vars - // The &mut here doesn't do anything because we're dropping var immediately afterwards. - // To linearize rule variables, we'll have to replace all LHS occurrences by ~ if the amount of uses is zero - let uses = linearize_name(&mut rule.body, &mut name, &mut fresh); - if uses == 0 { - if let CompTerm::Var { name } = var { - *name = Ident::new("~"); - } - } - // The reason why we don't simply pass a real mutable reference to our variable - // (instead of a mutable reference of a clone) - // to linearize_name is because since `var` is in `body`, we would - // be borrowing `var` mutably twice, which is not allowed. - - // The reason why linearize_name takes in a mutable reference is - // to replace unused vars by ~. This is useful, for example, in - // lambdas. (@x0 #0 should be linearized to @~ #0) - } - linearize_term(&mut rule.body, &mut fresh); // linearizes internal bound vars -} - -// Swaps u120 numbers and functions for primitive operations for kindelia compilation -pub fn convert_u120_entry(entry: CompEntry) -> Result { - let mut new_rules = Vec::new(); - for CompRule { name, pats, body } in entry.rules { - let body = convert_u120_term(&body, true)?; - let mut new_pats = Vec::new(); - for pat in pats { - new_pats.push(convert_u120_term(&pat, false)?); - } - new_rules.push(CompRule { name, pats: new_pats, body }); - } - Ok(CompEntry { rules: new_rules, ..entry }) -} - -pub fn convert_u120_term(term: &CompTerm, rhs: bool) -> Result, String> { - let term = Box::new(match term { - // Swap U120.new by a number - CompTerm::Ctr { name, args } => { - if name.0 == "U120.new" { - if let (CompTerm::Num { numb: num1 }, CompTerm::Num { numb: num2 }) = (&*args[0], &*args[1]) { - CompTerm::Num { numb: (num1 << 60) + num2 } - } else if rhs { - let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::>, String>>()?; - CompTerm::Fun { name: name.clone(), args } - } else { - return Err("Can't compile pattern match on U120 to kindelia".to_string()); - } - } else { - let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::>, String>>()?; - CompTerm::Ctr { name: name.clone(), args } - } - } - // Swap U120 functions by primitive operations - CompTerm::Fun { name, args } => { - if let Some(oper) = u120_to_oper(name) { - let val0 = convert_u120_term(&*args[0], rhs)?; - let val1 = convert_u120_term(&*args[1], rhs)?; - CompTerm::Op2 { oper, val0, val1 } - } else { - let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::>, String>>()?; - CompTerm::Fun { name: name.clone(), args } - } - } - CompTerm::Var { name: _ } => term.clone(), - CompTerm::Lam { name, body } => { - let body = convert_u120_term(body, rhs)?; - CompTerm::Lam { name: name.clone(), body } - } - CompTerm::App { func, argm } => { - let func = convert_u120_term(func, rhs)?; - let argm = convert_u120_term(argm, rhs)?; - CompTerm::App { func, argm } - } - CompTerm::Dup { nam0, nam1, expr, body } => { - let expr = convert_u120_term(expr, rhs)?; - let body = convert_u120_term(body, rhs)?; - CompTerm::Dup { - nam0: nam0.clone(), - nam1: nam1.clone(), - expr, - body, - } - } - CompTerm::Let { name, expr, body } => { - let expr = convert_u120_term(expr, rhs)?; - let body = convert_u120_term(body, rhs)?; - CompTerm::Let { name: name.clone(), expr, body } - } - CompTerm::Num { numb: _ } => term.clone(), - CompTerm::Op2 { oper, val0, val1 } => { - let val0 = convert_u120_term(val0, rhs)?; - let val1 = convert_u120_term(val1, rhs)?; - CompTerm::Op2 { oper: *oper, val0, val1 } - } - CompTerm::Nil => { - return Err("Found nil term during compilation".to_string()); - } - }); - Ok(term) -} - -// Converts a U120 function name to the corresponding primitive operation -// None if the name is not of an operation -pub fn u120_to_oper(name: &Ident) -> Option { - match name.0.as_str() { - "U120.add" => Some(Operator::Add), - "U120.sub" => Some(Operator::Sub), - "U120.mul" => Some(Operator::Mul), - "U120.div" => Some(Operator::Div), - "U120.mod" => Some(Operator::Mod), - "U120.bitwise_and" => Some(Operator::And), - "U120.bitwise_or" => Some(Operator::Or), - "U120.bitwise_xor" => Some(Operator::Xor), - "U120.shift_left" => Some(Operator::Shl), - "U120.shift_right" => Some(Operator::Shr), - "U120.num_less_than" => Some(Operator::Ltn), - "U120.num_less_equal" => Some(Operator::Lte), - "U120.num_greater_than" => Some(Operator::Gtn), - "U120.num_greater_equal" => Some(Operator::Gte), - "U120.num_equal" => Some(Operator::Eql), - "U120.num_not_equal" => Some(Operator::Neq), - _ => None, - } -} diff --git a/src/codegen/kdl/passes.rs b/src/codegen/kdl/passes.rs new file mode 100644 index 00000000..23230d29 --- /dev/null +++ b/src/codegen/kdl/passes.rs @@ -0,0 +1,1147 @@ +use std::collections::{HashMap, HashSet}; + +use crate::book::{ + Book, + Entry, + name::Ident, + Rule, + term::Operator, + term::Term +}; +use crate::codegen::kdl::book::{CompBook, CompEntry, CompRule, CompTerm}; + +// Removes proof-irrelevant parts of functions +pub fn erase_terms(book: &Book) -> Result { + // True if the compiled term is a valid rule pattern. + // Rule patterns must be normalized terms with only Ctrs, Nums and Vars (no Lams, Dups or Lets) + pub fn is_valid_pattern(pat: &CompTerm) -> Result<(), &CompTerm> { + let mut check_stack: Vec<&CompTerm> = vec![pat]; + while !check_stack.is_empty() { + let term = check_stack.pop().unwrap(); + match term { + CompTerm::Ctr { args, .. } => { + for arg in args { + check_stack.push(arg); + } + }, + CompTerm::Var { .. } => (), + CompTerm::Num { .. } => (), + CompTerm::Lam { .. } => { return Err(term) } + CompTerm::App { .. } => { return Err(term) } + CompTerm::Dup { .. } => { return Err(term) } + CompTerm::Let { .. } => { return Err(term) } + CompTerm::Fun { .. } => { return Err(term) } + CompTerm::Op2 { .. } => { return Err(term) } + CompTerm::Nil { .. } => { return Err(term) } + }; + } + Ok(()) + } + + fn erase_term(book: &Book, term: &Term) -> Box { + match term { + Term::Typ { .. } => Box::new(CompTerm::Nil), + Term::Var { name, .. } => { + let name = name.clone(); + Box::new(CompTerm::Var { name }) + } + Term::Lam { name, body, .. } => { + let name = name.clone(); + let body = erase_term(book, body); + Box::new(CompTerm::Lam { name, body }) + } + Term::App { func, argm, .. } => { + let func = erase_term(book, func); + let argm = erase_term(book, argm); + Box::new(CompTerm::App { func, argm }) + } + Term::All { + orig: _, + name: _, + tipo: _, + body: _, + } => Box::new(CompTerm::Nil), + Term::Let { name, expr, body, .. } => { + let name = name.clone(); + let expr = erase_term(book, expr); + let body = erase_term(book, body); + Box::new(CompTerm::Let { name, expr, body }) + } + Term::Ann { expr, .. } => erase_term(book, expr), + Term::Sub { expr, .. } => erase_term(book, expr), + Term::Ctr { name, args: term_args, .. } => { + let name = name.clone(); + let entr = book.entrs.get(&name).unwrap(); + let mut args = vec![]; + for (idx, arg) in term_args.iter().enumerate() { + if !entr.args[idx].eras { + args.push(erase_term(book, arg)); + } + } + Box::new(CompTerm::Ctr { name, args }) + } + Term::Fun { name, args: term_args, .. } => { + let name = name.clone(); + let entr = book.entrs.get(&name).unwrap(); + let mut args = vec![]; + for (idx, arg) in term_args.iter().enumerate() { + if !entr.args[idx].eras { + args.push(erase_term(book, arg)); + } + } + Box::new(CompTerm::Fun { name, args }) + } + Term::Hlp { .. } => Box::new(CompTerm::Nil), + Term::U60 { .. } => Box::new(CompTerm::Nil), + Term::Num { numb, .. } => { + let numb = *numb as u128; + Box::new(CompTerm::Num { numb }) + } + Term::Op2 { oper, val0, val1, .. } => { + let oper = *oper; + let val0 = erase_term(book, val0); + let val1 = erase_term(book, val1); + Box::new(CompTerm::Op2 { oper, val0, val1 }) + } + Term::Hol { .. } => Box::new(CompTerm::Nil), + Term::Mat { .. } => Box::new(CompTerm::Nil), + Term::Open { .. } => Box::new(CompTerm::Nil), + } + } + + fn erase_rule(book: &Book, entry: &Entry, rule: &Rule) -> Result { + let name = rule.name.clone(); + let mut pats = Vec::new(); + let mut errs = Vec::new(); + for (arg, pat) in entry.args.iter().zip(rule.pats.iter()) { + if !arg.eras { + let pat = erase_term(book, pat); + if let Err(err_term) = is_valid_pattern(&*pat) { + // TODO: Add Display trait for compterms + // TODO: Tell the user exactly why this term is incorrect + let err = format!("Found invalid term \"{:?}\" in rule pattern matching for entry \"{}\".", err_term, entry.name); + errs.push(err); + } else { + pats.push(pat); + } + } + } + if errs.is_empty() { + let body = erase_term(book, &rule.body); + Ok(CompRule { name, pats, body }) + } else { + Err(errs.join("\n")) + } + } + + fn erase_entry(book: &Book, entry: &Entry) -> Result { + let name = entry.name.clone(); + let args = entry.args.iter().filter(|x| !x.eras).map(|x| x.name.clone()).collect(); + let mut rules = Vec::new(); + let mut errs = Vec::new(); + for rule in &entry.rules { + match erase_rule(book, entry, rule) { + Ok(rule) => rules.push(rule), + Err(err) => errs.push(err), + } + } + if errs.is_empty() { + let attrs = entry.attrs.clone(); + let entry = CompEntry { name, args, rules, attrs, orig: true }; + Ok(entry) + } else { + Err(errs.join("\n")) + } + } + + let mut names = Vec::new(); + let mut entrs = HashMap::new(); + let mut errs = Vec::new(); + for name in &book.names { + let entry = book.entrs.get(&name).unwrap(); + names.push(name.clone()); + match erase_entry(book, entry) { + Ok(entry) => { entrs.insert(name.clone(), entry); } + Err(err) => { errs.push(err); } + } + } + if errs.is_empty() { + Ok(CompBook { names, entrs }) + } else { + Err(errs.join("\n")) + } +} + +pub fn erase_funs(book: Book) -> Result { + let mut book = book; + let mut names = Vec::new(); + let mut entrs = HashMap::new(); + for name in book.names { + let entry = book.entrs.remove(&name).unwrap(); + if matches!(&*entry.tipo, Term::Typ { .. }) { + continue; + } + entrs.insert(name.clone(), entry); + names.push(name); + } + let book = Book { entrs, names, holes: book.holes }; + Ok(book) +} + +// Splits an entry with rules with nested cases into multiple entries with flattened rules. +pub fn flatten(book: CompBook) -> Result { + fn post_inc(n: &mut u64) -> u64 { + let old_n = *n; + *n += 1; + old_n + } + + fn must_split(rule: &CompRule) -> bool { + for pat in &rule.pats { + if let CompTerm::Ctr { args, .. } = &**pat { + for arg in args { + if matches!(&**arg, CompTerm::Ctr { .. } | CompTerm::Num { .. }) { + return true; + } + } + } + } + false + } + + // return true on the first if both rules always match together + fn matches_together(a: &CompRule, b: &CompRule) -> (bool, bool) { + let mut same_shape = true; + for (a_pat, b_pat) in a.pats.iter().zip(&b.pats) { + match (&**a_pat, &**b_pat) { + (CompTerm::Ctr { name: a_name, .. }, CompTerm::Ctr { name: b_name, .. }) => { + if a_name != b_name { + return (false, false); + } + } + (CompTerm::Num { numb: a_numb }, CompTerm::Num { numb: b_numb }) => { + if a_numb != b_numb { + return (false, false); + } + } + (CompTerm::Ctr { .. }, CompTerm::Num { .. }) => { + return (false, false); + } + (CompTerm::Num { .. }, CompTerm::Ctr { .. }) => { + return (false, false); + } + (CompTerm::Ctr { .. }, CompTerm::Var { .. }) => { + same_shape = false; + } + (CompTerm::Num { .. }, CompTerm::Var { .. }) => { + same_shape = false; + } + _ => {} + } + } + (true, same_shape) + } + + fn split_rule(rule: &CompRule, entry: &CompEntry, i: usize, name_count: &mut u64, skip: &mut HashSet) -> (CompRule, Vec) { + // Each rule that must be split creates a new entry that inspects one layer of Ctrs + // The old rule is rewritten to be flat and call the new entry + let n = post_inc(name_count); + let new_entry_name = Ident(format!("{}{}_", entry.name, n)); + let mut new_entry_attrs = entry.attrs.clone(); + // If the old rule had a kdl name, create a new kdl name for the split entry + for attr in &mut new_entry_attrs { + if attr.name.0 == "kdl_name" { + let old_kdln = attr.value.as_ref().unwrap(); // Checked before in adjust step + let new_kdln = Ident(format!("{}{}_", old_kdln, n)); + attr.value = Some(new_kdln); + break; + } + } + let mut new_entry_rules: Vec = Vec::new(); + // Rewrite the old rule to be flat and point to the new entry + let mut old_rule_pats: Vec> = Vec::new(); + let mut old_rule_body_args: Vec> = Vec::new(); + let mut var_count = 0; + for pat in &rule.pats { + match &**pat { + CompTerm::Ctr { name: pat_name, args: pat_args } => { + let mut new_pat_args = Vec::new(); + for field in pat_args { + let arg = match &**field { + CompTerm::Ctr { .. } | CompTerm::Num { .. } => { + let name = Ident(format!(".{}", post_inc(&mut var_count))); + Box::new(CompTerm::Var { name }) + } + CompTerm::Var { .. } => field.clone(), + _ => { + panic!("?"); + } + }; + new_pat_args.push(arg.clone()); + old_rule_body_args.push(arg); + } + old_rule_pats.push(Box::new(CompTerm::Ctr { + name: pat_name.clone(), + args: new_pat_args, + })); + } + CompTerm::Var { name } => { + old_rule_pats.push(pat.clone()); + old_rule_body_args.push(Box::new(CompTerm::Var { name: name.clone() })); + } + CompTerm::Num { .. } => { + old_rule_pats.push(pat.clone()); + } + _ => { + panic!("Found invalid pattern \"{:?}\" while flattening entry \"{}\".", pat, entry.name); + } + } + } + let old_rule_body = Box::new(CompTerm::Fun { + name: new_entry_name.clone(), + args: old_rule_body_args, + }); + let old_rule = CompRule { + name: entry.name.clone(), + pats: old_rule_pats, + body: old_rule_body, + }; + //(Foo Tic (Bar a b) (Haz c d)) = A + //(Foo Tic x y) = B + //--------------------------------- + //(Foo Tic (Bar a b) (Haz c d)) = B[x <- (Bar a b), y <- (Haz c d)] + // + //(Foo.0 a b c d) = ... + + // Check the rules to see if there's any that will be covered by the new entry, including the rule itself. + // Skips previously checked rules to avoid duplication. + // For each unique matching rule, creates a new flattening rule for the entry. + // Ex: (Fun (Ctr1 (Ctr2))) and (Fun (Ctr1 (Ctr3))) will both flatten to (Fun (Ctr1 .0)) and can be merged + for (j, other) in entry.rules.iter().enumerate().skip(i) { + let (compatible, same_shape) = matches_together(rule, other); + if compatible { + // (Foo a (B x P) (C y0 y1)) = F + // (Foo (A k) (B x Q) y ) = G + // ----------------------------- + // (Foo a (B x u) (C y0 y1)) = (Foo.0 a x u y0 y1) + // (Foo.0 a x P y0 y1) = F + // (Foo.0 (A k) x Q f0 f1) = G [y <- (C f0 f1)] // f0 and f1 are fresh + + // Skip identical rules + if same_shape { + skip.insert(j); + } + let mut new_rule_pats = Vec::new(); + let mut new_rule_body = other.body.clone(); + for (rule_pat, other_pat) in rule.pats.iter().zip(&other.pats) { + match (&**rule_pat, &**other_pat) { + (CompTerm::Ctr { .. }, CompTerm::Ctr { args: other_pat_args, .. }) => { + // Bring the arguments of a constructor outside + new_rule_pats.extend(other_pat_args.clone()); + } + ( + CompTerm::Ctr { + name: rule_pat_name, + args: rule_pat_args, + }, + CompTerm::Var { name: other_pat_name }, + ) => { + let mut new_ctr_args = vec![]; + for _ in 0..rule_pat_args.len() { + let new_arg = CompTerm::Var { + name: Ident(format!(".{}", post_inc(&mut var_count))), + }; + new_ctr_args.push(Box::new(new_arg.clone())); + new_rule_pats.push(Box::new(new_arg)); + } + let new_ctr = CompTerm::Ctr { + name: rule_pat_name.clone(), + args: new_ctr_args, + }; + subst(&mut new_rule_body, other_pat_name, &new_ctr); + } + (CompTerm::Var { .. }, _) => { + new_rule_pats.push(other_pat.clone()); + } + // Nums are like Ctr with no args, so nothing to bring out + (CompTerm::Num { .. }, CompTerm::Num { .. }) => (), + (CompTerm::Num { .. }, CompTerm::Var { name: other_pat_name }) => { + subst(&mut new_rule_body, other_pat_name, rule_pat); + } + _ => { + panic!("Internal error. Please report."); // not possible since it matches + } + } + } + let new_rule = CompRule { + name: new_entry_name.clone(), + pats: new_rule_pats, + body: new_rule_body, + }; + new_entry_rules.push(new_rule); + } + } + assert!(!new_entry_rules.is_empty()); // There's at least one rule, since rules always match with themselves + let new_entry_args = (0..new_entry_rules[0].pats.len()).map(|n| Ident(format!("x{}", n))).collect(); + let new_entry = CompEntry { + name: new_entry_name, + args: new_entry_args, + rules: new_entry_rules, + attrs: new_entry_attrs, + orig: false, + }; + let new_split_entries = flatten_entry(new_entry); + (old_rule, new_split_entries) + } + + fn flatten_entry(entry: CompEntry) -> Vec { + let mut name_count = 0; + + let mut skip: HashSet = HashSet::new(); + let mut new_entries: Vec = Vec::new(); + let mut old_entry_rules: Vec = Vec::new(); + for i in 0..entry.rules.len() { + if !skip.contains(&i) { + let rule = &entry.rules[i]; + if must_split(rule) { + let (old_rule, split_entries) = split_rule(rule, &entry, i, &mut name_count, &mut skip); + old_entry_rules.push(old_rule); + new_entries.extend(split_entries); + } else { + old_entry_rules.push(entry.rules[i].clone()); + } + } + } + let old_entry = CompEntry { + name: entry.name, + args: entry.args, + rules: old_entry_rules, + orig: entry.orig, + attrs: entry.attrs, + }; + new_entries.push(old_entry); + new_entries + } + + let mut book = book; + let mut names = Vec::new(); + let mut entrs = HashMap::new(); + for name in book.names { + let entry = book.entrs.remove(&name).unwrap(); + for entry in flatten_entry(entry) { + names.push(entry.name.clone()); + entrs.insert(entry.name.clone(), entry); + } + } + let book = CompBook { names, entrs }; + Ok(book) +} + +pub fn linearize_rules(book: CompBook) -> Result { + // Returns left-hand side variables + fn collect_lhs_vars<'a>(term: &'a mut CompTerm, vars: &mut HashMap) { + match term { + CompTerm::Var { name } => { + vars.insert(name.clone(), term); + } + CompTerm::App { func, argm } => { + collect_lhs_vars(func, vars); + collect_lhs_vars(argm, vars); + } + CompTerm::Ctr { args, .. } => { + for arg in args { + collect_lhs_vars(arg, vars); + } + } + CompTerm::Num { .. } => {} + _ => { + panic!("Invalid left-hand side."); + } + } + } + + // Renames a target variable using the fresh names in a vector + pub fn rename_clones(term: &mut CompTerm, target: &Ident, names: &mut Vec) { + match term { + CompTerm::Var { name } => { + if name == target { + *name = names.pop().unwrap(); + } + } + CompTerm::Lam { name, body } => { + if name != target { + rename_clones(body, target, names); + } + } + CompTerm::App { func, argm } => { + rename_clones(func, target, names); + rename_clones(argm, target, names); + } + CompTerm::Dup { nam0, nam1, expr, body } => { + rename_clones(expr, target, names); + if nam0 != target && nam1 != target { + rename_clones(body, target, names); + } + } + CompTerm::Let { name, expr, body } => { + rename_clones(expr, target, names); + if name != target { + rename_clones(body, target, names); + } + } + CompTerm::Ctr { args, .. } => { + for arg in args { + rename_clones(arg, target, names); + } + } + CompTerm::Fun { args, .. } => { + for arg in args { + rename_clones(arg, target, names); + } + } + CompTerm::Op2 { val0, val1, .. } => { + rename_clones(val0, target, names); + rename_clones(val1, target, names); + } + CompTerm::Num { .. } => {} + CompTerm::Nil => {} + } + } + + // Counts usages of a name in an erased term + pub fn count_uses(term: &CompTerm, count_name: &Ident) -> usize { + match term { + CompTerm::Var { name } => { + if name == count_name { + 1 + } else { + 0 + } + } + CompTerm::Lam { name, body } => { + if name == count_name { + 0 + } else { + count_uses(body, count_name) + } + } + CompTerm::App { func, argm } => count_uses(func, count_name) + count_uses(argm, count_name), + CompTerm::Dup { nam0, nam1, expr, body } => { + let expr_count = count_uses(expr, count_name); + let body_count = if nam0 == count_name || nam1 == count_name { 0 } else { count_uses(body, count_name) }; + expr_count + body_count + } + CompTerm::Let { name, expr, body } => { + let expr_count = count_uses(expr, count_name); + let body_count = if name == count_name { 0 } else { count_uses(body, count_name) }; + expr_count + body_count + } + CompTerm::Ctr { args, .. } => { + let mut sum = 0; + for arg in args { + sum += count_uses(arg, count_name); + } + sum + } + CompTerm::Fun { args, .. } => { + let mut sum = 0; + for arg in args { + sum += count_uses(arg, count_name); + } + sum + } + CompTerm::Op2 { val0, val1, .. } => count_uses(val0, count_name) + count_uses(val1, count_name), + CompTerm::Num { .. } => 0, + CompTerm::Nil => 0, + } + } + + // linearize_name (Foo x x x x) 'x' 0 + // ---------------------------------------------------------------- + // dup x0 x1 = x; dup x2 x3 = x0; dup x4 x5 = x1; (Foo x2 x3 x4 x5) + // Returns the number of times the variable was used in the body. + pub fn linearize_name(body: &mut CompTerm, name: &mut Ident, fresh: &mut u64) -> usize { + fn fresh_name(fresh: &mut u64) -> Ident { + let name = format!("_{}", fresh); + *fresh += 1; + Ident(name) + } + let uses = count_uses(body, name); + if uses > 1 { + let mut names = vec![]; + for _ in 0..(uses - 1) * 2 { + names.push(fresh_name(fresh)); + } + //println!("-> uses is {}, names is {:?}", uses, names); + let mut renames = vec![]; + for rename in names[names.len() - uses..].iter().rev() { + renames.push(rename.clone()); + } + rename_clones(body, name, &mut renames); + for i in (0..uses - 1).rev() { + let nam0 = names[i * 2].clone(); + let nam1 = names[i * 2 + 1].clone(); + let expr = Box::new(CompTerm::Var { + name: if i == 0 { name.clone() } else { names[i - 1].clone() }, + }); + let new_body = CompTerm::Dup { + nam0, + nam1, + expr, + body: Box::new(CompTerm::Nil), + }; + let old_body = std::mem::replace(body, new_body); + if let CompTerm::Dup { ref mut body, .. } = body { + let _ = std::mem::replace(body, Box::new(old_body)); + } + } + } else if uses == 0 { + *name = Ident::new("~"); + } + uses + } + + // Linearies an erased term, replacing cloned variables by dups + pub fn linearize_term(term: &mut CompTerm, fresh: &mut u64) { + //println!("Linearizing: {:?}", term); + match term { + CompTerm::Var { name: _ } => {} + CompTerm::Lam { ref mut name, body } => { + linearize_term(body, fresh); + linearize_name(body, name, fresh); + } + CompTerm::App { func, argm } => { + linearize_term(func, fresh); + linearize_term(argm, fresh); + } + CompTerm::Let { ref mut name, expr, body } => { + linearize_term(expr, fresh); + linearize_term(body, fresh); + linearize_name(body, name, fresh); + } + CompTerm::Ctr { name: _, args } => { + for arg in args { + linearize_term(arg, fresh); + } + } + CompTerm::Fun { name: _, args } => { + for arg in args { + linearize_term(arg, fresh); + } + } + CompTerm::Op2 { oper: _, val0, val1 } => { + linearize_term(val0, fresh); + linearize_term(val1, fresh); + } + CompTerm::Dup { + ref mut nam0, + ref mut nam1, + expr, + body, + .. + } => { + // should be unreachable under normal usage, but I made it anyway + linearize_term(expr, fresh); + linearize_term(body, fresh); + linearize_name(body, nam0, fresh); + linearize_name(body, nam1, fresh); + } + CompTerm::Num { .. } => {} + CompTerm::Nil => {} + } + } + + pub fn linearize_rule(rule: &mut CompRule) { + let mut vars = HashMap::new(); // rule pattern vars + for pat in &mut rule.pats { + collect_lhs_vars(&mut **pat, &mut vars); + } + let mut fresh = 0; + for (mut name, var) in vars.drain() { + // linearizes rule pattern vars + // The &mut here doesn't do anything because we're dropping var immediately afterwards. + // To linearize rule variables, we'll have to replace all LHS occurrences by ~ if the amount of uses is zero + let uses = linearize_name(&mut rule.body, &mut name, &mut fresh); + if uses == 0 { + if let CompTerm::Var { name } = var { + *name = Ident::new("~"); + } + } + // The reason why we don't simply pass a real mutable reference to our variable + // (instead of a mutable reference of a clone) + // to linearize_name is because since `var` is in `body`, we would + // be borrowing `var` mutably twice, which is not allowed. + + // The reason why linearize_name takes in a mutable reference is + // to replace unused vars by ~. This is useful, for example, in + // lambdas. (@x0 #0 should be linearized to @~ #0) + } + linearize_term(&mut rule.body, &mut fresh); // linearizes internal bound vars + } + + let mut book = book; + for name in &book.names { + let entry = book.entrs.get_mut(name).unwrap(); + for rule in &mut entry.rules { + linearize_rule(rule); + } + } + Ok(book) +} + +pub fn inline(book: CompBook) -> Result { + fn replace_inlines(book: &CompBook, term: &CompTerm) -> Result, String> { + let new_term = match term { + CompTerm::Fun { name, args } => { + // First we substitute nested inline applications + // This expands the number of inline functions we can accept + // This is also inefficient since we are going over the tree more times than needed + let mut new_args = Vec::new(); + for arg in args { + new_args.push(replace_inlines(book, arg)?); + } + let fn_entry = book.entrs.get(name).unwrap(); + if fn_entry.get_attribute("inline").is_some() { + // Substitute an inlined function application directly by the rewrite on compilation + let new_term = subst_inline_term(fn_entry, &name, &new_args)?; + // The substituted term could still have nested inline functions, so continue recursing + replace_inlines(book, &*new_term)? + } else { + // Non inlined functions are just copied like other terms + Box::new(CompTerm::Fun { name: name.clone(), args: new_args }) + } + } + CompTerm::Var { name } => Box::new(CompTerm::Var { name: name.clone() }), + CompTerm::Lam { name, body } => Box::new(CompTerm::Lam { + name: name.clone(), + body: replace_inlines(book, body)?, + }), + CompTerm::App { func, argm } => Box::new(CompTerm::App { + func: replace_inlines(book, func)?, + argm: replace_inlines(book, argm)?, + }), + CompTerm::Dup { nam0, nam1, expr, body } => Box::new(CompTerm::Dup { + nam0: nam0.clone(), + nam1: nam1.clone(), + expr: replace_inlines(book, expr)?, + body: replace_inlines(book, body)?, + }), + CompTerm::Let { name, expr, body } => Box::new(CompTerm::Let { + name: name.clone(), + expr: replace_inlines(book, expr)?, + body: replace_inlines(book, body)?, + }), + CompTerm::Ctr { name, args } => { + let mut new_args = Vec::new(); + for arg in args { + new_args.push(replace_inlines(book, arg)?); + } + Box::new(CompTerm::Ctr { + name: name.clone(), + args: new_args, + }) + } + CompTerm::Num { numb } => Box::new(CompTerm::Num { numb: numb.clone() }), + CompTerm::Op2 { oper, val0, val1 } => Box::new(CompTerm::Op2 { + oper: oper.clone(), + val0: replace_inlines(book, val0)?, + val1: replace_inlines(book, val1)?, + }), + CompTerm::Nil => Box::new(CompTerm::Nil), + }; + Ok(new_term) + } + + fn subst_inline_term(entry: &CompEntry, name: &Ident, args: &[Box]) -> Result, String> { + let mut new_term = Box::new(CompTerm::Nil); // ugly + let mut found_match = false; + for rule in &entry.rules { + if fun_matches_rule(args, rule) { + // Clone the rule body and for each variable in the pats, subst in the body + // This is the new inlined term + new_term = rule.body.clone(); + let mut subst_stack: Vec<(&Box, &Box)> = + args.iter().zip(rule.pats.iter()).collect(); + while !subst_stack.is_empty() { + let (arg, pat) = subst_stack.pop().unwrap(); + match (&**arg, &**pat) { + (CompTerm::Ctr { args: arg_args, .. }, CompTerm::Ctr { args: pat_args, ..}) => { + let to_sub: Vec<(&Box, &Box)> = + arg_args.iter().zip(pat_args.iter()).collect(); + subst_stack.extend(to_sub); + } + (arg, CompTerm::Var { name }) => { + subst(&mut *new_term, &name, arg); + } + _ => () + } + } + found_match = true; + break; + } + } + if found_match { + Ok(new_term) + } else { + let term = CompTerm::Fun { name: name.clone(), args: args.to_vec() }; + Err(format!("Unable to match term {:?} to any of the function's rules", term)) + } + } + + // Inlining before flattening avoids some complexity + // TODO: Could be much faster to start inlining from leaves to avoid repeating work on nested inlined functions + // TODO: Currently breaks on cyclic recursion of inlined functions. We need to add a check for this. + let mut names = Vec::new(); + let mut entrs = HashMap::new(); + for name in &book.names { + let entry = book.entrs.get(name).unwrap(); + // Inlined functions can be removed from the book since they wont be called anywhere + if entry.get_attribute("inline").is_some() { + continue; + } + let mut rules = Vec::new(); + for rule in &entry.rules { + let body = replace_inlines(&book, &*rule.body)?; + let rule = CompRule { body, ..rule.clone() }; + rules.push(rule); + } + let entry = CompEntry { rules, ..entry.clone() }; + names.push(name.clone()); + entrs.insert(name.clone(), entry); + } + let book = CompBook { names, entrs }; + Ok(book) +} + +pub fn remove_u120_opers(book: CompBook) -> Result { + // opers and new/high/low + fn make_u120_new(old_entry: &CompEntry) -> CompEntry { + // U120.new hi lo = (+ (<< hi 60) (>> (<< lo 60) 60)) + CompEntry { + name: Ident::new("U120.new"), + args: vec![Ident::new("hi"), Ident::new("lo")], + rules: vec![CompRule { + name: Ident::new("U120.new"), + pats: vec![ + Box::new(CompTerm::Var { name: Ident::new("hi") }), + Box::new(CompTerm::Var { name: Ident::new("lo") }) + ], + body: Box::new(CompTerm::Op2 { + oper: Operator::Add, + val0: Box::new(CompTerm::Op2 { + oper: Operator::Shl, + val0: Box::new(CompTerm::Var { name: Ident::new("hi") }), + val1: Box::new(CompTerm::Num { numb: 60 }), + }), + val1: Box::new(CompTerm::Op2 { + oper: Operator::Shr, + val0: Box::new(CompTerm::Op2 { + oper: Operator::Shl, + val0: Box::new(CompTerm::Var { name: Ident::new("lo") }), + val1: Box::new(CompTerm::Num { numb: 60 }), + }), + val1: Box::new(CompTerm::Num { numb: 60 }), + }), + }), + }], + orig: true, + attrs: old_entry.attrs.clone(), + } + } + + fn make_u120_low(old_entry: &CompEntry) -> CompEntry { + // U120.low n = (>> (<< n 60) 60)) + CompEntry { + name: Ident::new("U120.low"), + args: vec![Ident::new("n")], + rules: vec![CompRule { + name: Ident::new("U120.low"), + pats: vec![Box::new(CompTerm::Var { name: Ident::new("n") })], + body: Box::new(CompTerm::Op2 { + oper: Operator::Shr, + val0: Box::new(CompTerm::Op2 { + oper: Operator::Shl, + val0: Box::new(CompTerm::Var { name: Ident::new("n") }), + val1: Box::new(CompTerm::Num { numb: 60 }), + }), + val1: Box::new(CompTerm::Num { numb: 60 }), + }), + }], + orig: true, + attrs: old_entry.attrs.clone(), + } + } + + fn make_u120_high(old_entry: &CompEntry) -> CompEntry { + // U120.high n = (>> n 60) + CompEntry { + name: Ident::new("U120.high"), + args: vec![Ident::new("n")], + rules: vec![CompRule { + name: Ident::new("U120.high"), + pats: vec![Box::new(CompTerm::Var { name: Ident::new("n") })], + body: Box::new(CompTerm::Op2 { + oper: Operator::Shr, + val0: Box::new(CompTerm::Var { name: Ident::new("n") }), + val1: Box::new(CompTerm::Num { numb: 60 }), + }), + }], + orig: true, + attrs: old_entry.attrs.clone(), + } + } + + // Remove functions that correspond to primitive u120 operators + let mut book = book; + let mut names = Vec::new(); + let mut entrs = HashMap::new(); + for name in book.names { + if u120_to_oper(&name).is_some() { + continue; + } + let entry = book.entrs.remove(&name).unwrap(); + entrs.insert(name.clone(), entry); + names.push(name); + } + let mut book = CompBook { names, entrs }; + + // These basic U120 functions need a special compilation for kdl + // U120.new becomes a special function that joins two numbers as if they were U60s + if let Some(entry) = book.entrs.get(&Ident::new("U120.new")) { + book.entrs.insert(Ident::new("U120.new"), make_u120_new(entry)); + } + // high and low are used for type compatibility with u60 + // TODO: We could rewrite these both to not need this workaround, but it would become rather slow on normal HVM (~100 rewrites instead of 1) + if let Some(entry) = book.entrs.get(&Ident::new("U120.low")) { + book.entrs.insert(Ident::new("U120.low"), make_u120_low(entry)); + } + if let Some(entry) = book.entrs.get(&Ident::new("U120.high")) { + book.entrs.insert(Ident::new("U120.high"), make_u120_high(entry)); + } + + Ok(book) +} + +// TODO: We probably need to handle U60 separately as well. +// Since they compile to U120, it wont overflow as expected and conversion to signed will fail. +pub fn convert_u120_uses(book: CompBook) -> Result { + // Swaps u120 numbers and functions for primitive operations for kindelia compilation + pub fn convert_u120_entry(entry: CompEntry) -> Result { + let mut new_rules = Vec::new(); + for CompRule { name, pats, body } in entry.rules { + let body = convert_u120_term(&body, true)?; + let mut new_pats = Vec::new(); + for pat in pats { + new_pats.push(convert_u120_term(&pat, false)?); + } + new_rules.push(CompRule { name, pats: new_pats, body }); + } + Ok(CompEntry { rules: new_rules, ..entry }) + } + + pub fn convert_u120_term(term: &CompTerm, rhs: bool) -> Result, String> { + let term = Box::new(match term { + // Swap U120.new by a number + CompTerm::Ctr { name, args } => { + if name.0 == "U120.new" { + if let (CompTerm::Num { numb: num1 }, CompTerm::Num { numb: num2 }) = (&*args[0], &*args[1]) { + CompTerm::Num { numb: (num1 << 60) + num2 } + } else if rhs { + let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::>, String>>()?; + CompTerm::Fun { name: name.clone(), args } + } else { + return Err("Can't compile pattern match on U120 to kindelia".to_string()); + } + } else { + let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::>, String>>()?; + CompTerm::Ctr { name: name.clone(), args } + } + } + // Swap U120 functions by primitive operations + CompTerm::Fun { name, args } => { + if let Some(oper) = u120_to_oper(name) { + let val0 = convert_u120_term(&*args[0], rhs)?; + let val1 = convert_u120_term(&*args[1], rhs)?; + CompTerm::Op2 { oper, val0, val1 } + } else { + let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::>, String>>()?; + CompTerm::Fun { name: name.clone(), args } + } + } + CompTerm::Var { name: _ } => term.clone(), + CompTerm::Lam { name, body } => { + let body = convert_u120_term(body, rhs)?; + CompTerm::Lam { name: name.clone(), body } + } + CompTerm::App { func, argm } => { + let func = convert_u120_term(func, rhs)?; + let argm = convert_u120_term(argm, rhs)?; + CompTerm::App { func, argm } + } + CompTerm::Dup { nam0, nam1, expr, body } => { + let expr = convert_u120_term(expr, rhs)?; + let body = convert_u120_term(body, rhs)?; + CompTerm::Dup { + nam0: nam0.clone(), + nam1: nam1.clone(), + expr, + body, + } + } + CompTerm::Let { name, expr, body } => { + let expr = convert_u120_term(expr, rhs)?; + let body = convert_u120_term(body, rhs)?; + CompTerm::Let { name: name.clone(), expr, body } + } + CompTerm::Num { numb: _ } => term.clone(), + CompTerm::Op2 { oper, val0, val1 } => { + let val0 = convert_u120_term(val0, rhs)?; + let val1 = convert_u120_term(val1, rhs)?; + CompTerm::Op2 { oper: *oper, val0, val1 } + } + CompTerm::Nil => { + return Err("Found nil term during compilation".to_string()); + } + }); + Ok(term) + } + + let mut book = book; + let mut names = Vec::new(); + let mut entrs = HashMap::new(); + let mut errs = Vec::new(); + for name in book.names { + let entry = book.entrs.remove(&name).unwrap(); + match convert_u120_entry(entry) { + Ok(entry) => { + entrs.insert(name.clone(), entry); + names.push(name); + } + Err(err) => { + errs.push(err); + } + } + } + if errs.is_empty() { + let book = CompBook { entrs, names }; + Ok(book) + } else { + Err(errs.join(" ")) + } +} + + +// Utils +// ----- + +// Converts a U120 function name to the corresponding primitive operation +// None if the name is not of an operation +pub fn u120_to_oper(name: &Ident) -> Option { + match name.0.as_str() { + "U120.add" => Some(Operator::Add), + "U120.sub" => Some(Operator::Sub), + "U120.mul" => Some(Operator::Mul), + "U120.div" => Some(Operator::Div), + "U120.mod" => Some(Operator::Mod), + "U120.bitwise_and" => Some(Operator::And), + "U120.bitwise_or" => Some(Operator::Or), + "U120.bitwise_xor" => Some(Operator::Xor), + "U120.shift_left" => Some(Operator::Shl), + "U120.shift_right" => Some(Operator::Shr), + "U120.num_less_than" => Some(Operator::Ltn), + "U120.num_less_equal" => Some(Operator::Lte), + "U120.num_greater_than" => Some(Operator::Gtn), + "U120.num_greater_equal" => Some(Operator::Gte), + "U120.num_equal" => Some(Operator::Eql), + "U120.num_not_equal" => Some(Operator::Neq), + _ => None, + } +} + +// Substitute all instances of a variable in a term with another term +pub fn subst(term: &mut CompTerm, sub_name: &Ident, value: &CompTerm) { + match term { + CompTerm::Var { name } => { + if sub_name == name { + *term = value.clone(); + } + } + CompTerm::Dup { nam0, nam1, expr, body } => { + subst(&mut *expr, sub_name, value); + if nam0 != sub_name && nam1 != sub_name { + subst(&mut *body, sub_name, value); + } + } + CompTerm::Let { name, expr, body } => { + subst(&mut *expr, sub_name, value); + if name != sub_name { + subst(&mut *body, sub_name, value); + } + } + CompTerm::Lam { name, body } => { + if name != sub_name { + subst(&mut *body, sub_name, value); + } + } + CompTerm::App { func, argm } => { + subst(&mut *func, sub_name, value); + subst(&mut *argm, sub_name, value); + } + CompTerm::Ctr { args, .. } => { + for arg in args { + subst(&mut *arg, sub_name, value); + } + } + CompTerm::Fun { args, .. } => { + for arg in args { + subst(&mut *arg, sub_name, value); + } + } + CompTerm::Num { .. } => {} + CompTerm::Op2 { val0, val1, .. } => { + subst(&mut *val0, sub_name, value); + subst(&mut *val1, sub_name, value); + } + CompTerm::Nil => {} + } +} + +pub fn fun_matches_rule (args: &[Box], rule: &CompRule) -> bool { + for (arg, pat) in args.iter().zip(rule.pats.iter()) { + let matches = term_matches_pattern(arg, pat); + if !matches { + return false; + } + } + true +} + +pub fn term_matches_pattern (term: &CompTerm, pat: &CompTerm) -> bool { + let mut check_stack = vec![(term, pat)]; + while !check_stack.is_empty() { + let (term, pat) = check_stack.pop().unwrap(); + match (term, pat) { + // For Ctr, check that the args also match + (CompTerm::Ctr { args: term_args, .. }, CompTerm::Ctr { args: pat_args, .. }) => { + for (arg, pat) in term_args.iter().zip(pat_args.iter()) { + check_stack.push((arg, pat)); + } + } + // Nums need to be the same + (CompTerm::Num { numb: term_numb }, CompTerm::Num { numb: pat_numb }) => { + if term_numb != pat_numb { + return false + } + } + // If the pattern is a variable we accept it if the term is weak head normal + (CompTerm::Ctr { .. }, CompTerm::Var { .. }) => (), + (CompTerm::Num { .. }, CompTerm::Var { .. }) => (), + (CompTerm::Lam { .. }, CompTerm::Var { .. }) => (), + // TODO: Unless we actually reduce the terms, we can only do a very simple 1-to-1 matching + // The only exception is with inlined functions, but we do them separately before. + // So, if we need to do some rewriting, we fail since we can't know the right rule + _ => return false, + } + } + true +} \ No newline at end of file diff --git a/src/driver.rs b/src/driver.rs index e69b6a6a..9de49f6d 100644 --- a/src/driver.rs +++ b/src/driver.rs @@ -9,7 +9,6 @@ use crate::codegen; use crate::derive; use crate::driver::loader::{load, File}; use crate::parser::new_type; -use crate::codegen::kdl::KDL_NAME_LEN; use crate::driver::config::Config; @@ -218,14 +217,13 @@ pub fn cmd_run_main(config: &Config, path: &str) -> Result<(), String> { pub fn cmd_to_kdl(config: &Config, path: &str, namespace: &Option) -> Result<(), String> { if let Some(ns) = namespace { - if ns.len() > KDL_NAME_LEN - 2 { - return Err(format!("Given namespace \"{}\"has more than {} characters.", ns, KDL_NAME_LEN - 2)); + const MAX_NS_LEN: usize = codegen::kdl::KDL_NAME_LEN - 2; + if ns.len() > MAX_NS_LEN { + return Err(format!("Given namespace \"{}\"has more than {} characters.", ns, MAX_NS_LEN)); } } let loaded = load(config, path)?; - let comp_book = codegen::kdl::compile_book(&loaded.book)?; - let kdl_names = codegen::kdl::get_kdl_names(&comp_book, namespace)?; - let result = codegen::kdl::to_kdl_book(&loaded.book, &kdl_names, &comp_book)?; + let result = codegen::kdl::to_kdl_book(loaded.book, namespace)?; print!("{}", result); Ok(()) } diff --git a/src/driver/loader.rs b/src/driver/loader.rs index 6a16fe0d..bb9ce17d 100644 --- a/src/driver/loader.rs +++ b/src/driver/loader.rs @@ -53,7 +53,7 @@ pub fn render_error(config: &Config, files: &[File], err: AdjustError) -> String AdjustErrorKind::AttributeWithoutArgs { name } => format!("You should not put arguments on the attribute '{}'.\n{}", name, high_line), AdjustErrorKind::AttributeMissingArg { name } => format!("Attribute '{}' needs to be given a value.\n{}", name, high_line), AdjustErrorKind::WrongTargetAttribute { name, target } => format!("The attribute '{}' only works in the target '{}'.\n{}", name, target, high_line), - AdjustErrorKind::NotInlineable { fn_name, attr_name } => format!("Function '{}' must have exactly one rule with only variable patterns to be '{}'.\n{}", fn_name, attr_name, high_line), + AdjustErrorKind::NeedsRules { fn_name, attr_name } => format!("Attribute '{}' requires '{}' to have at least one rule.\n{}", attr_name, fn_name, high_line), AdjustErrorKind::FunctionHasArgs { fn_name, attr_name } => format!("Function '{}' must not have any arguments to be '{}'.\n{}", fn_name, attr_name, high_line), AdjustErrorKind::FunctionNotFound { name } => format!("Function '{}' was not found.\n{}", name, high_line), AdjustErrorKind::HasKdlAttrs { name } => format!("Function '{}' must not have any kdl attributes.\n{}", name, high_line), diff --git a/src/lowering/adjust.rs b/src/lowering/adjust.rs index fbe95efb..949fbe41 100644 --- a/src/lowering/adjust.rs +++ b/src/lowering/adjust.rs @@ -24,7 +24,7 @@ pub enum AdjustErrorKind { AttributeWithoutArgs { name: String }, AttributeMissingArg { name: String }, WrongTargetAttribute { name: String, target: Target }, - NotInlineable { fn_name: String, attr_name: String }, + NeedsRules { fn_name: String, attr_name: String }, FunctionHasArgs { fn_name: String, attr_name: String }, FunctionNotFound { name: String }, HasKdlAttrs { name: String }, diff --git a/src/lowering/attributes.rs b/src/lowering/attributes.rs index 6bc65729..4ab83117 100644 --- a/src/lowering/attributes.rs +++ b/src/lowering/attributes.rs @@ -1,4 +1,4 @@ -use crate::book::{span::{Span, Localized}, Attribute, Book, Entry, term::Term, name::Ident}; +use crate::book::{span::Span, Attribute, Book, Entry, name::Ident}; use crate::driver::config::{Config, Target}; use super::adjust::{AdjustError, AdjustErrorKind}; @@ -30,26 +30,8 @@ pub fn only_target(config: &Config, attr: &Attribute, target: Target) -> Result< if config.target == target || config.target == Target::All { Ok(()) } else { - adjust_err(attr.orig, AdjustErrorKind::WrongTargetAttribute { name: attr.name.0.clone(), target }) - } -} - -// Checks that the function can be inlined -// A function is inlineable if it has only one rule and all its patterns are variables -pub fn is_inlineable(entry: &Entry, attr: &Attribute) -> Result<(), AdjustError> { - if entry.rules.len() != 1 { - let fn_name = entry.name.0.clone(); - let attr_name = attr.name.0.clone(); - adjust_err(entry.orig, AdjustErrorKind::NotInlineable { fn_name, attr_name } ) - } else { - for pat in &entry.rules[0].pats { - if !matches!(&**pat, Term::Var { .. }) { - let fn_name = entry.name.0.clone(); - let attr_name = attr.name.0.clone(); - return adjust_err((&**pat).get_origin(), AdjustErrorKind::NotInlineable { fn_name, attr_name } ); - } - } - Ok(()) + let name = attr.name.0.clone(); + adjust_err(attr.orig, AdjustErrorKind::WrongTargetAttribute { name, target }) } } @@ -68,15 +50,27 @@ pub fn fn_exists<'a>(book: &'a Book, attr: &Attribute, entry_name: &Ident) -> Re if let Some(entry) = book.entrs.get(entry_name) { Ok(entry) } else { - adjust_err(attr.orig, AdjustErrorKind::FunctionNotFound { name: entry_name.0.clone() }) + let name = entry_name.0.clone(); + adjust_err(attr.orig, AdjustErrorKind::FunctionNotFound { name }) } } +pub fn has_rules(entry: &Entry, attr: &Attribute) -> Result<(), AdjustError> { + if !entry.rules.is_empty() { + Ok(()) + } else { + let fn_name = entry.name.0.clone(); + let attr_name = attr.name.0.clone(); + adjust_err(entry.orig, AdjustErrorKind::NeedsRules { fn_name, attr_name }) + } +} + pub fn no_kdl_attrs(entry: &Entry) -> Result<(), AdjustError> { let kdl_attrs = ["kdl_erase", "kdl_run", "kdl_name", "kdl_state"]; for attr_name in kdl_attrs { if let Some(attr) = entry.get_attribute(attr_name) { - return adjust_err(attr.orig, AdjustErrorKind::HasKdlAttrs { name: entry.name.0.clone() }); + let name = entry.name.0.clone(); + return adjust_err(attr.orig, AdjustErrorKind::HasKdlAttrs { name }); } } Ok(()) @@ -88,12 +82,19 @@ pub fn no_kdl_attrs(entry: &Entry) -> Result<(), AdjustError> { // they have no specification so we should check then. pub fn check_attribute(config: &Config, book: &Book, attr: &Attribute) -> Result<(), AdjustError> { match attr.name.0.as_str() { - "kdl_erase" => without_args(attr), + "inline" => { + without_args(attr) + } + "kdl_erase" => { + without_args(attr) + } "kdl_run" => { without_args(attr)?; only_target(config, attr, Target::Kdl) } - "kdl_name" => with_args(attr), + "kdl_name" => { + with_args(attr) + } "kdl_state" => { with_args(attr)?; // TODO: The state function shouldnt be called anywhere @@ -101,9 +102,13 @@ pub fn check_attribute(config: &Config, book: &Book, attr: &Attribute) -> Result let state_fn = fn_exists(book, attr, attr.value.as_ref().unwrap())?; no_kdl_attrs(state_fn)?; no_fn_args(state_fn, attr)?; - is_inlineable(state_fn, attr) + has_rules(state_fn, attr) + } + _ => { + let name = attr.name.0.clone(); + let err = AdjustErrorKind::InvalidAttribute { name }; + adjust_err(attr.orig, err) } - _ => adjust_err(attr.orig, AdjustErrorKind::InvalidAttribute { name: attr.name.0.clone() }), } } diff --git a/tests/mod.rs b/tests/mod.rs index 97e70e2b..4cef6317 100644 --- a/tests/mod.rs +++ b/tests/mod.rs @@ -36,9 +36,7 @@ fn test_kind2(path: &Path, run: fn(&Path) -> String) -> Result<(), Error> { fn compile_kdl(config: &Config, path: &str) -> Result { let loaded = driver::loader::load(&config, path)?; - let comp_book = codegen::kdl::compile_book(&loaded.book)?; - let kdl_names = codegen::kdl::get_kdl_names(&comp_book, &None)?; - let result = codegen::kdl::to_kdl_book(&loaded.book, &kdl_names, &comp_book)?; + let result = codegen::kdl::to_kdl_book(loaded.book, &None)?; Ok(result) } diff --git a/tests/suite/to_kdl/bad_inline.golden b/tests/suite/to_kdl/bad_inline.golden new file mode 100644 index 00000000..c71b8f0e --- /dev/null +++ b/tests/suite/to_kdl/bad_inline.golden @@ -0,0 +1 @@ +Unable to match term Fun { name: Ident("InlineFn"), args: [App { func: Lam { name: Ident("x"), body: Var { name: Ident("x") } }, argm: Num { numb: 1 } }] } to any of the function's rules \ No newline at end of file diff --git a/tests/suite/to_kdl/bad_inline.kind2 b/tests/suite/to_kdl/bad_inline.kind2 new file mode 100644 index 00000000..84904e18 --- /dev/null +++ b/tests/suite/to_kdl/bad_inline.kind2 @@ -0,0 +1,17 @@ +NormalFn (a: U60) : U60 +NormalFn a = (+ a a) + +#inline +InlineFn (a: U60) : U60 +InlineFn 0 = (- 0 1) +InlineFn 1 = (+ 2 2) +InlineFn n = n + +BadInline : U60 { + let bad_arg = 1 + let a = InlineFn 1 + let b = InlineFn ((x => x) 1) + let c = InlineFn (NormalFn 0) + let d = InlineFn (- 2 1) + (+ a (+ b (+ c d))) +} \ No newline at end of file diff --git a/tests/suite/to_kdl/inline.golden b/tests/suite/to_kdl/inline.golden new file mode 100644 index 00000000..205c7124 --- /dev/null +++ b/tests/suite/to_kdl/inline.golden @@ -0,0 +1,13 @@ +// MyStruct.new -(t: Type) (a: U60) (b: t) : (MyStruct t) +ctr {MyStruct_new a b} + +// SadFn : U60 +fun (SadFn) { + (SadFn) = + let a0 = #2; + let a1 = #40; + let a2 = #400; + let a3 = #2000; + (+ (+ (+ a0 a1) a2) a3) +} + diff --git a/tests/suite/to_kdl/inline.kind2 b/tests/suite/to_kdl/inline.kind2 new file mode 100644 index 00000000..d09e2a8e --- /dev/null +++ b/tests/suite/to_kdl/inline.kind2 @@ -0,0 +1,32 @@ +MyStruct (t: Type) : Type +MyStruct.new (a: U60) (b: t) : MyStruct t + +#inline +MyStruct.get_b (x: MyStruct t) : t +MyStruct.get_b (MyStruct.new a b) = b + +#inline +MyStruct.make (n: U60) (x: t) (y: t) : MyStruct t +MyStruct.make 0 x y = MyStruct.new 0 x +MyStruct.make n x y = MyStruct.new 1 y + +#inline +InlineFn (n: U60) (b: MyStruct t) (c: MyStruct t) : t +InlineFn 0 (MyStruct.new 0 b) (MyStruct.new 0 y) = (MyStruct.get_b (MyStruct.make 0 b y)) +InlineFn 0 (MyStruct.new a b) (MyStruct.new x y) = b +InlineFn 1 (MyStruct.new a b) (MyStruct.new x y) = y +InlineFn 2 b c = MyStruct.get_b (MyStruct.get_b (MyStruct.make 3 b c)) +InlineFn n b c = MyStruct.get_b (MyStruct.get_b (MyStruct.make n c b)) + +#inline +CoolFn : U60 { + let a0 = InlineFn 0 (MyStruct.new 1 2) (MyStruct.new 3 4) + let a1 = InlineFn 1 (MyStruct.new 10 20) (MyStruct.new 30 40) + let a2 = InlineFn 2 (MyStruct.new 100 200) (MyStruct.new 300 400) + let a3 = InlineFn 3 (MyStruct.new 1000 2000) (MyStruct.new 3000 4000) + (+ (+ (+ a0 a1) a2) a3) +} + +SadFn : U60 { + CoolFn +} \ No newline at end of file diff --git a/tests/suite/to_kdl/inline_with_arg.golden b/tests/suite/to_kdl/inline_with_arg.golden new file mode 100644 index 00000000..759c6dde --- /dev/null +++ b/tests/suite/to_kdl/inline_with_arg.golden @@ -0,0 +1 @@ +You should not put arguments on the attribute 'inline'. diff --git a/tests/suite/to_kdl/inline_with_arg.kind2 b/tests/suite/to_kdl/inline_with_arg.kind2 new file mode 100644 index 00000000..13ffa289 --- /dev/null +++ b/tests/suite/to_kdl/inline_with_arg.kind2 @@ -0,0 +1,4 @@ +#inline = Oi +FunWithBadAttr : U60 { + 1 +} \ No newline at end of file diff --git a/tests/suite/to_kdl/state_with_args.golden b/tests/suite/to_kdl/kdl_state_with_args.golden similarity index 100% rename from tests/suite/to_kdl/state_with_args.golden rename to tests/suite/to_kdl/kdl_state_with_args.golden diff --git a/tests/suite/to_kdl/state_with_args.kind2 b/tests/suite/to_kdl/kdl_state_with_args.kind2 similarity index 100% rename from tests/suite/to_kdl/state_with_args.kind2 rename to tests/suite/to_kdl/kdl_state_with_args.kind2 diff --git a/tests/suite/to_kdl/state_with_attr.golden b/tests/suite/to_kdl/kdl_state_with_attr.golden similarity index 100% rename from tests/suite/to_kdl/state_with_attr.golden rename to tests/suite/to_kdl/kdl_state_with_attr.golden diff --git a/tests/suite/to_kdl/state_with_attr.kind2 b/tests/suite/to_kdl/kdl_state_with_attr.kind2 similarity index 100% rename from tests/suite/to_kdl/state_with_attr.kind2 rename to tests/suite/to_kdl/kdl_state_with_attr.kind2 diff --git a/tests/suite/to_kdl/non_inline_state.golden b/tests/suite/to_kdl/non_inline_state.golden index 31156b1b..b5d37eeb 100644 --- a/tests/suite/to_kdl/non_inline_state.golden +++ b/tests/suite/to_kdl/non_inline_state.golden @@ -1 +1 @@ -Function 'MyFn.state' must have exactly one rule with only variable patterns to be 'kdl_state'. +Attribute 'kdl_state' requires 'MyFn.state' to have at least one rule. From 6fcfca03086ceef6cb8b34473519583c6fd9d165 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Fri, 14 Oct 2022 10:54:25 +0200 Subject: [PATCH 10/17] feat: indent dup terms in generated kdl files --- src/codegen/kdl.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index ec5913f7..49d3a501 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -25,7 +25,7 @@ pub fn stringify_kdl_term(kdl_names: &HashMap, term: &CompTerm) -> CompTerm::Dup { nam0, nam1, expr, body } => { let expr = stringify_kdl_term(kdl_names, expr)?; let body = stringify_kdl_term(kdl_names, body)?; - format!("dup {} {} = {}; {}", nam0, nam1, expr, body) + format!("dup {} {} = {};\n {}", nam0, nam1, expr, body) } CompTerm::Let { name, expr, body } => { let expr = stringify_kdl_term(kdl_names, expr)?; From 0337096aff2b17a255572ae13c05574c0c8b7020 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Fri, 14 Oct 2022 11:02:01 +0200 Subject: [PATCH 11/17] refactor: Add description to kdl compilation functions --- src/codegen/kdl/passes.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/codegen/kdl/passes.rs b/src/codegen/kdl/passes.rs index 23230d29..75deb520 100644 --- a/src/codegen/kdl/passes.rs +++ b/src/codegen/kdl/passes.rs @@ -172,12 +172,16 @@ pub fn erase_terms(book: &Book) -> Result { } } +// Removes functions that shouldn't exist on runtime from the book +// These are any functions that return type information, like type declarations pub fn erase_funs(book: Book) -> Result { let mut book = book; let mut names = Vec::new(); let mut entrs = HashMap::new(); for name in book.names { let entry = book.entrs.remove(&name).unwrap(); + // TODO: Do a better job of finding functions that return types + // We need a more general algorithm to get things like (Type -> MyType) or (MyType -> Type) if matches!(&*entry.tipo, Term::Typ { .. }) { continue; } @@ -437,6 +441,7 @@ pub fn flatten(book: CompBook) -> Result { Ok(book) } +// Unbinds any unused variables and inserts dups for vars used more than once pub fn linearize_rules(book: CompBook) -> Result { // Returns left-hand side variables fn collect_lhs_vars<'a>(term: &'a mut CompTerm, vars: &mut HashMap) { @@ -689,6 +694,7 @@ pub fn linearize_rules(book: CompBook) -> Result { Ok(book) } +// Substitute all inlined function applications in the Book pub fn inline(book: CompBook) -> Result { fn replace_inlines(book: &CompBook, term: &CompTerm) -> Result, String> { let new_term = match term { @@ -813,6 +819,7 @@ pub fn inline(book: CompBook) -> Result { Ok(book) } +// Remove entries corresponding to primitive U120 operations from the book pub fn remove_u120_opers(book: CompBook) -> Result { // opers and new/high/low fn make_u120_new(old_entry: &CompEntry) -> CompEntry { @@ -922,6 +929,8 @@ pub fn remove_u120_opers(book: CompBook) -> Result { Ok(book) } +// Substitute U120.new by a Num term +// and functions that correspond to a primitive U120 operation by an Op2. // TODO: We probably need to handle U60 separately as well. // Since they compile to U120, it wont overflow as expected and conversion to signed will fail. pub fn convert_u120_uses(book: CompBook) -> Result { @@ -1106,6 +1115,7 @@ pub fn subst(term: &mut CompTerm, sub_name: &Ident, value: &CompTerm) { } } +// Return true if a function call matches with a rule of said function without reducing any subterm pub fn fun_matches_rule (args: &[Box], rule: &CompRule) -> bool { for (arg, pat) in args.iter().zip(rule.pats.iter()) { let matches = term_matches_pattern(arg, pat); @@ -1116,6 +1126,7 @@ pub fn fun_matches_rule (args: &[Box], rule: &CompRule) -> bool { true } +// Return true if we can match a term to a pattern without reducing anything pub fn term_matches_pattern (term: &CompTerm, pat: &CompTerm) -> bool { let mut check_stack = vec![(term, pat)]; while !check_stack.is_empty() { From eb0cc9a7683c83a6b1357b5e32a65cb8430eac91 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Fri, 14 Oct 2022 11:03:07 +0200 Subject: [PATCH 12/17] feat: Accept more cases of inlined functions --- src/codegen/kdl/passes.rs | 101 ++++++++++++++++-------- tests/suite/to_kdl/inline_simple.golden | 28 +++++++ tests/suite/to_kdl/inline_simple.kind2 | 22 ++++++ 3 files changed, 117 insertions(+), 34 deletions(-) create mode 100644 tests/suite/to_kdl/inline_simple.golden create mode 100644 tests/suite/to_kdl/inline_simple.kind2 diff --git a/src/codegen/kdl/passes.rs b/src/codegen/kdl/passes.rs index 75deb520..0ba4ea19 100644 --- a/src/codegen/kdl/passes.rs +++ b/src/codegen/kdl/passes.rs @@ -699,20 +699,32 @@ pub fn inline(book: CompBook) -> Result { fn replace_inlines(book: &CompBook, term: &CompTerm) -> Result, String> { let new_term = match term { CompTerm::Fun { name, args } => { - // First we substitute nested inline applications - // This expands the number of inline functions we can accept - // This is also inefficient since we are going over the tree more times than needed - let mut new_args = Vec::new(); - for arg in args { - new_args.push(replace_inlines(book, arg)?); - } - let fn_entry = book.entrs.get(name).unwrap(); - if fn_entry.get_attribute("inline").is_some() { - // Substitute an inlined function application directly by the rewrite on compilation - let new_term = subst_inline_term(fn_entry, &name, &new_args)?; + let inlined_fn = book.entrs.get(name).unwrap(); + if inlined_fn.get_attribute("inline").is_some() { + let new_term = if is_simple_fun(inlined_fn) { + // For simple functions, we know we can just subst all vars directly. + // "Simple" here means 1 rule and only vars as patterns + // TODO: Maybe also consider functions that just destructure a record + let rule = &inlined_fn.rules[0]; + subst_inline_term(rule, &args) + } else { + // For functions that need to do some pattern matching, + // we first try to resolve nested inlines. + // With this we can increase the number of inlineable functions + // without having to do complete rule rewriting. + let mut new_args = Vec::new(); + for arg in args { + new_args.push(replace_inlines(book, arg)?); + } + match_and_subst_inline_term(inlined_fn, &name, &new_args)? + }; // The substituted term could still have nested inline functions, so continue recursing replace_inlines(book, &*new_term)? } else { + let mut new_args = Vec::new(); + for arg in args { + new_args.push(replace_inlines(book, arg)?); + } // Non inlined functions are just copied like other terms Box::new(CompTerm::Fun { name: name.clone(), args: new_args }) } @@ -758,36 +770,46 @@ pub fn inline(book: CompBook) -> Result { Ok(new_term) } - fn subst_inline_term(entry: &CompEntry, name: &Ident, args: &[Box]) -> Result, String> { - let mut new_term = Box::new(CompTerm::Nil); // ugly + // Substitute a function application by the body of the given rule + // This doesn't check if the rule chosen is actually the correct one. + fn subst_inline_term(rule: &CompRule, args: &[Box]) -> Box { + // Clone the rule body and for each variable in the pats, subst in the body + // This is the new inlined term + let mut new_term = rule.body.clone(); + let mut subst_stack: Vec<(&Box, &Box)> = + args.iter().zip(rule.pats.iter()).collect(); + while !subst_stack.is_empty() { + let (arg, pat) = subst_stack.pop().unwrap(); + match (&**arg, &**pat) { + (CompTerm::Ctr { args: arg_args, .. }, CompTerm::Ctr { args: pat_args, ..}) => { + let to_sub: Vec<(&Box, &Box)> = + arg_args.iter().zip(pat_args.iter()).collect(); + subst_stack.extend(to_sub); + } + (arg, CompTerm::Var { name }) => { + subst(&mut *new_term, &name, arg); + } + _ => () + } + } + new_term + } + + // TODO: We should do actual rewriting subst_inline_termwith the HVM here to cover all possible cases. + // Right now, we can only inline very simple things. + // Substitute an inlined function application directly by the rewrite on compilation + fn match_and_subst_inline_term(entry: &CompEntry, name: &Ident, args: &[Box]) -> Result, String> { + let mut new_term = None; let mut found_match = false; for rule in &entry.rules { if fun_matches_rule(args, rule) { - // Clone the rule body and for each variable in the pats, subst in the body - // This is the new inlined term - new_term = rule.body.clone(); - let mut subst_stack: Vec<(&Box, &Box)> = - args.iter().zip(rule.pats.iter()).collect(); - while !subst_stack.is_empty() { - let (arg, pat) = subst_stack.pop().unwrap(); - match (&**arg, &**pat) { - (CompTerm::Ctr { args: arg_args, .. }, CompTerm::Ctr { args: pat_args, ..}) => { - let to_sub: Vec<(&Box, &Box)> = - arg_args.iter().zip(pat_args.iter()).collect(); - subst_stack.extend(to_sub); - } - (arg, CompTerm::Var { name }) => { - subst(&mut *new_term, &name, arg); - } - _ => () - } - } + new_term = Some(subst_inline_term(rule, args)); found_match = true; break; } } if found_match { - Ok(new_term) + Ok(new_term.unwrap()) } else { let term = CompTerm::Fun { name: name.clone(), args: args.to_vec() }; Err(format!("Unable to match term {:?} to any of the function's rules", term)) @@ -1155,4 +1177,15 @@ pub fn term_matches_pattern (term: &CompTerm, pat: &CompTerm) -> bool { } } true -} \ No newline at end of file +} + +// A function is considered "simple" when it has only one rule and all patterns are variables +pub fn is_simple_fun(entry: &CompEntry) -> bool { + let has_1_rule = entry.rules.len() == 1; + if has_1_rule { + let all_vars = entry.rules[0].pats.iter().all(|x| matches!(&**x, CompTerm::Var { .. })); + all_vars + } else { + false + } +} diff --git a/tests/suite/to_kdl/inline_simple.golden b/tests/suite/to_kdl/inline_simple.golden new file mode 100644 index 00000000..ba63cd80 --- /dev/null +++ b/tests/suite/to_kdl/inline_simple.golden @@ -0,0 +1,28 @@ +// MyType.new -(t: Type) -(u: Type) (a: U60) (b: U60) (c: t) (d: u) : (MyType t u) +ctr {MyType_new a b c d} + +// MyType.swap -(t: Type) -(u: Type) (a: (MyType t u)) : (MyType u t) +fun (MyType_swap a) { + (MyType_swap {MyType_new a b c d}) = + {MyType_new b a d c} +} + +// MyType.mix -(t: Type) -(u: Type) (a: (MyType t u)) (b: (MyType t u)) : (MyType t u) +fun (MyType_mix a b) { + (MyType_mix {MyType_new aa ~ ac ~} {MyType_new ~ bb ~ bd}) = + {MyType_new aa bb ac bd} +} + +// MyType.get_c -(t: Type) -(u: Type) (a: (MyType t u)) : t +fun (MyType_get_c a) { + (MyType_get_c {MyType_new ~ ~ c ~}) = + c +} + +// Main : _ +fun (Main) { + (Main) = + let a = {MyType_new #0 #1 #2 #3}; + (MyType_get_c (MyType_mix a (MyType_swap (!@x {MyType_new x #5 #6 #7} #4)))) +} + diff --git a/tests/suite/to_kdl/inline_simple.kind2 b/tests/suite/to_kdl/inline_simple.kind2 new file mode 100644 index 00000000..321e773a --- /dev/null +++ b/tests/suite/to_kdl/inline_simple.kind2 @@ -0,0 +1,22 @@ +MyType (t: Type) (u: Type) : Type +MyType.new (a: U60) (b: U60) (c: t) (d: u) : MyType t u + +MyType.swap (a: MyType t u) : MyType u t +MyType.swap (MyType.new a b c d) = MyType.new b a d c + +MyType.mix (a: MyType t u) (b: MyType t u) : MyType t u +MyType.mix (MyType.new aa ab ac ad) (MyType.new ba bb bc bd) = + MyType.new aa bb ac bd + +MyType.get_c (a: MyType t u) : t +MyType.get_c (MyType.new a b c d) = c + +#inline +SimpleFn (a: MyType t u) (b: MyType u t) : t { + MyType.get_c (MyType.mix a (MyType.swap b)) +} + +Main { + let a = (MyType.new 0 1 2 3) + SimpleFn a ((x => MyType.new x 5 6 7) 4) +} \ No newline at end of file From 88f6ac73b0730c418577fe50a2f6a0fba8101d6b Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Wed, 26 Oct 2022 14:29:46 +0200 Subject: [PATCH 13/17] feat: Use hash of name instead of random chars for long kdl names --- Cargo.lock | 64 +++++++------------------- Cargo.toml | 2 +- src/codegen/kdl.rs | 41 +++++++++++------ tests/suite/to_kdl/name_shorten.golden | 6 +++ tests/suite/to_kdl/name_shorten.kind2 | 3 ++ 5 files changed, 54 insertions(+), 62 deletions(-) create mode 100644 tests/suite/to_kdl/name_shorten.golden create mode 100644 tests/suite/to_kdl/name_shorten.kind2 diff --git a/Cargo.lock b/Cargo.lock index f6ea18df..5f479894 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -119,6 +119,12 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "ctor" version = "0.1.23" @@ -237,17 +243,6 @@ dependencies = [ "slab", ] -[[package]] -name = "getrandom" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - [[package]] name = "h2" version = "0.3.14" @@ -453,7 +448,7 @@ dependencies = [ "hvm", "ntest", "pretty_assertions", - "rand", + "tiny-keccak", "walkdir", ] @@ -661,12 +656,6 @@ version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" -[[package]] -name = "ppv-lite86" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" - [[package]] name = "pretty_assertions" version = "1.3.0" @@ -732,36 +721,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - [[package]] name = "redox_syscall" version = "0.2.16" @@ -996,6 +955,15 @@ dependencies = [ "syn", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinyvec" version = "1.6.0" diff --git a/Cargo.toml b/Cargo.toml index 3408480b..c456d65e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ hvm = "0.1.81" #hvm = { path = "../hvm" } highlight_error = "0.1.1" clap = { version = "3.1.8", features = ["derive"] } -rand = "0.8.5" +tiny-keccak = { version = "2.0.2", features = ["keccak"] } [dev-dependencies] pretty_assertions = "1.3.0" diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index 49d3a501..793709d6 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -5,8 +5,8 @@ use crate::book::name::Ident; use crate::book::Book; pub use crate::codegen::kdl::book::*; -use rand::Rng; use std::collections::{HashMap, HashSet}; +use tiny_keccak::Hasher; pub const KDL_NAME_LEN: usize = 12; @@ -165,22 +165,28 @@ pub fn to_kdl_book(book: Book, namespace: &Option) -> Result) -> Result, String> { // Fits a name to the max size allowed by kindelia. - // If the name is too large, truncates and replaces the last characters by random chars. - fn rand_shorten(name: &Ident, ns: &str) -> Ident { + // If the name is too large, uses the hash of the name instead + fn hash_shorten(name: &Ident, ns: &str) -> Ident { let max_fn_name = KDL_NAME_LEN - ns.len(); - // If the name doesn't fit, truncate and insert some random characters at the end let name = if name.len() > max_fn_name { - let n_rnd_chrs = usize::min(3, max_fn_name); - let name_cut = name.0[..max_fn_name - n_rnd_chrs].to_string(); - let mut rng = rand::thread_rng(); - let rnd_chrs = (0..n_rnd_chrs).map(|_| rng.gen_range(0..63)).map(encode_base64).collect::(); - Ident(format!("{}{}", name_cut, rnd_chrs)) + let name_hash = keccak128(name.0.as_bytes()); + let name_hash = u128::from_le_bytes(name_hash); + let name_hash = u128_to_kdl_name(name_hash); + name_hash[..max_fn_name].to_string() } else { - name.clone() + name.0.clone() }; Ident(format!("{}{}", ns, name)) } + fn keccak128(data: &[u8]) -> [u8; 16] { + let mut hasher = tiny_keccak::Keccak::v256(); + let mut output = [0u8; 16]; + hasher.update(data); + hasher.finalize(&mut output); + output + } + fn get_kdl_name(entry: &CompEntry, ns: &str) -> Result { let kind_name = &entry.name; // If the entry uses a kindelia name, use it @@ -202,18 +208,18 @@ pub fn get_kdl_names(book: &CompBook, namespace: &Option) -> Result char { + fn encode_base64_u8(num: u8) -> char { match num { 0..=9 => (num + b'0') as char, 10..=35 => (num - 10 + b'A') as char, @@ -222,6 +228,15 @@ pub fn get_kdl_names(book: &CompBook, namespace: &Option) -> Result String { + let mut encoded = [0 as char; 12]; + for i in 0..12 { + encoded[i] = encode_base64_u8((num & 0x3f) as u8); + num >>= 6; + } + encoded.into_iter().collect() + } + let mut errors = Vec::new(); let mut kdl_names = HashMap::new(); let ns = namespace.as_ref().map_or(String::new(), |ns| format!("{}.", ns)); diff --git a/tests/suite/to_kdl/name_shorten.golden b/tests/suite/to_kdl/name_shorten.golden new file mode 100644 index 00000000..313f20d3 --- /dev/null +++ b/tests/suite/to_kdl/name_shorten.golden @@ -0,0 +1,6 @@ +// FunctionWithAVeryLongName : U60 +fun (NDFMHGgkiOr2) { + (NDFMHGgkiOr2) = + #0 +} + diff --git a/tests/suite/to_kdl/name_shorten.kind2 b/tests/suite/to_kdl/name_shorten.kind2 new file mode 100644 index 00000000..7f0c6436 --- /dev/null +++ b/tests/suite/to_kdl/name_shorten.kind2 @@ -0,0 +1,3 @@ +FunctionWithAVeryLongName : U60 { + 0 +} \ No newline at end of file From 4a85dee0272d5fcda4f20b9c37af9ddef671d138 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Thu, 10 Nov 2022 15:13:52 +0100 Subject: [PATCH 14/17] feat: Add Xu120 syntax for U120 numbers --- src/parser/term.rs | 45 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/src/parser/term.rs b/src/parser/term.rs index 34608d0a..0a688389 100644 --- a/src/parser/term.rs +++ b/src/parser/term.rs @@ -12,6 +12,44 @@ type TermPrefix = Box) -> Box>; type TermComplete = Box Box>; +pub fn parse_num(state: State) -> Answer>> { + parser::guard( + Box::new(|state| { + let (state, head) = parser::get_char(state)?; + Ok((state, head.is_ascii_digit())) + }), + Box::new(|state| { + let (state, init) = get_init_index(state)?; + let (state, name) = parse_path_str(state)?; + let (state, last) = get_last_index(state)?; + let orig = Span::new_off(init, last); + + let parts = name.split("u120").collect::>(); + if parts.len() == 2 && parts[1] == "" { + if let Ok(num120) = parts[0].parse::() { + let term = Box::new(Term::Ctr { + orig, + name: Ident::new_path("U120", "new"), + args: vec![ + Box::new(Term::Num { orig, numb: (num120 >> 60) as u64 }), + Box::new(Term::Num { orig, numb: (num120 & 0xFFFFFFFFFFFFFFF) as u64 })], + }); + Ok((state, term)) + } else { + parser::expected("U120 number", name.len(), state) + } + } else { + if let Ok(numb) = name.parse::() { + Ok((state, Box::new(Term::Num { orig, numb }))) + } else { + parser::expected("U60 number", name.len(), state) + } + } + }), + state, + ) +} + pub fn parse_var(state: State) -> Answer>> { parser::guard( Box::new(|state| Ok((state, true))), @@ -20,11 +58,7 @@ pub fn parse_var(state: State) -> Answer>> { let (state, name) = parse_path_str(state)?; let (state, last) = get_last_index(state)?; let orig = Span::new_off(init, last); - if let Ok(numb) = name.parse::() { - Ok((state, Box::new(Term::Num { orig, numb }))) - } else { - Ok((state, Box::new(Term::Var { orig, name: Ident(name) }))) - } + Ok((state, Box::new(Term::Var { orig, name: Ident(name) }))) }), state, ) @@ -186,6 +220,7 @@ pub fn parse_term_prefix(state: State) -> Answer> { Box::new(parse_do), // `do ` Box::new(parse_hlp), // `?` Box::new(parse_hol), // `_` + Box::new(parse_num), // `01234` or `01234u120` Box::new(parse_var), // x Box::new(|state| Ok((state, None))), ], From a4ae8db8c3adf4fb9da94be74bf1174787e8fe35 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Thu, 10 Nov 2022 17:49:53 +0100 Subject: [PATCH 15/17] fix: Remove copy/paste leftover in syntax.md --- SYNTAX.md | 8 -------- 1 file changed, 8 deletions(-) diff --git a/SYNTAX.md b/SYNTAX.md index 0dc5d366..cefd88ad 100644 --- a/SYNTAX.md +++ b/SYNTAX.md @@ -465,14 +465,6 @@ List.nil) But underneath the hood, what an implicit argument actually does is automatically put holes in these places. -Moreover, single holes can be shortened as `!`. So it can also be written as: - -``` -List.cons!(List.cons!(1, List.cons!(2, List.nil!)), -List.cons!(List.cons!(3, List.cons!(4, List.nil!)), -List.nil!)) -``` - Of course, in this particular example, we can just use the list notation directly: ``` From c3f5b3bdda2d00e1499da468c97bfe319258d6c9 Mon Sep 17 00:00:00 2001 From: Nicolas Abril Date: Tue, 15 Nov 2022 10:03:11 +0100 Subject: [PATCH 16/17] feat: Remove capitalized kdl name restriction --- src/codegen/kdl.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/codegen/kdl.rs b/src/codegen/kdl.rs index 793709d6..9a03d0de 100644 --- a/src/codegen/kdl.rs +++ b/src/codegen/kdl.rs @@ -192,10 +192,6 @@ pub fn get_kdl_names(book: &CompBook, namespace: &Option) -> Result max_len { From f54ebb901cfd4d2439c6e0e7c7d87f29c25a453a Mon Sep 17 00:00:00 2001 From: felipegchi Date: Wed, 30 Nov 2022 13:40:11 -0300 Subject: [PATCH 17/17] style: changed code style a little bit --- crates/kind-cli/Cargo.toml | 4 +- crates/kind-cli/src/main.rs | 31 +++-- crates/kind-driver/Cargo.toml | 1 + crates/kind-driver/src/errors.rs | 2 +- crates/kind-driver/src/lib.rs | 63 +++++----- crates/kind-driver/src/resolution.rs | 133 ++++++++++++---------- crates/kind-parser/src/lexer/state.rs | 2 +- crates/kind-parser/src/top_level/mod.rs | 9 +- crates/kind-pass/Cargo.toml | 3 +- crates/kind-pass/src/desugar/mod.rs | 6 +- crates/kind-pass/src/desugar/top_level.rs | 1 - crates/kind-pass/src/erasure/mod.rs | 8 +- crates/kind-pass/src/errors.rs | 13 +++ crates/kind-target-kdl/src/compile.rs | 6 +- crates/kind-target-kdl/src/lib.rs | 15 ++- crates/kind-tests/benches/pure.rs | 16 +-- 16 files changed, 186 insertions(+), 127 deletions(-) diff --git a/crates/kind-cli/Cargo.toml b/crates/kind-cli/Cargo.toml index b73f573b..02d8a582 100644 --- a/crates/kind-cli/Cargo.toml +++ b/crates/kind-cli/Cargo.toml @@ -19,4 +19,6 @@ kind-report = { path = "../kind-report" } kind-checker = { path = "../kind-checker" } kind-query = { path = "../kind-query" } -clap = { version = "4.0.10", features = ["derive"] } \ No newline at end of file +clap = { version = "4.0.10", features = ["derive"] } +anyhow = "1.0.66" +exitcode = "1.1.2" \ No newline at end of file diff --git a/crates/kind-cli/src/main.rs b/crates/kind-cli/src/main.rs index 1a01a809..514382bc 100644 --- a/crates/kind-cli/src/main.rs +++ b/crates/kind-cli/src/main.rs @@ -3,6 +3,7 @@ use std::time::Instant; use std::{fmt, io}; use clap::{Parser, Subcommand}; +use driver::resolution::ResolutionError; use kind_driver::session::Session; use kind_report::data::{Diagnostic, Log}; use kind_report::report::{FileCache, Report}; @@ -33,7 +34,7 @@ pub struct Cli { pub no_color: bool, /// How much concurrency in HVM - #[arg(short, long)] + #[arg(long)] pub tids: Option, /// Prints all of the functions and their evaluation @@ -129,8 +130,8 @@ pub fn compile_in_session( root: PathBuf, file: String, compiled: bool, - fun: &mut dyn FnMut(&mut Session) -> Result, -) -> Result { + fun: &mut dyn FnMut(&mut Session) -> anyhow::Result, +) -> anyhow::Result { let (rx, tx) = std::sync::mpsc::channel(); let mut session = Session::new(root, rx); @@ -149,7 +150,8 @@ pub fn compile_in_session( let diagnostics = tx.try_iter().collect::>>(); - if diagnostics.is_empty() && res.is_ok() { + if diagnostics.is_empty() { + render_to_stderr( &render_config, &session, @@ -159,19 +161,27 @@ pub fn compile_in_session( Log::Checked(start.elapsed()) }, ); + eprintln!(); - Ok(res.unwrap()) + + res } else { render_to_stderr(&render_config, &session, &Log::Failed(start.elapsed())); eprintln!(); + for diagnostic in diagnostics { render_to_stderr(&render_config, &session, &diagnostic) } - Err(()) + + match res { + Ok(_) => Err(ResolutionError.into()), + Err(res) => Err(res) + } } } -pub fn run_cli(config: Cli) -> Result<(), ()> { +pub fn run_cli(config: Cli) -> anyhow::Result<()> { + kind_report::check_if_colors_are_supported(config.no_color); let render_config = kind_report::check_if_utf8_is_supported(config.ascii, 2); @@ -262,6 +272,9 @@ pub fn run_cli(config: Cli) -> Result<(), ()> { Ok(()) } -pub fn main() -> Result<(), ()> { - run_cli(Cli::parse()) +pub fn main() { + match run_cli(Cli::parse()) { + Ok(_) => std::process::exit(0), + Err(_) => std::process::exit(1), + } } diff --git a/crates/kind-driver/Cargo.toml b/crates/kind-driver/Cargo.toml index 3e25b1be..ffcc4efe 100644 --- a/crates/kind-driver/Cargo.toml +++ b/crates/kind-driver/Cargo.toml @@ -18,6 +18,7 @@ kind-target-kdl = { path = "../kind-target-kdl" } hvm = { git = "https://github.com/Kindelia/HVM.git" } +anyhow = "1.0.66" strsim = "0.10.0" fxhash = "0.2.1" dashmap = "5.4.0" \ No newline at end of file diff --git a/crates/kind-driver/src/errors.rs b/crates/kind-driver/src/errors.rs index 0e952adc..0b502846 100644 --- a/crates/kind-driver/src/errors.rs +++ b/crates/kind-driver/src/errors.rs @@ -56,7 +56,7 @@ impl Diagnostic for DriverError { DriverError::MultiplePaths(ident, paths) => DiagnosticFrame { code: 101, severity: Severity::Error, - title: "Multiple definitions for the same name".to_string(), + title: "Ambiguous definition location for the same name".to_string(), subtitles: paths .iter() .map(|path| Subtitle::Phrase(Color::Fst, vec![Word::White(path.display().to_string())])) diff --git a/crates/kind-driver/src/lib.rs b/crates/kind-driver/src/lib.rs index 9179c4d1..dd12398a 100644 --- a/crates/kind-driver/src/lib.rs +++ b/crates/kind-driver/src/lib.rs @@ -5,6 +5,7 @@ use kind_report::report::FileCache; use kind_span::SyntaxCtxIndex; use kind_tree::{backend, concrete, desugared, untyped}; +use resolution::ResolutionError; use session::Session; use std::path::PathBuf; @@ -25,17 +26,22 @@ pub fn type_check_book( session: &mut Session, path: &PathBuf, entrypoints: Vec, - tids: Option -) -> Result { + tids: Option, +) -> anyhow::Result { let concrete_book = to_book(session, path)?; let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?; let all = desugared_book.entrs.iter().map(|x| x.0).cloned().collect(); - let succeeded = checker::type_check(&desugared_book, session.diagnostic_sender.clone(), all, tids); + let succeeded = checker::type_check( + &desugared_book, + session.diagnostic_sender.clone(), + all, + tids, + ); if !succeeded { - return Err(()); + return Err(ResolutionError.into()); } let mut book = erasure::erase_book( @@ -48,14 +54,10 @@ pub fn type_check_book( Ok(book) } -pub fn to_book(session: &mut Session, path: &PathBuf) -> Result { +pub fn to_book(session: &mut Session, path: &PathBuf) -> anyhow::Result { let mut concrete_book = resolution::parse_and_store_book(session, path)?; - let failed = resolution::check_unbound_top_level(session, &mut concrete_book); - - if failed { - return Err(()); - } + resolution::check_unbound_top_level(session, &mut concrete_book)?; Ok(concrete_book) } @@ -64,24 +66,29 @@ pub fn erase_book( session: &mut Session, path: &PathBuf, entrypoints: Vec, -) -> Result { +) -> anyhow::Result { let concrete_book = to_book(session, path)?; let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?; + let mut book = erasure::erase_book( &desugared_book, session.diagnostic_sender.clone(), entrypoints, )?; + inline_book(&mut book); Ok(book) } -pub fn desugar_book(session: &mut Session, path: &PathBuf) -> Result { +pub fn desugar_book(session: &mut Session, path: &PathBuf) -> anyhow::Result { let concrete_book = to_book(session, path)?; desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book) } -pub fn check_erasure_book(session: &mut Session, path: &PathBuf) -> Result { +pub fn check_erasure_book( + session: &mut Session, + path: &PathBuf, +) -> anyhow::Result { let concrete_book = to_book(session, path)?; desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book) } @@ -95,9 +102,10 @@ pub fn compile_book_to_kdl( session: &mut Session, namespace: &str, entrypoints: Vec, -) -> Result { +) -> anyhow::Result { let concrete_book = to_book(session, path)?; let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?; + let mut book = erasure::erase_book( &desugared_book, session.diagnostic_sender.clone(), @@ -106,28 +114,29 @@ pub fn compile_book_to_kdl( inline_book(&mut book); - kind_target_kdl::compile_book(book, session.diagnostic_sender.clone(), namespace) + let res = kind_target_kdl::compile_book(book, session.diagnostic_sender.clone(), namespace)?; + + Ok(res) } -pub fn check_main_entry(session: &mut Session, book: &untyped::Book) -> Result<(), ()> { +pub fn check_main_entry(session: &mut Session, book: &untyped::Book) -> anyhow::Result<()> { if !book.entrs.contains_key("Main") { - session - .diagnostic_sender - .send(Box::new(DriverError::ThereIsntAMain)) - .unwrap(); - Err(()) + let err = Box::new(DriverError::ThereIsntAMain); + session.diagnostic_sender.send(err).unwrap(); + Err(ResolutionError.into()) } else { Ok(()) } } -pub fn check_main_desugared_entry(session: &mut Session, book: &desugared::Book) -> Result<(), ()> { +pub fn check_main_desugared_entry( + session: &mut Session, + book: &desugared::Book, +) -> anyhow::Result<()> { if !book.entrs.contains_key("Main") { - session - .diagnostic_sender - .send(Box::new(DriverError::ThereIsntAMain)) - .unwrap(); - Err(()) + let err = Box::new(DriverError::ThereIsntAMain); + session.diagnostic_sender.send(err).unwrap(); + Err(ResolutionError.into()) } else { Ok(()) } diff --git a/crates/kind-driver/src/resolution.rs b/crates/kind-driver/src/resolution.rs index 45a81c00..6b021499 100644 --- a/crates/kind-driver/src/resolution.rs +++ b/crates/kind-driver/src/resolution.rs @@ -3,9 +3,11 @@ //! it returns a desugared book of all of the //! depedencies. +use core::fmt; use fxhash::FxHashSet; use kind_pass::expand::expand_module; use kind_pass::expand::uses::expand_uses; +use std::error::Error; use std::fs; use std::path::{Path, PathBuf}; use std::rc::Rc; @@ -19,6 +21,17 @@ use kind_tree::symbol::{Ident, QualifiedIdent}; use crate::{errors::DriverError, session::Session}; +#[derive(Debug)] +pub struct ResolutionError; + +impl fmt::Display for ResolutionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "resolution error") + } +} + +impl Error for ResolutionError {} + /// The extension of kind2 files. const EXT: &str = "kind2"; @@ -30,10 +43,11 @@ fn accumulate_neighbour_paths( raw_path: &Path, ) -> Result, Box> { let mut canon_path = raw_path.to_path_buf(); - let mut dir_file_path = raw_path.to_path_buf(); - let dir_path = raw_path.to_path_buf(); + let mut dir_file_path = canon_path.clone(); + let dir_path = canon_path.clone(); canon_path.set_extension(EXT); + dir_file_path.push("_"); dir_file_path.set_extension(EXT); @@ -81,13 +95,12 @@ fn try_to_insert_new_name<'a>( book: &'a mut Book, ) -> bool { if let Some(first_occorence) = book.names.get(ident.to_string().as_str()) { - session - .diagnostic_sender - .send(Box::new(DriverError::DefinedMultipleTimes( - first_occorence.clone(), - ident, - ))) - .unwrap(); + let err = Box::new(DriverError::DefinedMultipleTimes( + first_occorence.clone(), + ident, + )); + + session.diagnostic_sender.send(err).unwrap(); *failed = true; false } else { @@ -107,29 +120,30 @@ fn module_to_book<'a>( for entry in module.entries { match entry { TopLevel::SumType(sum) => { - public_names.insert(sum.name.to_string()); + let name = sum.name.to_string(); + + public_names.insert(name.clone()); for cons in &sum.constructors { let mut cons_ident = sum.name.add_segment(cons.name.to_str()); cons_ident.range = cons.name.range; if try_to_insert_new_name(failed, session, cons_ident.clone(), book) { - public_names.insert(cons_ident.to_string()); - book.count - .insert(cons_ident.to_string(), cons.extract_book_info(&sum)); + let cons_name = cons_ident.to_string(); + public_names.insert(cons_name.clone()); + book.count.insert(cons_name, cons.extract_book_info(&sum)); } } if try_to_insert_new_name(failed, session, sum.name.clone(), book) { - book.count - .insert(sum.name.to_string(), sum.extract_book_info()); - book.entries - .insert(sum.name.to_string(), TopLevel::SumType(sum)); + book.count.insert(name.clone(), sum.extract_book_info()); + book.entries.insert(name, TopLevel::SumType(sum)); } } TopLevel::RecordType(rec) => { - public_names.insert(rec.name.to_string()); - book.count - .insert(rec.name.to_string(), rec.extract_book_info()); + let name = rec.name.to_string(); + public_names.insert(name.clone()); + book.count.insert(name.clone(), rec.extract_book_info()); + try_to_insert_new_name(failed, session, rec.name.clone(), book); let cons_ident = rec.name.add_segment(rec.constructor.to_str()); @@ -138,18 +152,18 @@ fn module_to_book<'a>( cons_ident.to_string(), rec.extract_book_info_of_constructor(), ); + try_to_insert_new_name(failed, session, cons_ident, book); - book.entries - .insert(rec.name.to_string(), TopLevel::RecordType(rec)); + book.entries.insert(name.clone(), TopLevel::RecordType(rec)); } TopLevel::Entry(entr) => { + let name = entr.name.to_string(); + try_to_insert_new_name(failed, session, entr.name.clone(), book); - public_names.insert(entr.name.to_string()); - book.count - .insert(entr.name.to_string(), entr.extract_book_info()); - book.entries - .insert(entr.name.to_string(), TopLevel::Entry(entr)); + public_names.insert(name.clone()); + book.count.insert(name.clone(), entr.extract_book_info()); + book.entries.insert(name, TopLevel::Entry(entr)); } } } @@ -176,25 +190,19 @@ fn parse_and_store_book_by_identifier( } } -fn parse_and_store_book_by_path( - session: &mut Session, - path: &PathBuf, - book: &mut Book, -) -> bool { +fn parse_and_store_book_by_path(session: &mut Session, path: &PathBuf, book: &mut Book) -> bool { if !path.exists() { - session - .diagnostic_sender - .send(Box::new(DriverError::CannotFindFile( - path.to_str().unwrap().to_string(), - ))) - .unwrap(); + let err = Box::new(DriverError::CannotFindFile( + path.to_str().unwrap().to_string(), + )); + + session.diagnostic_sender.send(err).unwrap(); return true; } - if session - .loaded_paths_map - .contains_key(&fs::canonicalize(path).unwrap()) - { + let canon_path = &fs::canonicalize(path).unwrap(); + + if session.loaded_paths_map.contains_key(canon_path) { return false; } @@ -213,15 +221,14 @@ fn parse_and_store_book_by_path( let ctx_id = session.book_counter; session.add_path(Rc::new(fs::canonicalize(path).unwrap()), input.clone()); + let tx = session.diagnostic_sender.clone(); - let (mut module, mut failed) = - kind_parser::parse_book(session.diagnostic_sender.clone(), ctx_id, &input); + let (mut module, mut failed) = kind_parser::parse_book(tx.clone(), ctx_id, &input); - expand_uses(&mut module, session.diagnostic_sender.clone()); + expand_uses(&mut module, tx.clone()); + expand_module(tx.clone(), &mut module); - expand_module(session.diagnostic_sender.clone(), &mut module); - - let mut state = UnboundCollector::new(session.diagnostic_sender.clone(), false); + let mut state = UnboundCollector::new(tx.clone(), false); state.visit_module(&mut module); for idents in state.unbound.values() { @@ -230,7 +237,7 @@ fn parse_and_store_book_by_path( } module_to_book(&mut failed, session, module, book); - + for idents in state.unbound_top_level.values() { let fst = idents.iter().next().unwrap(); if !book.names.contains_key(&fst.to_string()) { @@ -251,41 +258,45 @@ fn unbound_variable(session: &mut Session, book: &Book, idents: &[Ident]) { similar_names.sort_by(|x, y| x.0.total_cmp(&y.0)); - session - .diagnostic_sender - .send(Box::new(DriverError::UnboundVariable( - idents.to_vec(), - similar_names.iter().take(5).map(|x| x.1.clone()).collect(), - ))) - .unwrap(); + let err = Box::new(DriverError::UnboundVariable( + idents.to_vec(), + similar_names.iter().take(5).map(|x| x.1.clone()).collect(), + )); + + session.diagnostic_sender.send(err).unwrap(); } -pub fn parse_and_store_book(session: &mut Session, path: &PathBuf) -> Result { +pub fn parse_and_store_book(session: &mut Session, path: &PathBuf) -> anyhow::Result { let mut book = Book::default(); if parse_and_store_book_by_path(session, path, &mut book) { - Err(()) + Err(ResolutionError.into()) } else { Ok(book) } } -pub fn check_unbound_top_level(session: &mut Session, book: &mut Book) -> bool { +pub fn check_unbound_top_level(session: &mut Session, book: &mut Book) -> anyhow::Result<()> { let mut failed = false; let (_, unbound_tops) = unbound::get_book_unbound(session.diagnostic_sender.clone(), book, true); - for (_, unbound) in unbound_tops { + for unbound in unbound_tops.values() { let res: Vec = unbound .iter() .filter(|x| !x.generated) .map(|x| x.to_ident()) .collect(); + if !res.is_empty() { unbound_variable(session, book, &res); failed = true; } } - failed + if failed { + Err(ResolutionError.into()) + } else { + Ok(()) + } } diff --git a/crates/kind-parser/src/lexer/state.rs b/crates/kind-parser/src/lexer/state.rs index edf71591..6f033154 100644 --- a/crates/kind-parser/src/lexer/state.rs +++ b/crates/kind-parser/src/lexer/state.rs @@ -96,8 +96,8 @@ impl<'a> Lexer<'a> { Some(str) } - #[inline] /// Useful as entrypoint + #[inline] pub fn lex_next(&mut self) -> (Token, Range) { self.lex_token() } diff --git a/crates/kind-parser/src/top_level/mod.rs b/crates/kind-parser/src/top_level/mod.rs index 7a2e5bb6..8e7ae367 100644 --- a/crates/kind-parser/src/top_level/mod.rs +++ b/crates/kind-parser/src/top_level/mod.rs @@ -1,11 +1,8 @@ use fxhash::FxHashMap; use kind_tree::concrete::expr::Expr; use kind_tree::concrete::pat::{Pat, PatIdent, PatKind}; -/// Parses all of the top level structures -/// like Book, Entry, Rule and Argument. -use kind_tree::concrete::{ - Argument, Attribute, Entry, ExprKind, Module, Rule, Telescope, TopLevel, -}; + +use kind_tree::concrete::*; use kind_tree::symbol::QualifiedIdent; use crate::errors::SyntaxDiagnostic; @@ -85,6 +82,7 @@ impl<'a> Parser<'a> { fn parse_rule(&mut self, name: String) -> Result, SyntaxDiagnostic> { let start = self.range(); let ident; + if let Token::UpperId(name_id, ext) = self.get() { let qual = QualifiedIdent::new_static(name_id.as_str(), ext.clone(), start); if qual.to_string() == name { @@ -95,6 +93,7 @@ impl<'a> Parser<'a> { } else { return self.fail(vec![]); } + let mut pats = Vec::new(); while !self.get().same_variant(&Token::Eq) && !self.get().same_variant(&Token::Eof) { pats.push(self.parse_pat()?); diff --git a/crates/kind-pass/Cargo.toml b/crates/kind-pass/Cargo.toml index 123161f8..84c63af3 100644 --- a/crates/kind-pass/Cargo.toml +++ b/crates/kind-pass/Cargo.toml @@ -13,4 +13,5 @@ kind-derive = { path = "../kind-derive" } linked-hash-map = "0.5.6" fxhash = "0.2.1" -im-rc = "15.1.0" \ No newline at end of file +im-rc = "15.1.0" +anyhow = "1.0.66" \ No newline at end of file diff --git a/crates/kind-pass/src/desugar/mod.rs b/crates/kind-pass/src/desugar/mod.rs index e73391d7..19fa050e 100644 --- a/crates/kind-pass/src/desugar/mod.rs +++ b/crates/kind-pass/src/desugar/mod.rs @@ -16,7 +16,7 @@ use kind_tree::{ symbol::Ident, }; -use crate::errors::PassError; +use crate::errors::{PassError, GenericPassError}; pub mod app; pub mod attributes; @@ -35,7 +35,7 @@ pub struct DesugarState<'a> { pub fn desugar_book( errors: Sender>, book: &concrete::Book, -) -> Result { +) -> anyhow::Result { let mut state = DesugarState { errors, old_book: book, @@ -45,7 +45,7 @@ pub fn desugar_book( }; state.desugar_book(book); if state.failed { - Err(()) + Err(GenericPassError.into()) } else { Ok(state.new_book) } diff --git a/crates/kind-pass/src/desugar/top_level.rs b/crates/kind-pass/src/desugar/top_level.rs index 3ff6cc3b..97d24fb2 100644 --- a/crates/kind-pass/src/desugar/top_level.rs +++ b/crates/kind-pass/src/desugar/top_level.rs @@ -245,7 +245,6 @@ impl<'a> DesugarState<'a> { use concrete::pat::PatKind; match &pat.data { PatKind::App(head, spine) => { - // TODO: Fix lol let entry = self .old_book .count diff --git a/crates/kind-pass/src/erasure/mod.rs b/crates/kind-pass/src/erasure/mod.rs index 86034c61..60d28b15 100644 --- a/crates/kind-pass/src/erasure/mod.rs +++ b/crates/kind-pass/src/erasure/mod.rs @@ -9,7 +9,7 @@ use kind_tree::symbol::QualifiedIdent; use kind_tree::untyped::{self}; use kind_tree::Number; -use crate::errors::PassError; +use crate::errors::{PassError, GenericPassError}; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] enum Relevance { @@ -55,7 +55,7 @@ pub fn erase_book( book: &desugared::Book, errs: Sender>, entrypoints: Vec, -) -> Result { +) -> anyhow::Result { let mut state = ErasureState { errs, book, @@ -100,7 +100,7 @@ impl<'a> ErasureState<'a> { &mut self, book: &'a desugared::Book, named_entrypoints: Vec, - ) -> Result { + ) -> anyhow::Result { let mut vals = FxHashMap::default(); let mut entrypoints = Vec::new(); @@ -190,7 +190,7 @@ impl<'a> ErasureState<'a> { } if self.failed { - Err(()) + Err(GenericPassError.into()) } else { Ok(new_book) } diff --git a/crates/kind-pass/src/errors.rs b/crates/kind-pass/src/errors.rs index e2e3e08d..82cfe50f 100644 --- a/crates/kind-pass/src/errors.rs +++ b/crates/kind-pass/src/errors.rs @@ -1,7 +1,20 @@ +use std::{fmt::Display, error::Error}; + use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity}; use kind_span::{Range, SyntaxCtxIndex}; use kind_tree::symbol::Ident; +#[derive(Debug)] +pub struct GenericPassError; + +impl Display for GenericPassError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "generic pass error") + } +} + +impl Error for GenericPassError { } + pub enum Sugar { DoNotation, List, diff --git a/crates/kind-target-kdl/src/compile.rs b/crates/kind-target-kdl/src/compile.rs index d005912f..a58e4d74 100644 --- a/crates/kind-target-kdl/src/compile.rs +++ b/crates/kind-target-kdl/src/compile.rs @@ -8,7 +8,7 @@ use tiny_keccak::Hasher; pub use kindelia_lang::ast as kdl; -use crate::errors::KdlError; +use crate::{errors::KdlError, GenericCompilationToHVMError}; pub const KDL_NAME_LEN: usize = 12; const U60_MAX: kdl::U120 = kdl::U120(0xFFFFFFFFFFFFFFF); @@ -97,7 +97,7 @@ pub fn compile_book( book: &untyped::Book, sender: Sender>, namespace: &str, -) -> Result { +) -> Result { let mut ctx = CompileCtx::new(book, sender); for (name, entry) in &book.entrs { @@ -120,7 +120,7 @@ pub fn compile_book( } if ctx.failed { - return Err(()); + return Err(GenericCompilationToHVMError); } Ok(ctx.file) diff --git a/crates/kind-target-kdl/src/lib.rs b/crates/kind-target-kdl/src/lib.rs index 6bdaba39..bb48e0f2 100644 --- a/crates/kind-target-kdl/src/lib.rs +++ b/crates/kind-target-kdl/src/lib.rs @@ -1,4 +1,4 @@ -use std::sync::mpsc::Sender; +use std::{sync::mpsc::Sender, fmt::Display, error::Error}; use flatten::flatten; use kind_report::data::Diagnostic; @@ -12,11 +12,22 @@ mod flatten; mod linearize; mod subst; +#[derive(Debug)] +pub struct GenericCompilationToHVMError; + +impl Display for GenericCompilationToHVMError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "generic compilation to hvm error") + } +} + +impl Error for GenericCompilationToHVMError { } + pub fn compile_book( book: untyped::Book, sender: Sender>, namespace: &str, -) -> Result { +) -> Result { // TODO: Remove kdl_states (maybe check if they're ever called?) // TODO: Convert to some sort of Kindelia.Contract let flattened = flatten(book); diff --git a/crates/kind-tests/benches/pure.rs b/crates/kind-tests/benches/pure.rs index 632a2238..e1f8aaf2 100644 --- a/crates/kind-tests/benches/pure.rs +++ b/crates/kind-tests/benches/pure.rs @@ -97,8 +97,8 @@ fn bench_exp_pure_check_unbound(b: &mut Bencher) { b.iter(|| { books.iter_mut().map(|(session, book)| { - let failed = resolution::check_unbound_top_level(session, book); - assert!(!failed) + let result = resolution::check_unbound_top_level(session, book); + assert!(result.is_ok()); }).fold(0, |n, _| n + 1) }) } @@ -110,8 +110,8 @@ fn bench_exp_pure_desugar(b: &mut Bencher) { let mut books: Vec<_> = paths.iter().map(|x| { let mut session = new_session(); let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap(); - let failed = resolution::check_unbound_top_level(&mut session, &mut book); - assert!(!failed); + let result = resolution::check_unbound_top_level(&mut session, &mut book); + assert!(result.is_ok()); (session, book) }).collect(); @@ -130,9 +130,9 @@ fn bench_exp_pure_erase(b: &mut Bencher) { let books: Vec<_> = paths.iter().map(|x| { let mut session = new_session(); let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap(); - let failed = resolution::check_unbound_top_level(&mut session, &mut book); + let result = resolution::check_unbound_top_level(&mut session, &mut book); let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap(); - assert!(!failed); + assert!(result.is_ok()); (session, book) }).collect(); @@ -156,9 +156,9 @@ fn bench_exp_pure_to_hvm(b: &mut Bencher) { let books: Vec<_> = paths.iter().map(|x| { let mut session = new_session(); let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap(); - let failed = resolution::check_unbound_top_level(&mut session, &mut book); + let result = resolution::check_unbound_top_level(&mut session, &mut book); let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap(); - assert!(!failed); + assert!(result.is_ok()); let book = erasure::erase_book( &book,