diff --git a/Cargo.lock b/Cargo.lock index 4cb5d064..d582a094 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,15 +3,12 @@ version = 3 [[package]] -name = "ahash" -version = "0.8.11" +name = "TSPL" +version = "0.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a9423b1e6e2d6c0bbc03660f58f9c30f55359e13afea29432e6e767c0f7dc25" dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", + "highlight_error", ] [[package]] @@ -68,12 +65,6 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -[[package]] -name = "beef" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" - [[package]] name = "cc" version = "1.0.92" @@ -86,16 +77,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chumsky" -version = "1.0.0-alpha.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc3172a80699de358070dd99f80ea8badc6cdf8ac2417cb5a96e6d81bf5fe06d" -dependencies = [ - "hashbrown 0.13.2", - "stacker", -] - [[package]] name = "clap" version = "4.5.4" @@ -172,21 +153,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash", -] - [[package]] name = "hashbrown" version = "0.14.3" @@ -208,8 +174,9 @@ checksum = "809e18805660d7b6b2e2b9f316a5099521b5998d5cba4dda11b5157a21aaef03" [[package]] name = "hvm-core" version = "0.2.24" -source = "git+https://github.com/HigherOrderCO/hvm-core.git#54cf65f53cf15add868bc504c89bbdd5686d7160" +source = "git+https://github.com/HigherOrderCO/hvm-core.git#40febf34ce6c6b09a249553574da9835cb1ca976" dependencies = [ + "TSPL", "arrayvec", "clap", "nohash-hasher", @@ -221,7 +188,7 @@ dependencies = [ name = "hvm-lang" version = "0.1.0" dependencies = [ - "chumsky", + "TSPL", "clap", "highlight_error", "hvm-core", @@ -229,7 +196,6 @@ dependencies = [ "insta", "interner", "itertools", - "logos", "stacker", "stdext", "walkdir", @@ -242,7 +208,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.3", + "hashbrown", ] [[package]] @@ -290,51 +256,12 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" -[[package]] -name = "logos" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "161971eb88a0da7ae0c333e1063467c5b5727e7fb6b710b8db4814eade3a42e8" -dependencies = [ - "logos-derive", -] - -[[package]] -name = "logos-codegen" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e31badd9de5131fdf4921f6473d457e3dd85b11b7f091ceb50e4df7c3eeb12a" -dependencies = [ - "beef", - "fnv", - "lazy_static", - "proc-macro2", - "quote", - "regex-syntax", - "syn", -] - -[[package]] -name = "logos-derive" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c2a69b3eb68d5bd595107c9ee58d7e07fe2bb5e360cc85b0f084dedac80de0a" -dependencies = [ - "logos-codegen", -] - [[package]] name = "nohash-hasher" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" -[[package]] -name = "once_cell" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" - [[package]] name = "proc-macro2" version = "1.0.79" @@ -355,19 +282,13 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.35" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] -[[package]] -name = "regex-syntax" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" - [[package]] name = "same-file" version = "1.0.6" @@ -451,12 +372,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - [[package]] name = "walkdir" version = "2.5.0" @@ -563,23 +478,3 @@ name = "windows_x86_64_msvc" version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" - -[[package]] -name = "zerocopy" -version = "0.7.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] diff --git a/Cargo.toml b/Cargo.toml index f9f8ac15..9f933e8c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,15 +20,13 @@ default = ["cli"] cli = ["dep:clap"] [dependencies] -# Later versions of the alpha contains bugs that changes the parser errors -chumsky = { version = "= 1.0.0-alpha.4", features = ["label"] } +TSPL = "0.0.9" clap = { version = "4.4.1", features = ["derive"], optional = true } highlight_error = "0.1.1" hvm-core = { git = "https://github.com/HigherOrderCO/hvm-core.git" } indexmap = "2.2.3" interner = "0.2.1" itertools = "0.11.0" -logos = "0.14.0" stacker = "0.1" [dev-dependencies] diff --git a/README.md b/README.md index 0cd1b0cc..3d9a1c8f 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,7 @@ StrEx2 = (String.cons 'H' (String.cons 'e', (String.cons 'l' (String.cons 'l', ( Characters are delimited by `'` `'` and support Unicode escape sequences. They have a numeric value associated with them. ``` -main = '\u4242' +main = '\u{4242}' ``` Lists are delimited by `[` `]` and elements can be optionally separated by `,`. diff --git a/cspell.json b/cspell.json index f225b5c1..33e205a6 100644 --- a/cspell.json +++ b/cspell.json @@ -11,6 +11,7 @@ "builtins", "callcc", "chumsky", + "codepoint", "combinators", "concat", "ctrs", @@ -24,6 +25,7 @@ "effectful", "foldl", "hasher", + "hexdigit", "hvm's", "hvmc", "hvml", @@ -80,6 +82,7 @@ "succ", "supercombinator", "supercombinators", + "TSPL", "tunr", "unbounds", "vectorize", diff --git a/examples/gen_tree.hvm b/examples/gen_tree.hvm new file mode 100644 index 00000000..80d607a0 --- /dev/null +++ b/examples/gen_tree.hvm @@ -0,0 +1,9 @@ +Tree.leaf = λnode λleaf leaf +Tree.node = λval λlft λrgt λnode λleaf (node val lft rgt) + +Tree.gen n x = switch n { + 0: Tree.leaf + _: (Tree.node x (Tree.gen n-1 (+ (* x 2) 1)) (Tree.gen n-1 (+ (* x 2) 2))) +} + +main = (Tree.gen 8 2) diff --git a/examples/gen_tree_kind2.hvm b/examples/gen_tree_kind2.hvm deleted file mode 100644 index 8d78ccac..00000000 --- a/examples/gen_tree_kind2.hvm +++ /dev/null @@ -1,17 +0,0 @@ -_Char = 0 -_List = λ_T 0 -_List.cons = λ_head λ_tail λ_P λ_cons λ_nil ((_cons _head) _tail) -_List.nil = λ_P λ_cons λ_nil _nil -_Nat = 0 -_Nat.succ = λ_n λ_P λ_succ λ_zero (_succ _n) -_Nat.zero = λ_P λ_succ λ_zero _zero -_String = (_List _Char) -_String.cons = λ_head λ_tail λ_P λ_cons λ_nil ((_cons _head) _tail) -_String.nil = λ_P λ_cons λ_nil _nil -_Tree = λ_A 0 -_Tree.gen = λ_n λ_x switch _n = _n { 0: _Tree.leaf _: let _n-1 = _n-1 (((_Tree.node _x) ((_Tree.gen _n-1) (+ (* _x 2) 1))) ((_Tree.gen _n-1) (+ (* _x 2) 2))) } -_Tree.leaf = λ_P λ_node λ_leaf _leaf -_Tree.node = λ_val λ_lft λ_rgt λ_P λ_node λ_leaf (((_node _val) _lft) _rgt) -_main = ((_Tree.gen 8) 2) - -main = _main diff --git a/src/lib.rs b/src/lib.rs index 57adb893..15035767 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -486,3 +486,10 @@ pub struct RunStats { pub used: usize, pub run_time: f64, } + +fn maybe_grow(f: F) -> R +where + F: FnOnce() -> R, +{ + stacker::maybe_grow(1024 * 32, 1024 * 1024, f) +} diff --git a/src/main.rs b/src/main.rs index ca7bd70f..f8a0f525 100644 --- a/src/main.rs +++ b/src/main.rs @@ -19,7 +19,7 @@ struct Cli { pub verbose: bool, #[arg(short = 'e', long, global = true, help = "Use other entrypoint rather than main or Main")] - pub entrypoint: Option, + pub entrypoint: Option, } #[derive(Subcommand, Clone, Debug)] @@ -96,11 +96,7 @@ enum Mode { #[arg(help = "Path to the input file")] path: PathBuf, - #[arg(value_parser = |arg: &str| hvml::term::parser::parse_term(arg) - .map_err(|e| match e[0].reason() { - chumsky::error::RichReason::Many(errs) => format!("{}", &errs[0]), - _ => format!("{}", e[0].reason()), - }))] + #[arg(value_parser = |arg: &str| hvml::term::parser::TermParser::new_term(arg))] arguments: Option>, }, /// Runs the lambda-term level desugaring passes. @@ -292,7 +288,7 @@ fn execute_cli_mode(mut cli: Cli) -> Result<(), Diagnostics> { let load_book = |path: &Path| -> Result { let mut book = load_file_to_book(path)?; - book.entrypoint = entrypoint; + book.entrypoint = entrypoint.map(Name::new); if arg_verbose { println!("{book}"); diff --git a/src/term/builtins.rs b/src/term/builtins.rs index 13c82d1d..03bebdd4 100644 --- a/src/term/builtins.rs +++ b/src/term/builtins.rs @@ -1,4 +1,5 @@ -use super::{parser::parse_book, Book, Name, Pattern, Term}; +use super::{parser::TermParser, Book, Name, Pattern, Term}; +use crate::maybe_grow; const BUILTINS: &str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/term/builtins.hvm")); @@ -23,7 +24,8 @@ pub const NAT_ZERO: &str = "Nat.zero"; impl Book { pub fn builtins() -> Book { - parse_book(BUILTINS, Book::default, true).expect("Error parsing builtin file, this should not happen") + TermParser::new_book(BUILTINS, Book::default(), true) + .expect("Error parsing builtin file, this should not happen") } pub fn encode_builtins(&mut self) { @@ -38,7 +40,7 @@ impl Book { impl Term { fn encode_builtins(&mut self) { - Term::recursive_call(move || match self { + maybe_grow(|| match self { Term::Lst { els } => *self = Term::encode_list(std::mem::take(els)), Term::Str { val } => *self = Term::encode_str(val), Term::Nat { val } => *self = Term::encode_nat(*val), @@ -91,20 +93,20 @@ impl Pattern { } fn encode_list(elements: Vec) -> Pattern { - let lnil = Pattern::Ctr(Name::from(LNIL), vec![]); + let lnil = Pattern::Ctr(Name::new(LNIL), vec![]); elements.into_iter().rfold(lnil, |acc, mut nxt| { nxt.encode_builtins(); - Pattern::Ctr(Name::from(LCONS), vec![nxt, acc]) + Pattern::Ctr(Name::new(LCONS), vec![nxt, acc]) }) } fn encode_str(str: &str) -> Pattern { - let lnil = Pattern::Ctr(Name::from(SNIL), vec![]); + let lnil = Pattern::Ctr(Name::new(SNIL), vec![]); str.chars().rfold(lnil, |tail, head| { let head = Pattern::Num(head as u64); - Pattern::Ctr(Name::from(SCONS), vec![head, tail]) + Pattern::Ctr(Name::new(SCONS), vec![head, tail]) }) } } diff --git a/src/term/check/set_entrypoint.rs b/src/term/check/set_entrypoint.rs index 35e21273..347a7649 100644 --- a/src/term/check/set_entrypoint.rs +++ b/src/term/check/set_entrypoint.rs @@ -43,7 +43,7 @@ impl Ctx<'_> { } (None, None, None) => { - let entrypoint = self.book.entrypoint.clone().unwrap_or(Name::from(ENTRY_POINT)); + let entrypoint = self.book.entrypoint.clone().unwrap_or(Name::new(ENTRY_POINT)); self.info.add_book_error(EntryErr::NotFound(entrypoint)) } } @@ -59,8 +59,8 @@ fn validate_entry_point(entry: &Definition) -> Result { impl Book { fn get_possible_entry_points(&self) -> (Option<&Definition>, Option<&Definition>, Option<&Definition>) { let custom = self.entrypoint.as_ref().and_then(|e| self.defs.get(e)); - let main = self.defs.get(&Name::from(ENTRY_POINT)); - let hvm1_main = self.defs.get(&Name::from(HVM1_ENTRY_POINT)); + let main = self.defs.get(&Name::new(ENTRY_POINT)); + let hvm1_main = self.defs.get(&Name::new(HVM1_ENTRY_POINT)); (custom, main, hvm1_main) } } diff --git a/src/term/check/unbound_vars.rs b/src/term/check/unbound_vars.rs index f783ac14..9380eb5e 100644 --- a/src/term/check/unbound_vars.rs +++ b/src/term/check/unbound_vars.rs @@ -1,5 +1,6 @@ use crate::{ diagnostics::{Diagnostics, ToStringVerbose}, + maybe_grow, term::{Ctx, Name, Term}, }; use std::collections::{hash_map::Entry, HashMap}; @@ -62,7 +63,7 @@ pub fn check_uses<'a>( globals: &mut HashMap<&'a Name, (usize, usize)>, errs: &mut Vec, ) { - Term::recursive_call(move || match term { + maybe_grow(move || match term { Term::Var { nam } => { if !scope.contains_key(nam) { errs.push(UnboundVarErr::Local(nam.clone())); diff --git a/src/term/display.rs b/src/term/display.rs index fa46b3df..999d30d3 100644 --- a/src/term/display.rs +++ b/src/term/display.rs @@ -1,4 +1,5 @@ use super::{Book, Definition, Name, NumCtr, Pattern, Rule, Tag, Term}; +use crate::maybe_grow; use std::{fmt, ops::Deref}; /* Some aux structures for things that are not so simple to display */ @@ -41,7 +42,7 @@ macro_rules! display { impl fmt::Display for Term { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Term::recursive_call(move || match self { + maybe_grow(|| match self { Term::Lam { tag, nam, bod } => { write!(f, "{}λ{} {}", tag.display_padded(), var_as_str(nam), bod) } diff --git a/src/term/load_book.rs b/src/term/load_book.rs index f5fc4413..0950bb77 100644 --- a/src/term/load_book.rs +++ b/src/term/load_book.rs @@ -1,8 +1,4 @@ -use super::{ - parser::{parse_book, parser::error_to_msg}, - Book, -}; -use itertools::Itertools; +use super::{parser::TermParser, Book}; use std::path::Path; /// Reads a file and parses to a definition book. @@ -12,8 +8,5 @@ pub fn load_file_to_book(path: &Path) -> Result { } pub fn do_parse_book(code: &str, path: &Path) -> Result { - match parse_book(code, Book::builtins, false) { - Ok(book) => Ok(book), - Err(errs) => Err(errs.iter().map(|e| error_to_msg(e, code, path)).join("\n")), - } + TermParser::new_book(code, Book::builtins(), false).map_err(|e| format!("In {} :\n{}", path.display(), e)) } diff --git a/src/term/mod.rs b/src/term/mod.rs index 7c3fcedf..5ae8e1b8 100644 --- a/src/term/mod.rs +++ b/src/term/mod.rs @@ -1,6 +1,7 @@ -use self::parser::lexer::STRINGS; +use self::parser::STRINGS; use crate::{ diagnostics::{Diagnostics, DiagnosticsConfig}, + maybe_grow, term::builtins::*, ENTRY_POINT, }; @@ -303,7 +304,7 @@ impl Tag { impl Clone for Term { fn clone(&self) -> Self { - Self::recursive_call(move || match self { + maybe_grow(|| match self { Self::Lam { tag, nam, bod } => Self::Lam { tag: tag.clone(), nam: nam.clone(), bod: bod.clone() }, Self::Var { nam } => Self::Var { nam: nam.clone() }, Self::Chn { tag, nam, bod } => Self::Chn { tag: tag.clone(), nam: nam.clone(), bod: bod.clone() }, @@ -416,7 +417,7 @@ impl Term { } pub fn r#ref(name: &str) -> Self { - Term::Ref { nam: name.into() } + Term::Ref { nam: Name::new(name) } } pub fn str(str: &str) -> Self { @@ -654,12 +655,6 @@ impl Term { } /* Common checks and transformations */ - pub fn recursive_call(f: F) -> R - where - F: FnOnce() -> R, - { - stacker::maybe_grow(1024 * 32, 1024 * 1024, f) - } /// Substitute the occurrences of a variable in a term with the given term. /// @@ -669,7 +664,7 @@ impl Term { /// Expects var bind information to be properly stored in match expressions, /// so it must run AFTER `fix_match_terms`. pub fn subst(&mut self, from: &Name, to: &Term) { - Term::recursive_call(|| { + maybe_grow(|| { for (child, binds) in self.children_mut_with_binds() { if !binds.flat_map(|b| b.as_ref()).contains(from) { child.subst(from, to); @@ -686,7 +681,7 @@ impl Term { /// Substitute the occurrence of an unscoped variable with the given term. pub fn subst_unscoped(&mut self, from: &Name, to: &Term) { - Term::recursive_call(|| { + maybe_grow(|| { // We don't check the unscoped binds because there can be only one bind of an unscoped var. // TODO: potentially there could be some situation where this causes an incorrect program to compile? for child in self.children_mut() { @@ -705,7 +700,7 @@ impl Term { /// and the number of times each var is used pub fn free_vars(&self) -> HashMap { fn go(term: &Term, free_vars: &mut HashMap) { - Term::recursive_call(move || { + maybe_grow(|| { if let Term::Var { nam } = term { *free_vars.entry(nam.clone()).or_default() += 1; } @@ -731,7 +726,7 @@ impl Term { /// Returns the set of declared and the set of used unscoped variables pub fn unscoped_vars(&self) -> (IndexSet, IndexSet) { fn go(term: &Term, decls: &mut IndexSet, uses: &mut IndexSet) { - Term::recursive_call(move || { + maybe_grow(|| { match term { Term::Chn { nam: Some(nam), .. } => { decls.insert(nam.clone()); @@ -865,25 +860,19 @@ impl Name { impl Default for Name { fn default() -> Self { - Self::from("") - } -} - -impl From<&str> for Name { - fn from(value: &str) -> Self { - Name(STRINGS.get(value)) + Self::new("") } } impl From for Name { fn from(value: u64) -> Self { - num_to_name(value).as_str().into() + Name::new(num_to_name(value).as_str()) } } impl From for Name { fn from(value: u32) -> Self { - num_to_name(value as u64).as_str().into() + Name::new(num_to_name(value as u64).as_str()) } } diff --git a/src/term/net_to_term.rs b/src/term/net_to_term.rs index 67fc2952..e9662f45 100644 --- a/src/term/net_to_term.rs +++ b/src/term/net_to_term.rs @@ -1,5 +1,6 @@ use crate::{ diagnostics::{DiagnosticOrigin, Diagnostics, Severity}, + maybe_grow, net::{INet, NodeId, NodeKind::*, Port, SlotId, ROOT}, term::{num_to_name, term_to_net::Labels, Book, Name, Tag, Term}, }; @@ -68,10 +69,10 @@ pub struct Reader<'a> { impl Reader<'_> { fn read_term(&mut self, next: Port) -> Term { - Term::recursive_call(move || { + maybe_grow(|| { if self.dup_paths.is_none() && !self.seen.insert(next) { self.error(ReadbackError::Cyclic); - return Term::Var { nam: Name::from("...") }; + return Term::Var { nam: Name::new("...") }; } let node = next.node(); @@ -122,7 +123,7 @@ impl Reader<'_> { Term::Lam { nam, bod, .. } => { // Extract non-var args so we can refer to the pred. let (arg, bind) = if let Term::Var { nam } = &mut arg { - (std::mem::replace(nam, Name::from("")), None) + (std::mem::take(nam), None) } else { (self.namegen.unique(), Some(arg)) }; @@ -341,7 +342,7 @@ impl Term { /// This has the effect of inserting the split at the lowest common ancestor /// of all of the uses of `fst` and `snd`. fn insert_split(&mut self, split: &mut Split, threshold: usize) -> Option { - Term::recursive_call(move || { + maybe_grow(|| { let mut n = match self { Term::Var { nam } => usize::from(split.fst == *nam || split.snd == *nam), _ => 0, @@ -374,7 +375,7 @@ impl Term { } } - Term::recursive_call(move || match self { + maybe_grow(|| match self { Term::Ref { nam: def_name } => { if def_name.is_generated() { let def = book.defs.get(def_name).unwrap(); diff --git a/src/term/parser.rs b/src/term/parser.rs new file mode 100644 index 00000000..99caef86 --- /dev/null +++ b/src/term/parser.rs @@ -0,0 +1,813 @@ +use crate::{ + maybe_grow, + term::{ + display::DisplayFn, Adt, Book, Definition, IntOp, MatchRule, Name, NumCtr, Op, OpType, Pattern, Rule, + SwitchRule, Tag, Term, + }, +}; +use highlight_error::highlight_error; +use interner::global::GlobalPool; +use TSPL::Parser; + +pub static STRINGS: GlobalPool = GlobalPool::new(); + +// hvml grammar description: +// ::= ( | )* +// ::= "data" "=" ( | "(" ()* ")" )+ +// ::= ("(" * ")" | *) "=" +// ::= "(" * ")" | | | "(" ("," )+ ")" +// ::= +// | | | | | | | | +// | | | | | | | | +// ::= ? ("λ"|"@") +// ::= ? ("λ"|"@") "$" +// ::= "(" ")" +// ::= "(" ("," )+ ")" +// ::= ? "(" ()+ ")" +// ::= "(" ")" +// ::= "use" "=" ";"? +// ::= "let" "=" ";"? +// ::= "let" "(" ("," )+ ")" "=" ";"? +// ::= "let" ? "{" (","? )+ "}" "=" ";"? +// ::= "[" ( ","?)* "]" +// ::= "\"" (escape sequence | [^"])* "\"" +// ::= "'" (escape sequence | [^']) "'" +// ::= "match" ( | "=" ) ("with" (","? )*)? "{" + "}" +// ::= "|"? ":" ";"? +// ::= "match" ( | "=" ) ("with" (","? )*)? "{" + "}" +// ::= "|"? (|"_") ":" ";"? +// ::= +// ::= "$" +// ::= | "*" +// ::= "*" +// ::= "#" +// ::= [_\-./a-zA-Z0-9]+ +// ::= ([0-9]+ | "0x"[0-9a-fA-F]+ | "0b"[01]+) +// ::= ( "+" | "-" | "*" | "/" | "%" | "==" | "!=" | "<<" | ">>" | "<=" | ">=" | "<" | ">" | "^" ) + +TSPL::new_parser!(TermParser); + +impl<'a> TermParser<'a> { + // TODO: Since TSPL doesn't expose `new` we need something that creates the parser. + pub fn new_book(input: &'a str, default_book: Book, builtin: bool) -> Result { + Self::new(input).parse_book(default_book, builtin) + } + + pub fn new_term(input: &'a str) -> Result { + Self::new(input).parse_term() + } + + /* AST parsing functions */ + + fn parse_book(&mut self, default_book: Book, builtin: bool) -> Result { + let mut book = default_book; + self.skip_trivia(); + while !self.is_eof() { + let ini_idx = *self.index(); + if self.skip_starts_with("data") { + // adt declaration + let (nam, adt) = self.parse_datatype(builtin)?; + let end_idx = *self.index(); + book.add_adt(nam, adt).map_err(|e| add_ctx(&e, ini_idx, end_idx, self.input()))?; + } else { + // function declaration rule + let (name, rule) = self.parse_rule()?; + book.add_rule(name, rule, builtin); + } + self.skip_trivia(); + } + + Ok(book) + } + + fn parse_datatype(&mut self, builtin: bool) -> Result<(Name, Adt), String> { + // data name = ctr (| ctr)* + self.consume("data")?; + let name = self.labelled(|p| p.parse_hvml_name(), "datatype name")?; + self.consume("=")?; + let mut ctrs = vec![self.parse_datatype_ctr()?]; + while self.try_consume("|") { + ctrs.push(self.parse_datatype_ctr()?); + } + let ctrs = ctrs.into_iter().collect(); + let adt = Adt { ctrs, builtin }; + Ok((name, adt)) + } + + fn parse_datatype_ctr(&mut self) -> Result<(Name, Vec), String> { + if self.skip_starts_with("(") { + // (name field*) + let field_parser = |p: &mut Self| p.labelled(|p| p.parse_hvml_name(), "datatype constructor field"); + let mut els = self.list_like(field_parser, "(", ")", "", false, 1)?; + let fields = els.split_off(1); + let name = els.into_iter().next().unwrap(); + Ok((name, fields)) + } else { + // name + let name = self.labelled(|p| p.parse_hvml_name(), "datatype constructor name")?; + Ok((name, vec![])) + } + } + + fn parse_rule(&mut self) -> Result<(Name, Rule), String> { + let (name, pats) = if self.try_consume("(") { + let name = self.labelled(|p| p.parse_hvml_name(), "function name")?; + let pats = self.list_like(|p| p.parse_rule_pattern(), "", ")", "", false, 0)?; + (name, pats) + } else { + let name = self.labelled(|p| p.parse_hvml_name(), "top-level definition")?; + let mut pats = vec![]; + while !self.skip_starts_with("=") { + pats.push(self.parse_rule_pattern()?); + } + (name, pats) + }; + + self.consume("=")?; + + let body = self.parse_term()?; + + let rule = Rule { pats, body }; + Ok((name, rule)) + } + + fn parse_rule_pattern(&mut self) -> Result { + maybe_grow(|| { + let Some(head) = self.skip_peek_one() else { return self.expected("pattern-matching pattern") }; + let pat = match head { + // Ctr or Tup + '(' => { + self.consume("(")?; + let head_ini_idx = *self.index(); + let head = self.parse_rule_pattern()?; + let head_end_idx = *self.index(); + + if self.try_consume(",") { + // Tup + let mut els = self.list_like(|p| p.parse_rule_pattern(), "", ")", ",", true, 1)?; + els.insert(0, head); + Pattern::Tup(els) + } else { + // Ctr + let Pattern::Var(Some(name)) = head else { + return self.expected_spanned("constructor name", head_ini_idx, head_end_idx); + }; + let els = self.list_like(|p| p.parse_rule_pattern(), "", ")", "", false, 0)?; + Pattern::Ctr(name, els) + } + } + // List + '[' => { + let els = self.list_like(|p| p.parse_rule_pattern(), "[", "]", ",", false, 0)?; + Pattern::Lst(els) + } + // String + '\"' => { + let str = self.parse_quoted_string()?; + Pattern::Str(STRINGS.get(str)) + } + // Char + '\'' => { + let char = self.parse_quoted_char()?; + Pattern::Num(char as u64) + } + // Number + c if c.is_numeric() => { + let num = self.parse_u64()?; + Pattern::Num(num) + } + // Var + _ => { + let name = self.parse_name_or_era()?; + Pattern::Var(name) + } + }; + Ok(pat) + }) + } + + fn parse_term(&mut self) -> Result { + maybe_grow(|| { + let Some(head) = self.skip_peek_one() else { return self.expected("term") }; + let term = match head { + // Lambda, unscoped lambda + 'λ' | '@' => self.parse_lambda(Tag::Static)?, + // App, Tup, Num Op + '(' => { + self.consume("(")?; + let starts_with_oper = self.skip_peek_one().map_or(false, |c| "+-*/%&|<>^=!".contains(c)); + if starts_with_oper { + let opr = self.parse_oper()?; + if self.skip_starts_with(",") + && let Op { ty: _, op: IntOp::Mul } = opr + { + // jk, actually a tuple + let mut els = vec![Term::Era]; + while self.try_consume(",") { + els.push(self.parse_term()?); + } + self.consume(")")?; + Term::Tup { els } + } else { + let fst = self.parse_term()?; + let snd = self.parse_term()?; + self.consume(")")?; + Term::Opx { opr, fst: Box::new(fst), snd: Box::new(snd) } + } + } else { + // Tup or App + let head = self.parse_term()?; + if self.skip_starts_with(",") { + // Tup + let mut els = vec![head]; + while self.try_consume(",") { + els.push(self.parse_term()?); + } + self.consume(")")?; + Term::Tup { els } + } else { + // App + let els = self.list_like(|p| p.parse_term(), "", ")", "", false, 0)?; + els.into_iter().fold(head, |fun, arg| Term::App { + tag: Tag::Static, + fun: Box::new(fun), + arg: Box::new(arg), + }) + } + } + } + // List + '[' => { + let els = self.list_like(|p| p.parse_term(), "[", "]", ",", false, 0)?; + Term::Lst { els } + } + // Sup + '{' => { + let els = self.list_like(|p| p.parse_term(), "{", "}", ",", false, 2)?; + Term::Sup { tag: Tag::Auto, els } + } + // Unscoped var + '$' => { + self.consume("$")?; + let nam = self.parse_hvml_name()?; + Term::Lnk { nam } + } + // Era + '*' => { + self.consume("*")?; + Term::Era + } + // Nat, tagged lambda, tagged sup, tagged app + '#' => { + let Some(head) = self.peek_many(2) else { return self.expected("tagged term or nat") }; + let head = head.chars().collect::>(); + if head[1].is_numeric() { + // Nat + self.consume("#")?; + let val = self.parse_u64()?; + Term::Nat { val } + } else { + // Tagged term + let tag = self.parse_tag()?; + let Some(head) = self.skip_peek_one() else { return self.expected("tagged term") }; + match head { + // Tagged app + '(' => { + let els = self.list_like(|p| p.parse_term(), "(", ")", "", false, 2)?; + els + .into_iter() + .reduce(|fun, arg| Term::App { tag: tag.clone(), fun: Box::new(fun), arg: Box::new(arg) }) + .unwrap() + } + // Tagged sup + '{' => { + let els = self.list_like(|p| p.parse_term(), "{", "}", ",", false, 2)?; + Term::Sup { tag, els } + } + // Tagged lambda + 'λ' | '@' => self.parse_lambda(tag)?, + _ => return self.expected("tagged term"), + } + } + } + // String + '"' => { + let val = self.parse_quoted_string()?; + Term::Str { val: STRINGS.get(val) } + } + // Char + '\'' => { + let chr = self.parse_quoted_char()?; + Term::Num { val: chr as u64 } + } + // Native num + c if c.is_numeric() => { + let val = self.parse_u64()?; + Term::Num { val } + } + _ => { + if self.try_consume("use") { + // Use + let nam = self.parse_hvml_name()?; + self.consume("=")?; + let val = self.parse_term()?; + self.try_consume(";"); + let nxt = self.parse_term()?; + Term::Use { nam: Some(nam), val: Box::new(val), nxt: Box::new(nxt) } + } else if self.try_consume("let") { + // Let, let tup, dup, tagged dup + let Some(head) = self.skip_peek_one() else { return self.expected("let bind") }; + match head { + // tagged dup + '#' => { + let tag = self.parse_tag()?; + let bnd = self.list_like(|p| p.parse_name_or_era(), "{", "}", ",", false, 2)?; + self.consume("=")?; + let val = self.parse_term()?; + self.try_consume(";"); + let nxt = self.parse_term()?; + Term::Dup { tag, bnd, val: Box::new(val), nxt: Box::new(nxt) } + } + // dup + '{' => { + let bnd = self.list_like(|p| p.parse_name_or_era(), "{", "}", ",", false, 2)?; + self.consume("=")?; + let val = self.parse_term()?; + self.try_consume(";"); + let nxt = self.parse_term()?; + Term::Dup { tag: Tag::Auto, bnd, val: Box::new(val), nxt: Box::new(nxt) } + } + // Let tup + '(' => { + let bnd = self.list_like(|p| p.parse_name_or_era(), "(", ")", ",", true, 2)?; + self.consume("=")?; + let val = self.parse_term()?; + self.try_consume(";"); + let nxt = self.parse_term()?; + Term::Ltp { bnd, val: Box::new(val), nxt: Box::new(nxt) } + } + // let + _ => { + let nam = self.parse_name_or_era()?; + self.consume("=")?; + let val = self.parse_term()?; + self.try_consume(";"); + let nxt = self.parse_term()?; + Term::Let { nam, val: Box::new(val), nxt: Box::new(nxt) } + } + } + } else if self.try_consume("match") { + // match + let arg_ini_idx = *self.index(); + let mut arg = self.parse_term()?; + let arg_end_idx = *self.index(); + + let (bnd, arg) = if self.skip_starts_with("=") { + if let Term::Var { nam } = &mut arg { + self.consume("=")?; + let term = self.parse_term()?; + (Some(std::mem::take(nam)), term) + } else { + return self.expected_spanned("var", arg_ini_idx, arg_end_idx); + } + } else { + (None, arg) + }; + let with = if self.try_consume("with") { + let mut with = vec![self.parse_hvml_name()?]; + while !self.skip_starts_with("{") { + self.try_consume(","); + with.push(self.parse_hvml_name()?); + } + with + } else { + vec![] + }; + let rules = self.list_like(|p| p.parse_match_arm(), "{", "}", ";", false, 1)?; + if let Some(bnd) = bnd { + Term::Let { + nam: Some(bnd.clone()), + val: Box::new(arg), + nxt: Box::new(Term::Mat { arg: Box::new(Term::Var { nam: bnd }), with, rules }), + } + } else { + Term::Mat { arg: Box::new(arg), with, rules } + } + } else if self.try_consume("switch") { + // switch + let arg_ini_idx = *self.index(); + let mut arg = self.parse_term()?; + let arg_end_idx = *self.index(); + + let (bnd, arg) = if self.skip_starts_with("=") { + if let Term::Var { nam } = &mut arg { + self.consume("=")?; + let term = self.parse_term()?; + (Some(std::mem::take(nam)), term) + } else { + return self.expected_spanned("var", arg_ini_idx, arg_end_idx); + } + } else { + (None, arg) + }; + let with = if self.try_consume("with") { + let mut with = vec![self.parse_hvml_name()?]; + while !self.skip_starts_with("{") { + self.try_consume(","); + with.push(self.parse_hvml_name()?); + } + with + } else { + vec![] + }; + // TODO: we could enforce correct switches at the parser level to get a spanned error. + let rules = self.list_like(|p| p.parse_switch_arm(), "{", "}", ";", false, 1)?; + if let Some(bnd) = bnd { + Term::Let { + nam: Some(bnd.clone()), + val: Box::new(arg), + nxt: Box::new(Term::Swt { arg: Box::new(Term::Var { nam: bnd }), with, rules }), + } + } else { + Term::Swt { arg: Box::new(arg), with, rules } + } + } else { + // var + let nam = self.labelled(|p| p.parse_hvml_name(), "term")?; + Term::Var { nam } + } + } + }; + Ok(term) + }) + } + + fn parse_oper(&mut self) -> Result { + let opr = if self.try_consume("+") { + Op { ty: OpType::U60, op: IntOp::Add } + } else if self.try_consume("-") { + Op { ty: OpType::U60, op: IntOp::Sub } + } else if self.try_consume("*") { + Op { ty: OpType::U60, op: IntOp::Mul } + } else if self.try_consume("/") { + Op { ty: OpType::U60, op: IntOp::Div } + } else if self.try_consume("%") { + Op { ty: OpType::U60, op: IntOp::Rem } + } else if self.try_consume("<<") { + Op { ty: OpType::U60, op: IntOp::Shl } + } else if self.try_consume(">>") { + Op { ty: OpType::U60, op: IntOp::Shr } + } else if self.try_consume("<=") { + Op { ty: OpType::U60, op: IntOp::Le } + } else if self.try_consume(">=") { + Op { ty: OpType::U60, op: IntOp::Ge } + } else if self.try_consume("<") { + Op { ty: OpType::U60, op: IntOp::Lt } + } else if self.try_consume(">") { + Op { ty: OpType::U60, op: IntOp::Gt } + } else if self.try_consume("==") { + Op { ty: OpType::U60, op: IntOp::Eq } + } else if self.try_consume("!=") { + Op { ty: OpType::U60, op: IntOp::Ne } + } else if self.try_consume("&") { + Op { ty: OpType::U60, op: IntOp::And } + } else if self.try_consume("|") { + Op { ty: OpType::U60, op: IntOp::Or } + } else if self.try_consume("^") { + Op { ty: OpType::U60, op: IntOp::Xor } + } else { + return self.expected("numeric operator"); + }; + Ok(opr) + } + + fn parse_lambda(&mut self, tag: Tag) -> Result { + self.advance_one().unwrap(); + let term = if self.try_consume("$") { + // unscoped lambda + let nam = self.parse_hvml_name()?; + let bod = self.parse_term()?; + Term::Chn { tag, nam: Some(nam), bod: Box::new(bod) } + } else { + // normal lambda + let nam = self.parse_name_or_era()?; + let bod = self.parse_term()?; + Term::Lam { tag, nam, bod: Box::new(bod) } + }; + Ok(term) + } + + fn parse_hvml_name(&mut self) -> Result { + let nam = self.parse_name()?; + Ok(Name::new(nam)) + } + + fn parse_name_or_era(&mut self) -> Result, String> { + self.labelled( + |p| { + if p.try_consume("*") { + Ok(None) + } else { + let nam = p.parse_hvml_name()?; + Ok(Some(nam)) + } + }, + "name or '*'", + ) + } + + fn parse_tag(&mut self) -> Result { + self.consume("#")?; + let nam = self.labelled(|p| p.parse_hvml_name(), "tag name")?; + Ok(Tag::Named(nam)) + } + + fn parse_match_arm(&mut self) -> Result { + self.try_consume("|"); + let nam = self.parse_name_or_era()?; + self.consume(":")?; + let bod = self.parse_term()?; + Ok((nam, vec![], bod)) + } + + fn parse_switch_arm(&mut self) -> Result { + self.try_consume("|"); + let Some(head) = self.skip_peek_one() else { return self.expected("switch pattern") }; + let ctr = match head { + '_' => { + self.consume("_")?; + NumCtr::Succ(None) + } + c if c.is_numeric() => { + let val = self.parse_u64()?; + NumCtr::Num(val) + } + _ => return self.expected("switch pattern"), + }; + self.consume(":")?; + let bod = self.parse_term()?; + Ok((ctr, bod)) + } + + /* Utils */ + + /// Checks if the next characters in the input start with the given string. + /// Skips trivia. + fn skip_starts_with(&mut self, text: &str) -> bool { + self.skip_trivia(); + self.starts_with(text) + } + + fn skip_peek_one(&mut self) -> Option { + self.skip_trivia(); + self.peek_one() + } + + /// Parses a list-like structure like "[x1, x2, x3,]". + /// + /// `parser` is a function that parses an element of the list. + /// + /// If `hard_sep` the separator between elements is mandatory. + /// Always accepts trailing separators. + /// + /// `min_els` determines how many elements must be parsed at minimum. + fn list_like( + &mut self, + parser: impl Fn(&mut Self) -> Result, + start: &str, + end: &str, + sep: &str, + hard_sep: bool, + min_els: usize, + ) -> Result, String> { + self.consume(start)?; + let mut els = vec![]; + for i in 0 .. min_els { + els.push(parser(self)?); + if hard_sep && !(i == min_els - 1 && self.skip_starts_with(end)) { + self.consume(sep)?; + } else { + self.try_consume(sep); + } + } + + while !self.try_consume(end) { + els.push(parser(self)?); + if hard_sep && !self.skip_starts_with(end) { + self.consume(sep)?; + } else { + self.try_consume(sep); + } + } + Ok(els) + } + + fn labelled( + &mut self, + parser: impl Fn(&mut Self) -> Result, + label: &str, + ) -> Result { + match parser(self) { + Ok(val) => Ok(val), + Err(_) => self.expected(label), + } + } + + /* Overrides */ + + /// Generates an error message for parsing failures, including the highlighted context. + /// + /// Override to have our own error message. + fn expected(&mut self, exp: &str) -> Result { + let ini_idx = *self.index(); + let end_idx = *self.index() + 1; + self.expected_spanned(exp, ini_idx, end_idx) + } + + fn expected_spanned(&mut self, exp: &str, ini_idx: usize, end_idx: usize) -> Result { + let ctx = highlight_error(ini_idx, end_idx, self.input()); + let is_eof = self.is_eof(); + let detected = DisplayFn(|f| if is_eof { write!(f, " end of input") } else { write!(f, "\n{ctx}") }); + Err(format!("\x1b[1m- expected:\x1b[0m {}\n\x1b[1m- detected:\x1b[0m{}", exp, detected)) + } + + /// Consumes an instance of the given string, erroring if it is not found. + /// + /// Override to have our own error message. + fn consume(&mut self, text: &str) -> Result<(), String> { + self.skip_trivia(); + if self.input().get(*self.index() ..).unwrap_or_default().starts_with(text) { + *self.index() += text.len(); + Ok(()) + } else { + self.expected(format!("'{text}'").as_str()) + } + } + + /// Consumes text if the input starts with it. Otherwise, do nothing. + fn try_consume(&mut self, text: &str) -> bool { + self.skip_trivia(); + if self.starts_with(text) { + self.consume(text).unwrap(); + true + } else { + false + } + } + + /// Parses a name from the input, supporting alphanumeric characters, underscores, periods, and hyphens. + /// + /// Override to call our own `expected`. + fn parse_name(&mut self) -> Result { + self.skip_trivia(); + let name = self.take_while(|c| c.is_ascii_alphanumeric() || c == '_' || c == '.' || c == '-' || c == '/'); + if name.is_empty() { self.expected("name") } else { Ok(name.to_owned()) } + } + + // TODO: Override because the lib has a bug where it will error on '_' . + /// Parses a u64 from the input, supporting dec, hex (0xNUM), and bin (0bNUM). + fn parse_u64(&mut self) -> Result { + self.skip_trivia(); + let radix = match self.peek_many(2) { + Some("0x") => { + self.advance_many(2); + 16 + } + Some("0b") => { + self.advance_many(2); + 2 + } + _ => 10, + }; + let num_str = self.take_while(move |c| c.is_digit(radix) || c == '_'); + let num_str = num_str.chars().filter(|c| *c != '_').collect::(); + if num_str.is_empty() { + self.expected("numeric digit") + } else { + u64::from_str_radix(&num_str, radix).map_err(|e| e.to_string()) + } + } + + // TODO: Override to accept more escape sequences + /// Parses a single unicode character, supporting scape sequences. + fn parse_char(&mut self) -> Result { + match self.advance_one() { + Some('\\') => match self.advance_one() { + Some('u' | 'U') => { + self.consume("{")?; + let codepoint_str = self.take_while(|c| c.is_ascii_hexdigit()); + self.consume("}")?; + u32::from_str_radix(codepoint_str, 16) + .ok() + .and_then(std::char::from_u32) + .ok_or_else(|| self.expected::("unicode-codepoint").unwrap_err()) + } + Some('n') => Ok('\n'), + Some('r') => Ok('\r'), + Some('t') => Ok('\t'), + Some('\'') => Ok('\''), + Some('\"') => Ok('\"'), + Some('\\') => Ok('\\'), + Some('0') => Ok('\0'), + Some(chr) => self.expected(&format!("\\{}", chr)), + None => self.expected("escaped-char"), + }, + Some(other) => Ok(other), + None => self.expected("char"), + } + } + + // TODO: Override to accept more escape sequences + /// Parses a quoted character, like 'x'. + fn parse_quoted_char(&mut self) -> Result { + self.skip_trivia(); + self.consume("'")?; + let chr = self.parse_char()?; + self.consume("'")?; + Ok(chr) + } + + // TODO: Override to accept more escape sequences + /// Parses a quoted string, like "foobar". + fn parse_quoted_string(&mut self) -> Result { + self.skip_trivia(); + self.consume("\"")?; + let mut result = String::new(); + while let Some(chr) = self.peek_one() { + if chr == '"' { + break; + } else { + result.push(self.parse_char()?); + } + } + self.consume("\"")?; + Ok(result) + } + + // TODO: override to avoid looping on ending in comment without \n + /// Consumes the next character in the text. + fn skip_trivia(&mut self) { + while let Some(c) = self.peek_one() { + if c.is_ascii_whitespace() { + self.advance_one(); + continue; + } + if c == '/' && self.input().get(*self.index() ..).unwrap_or_default().starts_with("//") { + loop { + if let Some(c) = self.peek_one() { + if c != '\n' { + self.advance_one(); + } else { + break; + } + } else { + break; + } + } + self.advance_one(); // Skip the newline character as well + continue; + } + break; + } + } +} + +impl Book { + fn add_adt(&mut self, nam: Name, adt: Adt) -> Result<(), String> { + if let Some(adt) = self.adts.get(&nam) { + if adt.builtin { + return Err(format!("{} is a built-in datatype and should not be overridden.", nam)); + } else { + return Err(format!("Repeated datatype '{}'", nam)); + } + } else { + for ctr in adt.ctrs.keys() { + match self.ctrs.entry(ctr.clone()) { + indexmap::map::Entry::Vacant(e) => _ = e.insert(nam.clone()), + indexmap::map::Entry::Occupied(e) => { + if self.adts.get(e.get()).is_some_and(|adt| adt.builtin) { + return Err(format!("{} is a built-in constructor and should not be overridden.", e.key())); + } else { + return Err(format!("Repeated constructor '{}'", e.key())); + } + } + } + } + self.adts.insert(nam.clone(), adt); + } + Ok(()) + } + + fn add_rule(&mut self, name: Name, rule: Rule, builtin: bool) { + if let Some(def) = self.defs.get_mut(&name) { + def.rules.push(rule); + } else { + self.defs.insert(name.clone(), Definition { name, rules: vec![rule], builtin }); + } + } +} + +fn add_ctx(msg: &str, ini_idx: usize, end_idx: usize, file: &str) -> String { + let ctx = highlight_error(ini_idx, end_idx, file); + format!("{msg}\n{ctx}") +} diff --git a/src/term/parser/lexer.rs b/src/term/parser/lexer.rs deleted file mode 100644 index 6548880f..00000000 --- a/src/term/parser/lexer.rs +++ /dev/null @@ -1,315 +0,0 @@ -use interner::global::{GlobalPool, GlobalString}; -use logos::{FilterResult, Lexer, Logos}; -use std::{fmt, num::ParseIntError}; - -pub static STRINGS: GlobalPool = GlobalPool::new(); - -#[derive(Logos, Debug, PartialEq, Clone)] -#[logos(error=LexingError)] -#[logos(skip r"[ \t\r\n\f]+")] -pub enum Token { - #[regex("[_.a-zA-Z][_.a-zA-Z0-9-]*", |lex| lex.slice().parse().ok().map(|s: String| STRINGS.get(s)))] - Name(GlobalString), - - #[regex("@|λ")] - Lambda, - - #[token("$")] - Dollar, - - #[token("let")] - Let, - - #[token("use")] - Use, - - #[token("match")] - Match, - - #[token("switch")] - Switch, - - #[token("=")] - Equals, - - #[regex("0[bB][0-9a-zA-Z_]+", |lex| from_radix(2, lex))] - #[regex("0[xX][0-9a-zA-Z_]+", |lex| from_radix(16, lex))] - #[regex("[0-9][0-9a-zA-Z_]*", |lex| from_radix(10, lex))] - Num(u64), - - #[regex(r#""([^"\\]|\\[0tunr'"\\])*""#, |lex| normalized_string(lex).ok())] - #[regex(r#"`([^`\\]|\\[0tunr`'"\\])*`"#, |lex| normalized_string(lex).ok())] - Str(GlobalString), - - #[regex(r#"'\\U[0-9a-fA-F]{1,8}'"#, normalized_char, priority = 2)] - // Since '.' is just covering any ascii char, we need to make the - // regex match any possible character of the unicode general category - #[regex( - r#"'(\p{L}|\p{M}|\p{N}|\p{P}|\p{S}|\p{Z}|\p{C}|\p{Emoji}|\\u[0-9a-fA-F]{1,4}|\\[0tunr`'"\\])'"#, - normalized_char - )] - Char(u64), - - #[token("#")] - Hash, - - #[token("+")] - Add, - - #[token("-")] - Sub, - - #[token("*")] - Asterisk, - - #[token("/")] - Div, - - #[token("%")] - Mod, - - #[token("~")] - Tilde, - - #[token("&")] - And, - - #[token("|")] - Or, - - #[token("^")] - Xor, - - #[token("<<")] - Shl, - - #[token(">>")] - Shr, - - #[token("<")] - Ltn, - - #[token(">")] - Gtn, - - #[token("<=")] - Lte, - - #[token(">=")] - Gte, - - #[token("==")] - EqualsEquals, - - #[token("!=")] - NotEquals, - - #[token(";")] - Semicolon, - - #[token(":")] - Colon, - - #[token(",")] - Comma, - - #[token("(")] - LParen, - - #[token(")")] - RParen, - - #[token("{")] - LBracket, - - #[token("}")] - RBracket, - - #[token("[")] - LBrace, - - #[token("]")] - RBrace, - - #[regex("//.*", logos::skip)] - SingleLineComment, - - #[token("/*", comment)] - MultiLineComment, - - Error(LexingError), -} - -fn from_radix(radix: u32, lexer: &mut Lexer) -> Result { - let slice = if radix == 10 { lexer.slice() } else { &lexer.slice()[2 ..] }; - let slice = &slice.replace('_', ""); - u64::from_str_radix(slice, radix) -} - -#[derive(Default, Debug, PartialEq, Clone)] -pub enum LexingError { - UnclosedComment, - - #[default] - InvalidCharacter, - - InvalidNumberLiteral, -} - -impl From for LexingError { - fn from(_: ParseIntError) -> Self { - LexingError::InvalidNumberLiteral - } -} - -// Lexer for nested multi-line comments -#[derive(Logos)] -pub enum MultiLineComment { - #[token("/*")] - Open, - - #[token("*/")] - Close, - - #[regex("(?s).")] - Other, -} - -fn comment(lexer: &mut Lexer<'_, Token>) -> FilterResult<(), LexingError> { - let start = lexer.remainder(); - let mut comment = MultiLineComment::lexer(start); - let mut depth = 1; // Already matched an Open token, so count it - loop { - if let Some(token) = comment.next() { - match token { - Ok(MultiLineComment::Open) => depth += 1, - Ok(MultiLineComment::Close) => depth -= 1, - Ok(MultiLineComment::Other) => {} - Err(()) => unreachable!(), - } - } else { - // Unclosed comment - return FilterResult::Error(LexingError::UnclosedComment); - } - if depth <= 0 { - break; - } - } - let end = comment.remainder(); - let span = (end as *const str as *const () as usize) - (start as *const str as *const () as usize); - lexer.bump(span); - FilterResult::Skip -} - -fn normalized_string(lexer: &mut Lexer) -> Result { - let slice = lexer.slice(); - let slice = &slice[1 .. slice.len() - 1]; - - let mut s = String::new(); - let chars = &mut slice.chars(); - - while let Some(char) = chars.next() { - match char { - '\\' => match chars.next() { - Some('\\') => s.push('\\'), - Some('`') => s.push('`'), - Some('\'') => s.push('\''), - Some('\"') => s.push('\"'), - Some('n') => s.push('\n'), - Some('r') => s.push('\r'), - Some('t') => s.push('\t'), - Some('u') | Some('U') => { - let hex = chars.take(8).collect::(); - let hex_val = u32::from_str_radix(&hex, 16)?; - let char = char::from_u32(hex_val).unwrap_or(char::REPLACEMENT_CHARACTER); - s.push(char); - } - Some('0') => s.push('\0'), - Some(other) => { - s.push('\\'); - s.push(other); - } - None => s.push('\\'), - }, - other => s.push(other), - } - } - Ok(STRINGS.get(s)) -} - -fn normalized_char(lexer: &mut Lexer) -> Option { - let slice = lexer.slice(); - let slice = &slice[1 .. slice.len() - 1]; - let chars = &mut slice.chars(); - let c = match chars.next()? { - '\\' => match chars.next() { - Some('\\') => '\\', - Some('`') => '`', - Some('\"') => '\"', - Some('\'') => '\'', - Some('n') => '\n', - Some('r') => '\r', - Some('t') => '\t', - Some('u') | Some('U') => { - let hex = chars.take(8).collect::(); - let hex_val = u32::from_str_radix(&hex, 16).unwrap(); - char::from_u32(hex_val).unwrap_or(char::REPLACEMENT_CHARACTER) - } - Some('0') => '\0', - Some(..) => return None, - None => '\\', - }, - other => other, - }; - Some(u64::from(c)) -} - -impl fmt::Display for Token { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::Name(s) => write!(f, "{}", s), - Self::Lambda => write!(f, "λ"), - Self::Dollar => write!(f, "$"), - Self::Let => write!(f, "let"), - Self::Use => write!(f, "use"), - Self::Match => write!(f, "match"), - Self::Switch => write!(f, "switch"), - Self::Equals => write!(f, "="), - Self::Num(num) => write!(f, "{num}"), - Self::Str(s) => write!(f, "\"{s}\""), - Self::Char(c) => write!(f, "'{c}'"), - Self::Hash => write!(f, "#"), - Self::Add => write!(f, "+"), - Self::Sub => write!(f, "-"), - Self::Asterisk => write!(f, "*"), - Self::Div => write!(f, "/"), - Self::Mod => write!(f, "%"), - Self::Tilde => write!(f, "~"), - Self::And => write!(f, "&"), - Self::Or => write!(f, "|"), - Self::Xor => write!(f, "^"), - Self::Shl => write!(f, "<<"), - Self::Shr => write!(f, ">>"), - Self::Ltn => write!(f, "<"), - Self::Gtn => write!(f, ">"), - Self::Lte => write!(f, "<="), - Self::Gte => write!(f, ">="), - Self::NotEquals => write!(f, "!="), - Self::EqualsEquals => write!(f, "=="), - Self::Colon => write!(f, ":"), - Self::Comma => write!(f, ","), - Self::Semicolon => write!(f, ";"), - Self::LParen => write!(f, "("), - Self::RParen => write!(f, ")"), - Self::LBracket => write!(f, "{{"), - Self::RBracket => write!(f, "}}"), - Self::LBrace => write!(f, "["), - Self::RBrace => write!(f, "]"), - Self::SingleLineComment => write!(f, ""), - Self::MultiLineComment => write!(f, ""), - Self::Error(LexingError::InvalidNumberLiteral) => write!(f, ""), - Self::Error(LexingError::InvalidCharacter) => write!(f, ""), - Self::Error(LexingError::UnclosedComment) => write!(f, ""), - } - } -} diff --git a/src/term/parser/mod.rs b/src/term/parser/mod.rs deleted file mode 100644 index a8267ae3..00000000 --- a/src/term/parser/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod lexer; -#[allow(clippy::module_inception)] -pub mod parser; - -pub use parser::{parse_book, parse_term}; diff --git a/src/term/parser/parser.rs b/src/term/parser/parser.rs deleted file mode 100644 index bdfacde2..00000000 --- a/src/term/parser/parser.rs +++ /dev/null @@ -1,612 +0,0 @@ -use crate::term::{ - parser::lexer::{LexingError, Token}, - Adt, Book, Definition, IntOp, Name, NumCtr, Op, OpType, Pattern, Rule, Tag, Term, LNIL, SNIL, -}; -use chumsky::{ - error::{Error, RichReason}, - extra, - input::{Emitter, SpannedInput, Stream, ValueInput}, - prelude::{Input, Rich}, - primitive::{any, choice, just}, - recursive::recursive, - select, - span::SimpleSpan, - util::MaybeRef, - IterParser, Parser, -}; -use indexmap::map::Entry; -use logos::{Logos, SpannedIter}; -use std::{iter::Map, ops::Range, path::Path}; - -use super::lexer::STRINGS; - -// hvml grammar description: -// ::= * -// ::= ( | ) -// ::= ()* -// ::= "data" "=" ( | "(" ()* ")")+ -// ::= ("(" * ")" | *) "=" ( | ) -// ::= "(" * ")" | | -// ::= | | | | | | | | | | -// ::= ("λ"|"@") -// ::= ("λ"|"@") "$" -// ::= "dup" ? "=" ";" -// ::= "(" "," ")" -// ::= "let" "=" ";" -// ::= | "(" "," ")" -// ::= "match" ( | "=" ) "{" + "}" -// ::= "|"? ":" ";"? -// ::= "(" ")" -// ::= "(" ()* ")" -// ::= -// ::= "$" -// ::= | "*" -// ::= // [_a-zA-Z][_a-zA-Z0-9]{0..7} -// ::= // [0-9]+ -// ::= "#" - -pub fn parse_book( - code: &str, - default_book: impl Fn() -> Book, - builtin: bool, -) -> Result>> { - book(default_book, builtin).parse(token_stream(code)).into_result() -} - -pub fn parse_term(code: &str) -> Result>> { - // TODO: Make a function that calls a parser. I couldn't figure out how to type it correctly. - term().parse(token_stream(code)).into_result() -} - -/// Converts a Chumsky parser error into a message. -pub fn error_to_msg(err: &Rich<'_, Token>, code: &str, path: &Path) -> String { - let Range { start, end } = err.span().into_range(); - let (lin, col) = line_and_col_of_byte(start, code); - let reason = match err.reason() { - // When many reasons, the first one is the most relevant. - // Otherwise we just get 'multiple errors'. - RichReason::Many(errs) => &errs[0], - _ => err.reason(), - }; - let path = format!("{}:{lin}:{col}", path.display()); - format!("At {}: {}\n{}", path, reason, highlight_error::highlight_error(usize::min(start, end), end, code)) -} - -fn line_and_col_of_byte(until: usize, src: &str) -> (usize, usize) { - // Line and column numbers starts at 1. - let mut line = 1; - let mut col = 1; - let mut gone = 0; - for char in src.chars() { - if gone >= until { - break; - } - let char_len = char.len_utf8(); - gone += char_len; - if char == '\n' { - line += 1; - col = 1; - } else { - col += char_len; - } - } - (line, col) -} - -fn token_stream( - code: &str, -) -> SpannedInput< - Token, - SimpleSpan, - Stream< - Map, impl FnMut((Result, Range)) -> (Token, SimpleSpan)>, - >, -> { - // TODO: Maybe change to just using chumsky. - let token_iter = Token::lexer(code).spanned().map(|(token, span)| match token { - Ok(t) => (t, SimpleSpan::from(span)), - Err(e) => (Token::Error(e), SimpleSpan::from(span)), - }); - Stream::from_iter(token_iter).spanned(SimpleSpan::from(code.len() .. code.len())) -} - -// Parsers - -fn soft_keyword<'a, I>(keyword: &'static str) -> impl Parser<'a, I, (), extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - any().filter(move |t| matches!(t, Token::Name(n) if n == keyword)).to(()).labelled(keyword) -} - -fn name<'a, I>() -> impl Parser<'a, I, Name, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - // FIXME: bug with chumsky when using with `.repeated` - // select!(Token::Name(name) => Name::from(name)).labelled("") - - any() - .filter(|t| matches!(t, Token::Name(_))) - .map(|t| { - let Token::Name(n) = t else { unreachable!() }; - Name(n) - }) - .labelled("") -} - -/// A top level name that not accepts `-`. -fn tl_name<'a, I>() -> impl Parser<'a, I, Name, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - any() - .filter(|t| matches!(t, Token::Name(n) if n != "data")) - .map(|t| { - let Token::Name(name) = t else { unreachable!() }; - name - }) - .validate(|out, span, emitter| { - if out.contains('-') { - emitter.emit(Rich::custom(span, "Names with '-' are not supported at top level.")); - } - Name(out) - }) - .labelled("") -} - -fn tag<'a, I>(default: Tag) -> impl Parser<'a, I, Tag, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - just(Token::Hash).ignore_then(name()).or_not().map(move |x| x.map_or_else(|| default.clone(), Tag::Named)) -} - -fn name_or_era<'a, I>() -> impl Parser<'a, I, Option, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - choice((any().filter(|a| matches!(a, Token::Asterisk)).to(None), name().map(Some))) -} - -fn num_oper<'a, I>() -> impl Parser<'a, I, Op, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - select! { - Token::Add => Op{ ty: OpType::U60, op: IntOp::Add }, - Token::Sub => Op{ ty: OpType::U60, op: IntOp::Sub }, - Token::Asterisk => Op{ ty: OpType::U60, op: IntOp::Mul }, - Token::Div => Op{ ty: OpType::U60, op: IntOp::Div }, - Token::Mod => Op{ ty: OpType::U60, op: IntOp::Rem }, - Token::EqualsEquals => Op{ ty: OpType::U60, op: IntOp::Eq }, - Token::NotEquals => Op{ ty: OpType::U60, op: IntOp::Ne }, - Token::Ltn => Op{ ty: OpType::U60, op: IntOp::Lt }, - Token::Gtn => Op{ ty: OpType::U60, op: IntOp::Gt }, - Token::Lte => Op{ ty: OpType::U60, op: IntOp::Le }, - Token::Gte => Op{ ty: OpType::U60, op: IntOp::Ge }, - Token::And => Op{ ty: OpType::U60, op: IntOp::And }, - Token::Or => Op{ ty: OpType::U60, op: IntOp::Or }, - Token::Xor => Op{ ty: OpType::U60, op: IntOp::Xor }, - Token::Shl => Op{ ty: OpType::U60, op: IntOp::Shl }, - Token::Shr => Op{ ty: OpType::U60, op: IntOp::Shr }, - } -} - -fn term<'a, I>() -> impl Parser<'a, I, Term, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - let var = name().map(|name| Term::Var { nam: name }).boxed(); - let unscoped_var = just(Token::Dollar).ignore_then(name()).map(|name| Term::Lnk { nam: name }).boxed(); - - let number = select!(Token::Num(num) => num).or( - select!(Token::Error(LexingError::InvalidNumberLiteral) => ()).validate(|_, span, emit| { - emit.emit(Rich::custom(span, "found invalid number literal expected number")); - 0 - }), - ); - - let nat: chumsky::Boxed>> = just(Token::Hash) - .ignore_then(select!(Token::Num(num) => Term::Nat { val: num }).or( - select!(Token::Error(LexingError::InvalidNumberLiteral) => ()).validate(|_, span, emit| { - emit.emit(Rich::custom(span, "found invalid nat literal expected number")); - Term::Nat { val: 0 } - }), - )) - .boxed(); - - let num_term = number.map(|val| Term::Num { val }); - - let term_sep = just(Token::Semicolon).or_not(); - let list_sep = just(Token::Comma).or_not(); - - recursive(|term| { - // * - let era = just(Token::Asterisk).to(Term::Era).boxed(); - - // #tag? λx body - let lam = tag(Tag::Static) - .then_ignore(just(Token::Lambda)) - .then(name_or_era()) - .then(term.clone()) - .map(|((tag, nam), bod)| Term::Lam { tag, nam, bod: Box::new(bod) }) - .boxed(); - - // #tag? λ$x body - let unscoped_lam = tag(Tag::Static) - .then_ignore(just(Token::Lambda)) - .then(just(Token::Dollar).ignore_then(name_or_era())) - .then(term.clone()) - .map(|((tag, nam), bod)| Term::Chn { tag, nam, bod: Box::new(bod) }) - .boxed(); - - // #tag {fst snd} - let sup = tag(Tag::Auto) - .then_ignore(just(Token::LBracket)) - .then(term.clone().separated_by(list_sep.clone()).at_least(2).allow_trailing().collect()) - .then_ignore(just(Token::RBracket)) - .map(|(tag, els)| Term::Sup { tag, els }) - .boxed(); - - // let #tag? {x1 x2} = body; next - let dup = just(Token::Let) - .ignore_then(tag(Tag::Auto)) - .then_ignore(just(Token::LBracket)) - .then(name_or_era().separated_by(list_sep.clone()).at_least(2).allow_trailing().collect()) - .then_ignore(just(Token::RBracket)) - .then_ignore(just(Token::Equals)) - .then(term.clone()) - .then_ignore(term_sep.clone()) - .then(term.clone()) - .map(|(((tag, bnd), val), next)| Term::Dup { tag, bnd, val: Box::new(val), nxt: Box::new(next) }) - .boxed(); - - // let nam = term; term - let let_ = just(Token::Let) - .ignore_then(name_or_era()) - .then_ignore(just(Token::Equals)) - .then(term.clone()) - .then_ignore(term_sep.clone()) - .then(term.clone()) - .map(|((nam, val), nxt)| Term::Let { nam, val: Box::new(val), nxt: Box::new(nxt) }) - .boxed(); - - // use a = val ';'? nxt - let use_ = just(Token::Use) - .ignore_then(name()) - .then_ignore(just(Token::Equals)) - .then(term.clone()) - .then_ignore(term_sep.clone()) - .then(term.clone()) - .map(|((nam, val), nxt)| Term::Use { nam: Some(nam), val: Box::new(val), nxt: Box::new(nxt) }) - .boxed(); - - // (name '=')? term - let match_arg = name().then_ignore(just(Token::Equals)).or_not().then(term.clone()).boxed(); - - let with = soft_keyword("with") - .ignore_then(name().separated_by(list_sep.clone()).at_least(1).allow_trailing().collect()) - .boxed(); - - let lnil = just(Token::LBrace) - .ignore_then(just(Token::RBrace)) - .ignored() - .map(|_| Some(Name::from(LNIL))) - .labelled("List.nil"); - let snil = - select!(Token::Str(s) if s.is_empty() => ()).map(|_| Some(Name::from(SNIL))).labelled("String.nil"); - let match_pat = choice((name_or_era(), lnil, snil)); - - // '|'? name: term - let match_rule = just(Token::Or) - .or_not() - .ignore_then(match_pat) - .labelled("") - .then_ignore(just(Token::Colon)) - .then(term.clone()) - .map(|(nam, body)| (nam, vec![], body)); - let match_rules = match_rule.separated_by(term_sep.clone()).at_least(1).allow_trailing().collect(); - - // match ((scrutinee | = value),?)+ { pat+: term;... } - let match_ = just(Token::Match) - .ignore_then(match_arg.clone()) - .then(with.clone().or_not()) - .then_ignore(just(Token::LBracket)) - .then(match_rules) - .then_ignore(just(Token::RBracket)) - .map(|(((bind, arg), with), rules)| { - let with = with.unwrap_or_default(); - if let Some(bind) = bind { - Term::Let { - nam: Some(bind.clone()), - val: Box::new(arg), - nxt: Box::new(Term::Mat { arg: Box::new(Term::Var { nam: bind }), with, rules }), - } - } else { - Term::Mat { arg: Box::new(arg), with, rules } - } - }) - .boxed(); - - let switch_ctr = choice((number.map(NumCtr::Num), soft_keyword("_").map(|_| NumCtr::Succ(None)))) - .labelled(""); - - let switch_rule = - just(Token::Or).or_not().ignore_then(switch_ctr).then_ignore(just(Token::Colon)).then(term.clone()); - let switch_rules = switch_rule.separated_by(term_sep.clone()).at_least(1).allow_trailing().collect(); - - let switch = just(Token::Switch) - .ignore_then(match_arg) - .then(with.clone().or_not()) - .then_ignore(just(Token::LBracket)) - .then(switch_rules) - .then_ignore(just(Token::RBracket)) - .map(|(((bind, arg), with), rules)| { - let with = with.unwrap_or_default(); - if let Some(bind) = bind { - Term::Let { - nam: Some(bind.clone()), - val: Box::new(arg), - nxt: Box::new(Term::Swt { arg: Box::new(Term::Var { nam: bind }), with, rules }), - } - } else { - Term::Swt { arg: Box::new(arg), with, rules } - } - }) - .boxed(); - - // #tag? (f arg1 arg2 ...) - let app = tag(Tag::Static) - .then_ignore(just(Token::LParen)) - .then(term.clone()) - .foldl(term.clone().repeated(), |(tag, fun), arg| { - (tag.clone(), Term::App { tag, fun: Box::new(fun), arg: Box::new(arg) }) - }) - .then_ignore(just(Token::RParen)) - .map(|(_, app)| app) - .boxed(); - - let num_op = num_oper() - .then(term.clone()) - .then(term.clone()) - .delimited_by(just(Token::LParen), just(Token::RParen)) - .map(|((opr, fst), snd)| Term::Opx { opr, fst: Box::new(fst), snd: Box::new(snd) }) - .boxed(); - - // (x, ..n) - let tup = term - .clone() - .separated_by(just(Token::Comma)) - .at_least(2) - .collect::>() - .delimited_by(just(Token::LParen), just(Token::RParen)) - .map(|els| Term::Tup { els }) - .boxed(); - - // let (x, ..n) = term; term - let let_tup = just(Token::Let) - .ignore_then(just(Token::LParen)) - .ignore_then(name_or_era().separated_by(just(Token::Comma)).at_least(2).collect()) - .then_ignore(just(Token::RParen)) - .then_ignore(just(Token::Equals)) - .then(term.clone()) - .then_ignore(term_sep.clone()) - .then(term.clone()) - .map(|((bnd, val), next)| Term::Ltp { bnd, val: Box::new(val), nxt: Box::new(next) }) - .boxed(); - - let str = select!(Token::Str(s) => Term::Str { val: s }).boxed(); - let chr = select!(Token::Char(c) => Term::Num { val: c }).boxed(); - - let list = term - .clone() - .separated_by(just(Token::Comma).or_not()) - .collect() - .delimited_by(just(Token::LBrace), just(Token::RBrace)) - .map(|els| Term::Lst { els }) - .boxed(); - - choice(( - num_op, - app, - tup, - unscoped_var, - var, - nat, - num_term, - list, - str, - chr, - sup, - unscoped_lam, - lam, - dup, - use_, - let_tup, - let_, - match_, - switch, - era, - )) - .labelled("term") - }) -} - -fn rule_pattern<'a, I>() -> impl Parser<'a, I, Pattern, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - recursive(|pattern| { - let var = name_or_era().map(Pattern::Var).boxed(); - - let ctr = tl_name() - .then(pattern.clone().repeated().collect()) - .map(|(nam, xs)| Pattern::Ctr(nam, xs)) - .delimited_by(just(Token::LParen), just(Token::RParen)) - .boxed(); - - let tup = pattern - .clone() - .separated_by(just(Token::Comma)) - .at_least(2) - .collect::>() - .delimited_by(just(Token::LParen), just(Token::RParen)) - .map(Pattern::Tup) - .boxed(); - - let list = pattern - .clone() - .separated_by(just(Token::Comma).or_not()) - .collect() - .delimited_by(just(Token::LBrace), just(Token::RBrace)) - .map(Pattern::Lst) - .boxed(); - - let num_val = any().filter(|t| matches!(t, Token::Num(_))).map(|t| { - let Token::Num(n) = t else { unreachable!() }; - n - }); - - let num = num_val.map(Pattern::Num).labelled(""); - - let chr = select!(Token::Char(c) => Pattern::Num(c)).labelled("").boxed(); - - let str = select!(Token::Str(s) => Pattern::Str(s)).labelled("").boxed(); - - choice((num, chr, str, var, ctr, list, tup)) - }) - .labelled("") -} - -fn rule_lhs<'a, I>() -> impl Parser<'a, I, (Name, Vec), extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - let lhs = tl_name().then(rule_pattern().repeated().collect()).boxed(); - - let just_lhs = lhs.clone().then_ignore(just(Token::Equals).map_err(|err: Rich<'a, Token>| { - Error::::expected_found( - [ - Some(MaybeRef::Val(Token::Add)), - Some(MaybeRef::Val(Token::LParen)), - Some(MaybeRef::Val(Token::LBrace)), - Some(MaybeRef::Val(Token::Equals)), - ], - None, - *err.span(), - ) - })); - - let paren_lhs = just(Token::LParen) - .ignore_then(lhs.clone().map_err(|err| map_unexpected_eof(err, Token::Name(STRINGS.get(""))))) - .then_ignore(just(Token::RParen)) - .then_ignore(just(Token::Equals).map_err(|err| map_unexpected_eof(err, Token::Equals))); - - choice((just_lhs, paren_lhs)) -} - -fn rule<'a, I>() -> impl Parser<'a, I, TopLevel, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - rule_lhs().then(term()).map(move |((name, pats), body)| TopLevel::Rule((name, Rule { pats, body }))) -} - -fn datatype<'a, I>() -> impl Parser<'a, I, TopLevel, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - let arity_0 = tl_name().map_with_span(|nam, span| ((nam, vec![]), span)); - let arity_n = tl_name() - .then(name().repeated().collect::>()) - .delimited_by(just(Token::LParen), just(Token::RParen)) - .map_with_span(|(nam, args), span| ((nam, args), span)); - - let ctrs = arity_0.or(arity_n).separated_by(just(Token::Or)).at_least(1).collect(); - let data_name = tl_name().map_with_span(|name, span| (name, span)); - - soft_keyword("data") - .ignore_then(data_name.map_err(|err| map_unexpected_eof(err, Token::Name(STRINGS.get(""))))) - .then_ignore(just(Token::Equals)) - .then(ctrs.map_err(|err| map_unexpected_eof(err, Token::Name(STRINGS.get("constructor"))))) - .map(move |(name, ctrs)| TopLevel::Adt(name, ctrs)) -} - -fn map_unexpected_eof(err: Rich, expected_token: Token) -> Rich { - if err.found().is_none() { - // Not using Error::expected_found here to not merge with other expected_found errors - Rich::custom(*err.span(), format!("found end of input expected {}", expected_token)) - } else { - err - } -} - -fn book<'a, I>( - default_book: impl Fn() -> Book, - builtin: bool, -) -> impl Parser<'a, I, Book, extra::Err>> -where - I: ValueInput<'a, Token = Token, Span = SimpleSpan>, -{ - choice((datatype(), rule())) - .repeated() - .collect() - .validate(move |program, _, emit| collect_book(default_book(), program, builtin, emit)) -} - -/// Collect rules and adts into a book -fn collect_book( - mut book: Book, - program: Vec, - builtin: bool, - emit: &mut Emitter>, -) -> Book { - for top_level in program { - match top_level { - TopLevel::Rule((name, rule)) => { - if let Some(def) = book.defs.get_mut(&name) { - def.rules.push(rule); - } else { - book.defs.insert(name.clone(), Definition { name, rules: vec![rule], builtin }); - } - } - TopLevel::Adt((nam, nam_span), adt) => match book.adts.get(&nam) { - None => { - let (ctrs, spans): (Vec<(_, _)>, Vec<_>) = adt.into_iter().unzip(); - - for ((ctr, _), span) in ctrs.iter().zip(spans.into_iter()) { - match book.ctrs.entry(ctr.clone()) { - Entry::Vacant(e) => _ = e.insert(nam.clone()), - Entry::Occupied(e) => emit.emit(Rich::custom( - span, - if book.adts.get(e.get()).is_some_and(|adt| adt.builtin) { - format!("{} is a built-in constructor and should not be overridden.", e.key()) - } else { - format!("Repeated constructor '{}'", e.key()) - }, - )), - } - } - - let adt = Adt { ctrs: ctrs.into_iter().collect(), builtin }; - book.adts.insert(nam.clone(), adt); - } - Some(adt) => emit.emit(Rich::custom( - nam_span, - if adt.builtin { - format!("{} is a built-in datatype and should not be overridden.", nam) - } else { - format!("Repeated datatype '{}'", nam) - }, - )), - }, - } - } - book -} - -enum TopLevel { - Rule((Name, Rule)), - Adt((Name, SimpleSpan), Vec<((Name, Vec), SimpleSpan)>), -} diff --git a/src/term/term_to_net.rs b/src/term/term_to_net.rs index 0a7907e3..e9802689 100644 --- a/src/term/term_to_net.rs +++ b/src/term/term_to_net.rs @@ -1,4 +1,5 @@ use crate::{ + maybe_grow, net::{ INet, NodeKind::{self, *}, @@ -68,7 +69,7 @@ impl EncodeTermState<'_> { /// `global_vars` has the same information for global lambdas. Must be linked outside this function. /// Expects variables to be affine, refs to be stored as Refs and all names to be bound. fn encode_term(&mut self, term: &Term, up: Port) -> Option { - Term::recursive_call(move || { + maybe_grow(|| { match term { // A lambda becomes to a con node. Ports: // - 0: points to where the lambda occurs. diff --git a/src/term/transform/apply_use.rs b/src/term/transform/apply_use.rs index ebefb2fb..e80f7040 100644 --- a/src/term/transform/apply_use.rs +++ b/src/term/transform/apply_use.rs @@ -1,4 +1,7 @@ -use crate::term::{Book, Term}; +use crate::{ + maybe_grow, + term::{Book, Term}, +}; impl Book { /// Inline copies of the declared bind in the `use` expression. @@ -22,7 +25,7 @@ impl Book { impl Term { pub fn apply_use(&mut self) { - Term::recursive_call(|| { + maybe_grow(|| { for children in self.children_mut() { children.apply_use(); } diff --git a/src/term/transform/definition_merge.rs b/src/term/transform/definition_merge.rs index 3bcee8b5..5181e18d 100644 --- a/src/term/transform/definition_merge.rs +++ b/src/term/transform/definition_merge.rs @@ -1,4 +1,7 @@ -use crate::term::{Book, Definition, Name, Rule, Term}; +use crate::{ + maybe_grow, + term::{Book, Definition, Name, Rule, Term}, +}; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; use std::collections::BTreeMap; @@ -86,7 +89,7 @@ impl Term { /// Performs reference substitution within a term replacing any references found in /// `ref_map` with their corresponding targets. pub fn subst_ref_to_ref(term: &mut Term, ref_map: &BTreeMap) -> bool { - Term::recursive_call(move || match term { + maybe_grow(|| match term { Term::Ref { nam: def_name } => { if let Some(target_name) = ref_map.get(def_name) { *def_name = target_name.clone(); diff --git a/src/term/transform/definition_pruning.rs b/src/term/transform/definition_pruning.rs index a8a22c2d..3f6431c6 100644 --- a/src/term/transform/definition_pruning.rs +++ b/src/term/transform/definition_pruning.rs @@ -107,10 +107,10 @@ impl Book { None => self.insert_used(def_name, used, uses, adt_encoding), }, Term::Lst { .. } => { - self.insert_ctrs_used(&Name::from(LIST), uses, adt_encoding); + self.insert_ctrs_used(&Name::new(LIST), uses, adt_encoding); } Term::Str { .. } => { - self.insert_ctrs_used(&Name::from(STRING), uses, adt_encoding); + self.insert_ctrs_used(&Name::new(STRING), uses, adt_encoding); } _ => {} } diff --git a/src/term/transform/desugar_match_defs.rs b/src/term/transform/desugar_match_defs.rs index 3b90eb9f..41c5b0fa 100644 --- a/src/term/transform/desugar_match_defs.rs +++ b/src/term/transform/desugar_match_defs.rs @@ -499,8 +499,8 @@ impl Pattern { } Pattern::Tup(args) => Type::Tup(args.len()), Pattern::Num(_) => Type::Num, - Pattern::Lst(..) => Type::Adt(builtins::LIST.into()), - Pattern::Str(..) => Type::Adt(builtins::STRING.into()), + Pattern::Lst(..) => Type::Adt(Name::new(builtins::LIST)), + Pattern::Str(..) => Type::Adt(Name::new(builtins::STRING)), } } } diff --git a/src/term/transform/encode_match_terms.rs b/src/term/transform/encode_match_terms.rs index d625623e..0d5ddc6a 100644 --- a/src/term/transform/encode_match_terms.rs +++ b/src/term/transform/encode_match_terms.rs @@ -1,4 +1,7 @@ -use crate::term::{AdtEncoding, Book, Constructors, MatchRule, Name, NumCtr, SwitchRule, Tag, Term}; +use crate::{ + maybe_grow, + term::{AdtEncoding, Book, Constructors, MatchRule, Name, NumCtr, SwitchRule, Tag, Term}, +}; impl Book { /// Encodes pattern matching expressions in the book into their @@ -20,7 +23,7 @@ impl Book { impl Term { pub fn encode_matches(&mut self, ctrs: &Constructors, adt_encoding: AdtEncoding) { - Term::recursive_call(move || { + maybe_grow(|| { for child in self.children_mut() { child.encode_matches(ctrs, adt_encoding) } @@ -73,7 +76,7 @@ fn encode_match(arg: Term, rules: Vec, ctrs: &Constructors, adt_encod fn encode_switch(arg: Term, mut rules: Vec) -> Term { let last_rule = rules.pop().unwrap(); - let match_var = Name::from("%x"); + let match_var = Name::new("%x"); // @n-2 (C n-2) let NumCtr::Succ(last_var) = last_rule.0 else { unreachable!() }; diff --git a/src/term/transform/fix_match_terms.rs b/src/term/transform/fix_match_terms.rs index e36c7ee6..788df4de 100644 --- a/src/term/transform/fix_match_terms.rs +++ b/src/term/transform/fix_match_terms.rs @@ -1,5 +1,6 @@ use crate::{ diagnostics::{Diagnostics, ToStringVerbose, WarningType}, + maybe_grow, term::{Adts, Constructors, Ctx, MatchRule, Name, NumCtr, Term}, }; use std::collections::HashMap; @@ -76,7 +77,7 @@ impl Ctx<'_> { impl Term { fn fix_match_terms(&mut self, ctrs: &Constructors, adts: &Adts) -> Vec { - Term::recursive_call(move || { + maybe_grow(|| { let mut errs = Vec::new(); for child in self.children_mut() { @@ -182,7 +183,7 @@ fn extract_match_arg(arg: &mut Term) -> (Name, Option) { if let Term::Var { nam } = arg { (nam.clone(), None) } else { - let nam = Name::from("%matched"); + let nam = Name::new("%matched"); let arg = std::mem::replace(arg, Term::Var { nam: nam.clone() }); (nam, Some(arg)) } diff --git a/src/term/transform/float_combinators.rs b/src/term/transform/float_combinators.rs index 58a9a745..06848aa7 100644 --- a/src/term/transform/float_combinators.rs +++ b/src/term/transform/float_combinators.rs @@ -1,7 +1,7 @@ use indexmap::IndexSet; use crate::{ - multi_iterator, + maybe_grow, multi_iterator, term::{Book, Definition, Name, Rule, Term}, }; use std::collections::BTreeMap; @@ -85,7 +85,7 @@ impl Term { /// - A safe Lambda, e.g. a nullary constructor or a lambda with safe body. /// - A Reference with safe body. pub fn is_safe(&self, book: &Book, seen: &mut IndexSet) -> bool { - Term::recursive_call(move || match self { + maybe_grow(|| match self { Term::Num { .. } | Term::Era => true, Term::Tup { els } | Term::Sup { els, .. } => els.iter().all(|e| Term::is_safe(e, book, seen)), diff --git a/src/term/transform/linearize_matches.rs b/src/term/transform/linearize_matches.rs index 89909222..97af207e 100644 --- a/src/term/transform/linearize_matches.rs +++ b/src/term/transform/linearize_matches.rs @@ -1,4 +1,7 @@ -use crate::term::{Book, Name, NumCtr, Tag, Term}; +use crate::{ + maybe_grow, + term::{Book, Name, NumCtr, Tag, Term}, +}; use std::collections::{BTreeSet, HashSet}; impl Book { @@ -63,7 +66,7 @@ impl Term { /// } /// ``` pub fn linearize_match_lambdas(&mut self) { - Term::recursive_call(move || match self { + maybe_grow(|| match self { Term::Lam { .. } => { let mut term_owned = std::mem::take(self); let mut term = &mut term_owned; @@ -139,7 +142,7 @@ impl Term { } fn linearize_matches(&mut self) { - Term::recursive_call(move || { + maybe_grow(|| { for child in self.children_mut() { child.linearize_matches(); } @@ -151,7 +154,7 @@ impl Term { } fn linearize_match_with(&mut self) { - Term::recursive_call(|| { + maybe_grow(|| { for child in self.children_mut() { child.linearize_match_with(); } diff --git a/src/term/transform/linearize_vars.rs b/src/term/transform/linearize_vars.rs index d1181862..fe6dc1db 100644 --- a/src/term/transform/linearize_vars.rs +++ b/src/term/transform/linearize_vars.rs @@ -1,4 +1,7 @@ -use crate::term::{Book, Name, Tag, Term}; +use crate::{ + maybe_grow, + term::{Book, Name, Tag, Term}, +}; use std::collections::HashMap; /// Erases variables that weren't used, dups the ones that were used more than once. @@ -35,7 +38,7 @@ impl Term { } fn term_to_affine(term: &mut Term, var_uses: &mut HashMap) { - Term::recursive_call(move || match term { + maybe_grow(|| match term { Term::Let { nam: Some(nam), val, nxt } => { // TODO: This is swapping the order of how the bindings are // used, since it's not following the usual AST order (first diff --git a/src/term/transform/resolve_refs.rs b/src/term/transform/resolve_refs.rs index cd939da0..fcc6f6c3 100644 --- a/src/term/transform/resolve_refs.rs +++ b/src/term/transform/resolve_refs.rs @@ -1,6 +1,7 @@ use crate::{ builtins::CORE_BUILTINS, diagnostics::{Diagnostics, ToStringVerbose}, + maybe_grow, term::{Ctx, Name, Pattern, Term}, }; use std::collections::{HashMap, HashSet}; @@ -45,7 +46,7 @@ impl Term { main: Option<&Name>, scope: &mut HashMap<&'a Name, usize>, ) -> Result<(), ReferencedMainErr> { - Term::recursive_call(move || { + maybe_grow(move || { if let Term::Var { nam } = self && is_var_in_scope(nam, scope) { diff --git a/src/term/transform/resugar_adts.rs b/src/term/transform/resugar_adts.rs index 471b2fc6..15ae0d1a 100644 --- a/src/term/transform/resugar_adts.rs +++ b/src/term/transform/resugar_adts.rs @@ -2,6 +2,7 @@ use std::collections::VecDeque; use crate::{ diagnostics::ToStringVerbose, + maybe_grow, term::{Adt, AdtEncoding, Book, Name, Tag, Term}, }; @@ -23,7 +24,7 @@ impl Term { } fn resugar_tagged_scott(&mut self, book: &Book, errs: &mut Vec) { - Term::recursive_call(move || match self { + maybe_grow(|| match self { Term::Lam { tag: Tag::Named(adt_name), bod, .. } | Term::Chn { tag: Tag::Named(adt_name), bod, .. } => { if let Some((adt_name, adt)) = book.adts.get_key_value(adt_name) { self.resugar_ctr_tagged_scott(book, adt, adt_name, errs); @@ -245,7 +246,7 @@ impl Term { let (arg, bind) = if let Term::Var { nam } = cur { (nam.clone(), None) } else { - (Name::from("%matched"), Some(std::mem::take(cur))) + (Name::new("%matched"), Some(std::mem::take(cur))) }; // Subst the unique readback names for the field names. diff --git a/src/term/transform/resugar_builtins.rs b/src/term/transform/resugar_builtins.rs index 847e04e8..d543417f 100644 --- a/src/term/transform/resugar_builtins.rs +++ b/src/term/transform/resugar_builtins.rs @@ -1,4 +1,7 @@ -use crate::term::{Term, LCONS, LNIL, NAT_SUCC, NAT_ZERO, SCONS, SNIL}; +use crate::{ + maybe_grow, + term::{Term, LCONS, LNIL, NAT_SUCC, NAT_ZERO, SCONS, SNIL}, +}; impl Term { pub fn resugar_builtins(&mut self) { @@ -8,7 +11,7 @@ impl Term { } pub fn resugar_nats(&mut self) { - Term::recursive_call(move || match self { + maybe_grow(|| match self { // (Nat.succ pred) Term::App { fun: box Term::Ref { nam: ctr }, arg: box pred, .. } => { pred.resugar_nats(); @@ -35,7 +38,7 @@ impl Term { /// Rebuilds the String syntax sugar, converting `(Cons 97 Nil)` into `"a"`. pub fn resugar_strings(&mut self) { - Term::recursive_call(move || match self { + maybe_grow(|| match self { // (String.cons Num tail) Term::App { fun: box Term::App { fun: box Term::Ref { nam: ctr }, arg: box head, .. }, @@ -80,7 +83,7 @@ impl Term { /// Rebuilds the List syntax sugar, converting `(Cons head Nil)` into `[head]`. pub fn resugar_lists(&mut self) { - Term::recursive_call(move || match self { + maybe_grow(|| match self { // (List.cons el tail) Term::App { fun: box Term::App { fun: box Term::Ref { nam: ctr }, arg: box head, .. }, diff --git a/src/term/transform/unique_names.rs b/src/term/transform/unique_names.rs index 16174863..46adcc1c 100644 --- a/src/term/transform/unique_names.rs +++ b/src/term/transform/unique_names.rs @@ -1,6 +1,9 @@ // Pass to give all variables in a definition unique names. -use crate::term::{Book, Name, Term}; +use crate::{ + maybe_grow, + term::{Book, Name, Term}, +}; use std::collections::HashMap; impl Book { @@ -31,7 +34,7 @@ pub struct UniqueNameGenerator { impl UniqueNameGenerator { // Recursively assign an id to each variable in the term, then convert each id into a unique name. pub fn unique_names_in_term(&mut self, term: &mut Term) { - Term::recursive_call(move || match term { + maybe_grow(|| match term { Term::Var { nam } => *nam = self.use_var(nam), _ => { for (child, binds) in term.children_mut_with_binds_mut() { diff --git a/tests/golden_tests.rs b/tests/golden_tests.rs index 00f74f8d..f2268937 100644 --- a/tests/golden_tests.rs +++ b/tests/golden_tests.rs @@ -4,8 +4,8 @@ use hvml::{ net::{hvmc_to_net::hvmc_to_net, net_to_hvmc::net_to_hvmc}, run_book, term::{ - load_book::do_parse_book, net_to_term::net_to_term, parser::parse_term, term_to_compat_net, - term_to_net::Labels, AdtEncoding, Book, Ctx, Name, Term, + load_book::do_parse_book, net_to_term::net_to_term, parser::TermParser, term_to_compat_net, + term_to_net::Labels, AdtEncoding, Book, Ctx, Name, }, CompileOpts, RunOpts, }; @@ -25,14 +25,6 @@ fn format_output(output: std::process::Output) -> String { format!("{}{}", String::from_utf8_lossy(&output.stderr), String::from_utf8_lossy(&output.stdout)) } -fn do_parse_term(code: &str) -> Result { - parse_term(code).map_err(|errs| errs.into_iter().map(|e| e.to_string()).join("\n")) -} - -fn do_parse_net(code: &str) -> Result { - hvmc::ast::Net::from_str(code) -} - const TESTS_PATH: &str = "/tests/golden_tests/"; type RunFn = dyn Fn(&str, &Path) -> Result; @@ -103,7 +95,7 @@ fn run_golden_test_dir_multiple(test_name: &str, run: &[&RunFn]) { #[test] fn compile_term() { run_golden_test_dir(function_name!(), &|code, _| { - let mut term = do_parse_term(code)?; + let mut term = TermParser::new_term(code)?; let mut vec = Vec::new(); term.check_unbound_vars(&mut HashMap::new(), &mut vec); @@ -226,7 +218,7 @@ fn run_lazy() { #[test] fn readback_lnet() { run_golden_test_dir(function_name!(), &|code, _| { - let net = do_parse_net(code)?; + let net = hvmc::ast::Net::from_str(code)?; let book = Book::default(); let compat_net = hvmc_to_net(&net); let mut diags = Diagnostics::default(); @@ -349,7 +341,7 @@ fn hangs() { fn compile_entrypoint() { run_golden_test_dir(function_name!(), &|code, path| { let mut book = do_parse_book(code, path)?; - book.entrypoint = Some(Name::from("foo")); + book.entrypoint = Some(Name::new("foo")); let diagnostics_cfg = DiagnosticsConfig::new(Severity::Error, true); let res = compile_book(&mut book, CompileOpts::default_strict(), diagnostics_cfg, None)?; Ok(format!("{}{}", res.diagnostics, res.core_book)) @@ -360,7 +352,7 @@ fn compile_entrypoint() { fn run_entrypoint() { run_golden_test_dir(function_name!(), &|code, path| { let mut book = do_parse_book(code, path)?; - book.entrypoint = Some(Name::from("foo")); + book.entrypoint = Some(Name::new("foo")); let compile_opts = CompileOpts::default_strict().set_all(); let diagnostics_cfg = DiagnosticsConfig::new(Severity::Error, true); let (res, info) = run_book(book, None, RunOpts::default(), compile_opts, diagnostics_cfg, None)?; diff --git a/tests/golden_tests/encode_pattern_match/match_list_sugar.hvm b/tests/golden_tests/encode_pattern_match/match_list_sugar.hvm deleted file mode 100644 index 12ab9982..00000000 --- a/tests/golden_tests/encode_pattern_match/match_list_sugar.hvm +++ /dev/null @@ -1,5 +0,0 @@ -main = - match List.nil { - | []: 0 - | *: 1 - } diff --git a/tests/golden_tests/run_file/chars.hvm b/tests/golden_tests/run_file/chars.hvm index fa2b2713..1dc5d950 100644 --- a/tests/golden_tests/run_file/chars.hvm +++ b/tests/golden_tests/run_file/chars.hvm @@ -1 +1 @@ -main = (String.cons '\u1234' (String.cons '!' (String.cons '7' String.nil))) \ No newline at end of file +main = (String.cons '\u{1234}' (String.cons '!' (String.cons '7' String.nil))) \ No newline at end of file diff --git a/tests/golden_tests/run_file/escape_sequences.hvm b/tests/golden_tests/run_file/escape_sequences.hvm index 560f39b3..5a68fd38 100644 --- a/tests/golden_tests/run_file/escape_sequences.hvm +++ b/tests/golden_tests/run_file/escape_sequences.hvm @@ -1,4 +1,4 @@ -String.from_list [] = `` +String.from_list [] = "" String.from_list (List.cons x xs) = (String.cons x (String.from_list xs)) (Concat String.nil ys) = ys @@ -8,5 +8,5 @@ String.from_list (List.cons x xs) = (String.cons x (String.from_list xs)) (Join (List.cons x xs)) = (Concat x (Join xs)) main = - ((String.from_list ['\n', '\r', '\t', '\0', '\"', '\'', '\uAFE', '\`']) - , (Join ["\n", "\r", "\t", "\0", "\"", "\'", "\uAFE", `\``])) + ((String.from_list ['\n', '\r', '\t', '\0', '\"', '\'', '\u{AFE}', '\\']) + , (Join ["\n", "\r", "\t", "\0", "\"", "\'", "\u{AFE}", "\\"])) diff --git a/tests/golden_tests/run_file/names_hyphen_toplevel.hvm b/tests/golden_tests/run_file/names_hyphen_toplevel.hvm index 2b02bebd..0d89a794 100644 --- a/tests/golden_tests/run_file/names_hyphen_toplevel.hvm +++ b/tests/golden_tests/run_file/names_hyphen_toplevel.hvm @@ -1,5 +1,5 @@ -this-is-not-allowed = 1 +data Foo-Bar = (Baz-Qux field-hyph) -data Foo-Bar = Baz-Qux +fun-with-hyphen = 1 -main = (this-is-not-allowed Baz-Qux) +main = (Baz-Qux fun-with-hyphen) diff --git a/tests/golden_tests/run_file/str_backtick.hvm b/tests/golden_tests/run_file/str_backtick.hvm deleted file mode 100644 index 8abe880b..00000000 --- a/tests/golden_tests/run_file/str_backtick.hvm +++ /dev/null @@ -1,4 +0,0 @@ -(Concat String.nil ys) = ys -(Concat (String.cons x xs) ys) = (String.cons x (Concat xs ys)) - -Main = (Concat "abc369*`" `asdf"asdf`) diff --git a/tests/golden_tests/run_file/world.hvm b/tests/golden_tests/run_file/world.hvm index 51456c73..c3cd97d6 100644 --- a/tests/golden_tests/run_file/world.hvm +++ b/tests/golden_tests/run_file/world.hvm @@ -1 +1 @@ -main = (String.cons '\U1F30E' String.nil) \ No newline at end of file +main = (String.cons '\U{1F30E}' String.nil) \ No newline at end of file diff --git a/tests/golden_tests/run_lazy/chars.hvm b/tests/golden_tests/run_lazy/chars.hvm index fa2b2713..1dc5d950 100644 --- a/tests/golden_tests/run_lazy/chars.hvm +++ b/tests/golden_tests/run_lazy/chars.hvm @@ -1 +1 @@ -main = (String.cons '\u1234' (String.cons '!' (String.cons '7' String.nil))) \ No newline at end of file +main = (String.cons '\u{1234}' (String.cons '!' (String.cons '7' String.nil))) \ No newline at end of file diff --git a/tests/golden_tests/run_lazy/lam_op2_nested.hvm b/tests/golden_tests/run_lazy/lam_op2_nested.hvm index 10498658..64b1918e 100644 --- a/tests/golden_tests/run_lazy/lam_op2_nested.hvm +++ b/tests/golden_tests/run_lazy/lam_op2_nested.hvm @@ -1,7 +1 @@ -/* -main = λx (+ (* x x) (+ (+ 2 x) 3)) - -FIXME: panicked at 'not yet implemented' on hvmc::run::NetFields `if next.is_op1() { todo!(); } // FIXME` -*/ - -main = * \ No newline at end of file +main = λx (+ (* x x) (+ (+ 2 x) 3)) \ No newline at end of file diff --git a/tests/golden_tests/run_lazy/world.hvm b/tests/golden_tests/run_lazy/world.hvm index 51456c73..c3cd97d6 100644 --- a/tests/golden_tests/run_lazy/world.hvm +++ b/tests/golden_tests/run_lazy/world.hvm @@ -1 +1 @@ -main = (String.cons '\U1F30E' String.nil) \ No newline at end of file +main = (String.cons '\U{1F30E}' String.nil) \ No newline at end of file diff --git a/tests/snapshots/compile_file__error_messages.hvm.snap b/tests/snapshots/compile_file__error_messages.hvm.snap index 768d39f9..742dde12 100644 --- a/tests/snapshots/compile_file__error_messages.hvm.snap +++ b/tests/snapshots/compile_file__error_messages.hvm.snap @@ -3,5 +3,8 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/error_messages.hvm --- Errors: -At tests/golden_tests/compile_file/error_messages.hvm:3:10: Repeated constructor 'B' - 3 | data C = (B) +In tests/golden_tests/compile_file/error_messages.hvm : +Repeated constructor 'B' + 3 | data C = (B) + 4 |  + 5 | Foo (C) = * diff --git a/tests/snapshots/compile_file__just_a_name.hvm.snap b/tests/snapshots/compile_file__just_a_name.hvm.snap index e30831e4..2270b6bf 100644 --- a/tests/snapshots/compile_file__just_a_name.hvm.snap +++ b/tests/snapshots/compile_file__just_a_name.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/just_a_name.hvm --- Errors: -At tests/golden_tests/compile_file/just_a_name.hvm:1:1: found end of input expected '+', '(', '[', or '=' - 1 | asdf +In tests/golden_tests/compile_file/just_a_name.hvm : +- expected: pattern-matching pattern +- detected: end of input diff --git a/tests/snapshots/compile_file__just_data.hvm.snap b/tests/snapshots/compile_file__just_data.hvm.snap index 17b54de0..7dd33ad7 100644 --- a/tests/snapshots/compile_file__just_data.hvm.snap +++ b/tests/snapshots/compile_file__just_data.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/just_data.hvm --- Errors: -At tests/golden_tests/compile_file/just_data.hvm:1:5: found end of input expected - 1 | data +In tests/golden_tests/compile_file/just_data.hvm : +- expected: datatype name +- detected: end of input diff --git a/tests/snapshots/compile_file__just_paren.hvm.snap b/tests/snapshots/compile_file__just_paren.hvm.snap index 9e4de9f1..4bfa9cc6 100644 --- a/tests/snapshots/compile_file__just_paren.hvm.snap +++ b/tests/snapshots/compile_file__just_paren.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/just_paren.hvm --- Errors: -At tests/golden_tests/compile_file/just_paren.hvm:2:1: found end of input expected - 2 | ( +In tests/golden_tests/compile_file/just_paren.hvm : +- expected: function name +- detected: end of input diff --git a/tests/snapshots/compile_file__just_rule_paren.hvm.snap b/tests/snapshots/compile_file__just_rule_paren.hvm.snap index 321091c4..ea464fb1 100644 --- a/tests/snapshots/compile_file__just_rule_paren.hvm.snap +++ b/tests/snapshots/compile_file__just_rule_paren.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/just_rule_paren.hvm --- Errors: -At tests/golden_tests/compile_file/just_rule_paren.hvm:1:6: found end of input expected = - 1 | (rule) +In tests/golden_tests/compile_file/just_rule_paren.hvm : +- expected: '=' +- detected: end of input diff --git a/tests/snapshots/compile_file__missing_adt_eq.hvm.snap b/tests/snapshots/compile_file__missing_adt_eq.hvm.snap index 12669988..136d09c0 100644 --- a/tests/snapshots/compile_file__missing_adt_eq.hvm.snap +++ b/tests/snapshots/compile_file__missing_adt_eq.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/missing_adt_eq.hvm --- Errors: -At tests/golden_tests/compile_file/missing_adt_eq.hvm:1:9: found end of input expected '=' - 1 | data Adt +In tests/golden_tests/compile_file/missing_adt_eq.hvm : +- expected: '=' +- detected: end of input diff --git a/tests/snapshots/compile_file__missing_ctrs.hvm.snap b/tests/snapshots/compile_file__missing_ctrs.hvm.snap index a5d37015..7d665d5d 100644 --- a/tests/snapshots/compile_file__missing_ctrs.hvm.snap +++ b/tests/snapshots/compile_file__missing_ctrs.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/missing_ctrs.hvm --- Errors: -At tests/golden_tests/compile_file/missing_ctrs.hvm:1:12: found end of input expected constructor - 1 | data Adt =  +In tests/golden_tests/compile_file/missing_ctrs.hvm : +- expected: datatype constructor name +- detected: end of input diff --git a/tests/snapshots/compile_file__missing_pat.hvm.snap b/tests/snapshots/compile_file__missing_pat.hvm.snap index ec0beb56..0bee1365 100644 --- a/tests/snapshots/compile_file__missing_pat.hvm.snap +++ b/tests/snapshots/compile_file__missing_pat.hvm.snap @@ -3,5 +3,7 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/missing_pat.hvm --- Errors: -At tests/golden_tests/compile_file/missing_pat.hvm:2:3: found ':' expected +In tests/golden_tests/compile_file/missing_pat.hvm : +- expected: name or '*' +- detected:  2 | : * diff --git a/tests/snapshots/compile_file__unexpected_top_char.hvm.snap b/tests/snapshots/compile_file__unexpected_top_char.hvm.snap index 6cc24dce..b021b476 100644 --- a/tests/snapshots/compile_file__unexpected_top_char.hvm.snap +++ b/tests/snapshots/compile_file__unexpected_top_char.hvm.snap @@ -3,5 +3,7 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file/unexpected_top_char.hvm --- Errors: -At tests/golden_tests/compile_file/unexpected_top_char.hvm:1:1: found end of input expected data, , or '(' +In tests/golden_tests/compile_file/unexpected_top_char.hvm : +- expected: top-level definition +- detected:  1 | * diff --git a/tests/snapshots/compile_file_o_all__adt_string.hvm.snap b/tests/snapshots/compile_file_o_all__adt_string.hvm.snap index 6ca16cdd..8fa20c55 100644 --- a/tests/snapshots/compile_file_o_all__adt_string.hvm.snap +++ b/tests/snapshots/compile_file_o_all__adt_string.hvm.snap @@ -3,5 +3,8 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_file_o_all/adt_string.hvm --- Errors: -At tests/golden_tests/compile_file_o_all/adt_string.hvm:1:6: String is a built-in datatype and should not be overridden. - 1 | data String = S +In tests/golden_tests/compile_file_o_all/adt_string.hvm : +String is a built-in datatype and should not be overridden. + 1 | data String = S + 2 |  + 3 | main = S diff --git a/tests/snapshots/compile_term__wrong_nums.hvm.snap b/tests/snapshots/compile_term__wrong_nums.hvm.snap index 81cae79c..798b1f1e 100644 --- a/tests/snapshots/compile_term__wrong_nums.hvm.snap +++ b/tests/snapshots/compile_term__wrong_nums.hvm.snap @@ -3,5 +3,6 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/compile_term/wrong_nums.hvm --- Errors: -found invalid number literal expected number -found invalid number literal expected number +- expected: ')' +- detected: + 1 | (+ 0b0123456789 0FA) diff --git a/tests/snapshots/encode_pattern_match__match_list_sugar.hvm.snap b/tests/snapshots/encode_pattern_match__match_list_sugar.hvm.snap deleted file mode 100644 index 72f9969f..00000000 --- a/tests/snapshots/encode_pattern_match__match_list_sugar.hvm.snap +++ /dev/null @@ -1,17 +0,0 @@ ---- -source: tests/golden_tests.rs -input_file: tests/golden_tests/encode_pattern_match/match_list_sugar.hvm ---- -TaggedScott: -(main) = #List (List.nil #List λ* #List λ* 1 0) - -(List.cons) = λa λb #List λc #List λ* #List (c a b) - -(List.nil) = #List λ* #List λb b - -Scott: -(main) = (List.nil λ* λ* 1 0) - -(List.cons) = λa λb λc λ* (c a b) - -(List.nil) = λ* λb b diff --git a/tests/snapshots/examples__all_tree.hvm.snap b/tests/snapshots/examples__all_tree.hvm.snap index 8d7c49e2..b6a3ed2a 100644 --- a/tests/snapshots/examples__all_tree.hvm.snap +++ b/tests/snapshots/examples__all_tree.hvm.snap @@ -1,5 +1,5 @@ --- source: tests/golden_tests.rs -input_file: tests/golden_tests/examples/all_tree.hvm +input_file: examples/all_tree.hvm --- True diff --git a/tests/snapshots/examples__alloc_small_tree.hvm.snap b/tests/snapshots/examples__alloc_small_tree.hvm.snap index e28c02f8..54b2e495 100644 --- a/tests/snapshots/examples__alloc_small_tree.hvm.snap +++ b/tests/snapshots/examples__alloc_small_tree.hvm.snap @@ -1,5 +1,5 @@ --- source: tests/golden_tests.rs -input_file: tests/golden_tests/examples/alloc_small_tree.hvm +input_file: examples/alloc_small_tree.hvm --- λa λ* a diff --git a/tests/snapshots/examples__fib.hvm.snap b/tests/snapshots/examples__fib.hvm.snap index ad6dac9e..f22eb999 100644 --- a/tests/snapshots/examples__fib.hvm.snap +++ b/tests/snapshots/examples__fib.hvm.snap @@ -1,5 +1,5 @@ --- source: tests/golden_tests.rs -input_file: tests/golden_tests/examples/fib.hvm +input_file: examples/fib.hvm --- 1346269 diff --git a/tests/snapshots/examples__gen_tree.hvm.snap b/tests/snapshots/examples__gen_tree.hvm.snap new file mode 100644 index 00000000..b298ce0a --- /dev/null +++ b/tests/snapshots/examples__gen_tree.hvm.snap @@ -0,0 +1,5 @@ +--- +source: tests/golden_tests.rs +input_file: examples/gen_tree.hvm +--- +λa λ* (a 2 λb λ* (b 5 λc λ* (c 11 λd λ* (d 23 λe λ* (e 47 λf λ* (f 95 λg λ* (g 191 λh λ* (h 383 λ* λi i λ* λj j) λk λ* (k 384 λ* λl l λ* λm m)) λn λ* (n 192 λo λ* (o 385 λ* λp p λ* λq q) λr λ* (r 386 λ* λs s λ* λt t))) λu λ* (u 96 λv λ* (v 193 λw λ* (w 387 λ* λx x λ* λy y) λz λ* (z 388 λ* λab ab λ* λbb bb)) λcb λ* (cb 194 λdb λ* (db 389 λ* λeb eb λ* λfb fb) λgb λ* (gb 390 λ* λhb hb λ* λib ib)))) λjb λ* (jb 48 λkb λ* (kb 97 λlb λ* (lb 195 λmb λ* (mb 391 λ* λnb nb λ* λob ob) λpb λ* (pb 392 λ* λqb qb λ* λrb rb)) λsb λ* (sb 196 λtb λ* (tb 393 λ* λub ub λ* λvb vb) λwb λ* (wb 394 λ* λxb xb λ* λyb yb))) λzb λ* (zb 98 λac λ* (ac 197 λbc λ* (bc 395 λ* λcc cc λ* λdc dc) λec λ* (ec 396 λ* λfc fc λ* λgc gc)) λhc λ* (hc 198 λic λ* (ic 397 λ* λjc jc λ* λkc kc) λlc λ* (lc 398 λ* λmc mc λ* λnc nc))))) λoc λ* (oc 24 λpc λ* (pc 49 λqc λ* (qc 99 λrc λ* (rc 199 λsc λ* (sc 399 λ* λtc tc λ* λuc uc) λvc λ* (vc 400 λ* λwc wc λ* λxc xc)) λyc λ* (yc 200 λzc λ* (zc 401 λ* λad ad λ* λbd bd) λcd λ* (cd 402 λ* λdd dd λ* λed ed))) λfd λ* (fd 100 λgd λ* (gd 201 λhd λ* (hd 403 λ* λid id λ* λjd jd) λkd λ* (kd 404 λ* λld ld λ* λmd md)) λnd λ* (nd 202 λod λ* (od 405 λ* λpd pd λ* λqd qd) λrd λ* (rd 406 λ* λsd sd λ* λtd td)))) λud λ* (ud 50 λvd λ* (vd 101 λwd λ* (wd 203 λxd λ* (xd 407 λ* λyd yd λ* λzd zd) λae λ* (ae 408 λ* λbe be λ* λce ce)) λde λ* (de 204 λee λ* (ee 409 λ* λfe fe λ* λge ge) λhe λ* (he 410 λ* λie ie λ* λje je))) λke λ* (ke 102 λle λ* (le 205 λme λ* (me 411 λ* λne ne λ* λoe oe) λpe λ* (pe 412 λ* λqe qe λ* λre re)) λse λ* (se 206 λte λ* (te 413 λ* λue ue λ* λve ve) λwe λ* (we 414 λ* λxe xe λ* λye ye)))))) λze λ* (ze 12 λaf λ* (af 25 λbf λ* (bf 51 λcf λ* (cf 103 λdf λ* (df 207 λef λ* (ef 415 λ* λff ff λ* λgf gf) λhf λ* (hf 416 λ* λif if λ* λjf jf)) λkf λ* (kf 208 λlf λ* (lf 417 λ* λmf mf λ* λnf nf) λof λ* (of 418 λ* λpf pf λ* λqf qf))) λrf λ* (rf 104 λsf λ* (sf 209 λtf λ* (tf 419 λ* λuf uf λ* λvf vf) λwf λ* (wf 420 λ* λxf xf λ* λyf yf)) λzf λ* (zf 210 λag λ* (ag 421 λ* λbg bg λ* λcg cg) λdg λ* (dg 422 λ* λeg eg λ* λfg fg)))) λgg λ* (gg 52 λhg λ* (hg 105 λig λ* (ig 211 λjg λ* (jg 423 λ* λkg kg λ* λlg lg) λmg λ* (mg 424 λ* λng ng λ* λog og)) λpg λ* (pg 212 λqg λ* (qg 425 λ* λrg rg λ* λsg sg) λtg λ* (tg 426 λ* λug ug λ* λvg vg))) λwg λ* (wg 106 λxg λ* (xg 213 λyg λ* (yg 427 λ* λzg zg λ* λah ah) λbh λ* (bh 428 λ* λch ch λ* λdh dh)) λeh λ* (eh 214 λfh λ* (fh 429 λ* λgh gh λ* λhh hh) λih λ* (ih 430 λ* λjh jh λ* λkh kh))))) λlh λ* (lh 26 λmh λ* (mh 53 λnh λ* (nh 107 λoh λ* (oh 215 λph λ* (ph 431 λ* λqh qh λ* λrh rh) λsh λ* (sh 432 λ* λth th λ* λuh uh)) λvh λ* (vh 216 λwh λ* (wh 433 λ* λxh xh λ* λyh yh) λzh λ* (zh 434 λ* λai ai λ* λbi bi))) λci λ* (ci 108 λdi λ* (di 217 λei λ* (ei 435 λ* λfi fi λ* λgi gi) λhi λ* (hi 436 λ* λii ii λ* λji ji)) λki λ* (ki 218 λli λ* (li 437 λ* λmi mi λ* λni ni) λoi λ* (oi 438 λ* λpi pi λ* λqi qi)))) λri λ* (ri 54 λsi λ* (si 109 λti λ* (ti 219 λui λ* (ui 439 λ* λvi vi λ* λwi wi) λxi λ* (xi 440 λ* λyi yi λ* λzi zi)) λaj λ* (aj 220 λbj λ* (bj 441 λ* λcj cj λ* λdj dj) λej λ* (ej 442 λ* λfj fj λ* λgj gj))) λhj λ* (hj 110 λij λ* (ij 221 λjj λ* (jj 443 λ* λkj kj λ* λlj lj) λmj λ* (mj 444 λ* λnj nj λ* λoj oj)) λpj λ* (pj 222 λqj λ* (qj 445 λ* λrj rj λ* λsj sj) λtj λ* (tj 446 λ* λuj uj λ* λvj vj))))))) λwj λ* (wj 6 λxj λ* (xj 13 λyj λ* (yj 27 λzj λ* (zj 55 λak λ* (ak 111 λbk λ* (bk 223 λck λ* (ck 447 λ* λdk dk λ* λek ek) λfk λ* (fk 448 λ* λgk gk λ* λhk hk)) λik λ* (ik 224 λjk λ* (jk 449 λ* λkk kk λ* λlk lk) λmk λ* (mk 450 λ* λnk nk λ* λok ok))) λpk λ* (pk 112 λqk λ* (qk 225 λrk λ* (rk 451 λ* λsk sk λ* λtk tk) λuk λ* (uk 452 λ* λvk vk λ* λwk wk)) λxk λ* (xk 226 λyk λ* (yk 453 λ* λzk zk λ* λal al) λbl λ* (bl 454 λ* λcl cl λ* λdl dl)))) λel λ* (el 56 λfl λ* (fl 113 λgl λ* (gl 227 λhl λ* (hl 455 λ* λil il λ* λjl jl) λkl λ* (kl 456 λ* λll ll λ* λml ml)) λnl λ* (nl 228 λol λ* (ol 457 λ* λpl pl λ* λql ql) λrl λ* (rl 458 λ* λsl sl λ* λtl tl))) λul λ* (ul 114 λvl λ* (vl 229 λwl λ* (wl 459 λ* λxl xl λ* λyl yl) λzl λ* (zl 460 λ* λam am λ* λbm bm)) λcm λ* (cm 230 λdm λ* (dm 461 λ* λem em λ* λfm fm) λgm λ* (gm 462 λ* λhm hm λ* λim im))))) λjm λ* (jm 28 λkm λ* (km 57 λlm λ* (lm 115 λmm λ* (mm 231 λnm λ* (nm 463 λ* λom om λ* λpm pm) λqm λ* (qm 464 λ* λrm rm λ* λsm sm)) λtm λ* (tm 232 λum λ* (um 465 λ* λvm vm λ* λwm wm) λxm λ* (xm 466 λ* λym ym λ* λzm zm))) λan λ* (an 116 λbn λ* (bn 233 λcn λ* (cn 467 λ* λdn dn λ* λen en) λfn λ* (fn 468 λ* λgn gn λ* λhn hn)) λin λ* (in 234 λjn λ* (jn 469 λ* λkn kn λ* λln ln) λmn λ* (mn 470 λ* λnn nn λ* λon on)))) λpn λ* (pn 58 λqn λ* (qn 117 λrn λ* (rn 235 λsn λ* (sn 471 λ* λtn tn λ* λun un) λvn λ* (vn 472 λ* λwn wn λ* λxn xn)) λyn λ* (yn 236 λzn λ* (zn 473 λ* λao ao λ* λbo bo) λco λ* (co 474 λ* λdo do λ* λeo eo))) λfo λ* (fo 118 λgo λ* (go 237 λho λ* (ho 475 λ* λio io λ* λjo jo) λko λ* (ko 476 λ* λlo lo λ* λmo mo)) λno λ* (no 238 λoo λ* (oo 477 λ* λpo po λ* λqo qo) λro λ* (ro 478 λ* λso so λ* λto to)))))) λuo λ* (uo 14 λvo λ* (vo 29 λwo λ* (wo 59 λxo λ* (xo 119 λyo λ* (yo 239 λzo λ* (zo 479 λ* λap ap λ* λbp bp) λcp λ* (cp 480 λ* λdp dp λ* λep ep)) λfp λ* (fp 240 λgp λ* (gp 481 λ* λhp hp λ* λip ip) λjp λ* (jp 482 λ* λkp kp λ* λlp lp))) λmp λ* (mp 120 λnp λ* (np 241 λop λ* (op 483 λ* λpp pp λ* λqp qp) λrp λ* (rp 484 λ* λsp sp λ* λtp tp)) λup λ* (up 242 λvp λ* (vp 485 λ* λwp wp λ* λxp xp) λyp λ* (yp 486 λ* λzp zp λ* λaq aq)))) λbq λ* (bq 60 λcq λ* (cq 121 λdq λ* (dq 243 λeq λ* (eq 487 λ* λfq fq λ* λgq gq) λhq λ* (hq 488 λ* λiq iq λ* λjq jq)) λkq λ* (kq 244 λlq λ* (lq 489 λ* λmq mq λ* λnq nq) λoq λ* (oq 490 λ* λpq pq λ* λqq qq))) λrq λ* (rq 122 λsq λ* (sq 245 λtq λ* (tq 491 λ* λuq uq λ* λvq vq) λwq λ* (wq 492 λ* λxq xq λ* λyq yq)) λzq λ* (zq 246 λar λ* (ar 493 λ* λbr br λ* λcr cr) λdr λ* (dr 494 λ* λer er λ* λfr fr))))) λgr λ* (gr 30 λhr λ* (hr 61 λir λ* (ir 123 λjr λ* (jr 247 λkr λ* (kr 495 λ* λlr lr λ* λmr mr) λnr λ* (nr 496 λ* λor or λ* λpr pr)) λqr λ* (qr 248 λrr λ* (rr 497 λ* λsr sr λ* λtr tr) λur λ* (ur 498 λ* λvr vr λ* λwr wr))) λxr λ* (xr 124 λyr λ* (yr 249 λzr λ* (zr 499 λ* λas as λ* λbs bs) λcs λ* (cs 500 λ* λds ds λ* λes es)) λfs λ* (fs 250 λgs λ* (gs 501 λ* λhs hs λ* λis is) λjs λ* (js 502 λ* λks ks λ* λls ls)))) λms λ* (ms 62 λns λ* (ns 125 λos λ* (os 251 λps λ* (ps 503 λ* λqs qs λ* λrs rs) λss λ* (ss 504 λ* λts ts λ* λus us)) λvs λ* (vs 252 λws λ* (ws 505 λ* λxs xs λ* λys ys) λzs λ* (zs 506 λ* λat at λ* λbt bt))) λct λ* (ct 126 λdt λ* (dt 253 λet λ* (et 507 λ* λft ft λ* λgt gt) λht λ* (ht 508 λ* λit it λ* λjt jt)) λkt λ* (kt 254 λlt λ* (lt 509 λ* λmt mt λ* λnt nt) λot λ* (ot 510 λ* λpt pt λ* λqt qt)))))))) diff --git a/tests/snapshots/examples__gen_tree_kind2.hvm.snap b/tests/snapshots/examples__gen_tree_kind2.hvm.snap deleted file mode 100644 index b93fb020..00000000 --- a/tests/snapshots/examples__gen_tree_kind2.hvm.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: tests/golden_tests.rs -input_file: tests/golden_tests/examples/gen_tree_kind2.hvm ---- -λ* λa λ* (a 2 λ* λb λ* (b 5 λ* λc λ* (c 11 λ* λd λ* (d 23 λ* λe λ* (e 47 λ* λf λ* (f 95 λ* λg λ* (g 191 λ* λh λ* (h 383 λ* λ* λi i λ* λ* λj j) λ* λk λ* (k 384 λ* λ* λl l λ* λ* λm m)) λ* λn λ* (n 192 λ* λo λ* (o 385 λ* λ* λp p λ* λ* λq q) λ* λr λ* (r 386 λ* λ* λs s λ* λ* λt t))) λ* λu λ* (u 96 λ* λv λ* (v 193 λ* λw λ* (w 387 λ* λ* λx x λ* λ* λy y) λ* λz λ* (z 388 λ* λ* λab ab λ* λ* λbb bb)) λ* λcb λ* (cb 194 λ* λdb λ* (db 389 λ* λ* λeb eb λ* λ* λfb fb) λ* λgb λ* (gb 390 λ* λ* λhb hb λ* λ* λib ib)))) λ* λjb λ* (jb 48 λ* λkb λ* (kb 97 λ* λlb λ* (lb 195 λ* λmb λ* (mb 391 λ* λ* λnb nb λ* λ* λob ob) λ* λpb λ* (pb 392 λ* λ* λqb qb λ* λ* λrb rb)) λ* λsb λ* (sb 196 λ* λtb λ* (tb 393 λ* λ* λub ub λ* λ* λvb vb) λ* λwb λ* (wb 394 λ* λ* λxb xb λ* λ* λyb yb))) λ* λzb λ* (zb 98 λ* λac λ* (ac 197 λ* λbc λ* (bc 395 λ* λ* λcc cc λ* λ* λdc dc) λ* λec λ* (ec 396 λ* λ* λfc fc λ* λ* λgc gc)) λ* λhc λ* (hc 198 λ* λic λ* (ic 397 λ* λ* λjc jc λ* λ* λkc kc) λ* λlc λ* (lc 398 λ* λ* λmc mc λ* λ* λnc nc))))) λ* λoc λ* (oc 24 λ* λpc λ* (pc 49 λ* λqc λ* (qc 99 λ* λrc λ* (rc 199 λ* λsc λ* (sc 399 λ* λ* λtc tc λ* λ* λuc uc) λ* λvc λ* (vc 400 λ* λ* λwc wc λ* λ* λxc xc)) λ* λyc λ* (yc 200 λ* λzc λ* (zc 401 λ* λ* λad ad λ* λ* λbd bd) λ* λcd λ* (cd 402 λ* λ* λdd dd λ* λ* λed ed))) λ* λfd λ* (fd 100 λ* λgd λ* (gd 201 λ* λhd λ* (hd 403 λ* λ* λid id λ* λ* λjd jd) λ* λkd λ* (kd 404 λ* λ* λld ld λ* λ* λmd md)) λ* λnd λ* (nd 202 λ* λod λ* (od 405 λ* λ* λpd pd λ* λ* λqd qd) λ* λrd λ* (rd 406 λ* λ* λsd sd λ* λ* λtd td)))) λ* λud λ* (ud 50 λ* λvd λ* (vd 101 λ* λwd λ* (wd 203 λ* λxd λ* (xd 407 λ* λ* λyd yd λ* λ* λzd zd) λ* λae λ* (ae 408 λ* λ* λbe be λ* λ* λce ce)) λ* λde λ* (de 204 λ* λee λ* (ee 409 λ* λ* λfe fe λ* λ* λge ge) λ* λhe λ* (he 410 λ* λ* λie ie λ* λ* λje je))) λ* λke λ* (ke 102 λ* λle λ* (le 205 λ* λme λ* (me 411 λ* λ* λne ne λ* λ* λoe oe) λ* λpe λ* (pe 412 λ* λ* λqe qe λ* λ* λre re)) λ* λse λ* (se 206 λ* λte λ* (te 413 λ* λ* λue ue λ* λ* λve ve) λ* λwe λ* (we 414 λ* λ* λxe xe λ* λ* λye ye)))))) λ* λze λ* (ze 12 λ* λaf λ* (af 25 λ* λbf λ* (bf 51 λ* λcf λ* (cf 103 λ* λdf λ* (df 207 λ* λef λ* (ef 415 λ* λ* λff ff λ* λ* λgf gf) λ* λhf λ* (hf 416 λ* λ* λif if λ* λ* λjf jf)) λ* λkf λ* (kf 208 λ* λlf λ* (lf 417 λ* λ* λmf mf λ* λ* λnf nf) λ* λof λ* (of 418 λ* λ* λpf pf λ* λ* λqf qf))) λ* λrf λ* (rf 104 λ* λsf λ* (sf 209 λ* λtf λ* (tf 419 λ* λ* λuf uf λ* λ* λvf vf) λ* λwf λ* (wf 420 λ* λ* λxf xf λ* λ* λyf yf)) λ* λzf λ* (zf 210 λ* λag λ* (ag 421 λ* λ* λbg bg λ* λ* λcg cg) λ* λdg λ* (dg 422 λ* λ* λeg eg λ* λ* λfg fg)))) λ* λgg λ* (gg 52 λ* λhg λ* (hg 105 λ* λig λ* (ig 211 λ* λjg λ* (jg 423 λ* λ* λkg kg λ* λ* λlg lg) λ* λmg λ* (mg 424 λ* λ* λng ng λ* λ* λog og)) λ* λpg λ* (pg 212 λ* λqg λ* (qg 425 λ* λ* λrg rg λ* λ* λsg sg) λ* λtg λ* (tg 426 λ* λ* λug ug λ* λ* λvg vg))) λ* λwg λ* (wg 106 λ* λxg λ* (xg 213 λ* λyg λ* (yg 427 λ* λ* λzg zg λ* λ* λah ah) λ* λbh λ* (bh 428 λ* λ* λch ch λ* λ* λdh dh)) λ* λeh λ* (eh 214 λ* λfh λ* (fh 429 λ* λ* λgh gh λ* λ* λhh hh) λ* λih λ* (ih 430 λ* λ* λjh jh λ* λ* λkh kh))))) λ* λlh λ* (lh 26 λ* λmh λ* (mh 53 λ* λnh λ* (nh 107 λ* λoh λ* (oh 215 λ* λph λ* (ph 431 λ* λ* λqh qh λ* λ* λrh rh) λ* λsh λ* (sh 432 λ* λ* λth th λ* λ* λuh uh)) λ* λvh λ* (vh 216 λ* λwh λ* (wh 433 λ* λ* λxh xh λ* λ* λyh yh) λ* λzh λ* (zh 434 λ* λ* λai ai λ* λ* λbi bi))) λ* λci λ* (ci 108 λ* λdi λ* (di 217 λ* λei λ* (ei 435 λ* λ* λfi fi λ* λ* λgi gi) λ* λhi λ* (hi 436 λ* λ* λii ii λ* λ* λji ji)) λ* λki λ* (ki 218 λ* λli λ* (li 437 λ* λ* λmi mi λ* λ* λni ni) λ* λoi λ* (oi 438 λ* λ* λpi pi λ* λ* λqi qi)))) λ* λri λ* (ri 54 λ* λsi λ* (si 109 λ* λti λ* (ti 219 λ* λui λ* (ui 439 λ* λ* λvi vi λ* λ* λwi wi) λ* λxi λ* (xi 440 λ* λ* λyi yi λ* λ* λzi zi)) λ* λaj λ* (aj 220 λ* λbj λ* (bj 441 λ* λ* λcj cj λ* λ* λdj dj) λ* λej λ* (ej 442 λ* λ* λfj fj λ* λ* λgj gj))) λ* λhj λ* (hj 110 λ* λij λ* (ij 221 λ* λjj λ* (jj 443 λ* λ* λkj kj λ* λ* λlj lj) λ* λmj λ* (mj 444 λ* λ* λnj nj λ* λ* λoj oj)) λ* λpj λ* (pj 222 λ* λqj λ* (qj 445 λ* λ* λrj rj λ* λ* λsj sj) λ* λtj λ* (tj 446 λ* λ* λuj uj λ* λ* λvj vj))))))) λ* λwj λ* (wj 6 λ* λxj λ* (xj 13 λ* λyj λ* (yj 27 λ* λzj λ* (zj 55 λ* λak λ* (ak 111 λ* λbk λ* (bk 223 λ* λck λ* (ck 447 λ* λ* λdk dk λ* λ* λek ek) λ* λfk λ* (fk 448 λ* λ* λgk gk λ* λ* λhk hk)) λ* λik λ* (ik 224 λ* λjk λ* (jk 449 λ* λ* λkk kk λ* λ* λlk lk) λ* λmk λ* (mk 450 λ* λ* λnk nk λ* λ* λok ok))) λ* λpk λ* (pk 112 λ* λqk λ* (qk 225 λ* λrk λ* (rk 451 λ* λ* λsk sk λ* λ* λtk tk) λ* λuk λ* (uk 452 λ* λ* λvk vk λ* λ* λwk wk)) λ* λxk λ* (xk 226 λ* λyk λ* (yk 453 λ* λ* λzk zk λ* λ* λal al) λ* λbl λ* (bl 454 λ* λ* λcl cl λ* λ* λdl dl)))) λ* λel λ* (el 56 λ* λfl λ* (fl 113 λ* λgl λ* (gl 227 λ* λhl λ* (hl 455 λ* λ* λil il λ* λ* λjl jl) λ* λkl λ* (kl 456 λ* λ* λll ll λ* λ* λml ml)) λ* λnl λ* (nl 228 λ* λol λ* (ol 457 λ* λ* λpl pl λ* λ* λql ql) λ* λrl λ* (rl 458 λ* λ* λsl sl λ* λ* λtl tl))) λ* λul λ* (ul 114 λ* λvl λ* (vl 229 λ* λwl λ* (wl 459 λ* λ* λxl xl λ* λ* λyl yl) λ* λzl λ* (zl 460 λ* λ* λam am λ* λ* λbm bm)) λ* λcm λ* (cm 230 λ* λdm λ* (dm 461 λ* λ* λem em λ* λ* λfm fm) λ* λgm λ* (gm 462 λ* λ* λhm hm λ* λ* λim im))))) λ* λjm λ* (jm 28 λ* λkm λ* (km 57 λ* λlm λ* (lm 115 λ* λmm λ* (mm 231 λ* λnm λ* (nm 463 λ* λ* λom om λ* λ* λpm pm) λ* λqm λ* (qm 464 λ* λ* λrm rm λ* λ* λsm sm)) λ* λtm λ* (tm 232 λ* λum λ* (um 465 λ* λ* λvm vm λ* λ* λwm wm) λ* λxm λ* (xm 466 λ* λ* λym ym λ* λ* λzm zm))) λ* λan λ* (an 116 λ* λbn λ* (bn 233 λ* λcn λ* (cn 467 λ* λ* λdn dn λ* λ* λen en) λ* λfn λ* (fn 468 λ* λ* λgn gn λ* λ* λhn hn)) λ* λin λ* (in 234 λ* λjn λ* (jn 469 λ* λ* λkn kn λ* λ* λln ln) λ* λmn λ* (mn 470 λ* λ* λnn nn λ* λ* λon on)))) λ* λpn λ* (pn 58 λ* λqn λ* (qn 117 λ* λrn λ* (rn 235 λ* λsn λ* (sn 471 λ* λ* λtn tn λ* λ* λun un) λ* λvn λ* (vn 472 λ* λ* λwn wn λ* λ* λxn xn)) λ* λyn λ* (yn 236 λ* λzn λ* (zn 473 λ* λ* λao ao λ* λ* λbo bo) λ* λco λ* (co 474 λ* λ* λdo do λ* λ* λeo eo))) λ* λfo λ* (fo 118 λ* λgo λ* (go 237 λ* λho λ* (ho 475 λ* λ* λio io λ* λ* λjo jo) λ* λko λ* (ko 476 λ* λ* λlo lo λ* λ* λmo mo)) λ* λno λ* (no 238 λ* λoo λ* (oo 477 λ* λ* λpo po λ* λ* λqo qo) λ* λro λ* (ro 478 λ* λ* λso so λ* λ* λto to)))))) λ* λuo λ* (uo 14 λ* λvo λ* (vo 29 λ* λwo λ* (wo 59 λ* λxo λ* (xo 119 λ* λyo λ* (yo 239 λ* λzo λ* (zo 479 λ* λ* λap ap λ* λ* λbp bp) λ* λcp λ* (cp 480 λ* λ* λdp dp λ* λ* λep ep)) λ* λfp λ* (fp 240 λ* λgp λ* (gp 481 λ* λ* λhp hp λ* λ* λip ip) λ* λjp λ* (jp 482 λ* λ* λkp kp λ* λ* λlp lp))) λ* λmp λ* (mp 120 λ* λnp λ* (np 241 λ* λop λ* (op 483 λ* λ* λpp pp λ* λ* λqp qp) λ* λrp λ* (rp 484 λ* λ* λsp sp λ* λ* λtp tp)) λ* λup λ* (up 242 λ* λvp λ* (vp 485 λ* λ* λwp wp λ* λ* λxp xp) λ* λyp λ* (yp 486 λ* λ* λzp zp λ* λ* λaq aq)))) λ* λbq λ* (bq 60 λ* λcq λ* (cq 121 λ* λdq λ* (dq 243 λ* λeq λ* (eq 487 λ* λ* λfq fq λ* λ* λgq gq) λ* λhq λ* (hq 488 λ* λ* λiq iq λ* λ* λjq jq)) λ* λkq λ* (kq 244 λ* λlq λ* (lq 489 λ* λ* λmq mq λ* λ* λnq nq) λ* λoq λ* (oq 490 λ* λ* λpq pq λ* λ* λqq qq))) λ* λrq λ* (rq 122 λ* λsq λ* (sq 245 λ* λtq λ* (tq 491 λ* λ* λuq uq λ* λ* λvq vq) λ* λwq λ* (wq 492 λ* λ* λxq xq λ* λ* λyq yq)) λ* λzq λ* (zq 246 λ* λar λ* (ar 493 λ* λ* λbr br λ* λ* λcr cr) λ* λdr λ* (dr 494 λ* λ* λer er λ* λ* λfr fr))))) λ* λgr λ* (gr 30 λ* λhr λ* (hr 61 λ* λir λ* (ir 123 λ* λjr λ* (jr 247 λ* λkr λ* (kr 495 λ* λ* λlr lr λ* λ* λmr mr) λ* λnr λ* (nr 496 λ* λ* λor or λ* λ* λpr pr)) λ* λqr λ* (qr 248 λ* λrr λ* (rr 497 λ* λ* λsr sr λ* λ* λtr tr) λ* λur λ* (ur 498 λ* λ* λvr vr λ* λ* λwr wr))) λ* λxr λ* (xr 124 λ* λyr λ* (yr 249 λ* λzr λ* (zr 499 λ* λ* λas as λ* λ* λbs bs) λ* λcs λ* (cs 500 λ* λ* λds ds λ* λ* λes es)) λ* λfs λ* (fs 250 λ* λgs λ* (gs 501 λ* λ* λhs hs λ* λ* λis is) λ* λjs λ* (js 502 λ* λ* λks ks λ* λ* λls ls)))) λ* λms λ* (ms 62 λ* λns λ* (ns 125 λ* λos λ* (os 251 λ* λps λ* (ps 503 λ* λ* λqs qs λ* λ* λrs rs) λ* λss λ* (ss 504 λ* λ* λts ts λ* λ* λus us)) λ* λvs λ* (vs 252 λ* λws λ* (ws 505 λ* λ* λxs xs λ* λ* λys ys) λ* λzs λ* (zs 506 λ* λ* λat at λ* λ* λbt bt))) λ* λct λ* (ct 126 λ* λdt λ* (dt 253 λ* λet λ* (et 507 λ* λ* λft ft λ* λ* λgt gt) λ* λht λ* (ht 508 λ* λ* λit it λ* λ* λjt jt)) λ* λkt λ* (kt 254 λ* λlt λ* (lt 509 λ* λ* λmt mt λ* λ* λnt nt) λ* λot λ* (ot 510 λ* λ* λpt pt λ* λ* λqt qt)))))))) diff --git a/tests/snapshots/examples__neg_fusion.hvm.snap b/tests/snapshots/examples__neg_fusion.hvm.snap index 80d6b5f0..8d8c3d27 100644 --- a/tests/snapshots/examples__neg_fusion.hvm.snap +++ b/tests/snapshots/examples__neg_fusion.hvm.snap @@ -1,5 +1,5 @@ --- source: tests/golden_tests.rs -input_file: tests/golden_tests/examples/neg_fusion.hvm +input_file: examples/neg_fusion.hvm --- λa λ* a diff --git a/tests/snapshots/parse_file__repeated_adt_name.hvm.snap b/tests/snapshots/parse_file__repeated_adt_name.hvm.snap index d5ffd6ea..926b6ccb 100644 --- a/tests/snapshots/parse_file__repeated_adt_name.hvm.snap +++ b/tests/snapshots/parse_file__repeated_adt_name.hvm.snap @@ -3,5 +3,7 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/parse_file/repeated_adt_name.hvm --- Errors: -At tests/golden_tests/parse_file/repeated_adt_name.hvm:2:6: Repeated datatype 'Foo' - 2 | data Foo = B +In tests/golden_tests/parse_file/repeated_adt_name.hvm : +Repeated datatype 'Foo' + 2 | data Foo = B + diff --git a/tests/snapshots/run_file__escape_sequences.hvm.snap b/tests/snapshots/run_file__escape_sequences.hvm.snap index c122166f..846bbf57 100644 --- a/tests/snapshots/run_file__escape_sequences.hvm.snap +++ b/tests/snapshots/run_file__escape_sequences.hvm.snap @@ -3,7 +3,7 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/run_file/escape_sequences.hvm --- Lazy mode: -("\n\r\t\0\"'\u{afe}`", "\n\r\t\0\"'\u{afe}`") +("\n\r\t\0\"'\u{afe}\\", "\n\r\t\0\"'\u{afe}\\") Strict mode: -("\n\r\t\0\"'\u{afe}`", "\n\r\t\0\"'\u{afe}`") +("\n\r\t\0\"'\u{afe}\\", "\n\r\t\0\"'\u{afe}\\") diff --git a/tests/snapshots/run_file__names_hyphen_toplevel.hvm.snap b/tests/snapshots/run_file__names_hyphen_toplevel.hvm.snap index 5e5b6930..9e2b318c 100644 --- a/tests/snapshots/run_file__names_hyphen_toplevel.hvm.snap +++ b/tests/snapshots/run_file__names_hyphen_toplevel.hvm.snap @@ -3,20 +3,7 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/run_file/names_hyphen_toplevel.hvm --- Lazy mode: -Errors: -At tests/golden_tests/run_file/names_hyphen_toplevel.hvm:1:1: Names with '-' are not supported at top level. - 1 | this-is-not-allowed = 1 -At tests/golden_tests/run_file/names_hyphen_toplevel.hvm:3:6: Names with '-' are not supported at top level. - 3 | data Foo-Bar = Baz-Qux -At tests/golden_tests/run_file/names_hyphen_toplevel.hvm:3:16: Names with '-' are not supported at top level. - 3 | data Foo-Bar = Baz-Qux - +(Baz-Qux 1) Strict mode: -Errors: -At tests/golden_tests/run_file/names_hyphen_toplevel.hvm:1:1: Names with '-' are not supported at top level. - 1 | this-is-not-allowed = 1 -At tests/golden_tests/run_file/names_hyphen_toplevel.hvm:3:6: Names with '-' are not supported at top level. - 3 | data Foo-Bar = Baz-Qux -At tests/golden_tests/run_file/names_hyphen_toplevel.hvm:3:16: Names with '-' are not supported at top level. - 3 | data Foo-Bar = Baz-Qux +(Baz-Qux 1) diff --git a/tests/snapshots/run_file__override_list_ctr.hvm.snap b/tests/snapshots/run_file__override_list_ctr.hvm.snap index a61617b7..7fc474d6 100644 --- a/tests/snapshots/run_file__override_list_ctr.hvm.snap +++ b/tests/snapshots/run_file__override_list_ctr.hvm.snap @@ -4,11 +4,19 @@ input_file: tests/golden_tests/run_file/override_list_ctr.hvm --- Lazy mode: Errors: -At tests/golden_tests/run_file/override_list_ctr.hvm:2:5: List.nil is a built-in constructor and should not be overridden. - 2 | = List.nil +In tests/golden_tests/run_file/override_list_ctr.hvm : +List.nil is a built-in constructor and should not be overridden. + 1 | data Override + 2 |  = List.nil + 3 |  + 4 | main = [λz λk z] Strict mode: Errors: -At tests/golden_tests/run_file/override_list_ctr.hvm:2:5: List.nil is a built-in constructor and should not be overridden. - 2 | = List.nil +In tests/golden_tests/run_file/override_list_ctr.hvm : +List.nil is a built-in constructor and should not be overridden. + 1 | data Override + 2 |  = List.nil + 3 |  + 4 | main = [λz λk z] diff --git a/tests/snapshots/run_file__override_str_ctr.hvm.snap b/tests/snapshots/run_file__override_str_ctr.hvm.snap index 57b8a6bf..da497adc 100644 --- a/tests/snapshots/run_file__override_str_ctr.hvm.snap +++ b/tests/snapshots/run_file__override_str_ctr.hvm.snap @@ -4,11 +4,19 @@ input_file: tests/golden_tests/run_file/override_str_ctr.hvm --- Lazy mode: Errors: -At tests/golden_tests/run_file/override_str_ctr.hvm:2:5: String.cons is a built-in constructor and should not be overridden. - 2 | = (String.cons any) +In tests/golden_tests/run_file/override_str_ctr.hvm : +String.cons is a built-in constructor and should not be overridden. + 1 | data Override + 2 |  = (String.cons any) + 3 |  + 4 | main = (String.cons "any") Strict mode: Errors: -At tests/golden_tests/run_file/override_str_ctr.hvm:2:5: String.cons is a built-in constructor and should not be overridden. - 2 | = (String.cons any) +In tests/golden_tests/run_file/override_str_ctr.hvm : +String.cons is a built-in constructor and should not be overridden. + 1 | data Override + 2 |  = (String.cons any) + 3 |  + 4 | main = (String.cons "any") diff --git a/tests/snapshots/run_file__str_backtick.hvm.snap b/tests/snapshots/run_file__str_backtick.hvm.snap deleted file mode 100644 index 94dc3aa2..00000000 --- a/tests/snapshots/run_file__str_backtick.hvm.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: tests/golden_tests.rs -input_file: tests/golden_tests/run_file/str_backtick.hvm ---- -Lazy mode: -"abc369*`asdf\"asdf" - -Strict mode: -"abc369*`asdf\"asdf" diff --git a/tests/snapshots/run_lazy__lam_op2_nested.hvm.snap b/tests/snapshots/run_lazy__lam_op2_nested.hvm.snap index 7ea3b9d8..5c822cd9 100644 --- a/tests/snapshots/run_lazy__lam_op2_nested.hvm.snap +++ b/tests/snapshots/run_lazy__lam_op2_nested.hvm.snap @@ -2,4 +2,4 @@ source: tests/golden_tests.rs input_file: tests/golden_tests/run_lazy/lam_op2_nested.hvm --- -* +λa (+ (* a a) (+ (+ a 2) 3))