Fix parser hanging with large generated file

This commit is contained in:
LunaAmora 2024-02-22 12:12:26 -03:00
parent 4eb45c54f0
commit 79ee00125e
29 changed files with 68705 additions and 99 deletions

7
Cargo.lock generated
View File

@ -220,6 +220,7 @@ dependencies = [
"hvm-core",
"indexmap",
"insta",
"interner",
"itertools",
"logos",
"stdext",
@ -249,6 +250,12 @@ dependencies = [
"yaml-rust",
]
[[package]]
name = "interner"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c60687056b35a996f2213287048a7092d801b61df5fee3bd5bd9bf6f17a2d0"
[[package]]
name = "itertools"
version = "0.11.0"

View File

@ -26,6 +26,7 @@ clap = { version = "4.4.1", features = ["derive"], optional = true }
highlight_error = "0.1.1"
hvm-core = { git = "https://github.com/HigherOrderCO/hvm-core" }
indexmap = "2.2.3"
interner = "0.2.1"
itertools = "0.11.0"
logos = "0.14.0"

View File

@ -26,7 +26,7 @@ impl hvmc::run::AsDef for InertDef {
///
/// It does not reduce interactions that use builtin defs, as they are
/// assumed to be side-effectful
pub fn pre_reduce_book(book: &mut Book, entrypoint: String) -> Result<(), String> {
pub fn pre_reduce_book(book: &mut Book, entrypoint: &str) -> Result<(), String> {
/// Maximum amount of rewrites that
const MAX_RWTS: usize = 100_000;
// Create a host

View File

@ -75,7 +75,7 @@ pub fn compile_book(book: &mut Book, opts: CompileOpts) -> Result<CompileResult,
pre_reduce_book(&mut core_book, book.hvmc_entrypoint())?;
}
if opts.prune {
prune_defs(&mut core_book, book.hvmc_entrypoint());
prune_defs(&mut core_book, book.hvmc_entrypoint().to_string());
}
Ok(CompileResult { core_book, labels, warns })
}

View File

@ -1,4 +1,5 @@
use super::{INet, INode, INodes, NodeId, NodeKind::*, Port, SlotId, ROOT};
use crate::term::Name;
use hvmc::ast::{Net, Tree};
pub fn hvmc_to_net(net: &Net) -> INet {
@ -75,7 +76,7 @@ fn tree_to_inodes(tree: &Tree, tree_root: String, net_root: &str, n_vars: &mut N
}
Tree::Var { .. } => unreachable!(),
Tree::Ref { nam } => {
let kind = Ref { def_name: nam.clone().into() };
let kind = Ref { def_name: Name::new(nam.clone()) };
let var = new_var(n_vars);
inodes.push(INode { kind, ports: [subtree_root, var.clone(), var] });
}

View File

@ -90,11 +90,11 @@ impl Pattern {
}
fn encode_list(elements: Vec<Pattern>) -> Pattern {
let lnil = Pattern::Var(Some(Name::new(LNIL)));
let lnil = Pattern::Var(Some(Name::from(LNIL)));
elements.into_iter().rfold(lnil, |acc, mut nxt| {
nxt.encode_builtins();
Pattern::Ctr(Name::new(LCONS), vec![nxt, acc])
Pattern::Ctr(Name::from(LCONS), vec![nxt, acc])
})
}
}

View File

@ -69,7 +69,7 @@ fn check_pattern(
// TODO: Should check if it's a constructor type and use Pattern::is_flat_subset_of.
let rules_matching_ctrs = match pat_type {
// We can skip non pattern matching arguments
Type::Any => IndexMap::from([(Name::new("_"), rules_to_check)]),
Type::Any => IndexMap::from([(Name::from("_"), rules_to_check)]),
Type::Adt(adt_nam) => {
let adt = &adts[adt_nam];
// For each constructor, which rules do we need to check.
@ -87,19 +87,19 @@ fn check_pattern(
}
next_rules_to_check
}
Type::Tup => IndexMap::from([(Name::new("(_,_)"), rules_to_check)]),
Type::Tup => IndexMap::from([(Name::from("(_,_)"), rules_to_check)]),
Type::Num => {
let mut next_rules_to_check: IndexMap<Name, Vec<usize>> =
IndexMap::from([(Name::new("0"), vec![]), (Name::new("+"), vec![])]);
IndexMap::from([(Name::from("0"), vec![]), (Name::from("+"), vec![])]);
for rule_idx in rules_to_check {
let pat = &rules[rule_idx].pats[match_path.len()];
match pat {
Pattern::Var(_) => next_rules_to_check.values_mut().for_each(|x| x.push(rule_idx)),
Pattern::Num(MatchNum::Zero) => {
next_rules_to_check.get_mut(&Name::new("0")).unwrap().push(rule_idx);
next_rules_to_check.get_mut(&Name::from("0")).unwrap().push(rule_idx);
}
Pattern::Num(MatchNum::Succ { .. }) => {
next_rules_to_check.get_mut(&Name::new("+")).unwrap().push(rule_idx);
next_rules_to_check.get_mut(&Name::from("+")).unwrap().push(rule_idx);
}
_ => unreachable!(),
}

View File

@ -60,7 +60,7 @@ impl<'book> Ctx<'book> {
}
(None, None, None) => {
self.info.error(EntryErr::NotFound(self.book.entrypoint.clone().unwrap_or(Name::new(ENTRY_POINT))))
self.info.error(EntryErr::NotFound(self.book.entrypoint.clone().unwrap_or(Name::from(ENTRY_POINT))))
}
}
@ -81,8 +81,8 @@ fn validate_entry_point(entry: &Definition) -> Result<Name, EntryErr> {
impl Book {
fn get_possible_entry_points(&self) -> (Option<&Definition>, Option<&Definition>, Option<&Definition>) {
let custom = self.entrypoint.as_ref().map(|e| self.defs.get(e)).flatten();
let main = self.defs.get(&Name::new(ENTRY_POINT));
let hvm1_main = self.defs.get(&Name::new(HVM1_ENTRY_POINT));
let main = self.defs.get(&Name::from(ENTRY_POINT));
let hvm1_main = self.defs.get(&Name::from(HVM1_ENTRY_POINT));
(custom, main, hvm1_main)
}
}

View File

@ -1,6 +1,7 @@
use indexmap::{IndexMap, IndexSet};
use interner::global::GlobalString;
use itertools::Itertools;
use std::{collections::HashMap, ops::Deref, sync::Arc};
use std::{collections::HashMap, ops::Deref};
pub mod builtins;
pub mod check;
@ -16,6 +17,8 @@ pub use term_to_net::{book_to_nets, term_to_compat_net};
use crate::{diagnostics::Info, term::builtins::*, ENTRY_POINT};
use self::parser::lexer::STRINGS;
#[derive(Debug)]
pub struct Ctx<'book> {
pub book: &'book mut Book,
@ -201,8 +204,14 @@ pub enum AdtEncoding {
TaggedScott,
}
#[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord)]
pub struct Name(pub Arc<str>);
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Name(GlobalString);
impl PartialEq<str> for Name {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
pub fn num_to_name(mut num: u64) -> String {
let mut name = String::new();
@ -223,7 +232,7 @@ impl Tag {
}
pub fn adt_field(adt: &Name, ctr: &Name, field: &Name) -> Self {
Self::Named(format!("{adt}.{ctr}.{field}").into())
Self::Named(Name::new(format!("{adt}.{ctr}.{field}")))
}
}
@ -267,7 +276,7 @@ impl Term {
}
pub fn r#ref(name: &str) -> Self {
Term::Ref { nam: name.to_string().into() }
Term::Ref { nam: name.into() }
}
/// Substitute the occurrences of a variable in a term with the given term.
@ -486,7 +495,7 @@ impl Term {
if let Term::Var { nam } = &scrutinee {
if let Some(label) = &succ_label {
let new_label = Name::from(format!("{}-1", nam));
let new_label = Name::new(format!("{}-1", nam));
succ_term.subst(label, &Term::Var { nam: new_label.clone() });
succ_label = Some(new_label);
}
@ -498,7 +507,7 @@ impl Term {
Some(succ) => {
let match_bind = succ.clone();
let new_label = Name::from(format!("{}-1", succ));
let new_label = Name::new(format!("{}-1", succ));
succ_term.subst(&succ, &Term::Var { nam: new_label.clone() });
succ_label = Some(new_label);
@ -580,20 +589,20 @@ impl Pattern {
pats.iter().for_each(|pat| go(pat, set));
}
Pattern::Lst(pats) => {
set.push(builtins::LCONS.to_string().into());
set.push(builtins::LNIL.to_string().into());
set.push(builtins::LCONS.into());
set.push(builtins::LNIL.into());
pats.iter().for_each(|pat| go(pat, set))
}
Pattern::Tup(fst, snd) => {
set.push("(,)".to_string().into());
set.push("(,)".into());
go(fst, set);
go(snd, set);
}
Pattern::Num(MatchNum::Zero) => {
set.push("0".to_string().into());
set.push("0".into());
}
Pattern::Num(MatchNum::Succ(_)) => {
set.push("+".to_string().into());
set.push("+".into());
}
Pattern::Var(_) => {}
}
@ -636,7 +645,7 @@ impl Pattern {
}
Pattern::Tup(..) => Type::Tup,
Pattern::Num(..) => Type::Num,
Pattern::Lst(..) => Type::Adt(builtins::LIST.to_string().into()),
Pattern::Lst(..) => Type::Adt(builtins::LIST.into()),
}
}
@ -727,11 +736,11 @@ impl Type {
Type::None => vec![],
Type::Any => vec![],
Type::Tup => vec![Pattern::Tup(
Box::new(Pattern::Var(Some(Name::new("%fst")))),
Box::new(Pattern::Var(Some(Name::new("%snd")))),
Box::new(Pattern::Var(Some("%fst".into()))),
Box::new(Pattern::Var(Some("%snd".into()))),
)],
Type::Num => {
vec![Pattern::Num(MatchNum::Zero), Pattern::Num(MatchNum::Succ(Some(Some(Name::new("%pred")))))]
vec![Pattern::Num(MatchNum::Zero), Pattern::Num(MatchNum::Succ(Some(Some("%pred".into()))))]
}
Type::Adt(adt) => {
// TODO: Should return just a ref to ctrs and not clone.
@ -757,8 +766,10 @@ impl Type {
}
impl Name {
pub fn new(value: &str) -> Self {
Name::from(value.to_string())
/// For constructing a name when you have an owned String.
/// If you have an `&str` prefer to use `Name::From<&str>`
pub fn new(value: String) -> Name {
Name(STRINGS.get(value))
}
pub fn is_generated(&self) -> bool {
@ -767,21 +778,21 @@ impl Name {
}
}
impl From<String> for Name {
fn from(value: String) -> Self {
Name(Arc::from(value))
impl From<&str> for Name {
fn from(value: &str) -> Self {
Name(STRINGS.get(value))
}
}
impl From<u64> for Name {
fn from(value: u64) -> Self {
num_to_name(value).into()
num_to_name(value).as_str().into()
}
}
impl From<u32> for Name {
fn from(value: u32) -> Self {
num_to_name(value as u64).into()
num_to_name(value as u64).as_str().into()
}
}
@ -789,21 +800,15 @@ impl Deref for Name {
type Target = str;
fn deref(&self) -> &Self::Target {
self.0.deref()
}
}
impl AsRef<str> for Name {
fn as_ref(&self) -> &str {
self.0.as_ref()
&self.0
}
}
impl Book {
pub fn hvmc_entrypoint(&self) -> String {
match self.entrypoint.as_ref().map(|e| e.0.as_ref()) {
Some("main" | "Main") | None => ENTRY_POINT.to_string(),
Some(nam) => nam.to_string(),
pub fn hvmc_entrypoint(&self) -> &str {
match self.entrypoint.as_ref().map(|e| e.as_ref()) {
Some("main" | "Main") | None => ENTRY_POINT,
Some(nam) => nam,
}
}
}

View File

@ -60,7 +60,7 @@ impl<'a> Reader<'a> {
fn read_term(&mut self, next: Port) -> Term {
if self.dup_paths.is_none() && !self.seen.insert(next) {
self.error(ReadbackError::Cyclic);
return Term::Var { nam: Name::new("...") };
return Term::Var { nam: Name::from("...") };
}
let node = next.node();
@ -339,7 +339,7 @@ impl Term {
pub fn fix_names(&mut self, id_counter: &mut u64, book: &Book) {
fn fix_name(nam: &mut Option<Name>, id_counter: &mut u64, bod: &mut Term) {
if let Some(nam) = nam {
let name = Name::from(num_to_name(*id_counter));
let name = Name::new(num_to_name(*id_counter));
*id_counter += 1;
bod.subst(nam, &Term::Var { nam: name.clone() });
*nam = name;

View File

@ -1,12 +1,15 @@
use interner::global::{GlobalPool, GlobalString};
use logos::{FilterResult, Lexer, Logos};
use std::{fmt, num::ParseIntError};
pub static STRINGS: GlobalPool<String> = GlobalPool::new();
#[derive(Logos, Debug, PartialEq, Clone)]
#[logos(error=LexingError)]
#[logos(skip r"[ \t\n\f]+")]
pub enum Token {
#[regex("[_.a-zA-Z][_.a-zA-Z0-9-]*", |lex| lex.slice().parse().ok())]
Name(String),
#[regex("[_.a-zA-Z][_.a-zA-Z0-9-]*", |lex| lex.slice().parse().ok().map(|s: String| STRINGS.get(s)))]
Name(GlobalString),
#[regex("@|λ")]
Lambda,

View File

@ -18,6 +18,8 @@ use indexmap::{map::Entry, IndexMap};
use logos::{Logos, SpannedIter};
use std::{iter::Map, ops::Range, path::Path};
use super::lexer::STRINGS;
// hvml grammar description:
// <Book> ::= <TopLevel>*
// <TopLevel> ::= (<Def> | <Data>)
@ -114,7 +116,7 @@ fn soft_keyword<'a, I>(keyword: &'static str) -> impl Parser<'a, I, (), extra::E
where
I: ValueInput<'a, Token = Token, Span = SimpleSpan>,
{
any().filter(move |t| matches!(t, Token::Name(n) if n.as_str() == keyword)).to(()).labelled(keyword)
any().filter(move |t| matches!(t, Token::Name(n) if n == keyword)).to(()).labelled(keyword)
}
fn name<'a, I>() -> impl Parser<'a, I, Name, extra::Err<Rich<'a, Token>>>
@ -127,8 +129,8 @@ where
any()
.filter(|t| matches!(t, Token::Name(_)))
.map(|t| {
let Token::Name(name) = t else { unreachable!() };
Name::from(name)
let Token::Name(n) = t else { unreachable!() };
Name(n)
})
.labelled("<Name>")
}
@ -139,7 +141,7 @@ where
I: ValueInput<'a, Token = Token, Span = SimpleSpan>,
{
any()
.filter(|t| matches!(t, Token::Name(n) if n.as_str() != "data"))
.filter(|t| matches!(t, Token::Name(n) if n != "data"))
.map(|t| {
let Token::Name(name) = t else { unreachable!() };
name
@ -148,7 +150,7 @@ where
if out.contains('-') {
emitter.emit(Rich::custom(span, "Names with '-' are not supported at top level."));
}
Name::from(out)
Name(out)
})
.labelled("<Name>")
}
@ -335,7 +337,10 @@ where
.boxed();
choice((
global_var, var, number, list, str, chr, sup, tup, global_lam, lam, dup, let_, match_, num_op, app, era,
// OBS: num_op has to be before app, idk why?
// OBS: app has to be before `tup` fto not overflow on huge app terms
// TODO: What happens on huge `tup` terms?
num_op, app, tup, global_var, var, number, list, str, chr, sup, global_lam, lam, dup, let_, match_, era,
))
})
}
@ -414,7 +419,7 @@ where
}));
let paren_lhs = just(Token::LParen)
.ignore_then(lhs.clone().map_err(|err| map_unexpected_eof::<I>(err, Token::Name("<Name>".into()))))
.ignore_then(lhs.clone().map_err(|err| map_unexpected_eof::<I>(err, Token::Name(STRINGS.get("<Name>")))))
.then_ignore(just(Token::RParen))
.then_ignore(just(Token::Equals).map_err(|err| map_unexpected_eof::<I>(err, Token::Equals)));
@ -442,9 +447,9 @@ where
let data_name = tl_name().map_with_span(|name, span| (name, span));
soft_keyword("data")
.ignore_then(data_name.map_err(|err| map_unexpected_eof::<I>(err, Token::Name("<Name>".to_string()))))
.ignore_then(data_name.map_err(|err| map_unexpected_eof::<I>(err, Token::Name(STRINGS.get("<Name>")))))
.then_ignore(just(Token::Equals))
.then(ctrs.map_err(|err| map_unexpected_eof::<I>(err, Token::Name("constructor".to_string()))))
.then(ctrs.map_err(|err| map_unexpected_eof::<I>(err, Token::Name(STRINGS.get("constructor")))))
.map(move |(name, ctrs)| TopLevel::Adt(name, ctrs))
}

View File

@ -15,7 +15,7 @@ pub fn book_to_nets(book: &Book) -> (HashMap<String, INet>, Labels) {
let net = term_to_compat_net(&rule.body, &mut labels);
let name = if def.name == *main {
book.hvmc_entrypoint()
book.hvmc_entrypoint().to_string()
} else {
def.name.0.to_string()
};

View File

@ -28,7 +28,7 @@ impl Book {
for (term, equal_defs) in equal_terms {
// def1_$_def2_$_def3
let new_name = Name::from(equal_defs.iter().join("_$_"));
let new_name = Name::new(equal_defs.iter().join("_$_"));
// Builtin origin takes precedence
let builtin = equal_defs.iter().any(|nam| self.defs[nam].builtin);

View File

@ -114,13 +114,13 @@ impl Book {
}
}
Term::Lst { els } => {
self.insert_ctrs_used(&Name::new(LIST), uses, adt_encoding);
self.insert_ctrs_used(&Name::from(LIST), uses, adt_encoding);
for term in els {
to_find.push(term);
}
}
Term::Str { .. } => {
self.insert_ctrs_used(&Name::new(STRING), uses, adt_encoding);
self.insert_ctrs_used(&Name::from(STRING), uses, adt_encoding);
}
Term::Var { .. } | Term::Lnk { .. } | Term::Num { .. } | Term::Era | Term::Err => (),
}

View File

@ -23,7 +23,7 @@ impl Term {
} else {
let Term::Mat { matched, arms } = std::mem::take(term) else { unreachable!() };
let nam = Name::new("%matched");
let nam = Name::from("%matched");
*term = Term::Let {
pat: Pattern::Var(Some(nam.clone())),
@ -49,7 +49,7 @@ impl Term {
// Implicit ctr args
*pat_args = ctr_args
.iter()
.map(|field| Pattern::Var(Some(format!("{matched}.{field}").into())))
.map(|field| Pattern::Var(Some(Name::new(format!("{matched}.{field}")))))
.collect();
}
}
@ -57,7 +57,7 @@ impl Term {
Pattern::Num(MatchNum::Succ(Some(_))) => (),
Pattern::Num(MatchNum::Succ(p @ None)) => {
// Implicit num arg
*p = Some(Some(format!("{matched}-1").into()));
*p = Some(Some(Name::new(format!("{matched}-1"))));
}
Pattern::Tup(_, _) => (),
Pattern::Lst(..) => unreachable!(),

View File

@ -51,7 +51,7 @@ impl Term {
*self = if let Term::Var { .. } = val.as_ref() {
Term::Mat { matched: val, arms }
} else {
let nam = Name::new("%temp%scrutinee");
let nam = Name::from("%temp%scrutinee");
let pat = Pattern::Var(Some(nam.clone()));
let scrutinee = Box::new(Term::Var { nam });
Term::Let { pat, val, nxt: Box::new(Term::Mat { matched: scrutinee, arms }) }

View File

@ -65,7 +65,7 @@ impl<'d> TermInfo<'d> {
}
fn detach_term(&mut self, term: &mut Term) {
let comb_name = Name::from(format!("{}$S{}", self.def_name, self.counter));
let comb_name = Name::new(format!("{}$S{}", self.def_name, self.counter));
self.counter += 1;
let comb_var = Term::Ref { nam: comb_name.clone() };

View File

@ -113,7 +113,7 @@ fn make_match_case(
// Create the subfunctions
let mut next_cases = vec![];
let next_ctrs = if next_type.is_var_type() {
vec![Pattern::Var(Some(Name::new("x")))]
vec![Pattern::Var(Some(Name::from("x")))]
} else {
next_type.ctrs(&book.adts)
};
@ -132,7 +132,7 @@ fn make_match_case(
let (old_args, new_args) = args_from_match_path(&match_path);
// Encode the current pattern matching, calling the subfunctions
let match_var = Name::new("x");
let match_var = Name::from("x");
// The match term itself
let term = encode_match(next_type, &match_var, next_cases.into_iter(), adt_encoding);
// The calls to the args of previous matches
@ -157,12 +157,12 @@ fn encode_match(
// let (%fst, %snd) = x; (arm[0] %fst %snd)
Type::Tup => Term::Let {
pat: Pattern::Tup(
Box::new(Pattern::Var(Some(Name::new("%fst")))),
Box::new(Pattern::Var(Some(Name::new("%snd")))),
Box::new(Pattern::Var(Some(Name::from("%fst")))),
Box::new(Pattern::Var(Some(Name::from("%snd")))),
),
val: Box::new(Term::Var { nam: match_var.clone() }),
nxt: Box::new(Term::call(arms.next().unwrap(), [Term::Var { nam: Name::new("%fst") }, Term::Var {
nam: Name::new("%snd"),
nxt: Box::new(Term::call(arms.next().unwrap(), [Term::Var { nam: Name::from("%fst") }, Term::Var {
nam: Name::from("%snd"),
}])),
},
// match x {0: arm[0]; +: arm[1]}
@ -268,10 +268,10 @@ fn args_from_match_path(match_path: &[Pattern]) -> (Vec<Name>, Vec<Name>) {
.iter()
.flat_map(|pat| pat.vars())
.enumerate()
.map(|(i, _)| format!("%x{i}").into())
.map(|(i, _)| Name::new(format!("%x{i}")))
.collect();
let new_args: Vec<Name> = new_match
.map(|pat| pat.vars().enumerate().map(|(i, _)| format!("%y{i}").into()).collect())
.map(|pat| pat.vars().enumerate().map(|(i, _)| Name::new(format!("%y{i}"))).collect())
.unwrap_or(vec![]);
(old_args, new_args)
}
@ -280,7 +280,7 @@ fn args_from_match_path(match_path: &[Pattern]) -> (Vec<Name>, Vec<Name>) {
/// Name for a variable to be substituted with the rule body.
fn rule_body_subst_var(rule_idx: usize) -> Name {
format!("%rule_subst_{rule_idx}").into()
Name::new(format!("%rule_subst_{rule_idx}"))
}
/// Beta reduces a term.

View File

@ -161,7 +161,7 @@ fn match_to_def(
match_count: usize,
) -> Term {
let rules = arms.iter().map(|(pat, term)| Rule { pats: vec![pat.clone()], body: term.clone() }).collect();
let new_name = Name::from(format!("{def_name}$match${match_count}"));
let new_name = Name::new(format!("{def_name}$match${match_count}"));
let def = Definition { name: new_name.clone(), rules, builtin };
new_defs.push((new_name.clone(), def));

View File

@ -34,7 +34,7 @@ fn flatten_def(def: &Definition) -> Vec<Definition> {
if must_split {
// Create the entry for the new definition name
let old_name = &def.name;
let new_name = Name::from(format!("{}$F{}", old_name, new_defs.len()));
let new_name = Name::new(format!("{}$F{}", old_name, new_defs.len()));
// Create the rule that replaces the one being flattened.
// Destructs one layer of the nested patterns and calls the following, forwarding the extracted fields.
@ -237,5 +237,5 @@ fn make_split_rule(old_rule: &Rule, other_rule: &Rule) -> Rule {
fn make_var_name(var_count: &mut usize) -> Name {
let nam = format!("%x{var_count}");
*var_count += 1;
nam.into()
Name::new(nam)
}

View File

@ -179,12 +179,12 @@ fn duplicate_let(nam: &Name, nxt: &mut Term, uses: u64, let_body: &mut Term) {
fn dup_name(nam: &Name, uses: u64) -> Name {
if uses == 1 {
format!("{nam}").into()
Name::new(format!("{nam}"))
} else {
format!("{nam}_{uses}").into()
Name::new(format!("{nam}_{uses}"))
}
}
fn internal_dup_name(nam: &Name, uses: u64) -> Name {
format!("{}_dup", dup_name(nam, uses)).into()
}
Name::new(format!("{}_dup", dup_name(nam, uses)))
}

View File

@ -108,12 +108,12 @@ pub fn linearize_match_unscoped_vars(match_term: &mut Term) -> Result<&mut Term,
let (decls, uses) = arm.unscoped_vars();
// Not allowed to declare unscoped var and not use it since we need to extract the match arm.
if let Some(var) = decls.difference(&uses).next() {
return Err(MatchErr::Linearize(format!("λ${var}").into()));
return Err(MatchErr::Linearize(Name::new(format!("λ${var}"))));
}
// Change unscoped var to normal scoped var if it references something outside this match arm.
let arm_free_vars = uses.difference(&decls);
for var in arm_free_vars.clone() {
arm.subst_unscoped(var, &Term::Var { nam: format!("%match%unscoped%{var}").into() });
arm.subst_unscoped(var, &Term::Var { nam: Name::new(format!("%match%unscoped%{var}")) });
}
free_vars.extend(arm_free_vars.cloned());
}
@ -124,7 +124,7 @@ pub fn linearize_match_unscoped_vars(match_term: &mut Term) -> Result<&mut Term,
*body = free_vars
.iter()
.rev()
.fold(old_body, |body, var| Term::named_lam(format!("%match%unscoped%{var}").into(), body));
.fold(old_body, |body, var| Term::named_lam(Name::new(format!("%match%unscoped%{var}")), body));
}
// Add apps to the match

View File

@ -127,13 +127,13 @@ fn normalize_num_match(term: &mut Term) -> Result<(), MatchErr> {
pat: Pattern::Var(Some(var.clone())),
val: Box::new(Term::Opx {
op: Op::ADD,
fst: Box::new(Term::Var { nam: Name::new("%pred") }),
fst: Box::new(Term::Var { nam: Name::from("%pred") }),
snd: Box::new(Term::Num { val: 1 }),
}),
nxt: Box::new(std::mem::take(body)),
};
let body = Term::named_lam(Name::new("%pred"), body);
let body = Term::named_lam(Name::from("%pred"), body);
succ_arm = Some((Pattern::Num(MatchNum::Succ(None)), body));
break;
}
@ -150,10 +150,10 @@ fn normalize_num_match(term: &mut Term) -> Result<(), MatchErr> {
}
let Some(zero_arm) = zero_arm else {
return Err(MatchErr::Missing(["0".to_string().into()].into()));
return Err(MatchErr::Missing(["0".into()].into()));
};
let Some(succ_arm) = succ_arm else {
return Err(MatchErr::Missing(["+".to_string().into()].into()));
return Err(MatchErr::Missing(["+".into()].into()));
};
*arms = vec![zero_arm, succ_arm];
Ok(())

View File

@ -203,7 +203,7 @@ impl Term {
errs.push(ReadbackError::UnexpectedTag(expected_tag.clone(), tag.clone()));
}
let arg = Name::from(format!("{ctr}.{field}"));
let arg = Name::new(format!("{ctr}.{field}"));
args.push(Pattern::Var(Some(arg.clone())));
*arm = Term::tagged_app(expected_tag, std::mem::take(arm), Term::Var { nam: arg });
}

View File

@ -20,7 +20,7 @@ impl Term {
let head = std::mem::take(head);
let mut tail = std::mem::take(tail);
if ctr.as_ref() == SCONS
if ctr == SCONS
&& let Term::Num { val } = head
&& let Term::Str { val: tail } = tail
{
@ -39,7 +39,7 @@ impl Term {
}
}
// (String.nil)
Term::Ref { nam: def_name } if def_name.as_ref() == SNIL => *self = Term::Str { val: String::new() },
Term::Ref { nam: def_name } if def_name == SNIL => *self = Term::Str { val: String::new() },
Term::Mat { matched, arms } => {
matched.resugar_strings();
@ -88,7 +88,7 @@ impl Term {
let head = std::mem::take(head);
let tail = std::mem::take(tail);
if ctr.as_ref() == LCONS
if ctr == LCONS
&& let Term::Lst { els: tail } = tail
{
// If well formed list, cons the next element to the list being formed
@ -100,7 +100,7 @@ impl Term {
}
}
// (List.nil)
Term::Ref { nam: def_name } if def_name.as_ref() == LNIL => *self = Term::Lst { els: vec![] },
Term::Ref { nam: def_name } if def_name == LNIL => *self = Term::Lst { els: vec![] },
Term::Mat { matched, arms } => {
matched.resugar_lists();

View File

@ -266,7 +266,7 @@ fn hangs() {
fn compile_entrypoint() {
run_golden_test_dir(function_name!(), &|code, path| {
let mut book = do_parse_book(code, path)?;
book.entrypoint = Some(Name::new("foo"));
book.entrypoint = Some(Name::from("foo"));
let compiled = compile_book(&mut book, CompileOpts::light())?;
Ok(format!("{:?}", compiled))
})
@ -276,7 +276,7 @@ fn compile_entrypoint() {
fn run_entrypoint() {
run_golden_test_dir(function_name!(), &|code, path| {
let mut book = do_parse_book(code, path)?;
book.entrypoint = Some(Name::new("foo"));
book.entrypoint = Some(Name::from("foo"));
// 1 million nodes for the test runtime. Smaller doesn't seem to make it any faster
let (res, info) =
run_book(book, 1 << 20, RunOpts::default(), WarningOpts::deny_all(), CompileOpts::heavy())?;

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff