From 570804dbe1a158e77e64381c04d4382a8bce47d5 Mon Sep 17 00:00:00 2001 From: Anton-4 <17049058+Anton-4@users.noreply.github.com> Date: Fri, 29 Oct 2021 14:50:44 +0200 Subject: [PATCH 1/8] no need to install earthly on self-hosted runner --- .github/workflows/nightly.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index d77c742035..a7977235f7 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1,6 +1,6 @@ on: schedule: - - cron: '0 0 * * *' + - cron: '0 9 * * *' name: Nightly Release Build @@ -13,8 +13,6 @@ jobs: FORCE_COLOR: 1 # for earthly logging steps: - uses: actions/checkout@v2 - - name: install earthly - run: "sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'" - name: Earthly print version run: earthly --version - name: install dependencies, build, run tests, build release From e0fac60ddb762cc3627527aef69e6a8c203bb94f Mon Sep 17 00:00:00 2001 From: Anton-4 <17049058+Anton-4@users.noreply.github.com> Date: Fri, 29 Oct 2021 20:10:57 +0200 Subject: [PATCH 2/8] fixed undefined _dl_catch_error_ptr --- compiler/build/src/link.rs | 55 ++++++++++++++++++++++++-------------- nix/sources.json | 6 ++--- nix/zig.nix | 41 ---------------------------- shell.nix | 6 ++--- 4 files changed, 40 insertions(+), 68 deletions(-) delete mode 100644 nix/zig.nix diff --git a/compiler/build/src/link.rs b/compiler/build/src/link.rs index 8296c11db8..6f8a8ca478 100644 --- a/compiler/build/src/link.rs +++ b/compiler/build/src/link.rs @@ -529,15 +529,12 @@ pub fn rebuild_host( } } -fn nixos_path() -> String { - env::var("NIXOS_GLIBC_PATH").unwrap_or_else(|_| { - panic!( - "We couldn't find glibc! We tried looking for NIXOS_GLIBC_PATH -to find it via Nix, but that didn't work either. Please file a bug report. - -This will only be an issue until we implement surgical linking.", - ) - }) +fn nix_path_opt() -> Option { + if let Some(path) = env::var_os("NIX_GLIBC_PATH") { + Some(path.into_string().unwrap()) + } else { + None + } } fn library_path(segments: [&str; N]) -> Option { @@ -586,21 +583,39 @@ fn link_linux( )); } - let libcrt_path = library_path(["/usr", "lib", &architecture]) - .or_else(|| library_path(["/usr", "lib"])) - .or_else(|| library_path([&nixos_path()])) - .unwrap(); + let libcrt_path = + // give preference to nix_path if it's defined, this prevents bugs + if let Some(nix_path) = nix_path_opt() { + library_path([&nix_path]) + .unwrap() + } else { + library_path(["/usr", "lib", &architecture]) + .or_else(|| library_path(["/usr", "lib"])) + .unwrap() + }; let libgcc_name = "libgcc_s.so.1"; - let libgcc_path = library_path(["/lib", &architecture, libgcc_name]) - .or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name])) - .or_else(|| library_path(["/usr", "lib", libgcc_name])) - .or_else(|| library_path([&nixos_path(), libgcc_name])) - .unwrap(); + let libgcc_path = + // give preference to nix_path if it's defined, this prevents bugs + if let Some(nix_path) = nix_path_opt() { + library_path([&nix_path, libgcc_name]) + .unwrap() + } else { + library_path(["/lib", &architecture, libgcc_name]) + .or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name])) + .or_else(|| library_path(["/usr", "lib", libgcc_name])) + .unwrap() + }; let ld_linux = match target.architecture { - Architecture::X86_64 => library_path(["/lib64", "ld-linux-x86-64.so.2"]) - .or_else(|| library_path([&nixos_path(), "ld-linux-x86-64.so.2"])), + Architecture::X86_64 => { + // give preference to nix_path if it's defined, this prevents bugs + if let Some(nix_path) = nix_path_opt() { + library_path([&nix_path, "ld-linux-x86-64.so.2"]) + } else { + library_path(["/lib64", "ld-linux-x86-64.so.2"]) + } + }, Architecture::Aarch64(_) => library_path(["/lib", "ld-linux-aarch64.so.1"]), _ => panic!( "TODO gracefully handle unsupported linux architecture: {:?}", diff --git a/nix/sources.json b/nix/sources.json index 7f02e9546f..1d90464d5d 100644 --- a/nix/sources.json +++ b/nix/sources.json @@ -17,10 +17,10 @@ "homepage": "", "owner": "NixOS", "repo": "nixpkgs", - "rev": "1441fa74d213d7cc120d9d7d49e540c1fc59bc58", - "sha256": "152qb7ch0r4bidik33zd0a9wl0929zr0dqs5l5ksm7vh3assc7sc", + "rev": "51acb65b302551ac7993b437cc6863fe9fa8ae50", + "sha256": "0si8s2ji4prp614q3050x4sp282wxgp0mm5q50slcf5f75jw5yhh", "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/1441fa74d213d7cc120d9d7d49e540c1fc59bc58.tar.gz", + "url": "https://github.com/NixOS/nixpkgs/archive/51acb65b302551ac7993b437cc6863fe9fa8ae50.tar.gz", "url_template": "https://github.com///archive/.tar.gz" } } diff --git a/nix/zig.nix b/nix/zig.nix deleted file mode 100644 index 55fd1680e8..0000000000 --- a/nix/zig.nix +++ /dev/null @@ -1,41 +0,0 @@ -{ pkgs }: - -let - version = "0.8.0"; - - osName = if pkgs.stdenv.isDarwin then "macos" else "linux"; - - splitSystem = builtins.split "-" builtins.currentSystem; - arch = builtins.elemAt splitSystem 0; - isAarch64 = arch == "aarch64"; - - archiveName = "zig-${osName}-${arch}-${version}"; - - # If your system is not aarch64, we assume it's x86_64 - sha256 = if pkgs.stdenv.isDarwin then - if isAarch64 then - "b32d13f66d0e1ff740b3326d66a469ee6baddbd7211fa111c066d3bd57683111" - else - "279f9360b5cb23103f0395dc4d3d0d30626e699b1b4be55e98fd985b62bc6fbe" - else if isAarch64 then - "ee204ca2c2037952cf3f8b10c609373a08a291efa4af7b3c73be0f2b27720470" - else - "502625d3da3ae595c5f44a809a87714320b7a40e6dff4a895b5fa7df3391d01e"; -in pkgs.stdenv.mkDerivation { - pname = "zig"; - version = version; - src = pkgs.fetchurl { - inherit sha256; - name = "${archiveName}.tar.xz"; - url = "https://ziglang.org/download/${version}/${archiveName}.tar.xz"; - }; - phases = [ "unpackPhase" ]; - unpackPhase = '' - mkdir -p $out/bin - tar -xf $src - cp ${archiveName}/zig $out/zig - cp -r ${archiveName}/lib $out/lib - ln -s "$out/zig" "$out/bin/zig" - chmod +x $out/bin/zig - ''; -} diff --git a/shell.nix b/shell.nix index c72505a858..204f3836bd 100644 --- a/shell.nix +++ b/shell.nix @@ -17,7 +17,6 @@ let linuxInputs = with pkgs; lib.optionals stdenv.isLinux [ - glibc_multi valgrind vulkan-headers vulkan-loader @@ -33,7 +32,6 @@ let llvmPkgs = pkgs.llvmPackages_12; - zig = import ./nix/zig.nix { inherit pkgs; }; debugir = import ./nix/debugir.nix { inherit pkgs; }; inputs = with pkgs; [ @@ -72,9 +70,9 @@ in pkgs.mkShell { # Additional Env vars LLVM_SYS_120_PREFIX = "${llvmPkgs.llvm.dev}"; - NIXOS_GLIBC_PATH = - if pkgs.stdenv.isLinux then "${pkgs.glibc_multi.out}/lib" else ""; LD_LIBRARY_PATH = with pkgs; lib.makeLibraryPath ([ pkg-config stdenv.cc.cc.lib libffi ncurses zlib ] ++ linuxInputs); + NIX_GLIBC_PATH = + if pkgs.stdenv.isLinux then "${pkgs.glibc_multi.out}/lib" else ""; } From 5487f8e7cbbb2bae97fbc9c0caa2037df190e17a Mon Sep 17 00:00:00 2001 From: Anton-4 <17049058+Anton-4@users.noreply.github.com> Date: Fri, 29 Oct 2021 20:13:31 +0200 Subject: [PATCH 3/8] fmt+clippy --- compiler/build/src/link.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/compiler/build/src/link.rs b/compiler/build/src/link.rs index 6f8a8ca478..fa3966eac6 100644 --- a/compiler/build/src/link.rs +++ b/compiler/build/src/link.rs @@ -530,11 +530,7 @@ pub fn rebuild_host( } fn nix_path_opt() -> Option { - if let Some(path) = env::var_os("NIX_GLIBC_PATH") { - Some(path.into_string().unwrap()) - } else { - None - } + env::var_os("NIX_GLIBC_PATH").map(|path| path.into_string().unwrap()) } fn library_path(segments: [&str; N]) -> Option { @@ -583,7 +579,7 @@ fn link_linux( )); } - let libcrt_path = + let libcrt_path = // give preference to nix_path if it's defined, this prevents bugs if let Some(nix_path) = nix_path_opt() { library_path([&nix_path]) @@ -595,7 +591,7 @@ fn link_linux( }; let libgcc_name = "libgcc_s.so.1"; - let libgcc_path = + let libgcc_path = // give preference to nix_path if it's defined, this prevents bugs if let Some(nix_path) = nix_path_opt() { library_path([&nix_path, libgcc_name]) @@ -615,7 +611,7 @@ fn link_linux( } else { library_path(["/lib64", "ld-linux-x86-64.so.2"]) } - }, + } Architecture::Aarch64(_) => library_path(["/lib", "ld-linux-aarch64.so.1"]), _ => panic!( "TODO gracefully handle unsupported linux architecture: {:?}", From 30e7f7b335d27658278ad76f20ad22f840bf7254 Mon Sep 17 00:00:00 2001 From: Folkert Date: Fri, 29 Oct 2021 21:24:40 +0200 Subject: [PATCH 4/8] make solved type hash use an explicit stack (no recursion) --- compiler/types/src/solved_types.rs | 150 ++++++++++++++--------------- 1 file changed, 74 insertions(+), 76 deletions(-) diff --git a/compiler/types/src/solved_types.rs b/compiler/types/src/solved_types.rs index f796e127fb..a1768f6e7f 100644 --- a/compiler/types/src/solved_types.rs +++ b/compiler/types/src/solved_types.rs @@ -54,104 +54,102 @@ impl PartialEq for SolvedType { } fn hash_solved_type_help( - solved_type: &SolvedType, + initial: &SolvedType, flex_vars: &mut Vec, state: &mut H, ) { use SolvedType::*; - match solved_type { - Flex(var_id) => { - var_id_hash_help(*var_id, flex_vars, state); - } - Wildcard => "wildcard".hash(state), - EmptyRecord => "empty_record".hash(state), - EmptyTagUnion => "empty_tag_union".hash(state), - Error => "error".hash(state), - Func(arguments, closure, result) => { - for x in arguments { - hash_solved_type_help(x, flex_vars, state); - } + let mut stack = Vec::with_capacity(63); - hash_solved_type_help(closure, flex_vars, state); - hash_solved_type_help(result, flex_vars, state); - } - Apply(name, arguments) => { - name.hash(state); - for x in arguments { - hash_solved_type_help(x, flex_vars, state); - } - } - Rigid(name) => name.hash(state), - Erroneous(problem) => problem.hash(state), + stack.push(initial); - Record { fields, ext } => { - for (name, x) in fields { + while let Some(solved_type) = stack.pop() { + match solved_type { + Flex(var_id) => { + var_id_hash_help(*var_id, flex_vars, state); + } + Wildcard => "wildcard".hash(state), + EmptyRecord => "empty_record".hash(state), + EmptyTagUnion => "empty_tag_union".hash(state), + Error => "error".hash(state), + Func(arguments, closure, result) => { + stack.extend(arguments); + + stack.push(closure); + stack.push(result); + } + Apply(name, arguments) => { name.hash(state); - "record_field".hash(state); - hash_solved_type_help(x.as_inner(), flex_vars, state); + stack.extend(arguments); } - hash_solved_type_help(ext, flex_vars, state); - } + Rigid(name) => name.hash(state), + Erroneous(problem) => problem.hash(state), - TagUnion(tags, ext) => { - for (name, arguments) in tags { - name.hash(state); - for x in arguments { - hash_solved_type_help(x, flex_vars, state); + Record { fields, ext } => { + for (name, x) in fields { + name.hash(state); + "record_field".hash(state); + stack.push(x.as_inner()); } + stack.push(ext); } - hash_solved_type_help(ext, flex_vars, state); - } - FunctionOrTagUnion(_, _, ext) => { - hash_solved_type_help(ext, flex_vars, state); - } - - RecursiveTagUnion(rec, tags, ext) => { - var_id_hash_help(*rec, flex_vars, state); - for (name, arguments) in tags { - name.hash(state); - for x in arguments { - hash_solved_type_help(x, flex_vars, state); + TagUnion(tags, ext) => { + for (name, arguments) in tags { + name.hash(state); + stack.extend(arguments); } + stack.push(ext); } - hash_solved_type_help(ext, flex_vars, state); - } - Alias(name, arguments, solved_lambda_sets, actual) => { - name.hash(state); - for (name, x) in arguments { + FunctionOrTagUnion(_, _, ext) => { + stack.push(ext); + } + + RecursiveTagUnion(rec, tags, ext) => { + var_id_hash_help(*rec, flex_vars, state); + for (name, arguments) in tags { + name.hash(state); + stack.extend(arguments); + } + stack.push(ext); + } + + Alias(name, arguments, solved_lambda_sets, actual) => { name.hash(state); - hash_solved_type_help(x, flex_vars, state); + for (name, x) in arguments { + name.hash(state); + stack.push(x); + } + + for set in solved_lambda_sets { + stack.push(&set.0); + } + + stack.push(actual); } - for set in solved_lambda_sets { - hash_solved_type_help(&set.0, flex_vars, state); - } - - hash_solved_type_help(actual, flex_vars, state); - } - - HostExposedAlias { - name, - arguments, - lambda_set_variables: solved_lambda_sets, - actual, - actual_var, - } => { - name.hash(state); - for (name, x) in arguments { + HostExposedAlias { + name, + arguments, + lambda_set_variables: solved_lambda_sets, + actual, + actual_var, + } => { name.hash(state); - hash_solved_type_help(x, flex_vars, state); - } + for (name, x) in arguments { + name.hash(state); + stack.push(x); + } - for set in solved_lambda_sets { - hash_solved_type_help(&set.0, flex_vars, state); - } + for set in solved_lambda_sets { + stack.push(&set.0); + } - hash_solved_type_help(actual, flex_vars, state); - var_id_hash_help(*actual_var, flex_vars, state); + stack.push(actual); + var_id_hash_help(*actual_var, flex_vars, state); + } } } } From e10cd0714307ff9db2aa785d23cddb0f862e74e3 Mon Sep 17 00:00:00 2001 From: Folkert Date: Fri, 29 Oct 2021 21:25:11 +0200 Subject: [PATCH 5/8] use Entry in symbol.rs --- compiler/module/src/symbol.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/compiler/module/src/symbol.rs b/compiler/module/src/symbol.rs index c4961522c8..ef3e57ad73 100644 --- a/compiler/module/src/symbol.rs +++ b/compiler/module/src/symbol.rs @@ -562,15 +562,17 @@ impl IdentIds { } pub fn get_or_insert(&mut self, name: &Ident) -> IdentId { - match self.by_ident.get(name) { - Some(id) => *id, - None => { + use std::collections::hash_map::Entry; + + match self.by_ident.entry(name.clone()) { + Entry::Occupied(occupied) => *occupied.get(), + Entry::Vacant(vacant) => { let by_id = &mut self.by_id; let ident_id = IdentId(by_id.len() as u32); by_id.push(name.clone()); - self.by_ident.insert(name.clone(), ident_id); + vacant.insert(ident_id); ident_id } From eda904e63ed1f749a8d9eec999f62af1e4e8dc77 Mon Sep 17 00:00:00 2001 From: Folkert Date: Fri, 29 Oct 2021 21:26:36 +0200 Subject: [PATCH 6/8] don't register debug idents while doing work (it happens afterwards --- compiler/mono/src/expand_rc.rs | 4 ---- compiler/mono/src/reset_reuse.rs | 2 -- 2 files changed, 6 deletions(-) diff --git a/compiler/mono/src/expand_rc.rs b/compiler/mono/src/expand_rc.rs index 5b70006c84..2812252285 100644 --- a/compiler/mono/src/expand_rc.rs +++ b/compiler/mono/src/expand_rc.rs @@ -179,16 +179,12 @@ impl<'a, 'i> Env<'a, 'i> { pub fn unique_symbol(&mut self) -> Symbol { let ident_id = self.ident_ids.gen_unique(); - self.home.register_debug_idents(self.ident_ids); - Symbol::new(self.home, ident_id) } #[allow(dead_code)] fn manual_unique_symbol(home: ModuleId, ident_ids: &mut IdentIds) -> Symbol { let ident_id = ident_ids.gen_unique(); - home.register_debug_idents(ident_ids); - Symbol::new(home, ident_id) } } diff --git a/compiler/mono/src/reset_reuse.rs b/compiler/mono/src/reset_reuse.rs index 490314c885..64fde943c5 100644 --- a/compiler/mono/src/reset_reuse.rs +++ b/compiler/mono/src/reset_reuse.rs @@ -58,8 +58,6 @@ impl<'a, 'i> Env<'a, 'i> { fn unique_symbol(&mut self) -> Symbol { let ident_id = self.ident_ids.gen_unique(); - self.home.register_debug_idents(self.ident_ids); - Symbol::new(self.home, ident_id) } } From 05b5cd3429102c59033482733eb694d62194fc24 Mon Sep 17 00:00:00 2001 From: Folkert Date: Fri, 29 Oct 2021 21:27:26 +0200 Subject: [PATCH 7/8] specializations requested in file.rs are specializations for the host --- compiler/load/src/file.rs | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/compiler/load/src/file.rs b/compiler/load/src/file.rs index 267b354c51..584dbbebdc 100644 --- a/compiler/load/src/file.rs +++ b/compiler/load/src/file.rs @@ -2067,7 +2067,7 @@ fn update<'a>( log!("found specializations for {:?}", module_id); let subs = solved_subs.into_inner(); - for (symbol, specs) in &procs_base.pending_specializations { + for (symbol, specs) in &procs_base.specializations_for_host { let existing = match state.all_pending_specializations.entry(*symbol) { Vacant(entry) => entry.insert(MutMap::default()), Occupied(entry) => entry.into_mut(), @@ -3970,13 +3970,16 @@ fn make_specializations<'a>( &mut mono_env, procs, specializations_we_must_make, - procs_base.pending_specializations, + procs_base.specializations_for_host, &mut layout_cache, ); let external_specializations_requested = procs.externals_we_need.clone(); let procedures = procs.get_specialized_procs_without_rc(&mut mono_env); + // Turn `Bytes.Decode.IdentId(238)` into `Bytes.Decode.238`, we rely on this in mono tests + mono_env.home.register_debug_idents(mono_env.ident_ids); + let make_specializations_end = SystemTime::now(); module_timing.make_specializations = make_specializations_end .duration_since(make_specializations_start) @@ -3998,20 +4001,21 @@ fn make_specializations<'a>( struct ProcsBase<'a> { partial_procs: BumpMap>, module_thunks: &'a [Symbol], - pending_specializations: BumpMap, PendingSpecialization<'a>>>, + /// A host-exposed function must be specialized; it's a seed for subsequent specializations + specializations_for_host: BumpMap, PendingSpecialization<'a>>>, runtime_errors: BumpMap, imported_module_thunks: &'a [Symbol], } impl<'a> ProcsBase<'a> { - fn add_pending( + fn add_specialization_for_host( &mut self, symbol: Symbol, layout: ProcLayout<'a>, pending: PendingSpecialization<'a>, ) { let all_pending = self - .pending_specializations + .specializations_for_host .entry(symbol) .or_insert_with(|| HashMap::with_capacity_and_hasher(1, default_hasher())); @@ -4040,7 +4044,7 @@ fn build_pending_specializations<'a>( let mut procs_base = ProcsBase { partial_procs: BumpMap::default(), module_thunks: &[], - pending_specializations: BumpMap::default(), + specializations_for_host: BumpMap::default(), runtime_errors: BumpMap::default(), imported_module_thunks, }; @@ -4146,15 +4150,6 @@ fn add_def_to_module<'a>( // never gets called by Roc code, it will never // get specialized! if is_exposed { - let mut pattern_vars = bumpalo::collections::Vec::with_capacity_in( - loc_args.len(), - mono_env.arena, - ); - - for (var, _) in loc_args.iter() { - pattern_vars.push(*var); - } - let layout = match layout_cache.raw_from_var( mono_env.arena, annotation, @@ -4185,7 +4180,7 @@ fn add_def_to_module<'a>( annotation, ); - procs.add_pending( + procs.add_specialization_for_host( symbol, ProcLayout::from_raw(mono_env.arena, layout), pending, @@ -4249,7 +4244,7 @@ fn add_def_to_module<'a>( annotation, ); - procs.add_pending(symbol, top_level, pending); + procs.add_specialization_for_host(symbol, top_level, pending); } let proc = PartialProc { From d35686f3a862eebf8e4b0b9c0b3b3b4a368f0da9 Mon Sep 17 00:00:00 2001 From: Folkert Date: Fri, 29 Oct 2021 21:28:11 +0200 Subject: [PATCH 8/8] hash less --- compiler/mono/src/ir.rs | 56 +++++++++++++++++++++-------------------- 1 file changed, 29 insertions(+), 27 deletions(-) diff --git a/compiler/mono/src/ir.rs b/compiler/mono/src/ir.rs index b598eb48a9..379a26fa58 100644 --- a/compiler/mono/src/ir.rs +++ b/compiler/mono/src/ir.rs @@ -9,7 +9,7 @@ use crate::layout::{ use bumpalo::collections::Vec; use bumpalo::Bump; use hashbrown::hash_map::Entry; -use roc_collections::all::{default_hasher, BumpMap, BumpMapDefault, MutMap, MutSet}; +use roc_collections::all::{default_hasher, BumpMap, BumpMapDefault, MutMap}; use roc_module::ident::{ForeignSymbol, Lowercase, TagName}; use roc_module::low_level::LowLevel; use roc_module::symbol::{IdentIds, ModuleId, Symbol}; @@ -378,7 +378,8 @@ impl<'a> Proc<'a> { #[derive(Clone, Debug)] pub struct ExternalSpecializations<'a> { - pub specs: BumpMap>, + /// Not a bumpalo vec because bumpalo is not thread safe + pub specs: BumpMap>, _lifetime: std::marker::PhantomData<&'a u8>, } @@ -394,11 +395,11 @@ impl<'a> ExternalSpecializations<'a> { use hashbrown::hash_map::Entry::{Occupied, Vacant}; let existing = match self.specs.entry(symbol) { - Vacant(entry) => entry.insert(MutSet::default()), + Vacant(entry) => entry.insert(std::vec::Vec::new()), Occupied(entry) => entry.into_mut(), }; - existing.insert(typ); + existing.push(typ); } pub fn extend(&mut self, other: Self) { @@ -406,7 +407,7 @@ impl<'a> ExternalSpecializations<'a> { for (symbol, solved_types) in other.specs { let existing = match self.specs.entry(symbol) { - Vacant(entry) => entry.insert(MutSet::default()), + Vacant(entry) => entry.insert(std::vec::Vec::new()), Occupied(entry) => entry.into_mut(), }; @@ -514,9 +515,12 @@ impl<'a> Procs<'a> { // by the surrounding context, so we can add pending specializations // for them immediately. - let tuple = (symbol, top_level); - let already_specialized = self.specialized.contains_key(&tuple); - let (symbol, layout) = tuple; + let already_specialized = self + .specialized + .keys() + .any(|(s, t)| *s == symbol && *t == top_level); + + let layout = top_level; // if we've already specialized this one, no further work is needed. if !already_specialized { @@ -778,8 +782,6 @@ impl<'a, 'i> Env<'a, 'i> { pub fn unique_symbol(&mut self) -> Symbol { let ident_id = self.ident_ids.gen_unique(); - self.home.register_debug_idents(self.ident_ids); - Symbol::new(self.home, ident_id) } @@ -1695,19 +1697,16 @@ pub fn specialize_all<'a>( env: &mut Env<'a, '_>, mut procs: Procs<'a>, externals_others_need: ExternalSpecializations<'a>, - pending_specializations: BumpMap, PendingSpecialization<'a>>>, + specializations_for_host: BumpMap, PendingSpecialization<'a>>>, layout_cache: &mut LayoutCache<'a>, ) -> Procs<'a> { - specialize_all_help(env, &mut procs, externals_others_need, layout_cache); + specialize_externals_others_need(env, &mut procs, externals_others_need, layout_cache); // When calling from_can, pending_specializations should be unavailable. // This must be a single pass, and we must not add any more entries to it! - - // observation: specialize_all_help does add to pending_specializations, but does not reference - // any existing values in it. let opt_pending_specializations = std::mem::replace(&mut procs.pending_specializations, None); - let it = pending_specializations + let it = specializations_for_host .into_iter() .chain(opt_pending_specializations.into_iter().flatten()); @@ -1774,33 +1773,35 @@ pub fn specialize_all<'a>( procs } -fn specialize_all_help<'a>( +fn specialize_externals_others_need<'a>( env: &mut Env<'a, '_>, procs: &mut Procs<'a>, externals_others_need: ExternalSpecializations<'a>, layout_cache: &mut LayoutCache<'a>, ) { for (symbol, solved_types) in externals_others_need.specs.iter() { - // for some unclear reason, the MutSet does not deduplicate according to the hash - // instance. So we do it manually here + // de-duplicate by the Hash instance (set only deduplicates by Eq instance) use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; + let mut seen_hashes = Vec::with_capacity_in(solved_types.len(), env.arena); + let hash_the_thing = |x: &SolvedType| { let mut hasher = DefaultHasher::new(); x.hash(&mut hasher); hasher.finish() }; - let mut as_vec = Vec::from_iter_in( - solved_types.iter().map(|x| (hash_the_thing(x), x)), - env.arena, - ); + for solved_type in solved_types { + let hash = hash_the_thing(solved_type); - as_vec.sort_by_key(|(k, _)| *k); - as_vec.dedup_by_key(|(k, _)| *k); + if seen_hashes.iter().any(|h| *h == hash) { + // we've seen this one already + continue; + } + + seen_hashes.push(hash); - for (_, solved_type) in as_vec { let name = *symbol; let partial_proc = match procs.partial_procs.get(&name) { @@ -6535,7 +6536,8 @@ fn call_by_name_help<'a>( // If we've already specialized this one, no further work is needed. if procs .specialized - .contains_key(&(proc_name, top_level_layout)) + .keys() + .any(|x| x == &(proc_name, top_level_layout)) { debug_assert_eq!( argument_layouts.len(),