Merge branch 'trunk' of github.com:rtfeldman/roc into fix-compilation-nix-macos

This commit is contained in:
Anton-4 2021-10-30 19:29:12 +02:00
commit 9f1d61714a
11 changed files with 158 additions and 200 deletions

View File

@ -1,6 +1,6 @@
on:
schedule:
- cron: '0 0 * * *'
- cron: '0 9 * * *'
name: Nightly Release Build
@ -13,8 +13,6 @@ jobs:
FORCE_COLOR: 1 # for earthly logging
steps:
- uses: actions/checkout@v2
- name: install earthly
run: "sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'"
- name: Earthly print version
run: earthly --version
- name: install dependencies, build, run tests, build release

View File

@ -529,15 +529,8 @@ pub fn rebuild_host(
}
}
fn nixos_path() -> String {
env::var("NIXOS_GLIBC_PATH").unwrap_or_else(|_| {
panic!(
"We couldn't find glibc! We tried looking for NIXOS_GLIBC_PATH
to find it via Nix, but that didn't work either. Please file a bug report.
This will only be an issue until we implement surgical linking.",
)
})
fn nix_path_opt() -> Option<String> {
env::var_os("NIX_GLIBC_PATH").map(|path| path.into_string().unwrap())
}
fn library_path<const N: usize>(segments: [&str; N]) -> Option<PathBuf> {
@ -586,21 +579,39 @@ fn link_linux(
));
}
let libcrt_path = library_path(["/usr", "lib", &architecture])
.or_else(|| library_path(["/usr", "lib"]))
.or_else(|| library_path([&nixos_path()]))
.unwrap();
let libcrt_path =
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_path) = nix_path_opt() {
library_path([&nix_path])
.unwrap()
} else {
library_path(["/usr", "lib", &architecture])
.or_else(|| library_path(["/usr", "lib"]))
.unwrap()
};
let libgcc_name = "libgcc_s.so.1";
let libgcc_path = library_path(["/lib", &architecture, libgcc_name])
.or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name]))
.or_else(|| library_path(["/usr", "lib", libgcc_name]))
.or_else(|| library_path([&nixos_path(), libgcc_name]))
.unwrap();
let libgcc_path =
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_path) = nix_path_opt() {
library_path([&nix_path, libgcc_name])
.unwrap()
} else {
library_path(["/lib", &architecture, libgcc_name])
.or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name]))
.or_else(|| library_path(["/usr", "lib", libgcc_name]))
.unwrap()
};
let ld_linux = match target.architecture {
Architecture::X86_64 => library_path(["/lib64", "ld-linux-x86-64.so.2"])
.or_else(|| library_path([&nixos_path(), "ld-linux-x86-64.so.2"])),
Architecture::X86_64 => {
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_path) = nix_path_opt() {
library_path([&nix_path, "ld-linux-x86-64.so.2"])
} else {
library_path(["/lib64", "ld-linux-x86-64.so.2"])
}
}
Architecture::Aarch64(_) => library_path(["/lib", "ld-linux-aarch64.so.1"]),
_ => panic!(
"TODO gracefully handle unsupported linux architecture: {:?}",

View File

@ -2067,7 +2067,7 @@ fn update<'a>(
log!("found specializations for {:?}", module_id);
let subs = solved_subs.into_inner();
for (symbol, specs) in &procs_base.pending_specializations {
for (symbol, specs) in &procs_base.specializations_for_host {
let existing = match state.all_pending_specializations.entry(*symbol) {
Vacant(entry) => entry.insert(MutMap::default()),
Occupied(entry) => entry.into_mut(),
@ -3970,13 +3970,16 @@ fn make_specializations<'a>(
&mut mono_env,
procs,
specializations_we_must_make,
procs_base.pending_specializations,
procs_base.specializations_for_host,
&mut layout_cache,
);
let external_specializations_requested = procs.externals_we_need.clone();
let procedures = procs.get_specialized_procs_without_rc(&mut mono_env);
// Turn `Bytes.Decode.IdentId(238)` into `Bytes.Decode.238`, we rely on this in mono tests
mono_env.home.register_debug_idents(mono_env.ident_ids);
let make_specializations_end = SystemTime::now();
module_timing.make_specializations = make_specializations_end
.duration_since(make_specializations_start)
@ -3998,20 +4001,21 @@ fn make_specializations<'a>(
struct ProcsBase<'a> {
partial_procs: BumpMap<Symbol, PartialProc<'a>>,
module_thunks: &'a [Symbol],
pending_specializations: BumpMap<Symbol, MutMap<ProcLayout<'a>, PendingSpecialization<'a>>>,
/// A host-exposed function must be specialized; it's a seed for subsequent specializations
specializations_for_host: BumpMap<Symbol, MutMap<ProcLayout<'a>, PendingSpecialization<'a>>>,
runtime_errors: BumpMap<Symbol, &'a str>,
imported_module_thunks: &'a [Symbol],
}
impl<'a> ProcsBase<'a> {
fn add_pending(
fn add_specialization_for_host(
&mut self,
symbol: Symbol,
layout: ProcLayout<'a>,
pending: PendingSpecialization<'a>,
) {
let all_pending = self
.pending_specializations
.specializations_for_host
.entry(symbol)
.or_insert_with(|| HashMap::with_capacity_and_hasher(1, default_hasher()));
@ -4040,7 +4044,7 @@ fn build_pending_specializations<'a>(
let mut procs_base = ProcsBase {
partial_procs: BumpMap::default(),
module_thunks: &[],
pending_specializations: BumpMap::default(),
specializations_for_host: BumpMap::default(),
runtime_errors: BumpMap::default(),
imported_module_thunks,
};
@ -4146,15 +4150,6 @@ fn add_def_to_module<'a>(
// never gets called by Roc code, it will never
// get specialized!
if is_exposed {
let mut pattern_vars = bumpalo::collections::Vec::with_capacity_in(
loc_args.len(),
mono_env.arena,
);
for (var, _) in loc_args.iter() {
pattern_vars.push(*var);
}
let layout = match layout_cache.raw_from_var(
mono_env.arena,
annotation,
@ -4185,7 +4180,7 @@ fn add_def_to_module<'a>(
annotation,
);
procs.add_pending(
procs.add_specialization_for_host(
symbol,
ProcLayout::from_raw(mono_env.arena, layout),
pending,
@ -4249,7 +4244,7 @@ fn add_def_to_module<'a>(
annotation,
);
procs.add_pending(symbol, top_level, pending);
procs.add_specialization_for_host(symbol, top_level, pending);
}
let proc = PartialProc {

View File

@ -562,15 +562,17 @@ impl IdentIds {
}
pub fn get_or_insert(&mut self, name: &Ident) -> IdentId {
match self.by_ident.get(name) {
Some(id) => *id,
None => {
use std::collections::hash_map::Entry;
match self.by_ident.entry(name.clone()) {
Entry::Occupied(occupied) => *occupied.get(),
Entry::Vacant(vacant) => {
let by_id = &mut self.by_id;
let ident_id = IdentId(by_id.len() as u32);
by_id.push(name.clone());
self.by_ident.insert(name.clone(), ident_id);
vacant.insert(ident_id);
ident_id
}

View File

@ -179,16 +179,12 @@ impl<'a, 'i> Env<'a, 'i> {
pub fn unique_symbol(&mut self) -> Symbol {
let ident_id = self.ident_ids.gen_unique();
self.home.register_debug_idents(self.ident_ids);
Symbol::new(self.home, ident_id)
}
#[allow(dead_code)]
fn manual_unique_symbol(home: ModuleId, ident_ids: &mut IdentIds) -> Symbol {
let ident_id = ident_ids.gen_unique();
home.register_debug_idents(ident_ids);
Symbol::new(home, ident_id)
}
}

View File

@ -9,7 +9,7 @@ use crate::layout::{
use bumpalo::collections::Vec;
use bumpalo::Bump;
use hashbrown::hash_map::Entry;
use roc_collections::all::{default_hasher, BumpMap, BumpMapDefault, MutMap, MutSet};
use roc_collections::all::{default_hasher, BumpMap, BumpMapDefault, MutMap};
use roc_module::ident::{ForeignSymbol, Lowercase, TagName};
use roc_module::low_level::LowLevel;
use roc_module::symbol::{IdentIds, ModuleId, Symbol};
@ -378,7 +378,8 @@ impl<'a> Proc<'a> {
#[derive(Clone, Debug)]
pub struct ExternalSpecializations<'a> {
pub specs: BumpMap<Symbol, MutSet<SolvedType>>,
/// Not a bumpalo vec because bumpalo is not thread safe
pub specs: BumpMap<Symbol, std::vec::Vec<SolvedType>>,
_lifetime: std::marker::PhantomData<&'a u8>,
}
@ -394,11 +395,11 @@ impl<'a> ExternalSpecializations<'a> {
use hashbrown::hash_map::Entry::{Occupied, Vacant};
let existing = match self.specs.entry(symbol) {
Vacant(entry) => entry.insert(MutSet::default()),
Vacant(entry) => entry.insert(std::vec::Vec::new()),
Occupied(entry) => entry.into_mut(),
};
existing.insert(typ);
existing.push(typ);
}
pub fn extend(&mut self, other: Self) {
@ -406,7 +407,7 @@ impl<'a> ExternalSpecializations<'a> {
for (symbol, solved_types) in other.specs {
let existing = match self.specs.entry(symbol) {
Vacant(entry) => entry.insert(MutSet::default()),
Vacant(entry) => entry.insert(std::vec::Vec::new()),
Occupied(entry) => entry.into_mut(),
};
@ -514,9 +515,12 @@ impl<'a> Procs<'a> {
// by the surrounding context, so we can add pending specializations
// for them immediately.
let tuple = (symbol, top_level);
let already_specialized = self.specialized.contains_key(&tuple);
let (symbol, layout) = tuple;
let already_specialized = self
.specialized
.keys()
.any(|(s, t)| *s == symbol && *t == top_level);
let layout = top_level;
// if we've already specialized this one, no further work is needed.
if !already_specialized {
@ -778,8 +782,6 @@ impl<'a, 'i> Env<'a, 'i> {
pub fn unique_symbol(&mut self) -> Symbol {
let ident_id = self.ident_ids.gen_unique();
self.home.register_debug_idents(self.ident_ids);
Symbol::new(self.home, ident_id)
}
@ -1695,19 +1697,16 @@ pub fn specialize_all<'a>(
env: &mut Env<'a, '_>,
mut procs: Procs<'a>,
externals_others_need: ExternalSpecializations<'a>,
pending_specializations: BumpMap<Symbol, MutMap<ProcLayout<'a>, PendingSpecialization<'a>>>,
specializations_for_host: BumpMap<Symbol, MutMap<ProcLayout<'a>, PendingSpecialization<'a>>>,
layout_cache: &mut LayoutCache<'a>,
) -> Procs<'a> {
specialize_all_help(env, &mut procs, externals_others_need, layout_cache);
specialize_externals_others_need(env, &mut procs, externals_others_need, layout_cache);
// When calling from_can, pending_specializations should be unavailable.
// This must be a single pass, and we must not add any more entries to it!
// observation: specialize_all_help does add to pending_specializations, but does not reference
// any existing values in it.
let opt_pending_specializations = std::mem::replace(&mut procs.pending_specializations, None);
let it = pending_specializations
let it = specializations_for_host
.into_iter()
.chain(opt_pending_specializations.into_iter().flatten());
@ -1774,33 +1773,35 @@ pub fn specialize_all<'a>(
procs
}
fn specialize_all_help<'a>(
fn specialize_externals_others_need<'a>(
env: &mut Env<'a, '_>,
procs: &mut Procs<'a>,
externals_others_need: ExternalSpecializations<'a>,
layout_cache: &mut LayoutCache<'a>,
) {
for (symbol, solved_types) in externals_others_need.specs.iter() {
// for some unclear reason, the MutSet does not deduplicate according to the hash
// instance. So we do it manually here
// de-duplicate by the Hash instance (set only deduplicates by Eq instance)
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut seen_hashes = Vec::with_capacity_in(solved_types.len(), env.arena);
let hash_the_thing = |x: &SolvedType| {
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
hasher.finish()
};
let mut as_vec = Vec::from_iter_in(
solved_types.iter().map(|x| (hash_the_thing(x), x)),
env.arena,
);
for solved_type in solved_types {
let hash = hash_the_thing(solved_type);
as_vec.sort_by_key(|(k, _)| *k);
as_vec.dedup_by_key(|(k, _)| *k);
if seen_hashes.iter().any(|h| *h == hash) {
// we've seen this one already
continue;
}
seen_hashes.push(hash);
for (_, solved_type) in as_vec {
let name = *symbol;
let partial_proc = match procs.partial_procs.get(&name) {
@ -6535,7 +6536,8 @@ fn call_by_name_help<'a>(
// If we've already specialized this one, no further work is needed.
if procs
.specialized
.contains_key(&(proc_name, top_level_layout))
.keys()
.any(|x| x == &(proc_name, top_level_layout))
{
debug_assert_eq!(
argument_layouts.len(),

View File

@ -58,8 +58,6 @@ impl<'a, 'i> Env<'a, 'i> {
fn unique_symbol(&mut self) -> Symbol {
let ident_id = self.ident_ids.gen_unique();
self.home.register_debug_idents(self.ident_ids);
Symbol::new(self.home, ident_id)
}
}

View File

@ -54,104 +54,102 @@ impl PartialEq for SolvedType {
}
fn hash_solved_type_help<H: Hasher>(
solved_type: &SolvedType,
initial: &SolvedType,
flex_vars: &mut Vec<VarId>,
state: &mut H,
) {
use SolvedType::*;
match solved_type {
Flex(var_id) => {
var_id_hash_help(*var_id, flex_vars, state);
}
Wildcard => "wildcard".hash(state),
EmptyRecord => "empty_record".hash(state),
EmptyTagUnion => "empty_tag_union".hash(state),
Error => "error".hash(state),
Func(arguments, closure, result) => {
for x in arguments {
hash_solved_type_help(x, flex_vars, state);
}
let mut stack = Vec::with_capacity(63);
hash_solved_type_help(closure, flex_vars, state);
hash_solved_type_help(result, flex_vars, state);
}
Apply(name, arguments) => {
name.hash(state);
for x in arguments {
hash_solved_type_help(x, flex_vars, state);
}
}
Rigid(name) => name.hash(state),
Erroneous(problem) => problem.hash(state),
stack.push(initial);
Record { fields, ext } => {
for (name, x) in fields {
while let Some(solved_type) = stack.pop() {
match solved_type {
Flex(var_id) => {
var_id_hash_help(*var_id, flex_vars, state);
}
Wildcard => "wildcard".hash(state),
EmptyRecord => "empty_record".hash(state),
EmptyTagUnion => "empty_tag_union".hash(state),
Error => "error".hash(state),
Func(arguments, closure, result) => {
stack.extend(arguments);
stack.push(closure);
stack.push(result);
}
Apply(name, arguments) => {
name.hash(state);
"record_field".hash(state);
hash_solved_type_help(x.as_inner(), flex_vars, state);
stack.extend(arguments);
}
hash_solved_type_help(ext, flex_vars, state);
}
Rigid(name) => name.hash(state),
Erroneous(problem) => problem.hash(state),
TagUnion(tags, ext) => {
for (name, arguments) in tags {
name.hash(state);
for x in arguments {
hash_solved_type_help(x, flex_vars, state);
Record { fields, ext } => {
for (name, x) in fields {
name.hash(state);
"record_field".hash(state);
stack.push(x.as_inner());
}
stack.push(ext);
}
hash_solved_type_help(ext, flex_vars, state);
}
FunctionOrTagUnion(_, _, ext) => {
hash_solved_type_help(ext, flex_vars, state);
}
RecursiveTagUnion(rec, tags, ext) => {
var_id_hash_help(*rec, flex_vars, state);
for (name, arguments) in tags {
name.hash(state);
for x in arguments {
hash_solved_type_help(x, flex_vars, state);
TagUnion(tags, ext) => {
for (name, arguments) in tags {
name.hash(state);
stack.extend(arguments);
}
stack.push(ext);
}
hash_solved_type_help(ext, flex_vars, state);
}
Alias(name, arguments, solved_lambda_sets, actual) => {
name.hash(state);
for (name, x) in arguments {
FunctionOrTagUnion(_, _, ext) => {
stack.push(ext);
}
RecursiveTagUnion(rec, tags, ext) => {
var_id_hash_help(*rec, flex_vars, state);
for (name, arguments) in tags {
name.hash(state);
stack.extend(arguments);
}
stack.push(ext);
}
Alias(name, arguments, solved_lambda_sets, actual) => {
name.hash(state);
hash_solved_type_help(x, flex_vars, state);
for (name, x) in arguments {
name.hash(state);
stack.push(x);
}
for set in solved_lambda_sets {
stack.push(&set.0);
}
stack.push(actual);
}
for set in solved_lambda_sets {
hash_solved_type_help(&set.0, flex_vars, state);
}
hash_solved_type_help(actual, flex_vars, state);
}
HostExposedAlias {
name,
arguments,
lambda_set_variables: solved_lambda_sets,
actual,
actual_var,
} => {
name.hash(state);
for (name, x) in arguments {
HostExposedAlias {
name,
arguments,
lambda_set_variables: solved_lambda_sets,
actual,
actual_var,
} => {
name.hash(state);
hash_solved_type_help(x, flex_vars, state);
}
for (name, x) in arguments {
name.hash(state);
stack.push(x);
}
for set in solved_lambda_sets {
hash_solved_type_help(&set.0, flex_vars, state);
}
for set in solved_lambda_sets {
stack.push(&set.0);
}
hash_solved_type_help(actual, flex_vars, state);
var_id_hash_help(*actual_var, flex_vars, state);
stack.push(actual);
var_id_hash_help(*actual_var, flex_vars, state);
}
}
}
}

View File

@ -17,10 +17,10 @@
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "1441fa74d213d7cc120d9d7d49e540c1fc59bc58",
"sha256": "152qb7ch0r4bidik33zd0a9wl0929zr0dqs5l5ksm7vh3assc7sc",
"rev": "51acb65b302551ac7993b437cc6863fe9fa8ae50",
"sha256": "0si8s2ji4prp614q3050x4sp282wxgp0mm5q50slcf5f75jw5yhh",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/1441fa74d213d7cc120d9d7d49e540c1fc59bc58.tar.gz",
"url": "https://github.com/NixOS/nixpkgs/archive/51acb65b302551ac7993b437cc6863fe9fa8ae50.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

View File

@ -1,41 +0,0 @@
{ pkgs }:
let
version = "0.8.0";
osName = if pkgs.stdenv.isDarwin then "macos" else "linux";
splitSystem = builtins.split "-" builtins.currentSystem;
arch = builtins.elemAt splitSystem 0;
isAarch64 = arch == "aarch64";
archiveName = "zig-${osName}-${arch}-${version}";
# If your system is not aarch64, we assume it's x86_64
sha256 = if pkgs.stdenv.isDarwin then
if isAarch64 then
"b32d13f66d0e1ff740b3326d66a469ee6baddbd7211fa111c066d3bd57683111"
else
"279f9360b5cb23103f0395dc4d3d0d30626e699b1b4be55e98fd985b62bc6fbe"
else if isAarch64 then
"ee204ca2c2037952cf3f8b10c609373a08a291efa4af7b3c73be0f2b27720470"
else
"502625d3da3ae595c5f44a809a87714320b7a40e6dff4a895b5fa7df3391d01e";
in pkgs.stdenv.mkDerivation {
pname = "zig";
version = version;
src = pkgs.fetchurl {
inherit sha256;
name = "${archiveName}.tar.xz";
url = "https://ziglang.org/download/${version}/${archiveName}.tar.xz";
};
phases = [ "unpackPhase" ];
unpackPhase = ''
mkdir -p $out/bin
tar -xf $src
cp ${archiveName}/zig $out/zig
cp -r ${archiveName}/lib $out/lib
ln -s "$out/zig" "$out/bin/zig"
chmod +x $out/bin/zig
'';
}

View File

@ -17,7 +17,6 @@ let
linuxInputs = with pkgs;
lib.optionals stdenv.isLinux [
glibc_multi
valgrind
vulkan-headers
vulkan-loader
@ -33,7 +32,6 @@ let
llvmPkgs = pkgs.llvmPackages_12;
zig = import ./nix/zig.nix { inherit pkgs; };
debugir = import ./nix/debugir.nix { inherit pkgs; };
inputs = with pkgs; [
@ -72,7 +70,7 @@ in pkgs.mkShell {
# Additional Env vars
LLVM_SYS_120_PREFIX = "${llvmPkgs.llvm.dev}";
NIXOS_GLIBC_PATH =
NIX_GLIBC_PATH =
if pkgs.stdenv.isLinux then "${pkgs.glibc_multi.out}/lib" else "";
LD_LIBRARY_PATH = with pkgs;
lib.makeLibraryPath
@ -106,4 +104,5 @@ in pkgs.mkShell {
# above regardless of system! That'd set us up for cross-compilation as well.
"";
}