Merge pull request #1324 from rtfeldman/roc_alloc

This commit is contained in:
Richard Feldman 2021-05-25 00:10:00 -04:00 committed by GitHub
commit f7517db80d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 1084 additions and 637 deletions

View File

@ -1,6 +1,7 @@
use crate::repl::eval;
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::module::Linkage;
use roc_build::link::module_to_dylib;
use roc_build::program::FunctionIterator;
use roc_can::builtins::builtin_defs_map;
@ -8,6 +9,7 @@ use roc_collections::all::{MutMap, MutSet};
use roc_fmt::annotation::Formattable;
use roc_fmt::annotation::{Newlines, Parens};
use roc_gen::llvm::build::{build_proc, build_proc_header, OptLevel};
use roc_gen::llvm::externs::add_default_roc_externs;
use roc_load::file::LoadingProblem;
use roc_parse::parser::SyntaxError;
use roc_types::pretty_print::{content_to_string, name_all_type_vars};
@ -126,14 +128,15 @@ pub fn gen_and_eval<'a>(
Ok(ReplOutput::Problems(lines))
} else {
let context = Context::create();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let module = arena.alloc(roc_gen::llvm::build::module_from_builtins(&context, ""));
let builder = context.create_builder();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let module = arena.alloc(roc_gen::llvm::build::module_from_builtins(&context, ""));
// Add roc_alloc, roc_realloc, and roc_dealloc, since the repl has no
// platform to provide them.
add_default_roc_externs(&context, module, &builder, ptr_bytes);
// mark our zig-defined builtins as internal
use inkwell::module::Linkage;
for function in FunctionIterator::from_module(module) {
let name = function.get_name().to_str().unwrap();
if name.starts_with("roc_builtins") {
@ -256,14 +259,17 @@ pub fn gen_and_eval<'a>(
module_pass.run_on(env.module);
// Verify the module
if let Err(errors) = env.module.verify() {
panic!("Errors defining module: {:?}", errors);
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
// Verify the module
if let Err(errors) = env.module.verify() {
panic!(
"Errors defining module: {}\n\nUncomment things nearby to see more details.",
errors
);
}
let lib = module_to_dylib(&env.module, &target, opt_level)
.expect("Error loading compiled dylib for test");
let res_answer = unsafe {

View File

@ -24,6 +24,22 @@ const Allocator = mem.Allocator;
extern fn roc__mainForHost_1_exposed(*RocCallResult) void;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const RocCallResult = extern struct { flag: usize, content: RocStr };
const Unit = extern struct {};
@ -45,7 +61,7 @@ pub export fn main() i32 {
// stdout the result
stdout.print("{}\n", .{callresult.content.asSlice()}) catch unreachable;
callresult.content.deinit(std.heap.c_allocator);
callresult.content.deinit();
// end time
var ts2: std.os.timespec = undefined;

View File

@ -24,6 +24,22 @@ const Allocator = mem.Allocator;
extern fn roc__mainForHost_1_exposed(*RocCallResult) void;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const RocCallResult = extern struct { flag: usize, content: RocStr };
const Unit = extern struct {};
@ -45,7 +61,7 @@ pub export fn main() i32 {
// stdout the result
stdout.print("{}\n", .{callresult.content.asSlice()}) catch unreachable;
callresult.content.deinit(std.heap.c_allocator);
callresult.content.deinit();
// end time
var ts2: std.os.timespec = undefined;

View File

@ -2,7 +2,6 @@ const std = @import("std");
const testing = std.testing;
const expectEqual = testing.expectEqual;
const mem = std.mem;
const Allocator = mem.Allocator;
const assert = std.debug.assert;
const utils = @import("utils.zig");
@ -74,7 +73,7 @@ const Alignment = packed enum(u8) {
Align8KeyFirst,
Align8ValueFirst,
fn toUsize(self: Alignment) usize {
fn toU32(self: Alignment) u32 {
switch (self) {
.Align16KeyFirst => return 16,
.Align16ValueFirst => return 16,
@ -94,20 +93,18 @@ const Alignment = packed enum(u8) {
};
pub fn decref(
allocator: *Allocator,
alignment: Alignment,
bytes_or_null: ?[*]u8,
data_bytes: usize,
alignment: Alignment,
) void {
return utils.decref(allocator, alignment.toUsize(), bytes_or_null, data_bytes);
return utils.decref(bytes_or_null, data_bytes, alignment.toU32());
}
pub fn allocateWithRefcount(
allocator: *Allocator,
alignment: Alignment,
data_bytes: usize,
alignment: Alignment,
) [*]u8 {
return utils.allocateWithRefcount(allocator, alignment.toUsize(), data_bytes);
return utils.allocateWithRefcount(data_bytes, alignment.toU32());
}
pub const RocDict = extern struct {
@ -124,7 +121,6 @@ pub const RocDict = extern struct {
}
pub fn allocate(
allocator: *Allocator,
number_of_levels: usize,
number_of_entries: usize,
alignment: Alignment,
@ -136,7 +132,7 @@ pub const RocDict = extern struct {
const data_bytes = number_of_slots * slot_size;
return RocDict{
.dict_bytes = allocateWithRefcount(allocator, alignment, data_bytes),
.dict_bytes = allocateWithRefcount(data_bytes, alignment),
.number_of_levels = number_of_levels,
.dict_entries_len = number_of_entries,
};
@ -144,7 +140,6 @@ pub const RocDict = extern struct {
pub fn reallocate(
self: RocDict,
allocator: *Allocator,
alignment: Alignment,
key_width: usize,
value_width: usize,
@ -157,7 +152,7 @@ pub const RocDict = extern struct {
const delta_capacity = new_capacity - old_capacity;
const data_bytes = new_capacity * slot_size;
const first_slot = allocateWithRefcount(allocator, alignment, data_bytes);
const first_slot = allocateWithRefcount(data_bytes, alignment);
// transfer the memory
@ -204,7 +199,7 @@ pub const RocDict = extern struct {
};
// NOTE we fuse an increment of all keys/values with a decrement of the input dict
decref(allocator, alignment, self.dict_bytes, self.capacity() * slotSize(key_width, value_width));
decref(self.dict_bytes, self.capacity() * slotSize(key_width, value_width), alignment);
return result;
}
@ -236,7 +231,7 @@ pub const RocDict = extern struct {
return totalCapacityAtLevel(self.number_of_levels);
}
pub fn makeUnique(self: RocDict, allocator: *Allocator, alignment: Alignment, key_width: usize, value_width: usize) RocDict {
pub fn makeUnique(self: RocDict, alignment: Alignment, key_width: usize, value_width: usize) RocDict {
if (self.isEmpty()) {
return self;
}
@ -246,7 +241,7 @@ pub const RocDict = extern struct {
}
// unfortunately, we have to clone
var new_dict = RocDict.allocate(allocator, self.number_of_levels, self.dict_entries_len, alignment, key_width, value_width);
var new_dict = RocDict.allocate(self.number_of_levels, self.dict_entries_len, alignment, key_width, value_width);
var old_bytes: [*]u8 = @ptrCast([*]u8, self.dict_bytes);
var new_bytes: [*]u8 = @ptrCast([*]u8, new_dict.dict_bytes);
@ -256,7 +251,7 @@ pub const RocDict = extern struct {
// NOTE we fuse an increment of all keys/values with a decrement of the input dict
const data_bytes = self.capacity() * slotSize(key_width, value_width);
decref(allocator, alignment, self.dict_bytes, data_bytes);
decref(self.dict_bytes, data_bytes, alignment);
return new_dict;
}
@ -423,7 +418,7 @@ const Caller3 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
pub fn dictInsert(input: RocDict, alignment: Alignment, key: Opaque, key_width: usize, value: Opaque, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Dec, dec_value: Dec, output: *RocDict) callconv(.C) void {
var seed: u64 = INITIAL_SEED;
var result = input.makeUnique(std.heap.c_allocator, alignment, key_width, value_width);
var result = input.makeUnique(alignment, key_width, value_width);
var current_level: usize = 1;
var current_level_size: usize = 8;
@ -431,7 +426,7 @@ pub fn dictInsert(input: RocDict, alignment: Alignment, key: Opaque, key_width:
while (true) {
if (current_level > result.number_of_levels) {
result = result.reallocate(std.heap.c_allocator, alignment, key_width, value_width);
result = result.reallocate(alignment, key_width, value_width);
}
const hash = hash_fn(seed, key);
@ -487,7 +482,7 @@ pub fn dictRemove(input: RocDict, alignment: Alignment, key: Opaque, key_width:
return;
},
MaybeIndex.index => |index| {
var dict = input.makeUnique(std.heap.c_allocator, alignment, key_width, value_width);
var dict = input.makeUnique(alignment, key_width, value_width);
assert(index < dict.capacity());
@ -502,7 +497,7 @@ pub fn dictRemove(input: RocDict, alignment: Alignment, key: Opaque, key_width:
// if the dict is now completely empty, free its allocation
if (dict.dict_entries_len == 0) {
const data_bytes = dict.capacity() * slotSize(key_width, value_width);
decref(std.heap.c_allocator, alignment, dict.dict_bytes, data_bytes);
decref(dict.dict_bytes, data_bytes, alignment);
output.* = RocDict.empty();
return;
}
@ -575,7 +570,7 @@ pub fn dictKeys(dict: RocDict, alignment: Alignment, key_width: usize, value_wid
}
const data_bytes = length * key_width;
var ptr = allocateWithRefcount(std.heap.c_allocator, alignment, data_bytes);
var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
@ -624,7 +619,7 @@ pub fn dictValues(dict: RocDict, alignment: Alignment, key_width: usize, value_w
}
const data_bytes = length * value_width;
var ptr = allocateWithRefcount(std.heap.c_allocator, alignment, data_bytes);
var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
@ -658,7 +653,7 @@ fn doNothing(ptr: Opaque) callconv(.C) void {
}
pub fn dictUnion(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, inc_key: Inc, inc_value: Inc, output: *RocDict) callconv(.C) void {
output.* = dict1.makeUnique(std.heap.c_allocator, alignment, key_width, value_width);
output.* = dict1.makeUnique(alignment, key_width, value_width);
var i: usize = 0;
while (i < dict2.capacity()) : (i += 1) {
@ -693,7 +688,7 @@ pub fn dictUnion(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width
}
pub fn dictIntersection(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Inc, dec_value: Inc, output: *RocDict) callconv(.C) void {
output.* = dict1.makeUnique(std.heap.c_allocator, alignment, key_width, value_width);
output.* = dict1.makeUnique(alignment, key_width, value_width);
var i: usize = 0;
const size = dict1.capacity();
@ -718,7 +713,7 @@ pub fn dictIntersection(dict1: RocDict, dict2: RocDict, alignment: Alignment, ke
}
pub fn dictDifference(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Dec, dec_value: Dec, output: *RocDict) callconv(.C) void {
output.* = dict1.makeUnique(std.heap.c_allocator, alignment, key_width, value_width);
output.* = dict1.makeUnique(alignment, key_width, value_width);
var i: usize = 0;
const size = dict1.capacity();
@ -759,7 +754,7 @@ pub fn setFromList(list: RocList, alignment: Alignment, key_width: usize, value_
// NOTE: decref checks for the empty case
const data_bytes = size * key_width;
decref(std.heap.c_allocator, alignment, list.bytes, data_bytes);
decref(list.bytes, data_bytes, alignment);
}
pub fn dictWalk(
@ -777,11 +772,12 @@ pub fn dictWalk(
inc_value: Inc,
output: Opaque,
) callconv(.C) void {
const alignment_u32 = alignment.toU32();
// allocate space to write the result of the stepper into
// experimentally aliasing the accum and output pointers is not a good idea
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable);
const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment_u32);
var b1 = output orelse unreachable;
var b2 = alloc;
var b2 = bytes_ptr;
if (data_is_owned) {
inc_n_data(data, dict.len());
@ -808,5 +804,5 @@ pub fn dictWalk(
}
@memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]);
utils.dealloc(bytes_ptr, alignment_u32);
}

View File

@ -2,7 +2,6 @@ const std = @import("std");
const utils = @import("utils.zig");
const RocResult = utils.RocResult;
const mem = std.mem;
const Allocator = mem.Allocator;
const TAG_WIDTH = 8;
@ -42,20 +41,19 @@ pub const RocList = extern struct {
}
pub fn allocate(
allocator: *Allocator,
alignment: usize,
alignment: u32,
length: usize,
element_size: usize,
) RocList {
const data_bytes = length * element_size;
return RocList{
.bytes = utils.allocateWithRefcount(allocator, alignment, data_bytes),
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
.length = length,
};
}
pub fn makeUnique(self: RocList, allocator: *Allocator, alignment: usize, element_width: usize) RocList {
pub fn makeUnique(self: RocList, alignment: u32, element_width: usize) RocList {
if (self.isEmpty()) {
return self;
}
@ -65,7 +63,7 @@ pub const RocList = extern struct {
}
// unfortunately, we have to clone
var new_list = RocList.allocate(allocator, alignment, self.length, element_width);
var new_list = RocList.allocate(alignment, self.length, element_width);
var old_bytes: [*]u8 = @ptrCast([*]u8, self.bytes);
var new_bytes: [*]u8 = @ptrCast([*]u8, new_list.bytes);
@ -75,34 +73,32 @@ pub const RocList = extern struct {
// NOTE we fuse an increment of all keys/values with a decrement of the input dict
const data_bytes = self.len() * element_width;
utils.decref(allocator, alignment, self.bytes, data_bytes);
utils.decref(self.bytes, data_bytes, alignment);
return new_list;
}
pub fn reallocate(
self: RocList,
allocator: *Allocator,
alignment: usize,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
if (self.bytes) |source_ptr| {
if (self.isUnique()) {
const new_source = utils.unsafeReallocate(source_ptr, allocator, alignment, self.len(), new_length, element_width);
const new_source = utils.unsafeReallocate(source_ptr, alignment, self.len(), new_length, element_width);
return RocList{ .bytes = new_source, .length = new_length };
}
}
return self.reallocateFresh(allocator, alignment, new_length, element_width);
return self.reallocateFresh(alignment, new_length, element_width);
}
/// reallocate by explicitly making a new allocation and copying elements over
fn reallocateFresh(
self: RocList,
allocator: *Allocator,
alignment: usize,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
@ -110,7 +106,7 @@ pub const RocList = extern struct {
const delta_length = new_length - old_length;
const data_bytes = new_length * element_width;
const first_slot = utils.allocateWithRefcount(allocator, alignment, data_bytes);
const first_slot = utils.allocateWithRefcount(data_bytes, alignment);
// transfer the memory
@ -126,7 +122,7 @@ pub const RocList = extern struct {
.length = new_length,
};
utils.decref(allocator, alignment, self.bytes, old_length * element_width);
utils.decref(self.bytes, old_length * element_width, alignment);
return result;
}
@ -137,7 +133,7 @@ const Caller1 = fn (?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
const Caller2 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
const Caller3 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
pub fn listReverse(list: RocList, alignment: usize, element_width: usize) callconv(.C) RocList {
pub fn listReverse(list: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
if (list.bytes) |source_ptr| {
const size = list.len();
@ -156,7 +152,7 @@ pub fn listReverse(list: RocList, alignment: usize, element_width: usize) callco
return list;
} else {
const output = RocList.allocate(std.heap.c_allocator, alignment, size, element_width);
const output = RocList.allocate(alignment, size, element_width);
const target_ptr = output.bytes orelse unreachable;
@ -166,7 +162,7 @@ pub fn listReverse(list: RocList, alignment: usize, element_width: usize) callco
@memcpy(target_ptr + (i * element_width), source_ptr + (last_position * element_width), element_width);
}
utils.decref(std.heap.c_allocator, alignment, list.bytes, size * element_width);
utils.decref(list.bytes, size * element_width, alignment);
return output;
}
@ -181,14 +177,14 @@ pub fn listMap(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
old_element_width: usize,
new_element_width: usize,
) callconv(.C) RocList {
if (list.bytes) |source_ptr| {
const size = list.len();
var i: usize = 0;
const output = RocList.allocate(std.heap.c_allocator, alignment, size, new_element_width);
const output = RocList.allocate(alignment, size, new_element_width);
const target_ptr = output.bytes orelse unreachable;
if (data_is_owned) {
@ -211,14 +207,14 @@ pub fn listMapWithIndex(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
old_element_width: usize,
new_element_width: usize,
) callconv(.C) RocList {
if (list.bytes) |source_ptr| {
const size = list.len();
var i: usize = 0;
const output = RocList.allocate(std.heap.c_allocator, alignment, size, new_element_width);
const output = RocList.allocate(alignment, size, new_element_width);
const target_ptr = output.bytes orelse unreachable;
if (data_is_owned) {
@ -252,7 +248,7 @@ pub fn listMap2(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
a_width: usize,
b_width: usize,
c_width: usize,
@ -272,7 +268,7 @@ pub fn listMap2(
if (list1.bytes) |source_a| {
if (list2.bytes) |source_b| {
const output = RocList.allocate(std.heap.c_allocator, alignment, output_length, c_width);
const output = RocList.allocate(alignment, output_length, c_width);
const target_ptr = output.bytes orelse unreachable;
var i: usize = 0;
@ -300,7 +296,7 @@ pub fn listMap3(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
a_width: usize,
b_width: usize,
c_width: usize,
@ -323,7 +319,7 @@ pub fn listMap3(
if (list1.bytes) |source_a| {
if (list2.bytes) |source_b| {
if (list3.bytes) |source_c| {
const output = RocList.allocate(std.heap.c_allocator, alignment, output_length, d_width);
const output = RocList.allocate(alignment, output_length, d_width);
const target_ptr = output.bytes orelse unreachable;
var i: usize = 0;
@ -354,7 +350,7 @@ pub fn listKeepIf(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
element_width: usize,
inc: Inc,
dec: Dec,
@ -362,7 +358,7 @@ pub fn listKeepIf(
if (list.bytes) |source_ptr| {
const size = list.len();
var i: usize = 0;
var output = RocList.allocate(std.heap.c_allocator, alignment, list.len(), list.len() * element_width);
var output = RocList.allocate(alignment, list.len(), list.len() * element_width);
const target_ptr = output.bytes orelse unreachable;
if (data_is_owned) {
@ -387,7 +383,7 @@ pub fn listKeepIf(
if (kept == 0) {
// if the output is empty, deallocate the space we made for the result
utils.decref(std.heap.c_allocator, alignment, output.bytes, size * element_width);
utils.decref(output.bytes, size * element_width, alignment);
return RocList.empty();
} else {
output.length = kept;
@ -405,7 +401,7 @@ pub fn listKeepOks(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
before_width: usize,
result_width: usize,
after_width: usize,
@ -432,7 +428,7 @@ pub fn listKeepErrs(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
before_width: usize,
result_width: usize,
after_width: usize,
@ -460,7 +456,7 @@ pub fn listKeepResult(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
before_width: usize,
result_width: usize,
after_width: usize,
@ -469,10 +465,10 @@ pub fn listKeepResult(
if (list.bytes) |source_ptr| {
const size = list.len();
var i: usize = 0;
var output = RocList.allocate(std.heap.c_allocator, alignment, list.len(), list.len() * after_width);
var output = RocList.allocate(alignment, list.len(), list.len() * after_width);
const target_ptr = output.bytes orelse unreachable;
var temporary = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, result_width) catch unreachable);
var temporary = @ptrCast([*]u8, utils.alloc(result_width, alignment));
if (data_is_owned) {
inc_n_data(data, size);
@ -494,10 +490,10 @@ pub fn listKeepResult(
}
}
std.heap.c_allocator.free(temporary[0..result_width]);
utils.dealloc(temporary, alignment);
if (kept == 0) {
utils.decref(std.heap.c_allocator, alignment, output.bytes, size * after_width);
utils.decref(output.bytes, size * after_width, alignment);
return RocList.empty();
} else {
output.length = kept;
@ -515,7 +511,7 @@ pub fn listWalk(
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: usize,
alignment: u32,
element_width: usize,
accum_width: usize,
output: Opaque,
@ -533,9 +529,9 @@ pub fn listWalk(
inc_n_data(data, list.len());
}
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable);
const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment);
var b1 = output orelse unreachable;
var b2 = alloc;
var b2 = bytes_ptr;
@memcpy(b2, accum orelse unreachable, accum_width);
@ -553,7 +549,7 @@ pub fn listWalk(
}
@memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]);
utils.dealloc(bytes_ptr, alignment);
}
pub fn listWalkBackwards(
@ -563,7 +559,7 @@ pub fn listWalkBackwards(
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: usize,
alignment: u32,
element_width: usize,
accum_width: usize,
output: Opaque,
@ -581,9 +577,9 @@ pub fn listWalkBackwards(
inc_n_data(data, list.len());
}
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable);
const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment);
var b1 = output orelse unreachable;
var b2 = alloc;
var b2 = bytes_ptr;
@memcpy(b2, accum orelse unreachable, accum_width);
@ -602,7 +598,7 @@ pub fn listWalkBackwards(
}
@memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]);
utils.dealloc(bytes_ptr, alignment);
}
pub fn listWalkUntil(
@ -612,7 +608,7 @@ pub fn listWalkUntil(
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: usize,
alignment: u32,
element_width: usize,
accum_width: usize,
dec: Dec,
@ -630,9 +626,9 @@ pub fn listWalkUntil(
return;
}
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, TAG_WIDTH + accum_width) catch unreachable);
const bytes_ptr: [*]u8 = utils.alloc(TAG_WIDTH + accum_width, alignment);
@memcpy(alloc + TAG_WIDTH, accum orelse unreachable, accum_width);
@memcpy(bytes_ptr + TAG_WIDTH, accum orelse unreachable, accum_width);
if (list.bytes) |source_ptr| {
var i: usize = 0;
@ -644,9 +640,9 @@ pub fn listWalkUntil(
inc_n_data(data, 1);
}
caller(data, element, alloc + TAG_WIDTH, alloc);
caller(data, element, bytes_ptr + TAG_WIDTH, bytes_ptr);
const usizes: [*]usize = @ptrCast([*]usize, @alignCast(8, alloc));
const usizes: [*]usize = @ptrCast([*]usize, @alignCast(8, bytes_ptr));
if (usizes[0] != 0) {
// decrement refcount of the remaining items
i += 1;
@ -658,8 +654,8 @@ pub fn listWalkUntil(
}
}
@memcpy(output orelse unreachable, alloc + TAG_WIDTH, accum_width);
std.heap.c_allocator.free(alloc[0 .. TAG_WIDTH + accum_width]);
@memcpy(output orelse unreachable, bytes_ptr + TAG_WIDTH, accum_width);
utils.dealloc(bytes_ptr, alignment);
}
// List.contains : List k, k -> Bool
@ -678,13 +674,12 @@ pub fn listContains(list: RocList, key: Opaque, key_width: usize, is_eq: EqFn) c
return false;
}
pub fn listRepeat(count: usize, alignment: usize, element: Opaque, element_width: usize, inc_n_element: IncN) callconv(.C) RocList {
pub fn listRepeat(count: usize, alignment: u32, element: Opaque, element_width: usize, inc_n_element: IncN) callconv(.C) RocList {
if (count == 0) {
return RocList.empty();
}
const allocator = std.heap.c_allocator;
var output = RocList.allocate(allocator, alignment, count, element_width);
var output = RocList.allocate(alignment, count, element_width);
if (output.bytes) |target_ptr| {
// increment the element's RC N times
@ -702,8 +697,8 @@ pub fn listRepeat(count: usize, alignment: usize, element: Opaque, element_width
}
}
pub fn listSingle(alignment: usize, element: Opaque, element_width: usize) callconv(.C) RocList {
var output = RocList.allocate(std.heap.c_allocator, alignment, 1, element_width);
pub fn listSingle(alignment: u32, element: Opaque, element_width: usize) callconv(.C) RocList {
var output = RocList.allocate(alignment, 1, element_width);
if (output.bytes) |target| {
if (element) |source| {
@ -714,9 +709,9 @@ pub fn listSingle(alignment: usize, element: Opaque, element_width: usize) callc
return output;
}
pub fn listAppend(list: RocList, alignment: usize, element: Opaque, element_width: usize) callconv(.C) RocList {
pub fn listAppend(list: RocList, alignment: u32, element: Opaque, element_width: usize) callconv(.C) RocList {
const old_length = list.len();
var output = list.reallocate(std.heap.c_allocator, alignment, old_length + 1, element_width);
var output = list.reallocate(alignment, old_length + 1, element_width);
if (output.bytes) |target| {
if (element) |source| {
@ -729,7 +724,7 @@ pub fn listAppend(list: RocList, alignment: usize, element: Opaque, element_widt
pub fn listDrop(
list: RocList,
alignment: usize,
alignment: u32,
element_width: usize,
drop_count: usize,
dec: Dec,
@ -750,12 +745,12 @@ pub fn listDrop(
return RocList.empty();
}
const output = RocList.allocate(std.heap.c_allocator, alignment, keep_count, element_width);
const output = RocList.allocate(alignment, keep_count, element_width);
const target_ptr = output.bytes orelse unreachable;
@memcpy(target_ptr, source_ptr + drop_count * element_width, keep_count * element_width);
utils.decref(std.heap.c_allocator, alignment, list.bytes, size * element_width);
utils.decref(list.bytes, size * element_width, alignment);
return output;
} else {
@ -764,54 +759,53 @@ pub fn listDrop(
}
pub fn listRange(width: utils.IntWidth, low: Opaque, high: Opaque) callconv(.C) RocList {
const allocator = std.heap.c_allocator;
const IntWidth = utils.IntWidth;
switch (width) {
IntWidth.U8 => {
return helper1(allocator, u8, low, high);
return helper1(u8, low, high);
},
IntWidth.U16 => {
return helper1(allocator, u16, low, high);
return helper1(u16, low, high);
},
IntWidth.U32 => {
return helper1(allocator, u32, low, high);
return helper1(u32, low, high);
},
IntWidth.U64 => {
return helper1(allocator, u64, low, high);
return helper1(u64, low, high);
},
IntWidth.U128 => {
return helper1(allocator, u128, low, high);
return helper1(u128, low, high);
},
IntWidth.I8 => {
return helper1(allocator, i8, low, high);
return helper1(i8, low, high);
},
IntWidth.I16 => {
return helper1(allocator, i16, low, high);
return helper1(i16, low, high);
},
IntWidth.I32 => {
return helper1(allocator, i32, low, high);
return helper1(i32, low, high);
},
IntWidth.I64 => {
return helper1(allocator, i64, low, high);
return helper1(i64, low, high);
},
IntWidth.I128 => {
return helper1(allocator, i128, low, high);
return helper1(i128, low, high);
},
IntWidth.Usize => {
return helper1(allocator, usize, low, high);
return helper1(usize, low, high);
},
}
}
fn helper1(allocator: *Allocator, comptime T: type, low: Opaque, high: Opaque) RocList {
fn helper1(comptime T: type, low: Opaque, high: Opaque) RocList {
const ptr1 = @ptrCast(*T, @alignCast(@alignOf(T), low));
const ptr2 = @ptrCast(*T, @alignCast(@alignOf(T), high));
return listRangeHelp(allocator, T, ptr1.*, ptr2.*);
return listRangeHelp(T, ptr1.*, ptr2.*);
}
fn listRangeHelp(allocator: *Allocator, comptime T: type, low: T, high: T) RocList {
fn listRangeHelp(comptime T: type, low: T, high: T) RocList {
const Order = std.math.Order;
switch (std.math.order(low, high)) {
@ -820,7 +814,7 @@ fn listRangeHelp(allocator: *Allocator, comptime T: type, low: T, high: T) RocLi
},
Order.eq => {
const list = RocList.allocate(allocator, @alignOf(usize), 1, @sizeOf(T));
const list = RocList.allocate(@alignOf(usize), 1, @sizeOf(T));
const buffer = @ptrCast([*]T, @alignCast(@alignOf(T), list.bytes orelse unreachable));
buffer[0] = low;
@ -830,7 +824,7 @@ fn listRangeHelp(allocator: *Allocator, comptime T: type, low: T, high: T) RocLi
Order.lt => {
const length: usize = @intCast(usize, high - low);
const list = RocList.allocate(allocator, @alignOf(usize), length, @sizeOf(T));
const list = RocList.allocate(@alignOf(usize), length, @sizeOf(T));
const buffer = @ptrCast([*]T, @alignCast(@alignOf(T), list.bytes orelse unreachable));
@ -889,10 +883,10 @@ pub fn listSortWith(
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
alignment: usize,
alignment: u32,
element_width: usize,
) callconv(.C) RocList {
var list = input.makeUnique(std.heap.c_allocator, alignment, element_width);
var list = input.makeUnique(alignment, element_width);
if (data_is_owned) {
inc_n_data(data, list.len());
@ -953,7 +947,7 @@ fn swapElements(source_ptr: [*]u8, element_width: usize, index_1: usize, index_2
return swap(element_width, element_at_i, element_at_j);
}
pub fn listJoin(list_of_lists: RocList, alignment: usize, element_width: usize) callconv(.C) RocList {
pub fn listJoin(list_of_lists: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
var total_length: usize = 0;
const slice_of_lists = @ptrCast([*]RocList, @alignCast(@alignOf(RocList), list_of_lists.bytes));
@ -963,7 +957,7 @@ pub fn listJoin(list_of_lists: RocList, alignment: usize, element_width: usize)
total_length += slice_of_lists[i].len();
}
const output = RocList.allocate(std.heap.c_allocator, alignment, total_length, element_width);
const output = RocList.allocate(alignment, total_length, element_width);
if (output.bytes) |target| {
var elements_copied: usize = 0;
@ -981,7 +975,7 @@ pub fn listJoin(list_of_lists: RocList, alignment: usize, element_width: usize)
return output;
}
pub fn listConcat(list_a: RocList, list_b: RocList, alignment: usize, element_width: usize) callconv(.C) RocList {
pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_width: usize) callconv(.C) RocList {
if (list_a.isEmpty()) {
return list_b;
} else if (list_b.isEmpty()) {
@ -992,7 +986,6 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: usize, element_wi
if (list_a.bytes) |source| {
const new_source = utils.unsafeReallocate(
source,
std.heap.c_allocator,
alignment,
list_a.len(),
total_length,
@ -1008,7 +1001,7 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: usize, element_wi
}
const total_length: usize = list_a.len() + list_b.len();
const output = RocList.allocate(std.heap.c_allocator, alignment, total_length, element_width);
const output = RocList.allocate(alignment, total_length, element_width);
if (output.bytes) |target| {
if (list_a.bytes) |source| {

View File

@ -1,9 +1,9 @@
const utils = @import("utils.zig");
const roc_mem = @import("mem.zig");
const RocList = @import("list.zig").RocList;
const std = @import("std");
const mem = std.mem;
const always_inline = std.builtin.CallOptions.Modifier.always_inline;
const Allocator = mem.Allocator;
const unicode = std.unicode;
const testing = std.testing;
const expectEqual = testing.expectEqual;
@ -34,6 +34,8 @@ pub const RocStr = extern struct {
str_bytes: ?[*]u8,
str_len: usize,
pub const alignment = @alignOf(usize);
pub inline fn empty() RocStr {
return RocStr{
.str_len = 0,
@ -43,15 +45,15 @@ pub const RocStr = extern struct {
// This clones the pointed-to bytes if they won't fit in a
// small string, and returns a (pointer, len) tuple which points to them.
pub fn init(allocator: *Allocator, bytes_ptr: [*]const u8, length: usize) RocStr {
var result = RocStr.allocate(allocator, InPlace.Clone, length);
pub fn init(bytes_ptr: [*]const u8, length: usize) RocStr {
var result = RocStr.allocate(InPlace.Clone, length);
@memcpy(result.asU8ptr(), bytes_ptr, length);
return result;
}
pub fn initBig(allocator: *Allocator, in_place: InPlace, number_of_chars: u64) RocStr {
const first_element = utils.allocateWithRefcount(allocator, @sizeOf(usize), number_of_chars);
pub fn initBig(in_place: InPlace, number_of_chars: u64) RocStr {
const first_element = utils.allocateWithRefcount(number_of_chars, @sizeOf(usize));
return RocStr{
.str_bytes = first_element,
@ -60,11 +62,11 @@ pub const RocStr = extern struct {
}
// allocate space for a (big or small) RocStr, but put nothing in it yet
pub fn allocate(allocator: *Allocator, result_in_place: InPlace, number_of_chars: usize) RocStr {
pub fn allocate(result_in_place: InPlace, number_of_chars: usize) RocStr {
const result_is_big = number_of_chars >= small_string_size;
if (result_is_big) {
return RocStr.initBig(allocator, result_in_place, number_of_chars);
return RocStr.initBig(result_in_place, number_of_chars);
} else {
var t = blank_small_string;
@ -77,10 +79,9 @@ pub const RocStr = extern struct {
}
}
pub fn deinit(self: RocStr, allocator: *Allocator) void {
pub fn deinit(self: RocStr) void {
if (!self.isSmallStr() and !self.isEmpty()) {
const alignment = @alignOf(usize);
utils.decref(allocator, alignment, self.str_bytes, self.str_len);
utils.decref(self.str_bytes, self.str_len, RocStr.alignment);
}
}
@ -98,7 +99,7 @@ pub const RocStr = extern struct {
if (length < roc_str_size) {
return RocStr.empty();
} else {
var new_bytes: []T = allocator.alloc(u8, length) catch unreachable;
var new_bytes: []T = utils.alloc(length, RocStr.alignment) catch unreachable;
var new_bytes_ptr: [*]u8 = @ptrCast([*]u8, &new_bytes);
@ -146,12 +147,12 @@ pub const RocStr = extern struct {
return true;
}
pub fn clone(allocator: *Allocator, in_place: InPlace, str: RocStr) RocStr {
pub fn clone(in_place: InPlace, str: RocStr) RocStr {
if (str.isSmallStr() or str.isEmpty()) {
// just return the bytes
return str;
} else {
var new_str = RocStr.initBig(allocator, in_place, str.str_len);
var new_str = RocStr.initBig(in_place, str.str_len);
var old_bytes: [*]u8 = @ptrCast([*]u8, str.str_bytes);
var new_bytes: [*]u8 = @ptrCast([*]u8, new_str.str_bytes);
@ -164,33 +165,30 @@ pub const RocStr = extern struct {
pub fn reallocate(
self: RocStr,
allocator: *Allocator,
new_length: usize,
) RocStr {
const alignment = 1;
const element_width = 1;
if (self.bytes) |source_ptr| {
if (self.isUnique()) {
const new_source = utils.unsafeReallocate(source_ptr, allocator, alignment, self.len(), new_length, element_width);
const new_source = utils.unsafeReallocate(source_ptr, RocStr.alignment, self.len(), new_length, element_width);
return RocStr{ .str_bytes = new_source, .str_len = new_length };
}
}
return self.reallocateFresh(allocator, alignment, new_length, element_width);
return self.reallocateFresh(RocStr.alignment, new_length, element_width);
}
/// reallocate by explicitly making a new allocation and copying elements over
pub fn reallocateFresh(
self: RocStr,
allocator: *Allocator,
new_length: usize,
) RocStr {
const old_length = self.len();
const delta_length = new_length - old_length;
const result = RocStr.allocate(allocator, InPlace.Clone, new_length);
const result = RocStr.allocate(InPlace.Clone, new_length);
// transfer the memory
@ -200,7 +198,7 @@ pub const RocStr = extern struct {
@memcpy(dest_ptr, source_ptr, old_length);
@memset(dest_ptr + old_length, 0, delta_length);
self.deinit(allocator);
self.deinit();
return result;
}
@ -268,33 +266,33 @@ pub const RocStr = extern struct {
const str1_len = 3;
var str1: [str1_len]u8 = "abc".*;
const str1_ptr: [*]u8 = &str1;
var roc_str1 = RocStr.init(testing.allocator, str1_ptr, str1_len);
var roc_str1 = RocStr.init(str1_ptr, str1_len);
const str2_len = 3;
var str2: [str2_len]u8 = "abc".*;
const str2_ptr: [*]u8 = &str2;
var roc_str2 = RocStr.init(testing.allocator, str2_ptr, str2_len);
var roc_str2 = RocStr.init(str2_ptr, str2_len);
expect(roc_str1.eq(roc_str2));
roc_str1.deinit(testing.allocator);
roc_str2.deinit(testing.allocator);
roc_str1.deinit();
roc_str2.deinit();
}
test "RocStr.eq: not equal different length" {
const str1_len = 4;
var str1: [str1_len]u8 = "abcd".*;
const str1_ptr: [*]u8 = &str1;
var roc_str1 = RocStr.init(testing.allocator, str1_ptr, str1_len);
var roc_str1 = RocStr.init(str1_ptr, str1_len);
const str2_len = 3;
var str2: [str2_len]u8 = "abc".*;
const str2_ptr: [*]u8 = &str2;
var roc_str2 = RocStr.init(testing.allocator, str2_ptr, str2_len);
var roc_str2 = RocStr.init(str2_ptr, str2_len);
defer {
roc_str1.deinit(testing.allocator);
roc_str2.deinit(testing.allocator);
roc_str1.deinit();
roc_str2.deinit();
}
expect(!roc_str1.eq(roc_str2));
@ -304,16 +302,16 @@ pub const RocStr = extern struct {
const str1_len = 3;
var str1: [str1_len]u8 = "acb".*;
const str1_ptr: [*]u8 = &str1;
var roc_str1 = RocStr.init(testing.allocator, str1_ptr, str1_len);
var roc_str1 = RocStr.init(str1_ptr, str1_len);
const str2_len = 3;
var str2: [str2_len]u8 = "abc".*;
const str2_ptr: [*]u8 = &str2;
var roc_str2 = RocStr.init(testing.allocator, str2_ptr, str2_len);
var roc_str2 = RocStr.init(str2_ptr, str2_len);
defer {
roc_str1.deinit(testing.allocator);
roc_str2.deinit(testing.allocator);
roc_str1.deinit();
roc_str2.deinit();
}
expect(!roc_str1.eq(roc_str2));
@ -321,7 +319,7 @@ pub const RocStr = extern struct {
};
pub fn init(bytes_ptr: [*]const u8, length: usize) callconv(.C) RocStr {
return @call(.{ .modifier = always_inline }, RocStr.init, .{ std.heap.c_allocator, bytes_ptr, length });
return @call(.{ .modifier = always_inline }, RocStr.init, .{ bytes_ptr, length });
}
// Str.equal
@ -335,17 +333,12 @@ pub fn strNumberOfBytes(string: RocStr) callconv(.C) usize {
}
// Str.fromInt
// When we actually use this in Roc, libc will be linked so we have access to std.heap.c_allocator
pub fn strFromIntC(int: i64) callconv(.C) RocStr {
return strFromInt(std.heap.c_allocator, int);
}
fn strFromInt(allocator: *Allocator, int: i64) RocStr {
// prepare for having multiple integer types in the future
return @call(.{ .modifier = always_inline }, strFromIntHelp, .{ allocator, i64, int });
return @call(.{ .modifier = always_inline }, strFromIntHelp, .{ i64, int });
}
fn strFromIntHelp(allocator: *Allocator, comptime T: type, int: T) RocStr {
fn strFromIntHelp(comptime T: type, int: T) RocStr {
// determine maximum size for this T
comptime const size = comptime blk: {
// the string representation of the minimum i128 value uses at most 40 characters
@ -357,11 +350,10 @@ fn strFromIntHelp(allocator: *Allocator, comptime T: type, int: T) RocStr {
var buf: [size]u8 = undefined;
const result = std.fmt.bufPrint(&buf, "{}", .{int}) catch unreachable;
return RocStr.init(allocator, &buf, result.len);
return RocStr.init(&buf, result.len);
}
// Str.fromFloat
// When we actually use this in Roc, libc will be linked so we have access to std.heap.c_allocator
pub fn strFromFloatC(float: f64) callconv(.C) RocStr {
// NOTE the compiled zig for float formatting seems to use LLVM11-specific features
// hopefully we can use zig instead of snprintf in the future when we upgrade
@ -374,16 +366,15 @@ pub fn strFromFloatC(float: f64) callconv(.C) RocStr {
const result = c.snprintf(&buf, 100, "%f", float);
return RocStr.init(std.heap.c_allocator, &buf, @intCast(usize, result));
return RocStr.init(&buf, @intCast(usize, result));
}
// Str.split
// When we actually use this in Roc, libc will be linked so we have access to std.heap.c_allocator
pub fn strSplitInPlaceC(array: [*]RocStr, string: RocStr, delimiter: RocStr) callconv(.C) void {
return @call(.{ .modifier = always_inline }, strSplitInPlace, .{ std.heap.c_allocator, array, string, delimiter });
return @call(.{ .modifier = always_inline }, strSplitInPlace, .{ array, string, delimiter });
}
fn strSplitInPlace(allocator: *Allocator, array: [*]RocStr, string: RocStr, delimiter: RocStr) void {
fn strSplitInPlace(array: [*]RocStr, string: RocStr, delimiter: RocStr) void {
var ret_array_index: usize = 0;
var slice_start_index: usize = 0;
var str_index: usize = 0;
@ -415,7 +406,7 @@ fn strSplitInPlace(allocator: *Allocator, array: [*]RocStr, string: RocStr, deli
if (matches_delimiter) {
const segment_len: usize = str_index - slice_start_index;
array[ret_array_index] = RocStr.init(allocator, str_bytes + slice_start_index, segment_len);
array[ret_array_index] = RocStr.init(str_bytes + slice_start_index, segment_len);
slice_start_index = str_index + delimiter_len;
ret_array_index += 1;
str_index += delimiter_len;
@ -425,21 +416,21 @@ fn strSplitInPlace(allocator: *Allocator, array: [*]RocStr, string: RocStr, deli
}
}
array[ret_array_index] = RocStr.init(allocator, str_bytes + slice_start_index, str_len - slice_start_index);
array[ret_array_index] = RocStr.init(str_bytes + slice_start_index, str_len - slice_start_index);
}
test "strSplitInPlace: no delimiter" {
// Str.split "abc" "!" == [ "abc" ]
const str_arr = "abc";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "!";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
var array: [1]RocStr = undefined;
const array_ptr: [*]RocStr = &array;
strSplitInPlace(testing.allocator, array_ptr, str, delimiter);
strSplitInPlace(array_ptr, str, delimiter);
var expected = [1]RocStr{
str,
@ -447,15 +438,15 @@ test "strSplitInPlace: no delimiter" {
defer {
for (array) |roc_str| {
roc_str.deinit(testing.allocator);
roc_str.deinit();
}
for (expected) |roc_str| {
roc_str.deinit(testing.allocator);
roc_str.deinit();
}
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
expectEqual(array.len, expected.len);
@ -464,10 +455,10 @@ test "strSplitInPlace: no delimiter" {
test "strSplitInPlace: empty end" {
const str_arr = "1---- ---- ---- ---- ----2---- ---- ---- ---- ----";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "---- ---- ---- ---- ----";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
const array_len: usize = 3;
var array: [array_len]RocStr = [_]RocStr{
@ -477,10 +468,10 @@ test "strSplitInPlace: empty end" {
};
const array_ptr: [*]RocStr = &array;
strSplitInPlace(testing.allocator, array_ptr, str, delimiter);
strSplitInPlace(array_ptr, str, delimiter);
const one = RocStr.init(testing.allocator, "1", 1);
const two = RocStr.init(testing.allocator, "2", 1);
const one = RocStr.init("1", 1);
const two = RocStr.init("2", 1);
var expected = [3]RocStr{
one, two, RocStr.empty(),
@ -488,15 +479,15 @@ test "strSplitInPlace: empty end" {
defer {
for (array) |rocStr| {
rocStr.deinit(testing.allocator);
rocStr.deinit();
}
for (expected) |rocStr| {
rocStr.deinit(testing.allocator);
rocStr.deinit();
}
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
expectEqual(array.len, expected.len);
@ -507,10 +498,10 @@ test "strSplitInPlace: empty end" {
test "strSplitInPlace: delimiter on sides" {
const str_arr = "tttghittt";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "ttt";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
const array_len: usize = 3;
var array: [array_len]RocStr = [_]RocStr{
@ -519,10 +510,10 @@ test "strSplitInPlace: delimiter on sides" {
undefined,
};
const array_ptr: [*]RocStr = &array;
strSplitInPlace(testing.allocator, array_ptr, str, delimiter);
strSplitInPlace(array_ptr, str, delimiter);
const ghi_arr = "ghi";
const ghi = RocStr.init(testing.allocator, ghi_arr, ghi_arr.len);
const ghi = RocStr.init(ghi_arr, ghi_arr.len);
var expected = [3]RocStr{
RocStr.empty(), ghi, RocStr.empty(),
@ -530,15 +521,15 @@ test "strSplitInPlace: delimiter on sides" {
defer {
for (array) |rocStr| {
rocStr.deinit(testing.allocator);
rocStr.deinit();
}
for (expected) |rocStr| {
rocStr.deinit(testing.allocator);
rocStr.deinit();
}
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
expectEqual(array.len, expected.len);
@ -550,20 +541,20 @@ test "strSplitInPlace: delimiter on sides" {
test "strSplitInPlace: three pieces" {
// Str.split "a!b!c" "!" == [ "a", "b", "c" ]
const str_arr = "a!b!c";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "!";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
const array_len: usize = 3;
var array: [array_len]RocStr = undefined;
const array_ptr: [*]RocStr = &array;
strSplitInPlace(testing.allocator, array_ptr, str, delimiter);
strSplitInPlace(array_ptr, str, delimiter);
const a = RocStr.init(testing.allocator, "a", 1);
const b = RocStr.init(testing.allocator, "b", 1);
const c = RocStr.init(testing.allocator, "c", 1);
const a = RocStr.init("a", 1);
const b = RocStr.init("b", 1);
const c = RocStr.init("c", 1);
var expected_array = [array_len]RocStr{
a, b, c,
@ -571,15 +562,15 @@ test "strSplitInPlace: three pieces" {
defer {
for (array) |roc_str| {
roc_str.deinit(testing.allocator);
roc_str.deinit();
}
for (expected_array) |roc_str| {
roc_str.deinit(testing.allocator);
roc_str.deinit();
}
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
expectEqual(expected_array.len, array.len);
@ -637,14 +628,14 @@ test "countSegments: long delimiter" {
// Str.split "str" "delimiter" == [ "str" ]
// 1 segment
const str_arr = "str";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "delimiter";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
defer {
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
const segments_count = countSegments(str, delimiter);
@ -655,14 +646,14 @@ test "countSegments: delimiter at start" {
// Str.split "hello there" "hello" == [ "", " there" ]
// 2 segments
const str_arr = "hello there";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "hello";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
defer {
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
const segments_count = countSegments(str, delimiter);
@ -674,14 +665,14 @@ test "countSegments: delimiter interspered" {
// Str.split "a!b!c" "!" == [ "a", "b", "c" ]
// 3 segments
const str_arr = "a!b!c";
const str = RocStr.init(testing.allocator, str_arr, str_arr.len);
const str = RocStr.init(str_arr, str_arr.len);
const delimiter_arr = "!";
const delimiter = RocStr.init(testing.allocator, delimiter_arr, delimiter_arr.len);
const delimiter = RocStr.init(delimiter_arr, delimiter_arr.len);
defer {
str.deinit(testing.allocator);
delimiter.deinit(testing.allocator);
str.deinit();
delimiter.deinit();
}
const segments_count = countSegments(str, delimiter);
@ -736,8 +727,8 @@ test "countGraphemeClusters: empty string" {
test "countGraphemeClusters: ascii characters" {
const bytes_arr = "abcd";
const bytes_len = bytes_arr.len;
const str = RocStr.init(testing.allocator, bytes_arr, bytes_len);
defer str.deinit(testing.allocator);
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
expectEqual(count, 4);
@ -746,8 +737,8 @@ test "countGraphemeClusters: ascii characters" {
test "countGraphemeClusters: utf8 characters" {
const bytes_arr = "ãxā";
const bytes_len = bytes_arr.len;
const str = RocStr.init(testing.allocator, bytes_arr, bytes_len);
defer str.deinit(testing.allocator);
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
expectEqual(count, 3);
@ -756,8 +747,8 @@ test "countGraphemeClusters: utf8 characters" {
test "countGraphemeClusters: emojis" {
const bytes_arr = "🤔🤔🤔";
const bytes_len = bytes_arr.len;
const str = RocStr.init(testing.allocator, bytes_arr, bytes_len);
defer str.deinit(testing.allocator);
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
expectEqual(count, 3);
@ -766,8 +757,8 @@ test "countGraphemeClusters: emojis" {
test "countGraphemeClusters: emojis and ut8 characters" {
const bytes_arr = "🤔å🤔¥🤔ç";
const bytes_len = bytes_arr.len;
const str = RocStr.init(testing.allocator, bytes_arr, bytes_len);
defer str.deinit(testing.allocator);
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
expectEqual(count, 6);
@ -776,8 +767,8 @@ test "countGraphemeClusters: emojis and ut8 characters" {
test "countGraphemeClusters: emojis, ut8, and ascii characters" {
const bytes_arr = "6🤔å🤔e¥🤔çpp";
const bytes_len = bytes_arr.len;
const str = RocStr.init(testing.allocator, bytes_arr, bytes_len);
defer str.deinit(testing.allocator);
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
expectEqual(count, 10);
@ -828,36 +819,36 @@ pub fn startsWithCodePoint(string: RocStr, prefix: u32) callconv(.C) bool {
}
test "startsWithCodePoint: ascii char" {
const whole = RocStr.init(testing.allocator, "foobar", 6);
const whole = RocStr.init("foobar", 6);
const prefix = 'f';
expect(startsWithCodePoint(whole, prefix));
}
test "startsWithCodePoint: emoji" {
const yes = RocStr.init(testing.allocator, "💖foobar", 10);
const no = RocStr.init(testing.allocator, "foobar", 6);
const yes = RocStr.init("💖foobar", 10);
const no = RocStr.init("foobar", 6);
const prefix = '💖';
expect(startsWithCodePoint(yes, prefix));
expect(!startsWithCodePoint(no, prefix));
}
test "startsWith: foo starts with fo" {
const foo = RocStr.init(testing.allocator, "foo", 3);
const fo = RocStr.init(testing.allocator, "fo", 2);
const foo = RocStr.init("foo", 3);
const fo = RocStr.init("fo", 2);
expect(startsWith(foo, fo));
}
test "startsWith: 123456789123456789 starts with 123456789123456789" {
const str = RocStr.init(testing.allocator, "123456789123456789", 18);
defer str.deinit(testing.allocator);
const str = RocStr.init("123456789123456789", 18);
defer str.deinit();
expect(startsWith(str, str));
}
test "startsWith: 12345678912345678910 starts with 123456789123456789" {
const str = RocStr.init(testing.allocator, "12345678912345678910", 20);
defer str.deinit(testing.allocator);
const prefix = RocStr.init(testing.allocator, "123456789123456789", 18);
defer prefix.deinit(testing.allocator);
const str = RocStr.init("12345678912345678910", 20);
defer str.deinit();
const prefix = RocStr.init("123456789123456789", 18);
defer prefix.deinit();
expect(startsWith(str, prefix));
}
@ -886,63 +877,60 @@ pub fn endsWith(string: RocStr, suffix: RocStr) callconv(.C) bool {
}
test "endsWith: foo ends with oo" {
const foo = RocStr.init(testing.allocator, "foo", 3);
const oo = RocStr.init(testing.allocator, "oo", 2);
defer foo.deinit(testing.allocator);
defer oo.deinit(testing.allocator);
const foo = RocStr.init("foo", 3);
const oo = RocStr.init("oo", 2);
defer foo.deinit();
defer oo.deinit();
expect(endsWith(foo, oo));
}
test "endsWith: 123456789123456789 ends with 123456789123456789" {
const str = RocStr.init(testing.allocator, "123456789123456789", 18);
defer str.deinit(testing.allocator);
const str = RocStr.init("123456789123456789", 18);
defer str.deinit();
expect(endsWith(str, str));
}
test "endsWith: 12345678912345678910 ends with 345678912345678910" {
const str = RocStr.init(testing.allocator, "12345678912345678910", 20);
const suffix = RocStr.init(testing.allocator, "345678912345678910", 18);
defer str.deinit(testing.allocator);
defer suffix.deinit(testing.allocator);
const str = RocStr.init("12345678912345678910", 20);
const suffix = RocStr.init("345678912345678910", 18);
defer str.deinit();
defer suffix.deinit();
expect(endsWith(str, suffix));
}
test "endsWith: hello world ends with world" {
const str = RocStr.init(testing.allocator, "hello world", 11);
const suffix = RocStr.init(testing.allocator, "world", 5);
defer str.deinit(testing.allocator);
defer suffix.deinit(testing.allocator);
const str = RocStr.init("hello world", 11);
const suffix = RocStr.init("world", 5);
defer str.deinit();
defer suffix.deinit();
expect(endsWith(str, suffix));
}
// Str.concat
// When we actually use this in Roc, libc will be linked so we have access to std.heap.c_allocator
pub fn strConcatC(result_in_place: InPlace, arg1: RocStr, arg2: RocStr) callconv(.C) RocStr {
return @call(.{ .modifier = always_inline }, strConcat, .{ std.heap.c_allocator, result_in_place, arg1, arg2 });
return @call(.{ .modifier = always_inline }, strConcat, .{ result_in_place, arg1, arg2 });
}
fn strConcat(allocator: *Allocator, result_in_place: InPlace, arg1: RocStr, arg2: RocStr) RocStr {
fn strConcat(result_in_place: InPlace, arg1: RocStr, arg2: RocStr) RocStr {
if (arg1.isEmpty()) {
// the second argument is borrowed, so we must increment its refcount before returning
return RocStr.clone(allocator, result_in_place, arg2);
return RocStr.clone(result_in_place, arg2);
} else if (arg2.isEmpty()) {
// the first argument is owned, so we can return it without cloning
return arg1;
} else {
const combined_length = arg1.len() + arg2.len();
const alignment = 1;
const element_width = 1;
if (!arg1.isSmallStr() and arg1.isUnique()) {
if (arg1.str_bytes) |source_ptr| {
const new_source = utils.unsafeReallocate(
source_ptr,
allocator,
alignment,
RocStr.alignment,
arg1.len(),
combined_length,
element_width,
@ -954,10 +942,7 @@ fn strConcat(allocator: *Allocator, result_in_place: InPlace, arg1: RocStr, arg2
}
}
var result = arg1.reallocateFresh(
allocator,
combined_length,
);
var result = arg1.reallocateFresh(combined_length);
var result_ptr = result.asU8ptr();
arg1.memcpy(result_ptr);
@ -971,27 +956,27 @@ test "RocStr.concat: small concat small" {
const str1_len = 3;
var str1: [str1_len]u8 = "foo".*;
const str1_ptr: [*]u8 = &str1;
var roc_str1 = RocStr.init(testing.allocator, str1_ptr, str1_len);
var roc_str1 = RocStr.init(str1_ptr, str1_len);
const str2_len = 3;
var str2: [str2_len]u8 = "abc".*;
const str2_ptr: [*]u8 = &str2;
var roc_str2 = RocStr.init(testing.allocator, str2_ptr, str2_len);
var roc_str2 = RocStr.init(str2_ptr, str2_len);
const str3_len = 6;
var str3: [str3_len]u8 = "fooabc".*;
const str3_ptr: [*]u8 = &str3;
var roc_str3 = RocStr.init(testing.allocator, str3_ptr, str3_len);
var roc_str3 = RocStr.init(str3_ptr, str3_len);
defer {
roc_str1.deinit(testing.allocator);
roc_str2.deinit(testing.allocator);
roc_str3.deinit(testing.allocator);
roc_str1.deinit();
roc_str2.deinit();
roc_str3.deinit();
}
const result = strConcat(testing.allocator, InPlace.Clone, roc_str1, roc_str2);
const result = strConcat(InPlace.Clone, roc_str1, roc_str2);
defer result.deinit(testing.allocator);
defer result.deinit();
expect(roc_str3.eq(result));
}
@ -1002,12 +987,11 @@ pub const RocListStr = extern struct {
};
// Str.joinWith
// When we actually use this in Roc, libc will be linked so we have access to std.heap.c_allocator
pub fn strJoinWithC(list: RocListStr, separator: RocStr) callconv(.C) RocStr {
return @call(.{ .modifier = always_inline }, strJoinWith, .{ std.heap.c_allocator, list, separator });
return @call(.{ .modifier = always_inline }, strJoinWith, .{ list, separator });
}
fn strJoinWith(allocator: *Allocator, list: RocListStr, separator: RocStr) RocStr {
fn strJoinWith(list: RocListStr, separator: RocStr) RocStr {
const len = list.list_length;
if (len == 0) {
@ -1025,7 +1009,7 @@ fn strJoinWith(allocator: *Allocator, list: RocListStr, separator: RocStr) RocSt
// include size of the separator
total_size += separator.len() * (len - 1);
var result = RocStr.allocate(allocator, InPlace.Clone, total_size);
var result = RocStr.allocate(InPlace.Clone, total_size);
var result_ptr = result.asU8ptr();
var offset: usize = 0;
@ -1048,45 +1032,45 @@ test "RocStr.joinWith: result is big" {
const sep_len = 2;
var sep: [sep_len]u8 = ", ".*;
const sep_ptr: [*]u8 = &sep;
var roc_sep = RocStr.init(testing.allocator, sep_ptr, sep_len);
var roc_sep = RocStr.init(sep_ptr, sep_len);
const elem_len = 13;
var elem: [elem_len]u8 = "foobarbazspam".*;
const elem_ptr: [*]u8 = &elem;
var roc_elem = RocStr.init(testing.allocator, elem_ptr, elem_len);
var roc_elem = RocStr.init(elem_ptr, elem_len);
const result_len = 43;
var xresult: [result_len]u8 = "foobarbazspam, foobarbazspam, foobarbazspam".*;
const result_ptr: [*]u8 = &xresult;
var roc_result = RocStr.init(testing.allocator, result_ptr, result_len);
var roc_result = RocStr.init(result_ptr, result_len);
var elements: [3]RocStr = .{ roc_elem, roc_elem, roc_elem };
const list = RocListStr{ .list_length = 3, .list_elements = @ptrCast([*]RocStr, &elements) };
defer {
roc_sep.deinit(testing.allocator);
roc_elem.deinit(testing.allocator);
roc_result.deinit(testing.allocator);
roc_sep.deinit();
roc_elem.deinit();
roc_result.deinit();
}
const result = strJoinWith(testing.allocator, list, roc_sep);
const result = strJoinWith(list, roc_sep);
defer result.deinit(testing.allocator);
defer result.deinit();
expect(roc_result.eq(result));
}
// Str.toBytes
pub fn strToBytesC(arg: RocStr) callconv(.C) RocList {
return @call(.{ .modifier = always_inline }, strToBytes, .{ std.heap.c_allocator, arg });
return @call(.{ .modifier = always_inline }, strToBytes, .{arg});
}
fn strToBytes(allocator: *Allocator, arg: RocStr) RocList {
fn strToBytes(arg: RocStr) RocList {
if (arg.isEmpty()) {
return RocList.empty();
} else if (arg.isSmallStr()) {
const length = arg.len();
const ptr = utils.allocateWithRefcount(allocator, @alignOf(usize), length);
const ptr = utils.allocateWithRefcount(length, RocStr.alignment);
@memcpy(ptr, arg.asU8ptr(), length);
@ -1104,25 +1088,25 @@ const FromUtf8Result = extern struct {
};
pub fn fromUtf8C(arg: RocList, output: *FromUtf8Result) callconv(.C) void {
output.* = @call(.{ .modifier = always_inline }, fromUtf8, .{ std.heap.c_allocator, arg });
output.* = @call(.{ .modifier = always_inline }, fromUtf8, .{arg});
}
fn fromUtf8(allocator: *Allocator, arg: RocList) FromUtf8Result {
fn fromUtf8(arg: RocList) FromUtf8Result {
const bytes = @ptrCast([*]const u8, arg.bytes)[0..arg.length];
if (unicode.utf8ValidateSlice(bytes)) {
// the output will be correct. Now we need to take ownership of the input
if (arg.len() <= SMALL_STR_MAX_LENGTH) {
// turn the bytes into a small string
const string = RocStr.init(allocator, @ptrCast([*]u8, arg.bytes), arg.len());
const string = RocStr.init(@ptrCast([*]u8, arg.bytes), arg.len());
// then decrement the input list
const data_bytes = arg.len();
utils.decref(allocator, @alignOf(usize), arg.bytes, data_bytes);
utils.decref(arg.bytes, data_bytes, RocStr.alignment);
return FromUtf8Result{ .is_ok = true, .string = string, .byte_index = 0, .problem_code = Utf8ByteProblem.InvalidStartByte };
} else {
const byte_list = arg.makeUnique(allocator, @alignOf(usize), @sizeOf(u8));
const byte_list = arg.makeUnique(RocStr.alignment, @sizeOf(u8));
const string = RocStr{ .str_bytes = byte_list.bytes, .str_len = byte_list.length };
@ -1133,7 +1117,7 @@ fn fromUtf8(allocator: *Allocator, arg: RocList) FromUtf8Result {
// consume the input list
const data_bytes = arg.len();
utils.decref(allocator, @alignOf(usize), arg.bytes, data_bytes);
utils.decref(arg.bytes, data_bytes, RocStr.alignment);
return FromUtf8Result{ .is_ok = false, .string = RocStr.empty(), .byte_index = temp.index, .problem_code = temp.problem };
}
@ -1200,11 +1184,11 @@ pub const Utf8ByteProblem = packed enum(u8) {
};
fn validateUtf8Bytes(bytes: [*]u8, length: usize) FromUtf8Result {
return fromUtf8(std.testing.allocator, RocList{ .bytes = bytes, .length = length });
return fromUtf8(RocList{ .bytes = bytes, .length = length });
}
fn validateUtf8BytesX(str: RocList) FromUtf8Result {
return fromUtf8(std.testing.allocator, str);
return fromUtf8(str);
}
fn expectOk(result: FromUtf8Result) void {
@ -1212,7 +1196,7 @@ fn expectOk(result: FromUtf8Result) void {
}
fn sliceHelp(bytes: [*]const u8, length: usize) RocList {
var list = RocList.allocate(testing.allocator, @alignOf(usize), length, @sizeOf(u8));
var list = RocList.allocate(RocStr.alignment, length, @sizeOf(u8));
@memcpy(list.bytes orelse unreachable, bytes, length);
list.length = length;

View File

@ -1,5 +1,53 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const always_inline = std.builtin.CallOptions.Modifier.always_inline;
// If allocation fails, this must cxa_throw - it must not return a null pointer!
extern fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void;
// This should never be passed a null pointer.
// If allocation fails, this must cxa_throw - it must not return a null pointer!
extern fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void;
// This should never be passed a null pointer.
extern fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void;
comptime {
// During tetsts, use the testing allocators to satisfy these functions.
if (std.builtin.is_test) {
@export(testing_roc_alloc, .{ .name = "roc_alloc", .linkage = .Strong });
@export(testing_roc_realloc, .{ .name = "roc_realloc", .linkage = .Strong });
@export(testing_roc_dealloc, .{ .name = "roc_dealloc", .linkage = .Strong });
}
}
fn testing_roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return @ptrCast(?*c_void, std.testing.allocator.alloc(u8, size) catch unreachable);
}
fn testing_roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
const slice = ptr[0..old_size];
return @ptrCast(?*c_void, std.testing.allocator.realloc(slice, new_size) catch unreachable);
}
fn testing_roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
std.testing.allocator.destroy(ptr);
}
pub fn alloc(size: usize, alignment: u32) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_alloc, .{ size, alignment }));
}
pub fn realloc(c_ptr: [*]u8, new_size: usize, old_size: usize, alignment: u32) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_realloc, .{ c_ptr, new_size, old_size, alignment }));
}
pub fn dealloc(c_ptr: [*]u8, alignment: u32) void {
return @call(.{ .modifier = always_inline }, roc_dealloc, .{ c_ptr, alignment });
}
pub const Inc = fn (?[*]u8) callconv(.C) void;
pub const IncN = fn (?[*]u8, u64) callconv(.C) void;
@ -62,10 +110,9 @@ pub fn intWidth(width: IntWidth) anytype {
}
pub fn decref(
allocator: *Allocator,
alignment: usize,
bytes_or_null: ?[*]u8,
data_bytes: usize,
alignment: u32,
) void {
if (data_bytes == 0) {
return;
@ -81,7 +128,7 @@ pub fn decref(
switch (alignment) {
16 => {
if (refcount == REFCOUNT_ONE_ISIZE) {
allocator.free((bytes - 16)[0 .. 16 + data_bytes]);
dealloc(bytes - 16, alignment);
} else if (refcount_isize < 0) {
(isizes - 1)[0] = refcount - 1;
}
@ -89,7 +136,7 @@ pub fn decref(
else => {
// NOTE enums can currently have an alignment of < 8
if (refcount == REFCOUNT_ONE_ISIZE) {
allocator.free((bytes - 8)[0 .. 8 + data_bytes]);
dealloc(bytes - 8, alignment);
} else if (refcount_isize < 0) {
(isizes - 1)[0] = refcount - 1;
}
@ -98,9 +145,8 @@ pub fn decref(
}
pub fn allocateWithRefcount(
allocator: *Allocator,
alignment: usize,
data_bytes: usize,
alignment: u32,
) [*]u8 {
comptime const result_in_place = false;
@ -108,7 +154,7 @@ pub fn allocateWithRefcount(
16 => {
const length = 2 * @sizeOf(usize) + data_bytes;
var new_bytes: []align(16) u8 = allocator.alignedAlloc(u8, 16, length) catch unreachable;
var new_bytes: [*]align(16) u8 = @alignCast(16, alloc(length, alignment));
var as_usize_array = @ptrCast([*]usize, new_bytes);
if (result_in_place) {
@ -127,13 +173,13 @@ pub fn allocateWithRefcount(
else => {
const length = @sizeOf(usize) + data_bytes;
var new_bytes: []align(8) u8 = allocator.alignedAlloc(u8, 8, length) catch unreachable;
var new_bytes: [*]align(8) u8 = @alignCast(8, alloc(length, alignment));
var as_usize_array = @ptrCast([*]isize, new_bytes);
var as_isize_array = @ptrCast([*]isize, new_bytes);
if (result_in_place) {
as_usize_array[0] = @intCast(isize, number_of_slots);
as_isize_array[0] = @intCast(isize, number_of_slots);
} else {
as_usize_array[0] = REFCOUNT_ONE_ISIZE;
as_isize_array[0] = REFCOUNT_ONE_ISIZE;
}
var as_u8_array = @ptrCast([*]u8, new_bytes);
@ -146,8 +192,7 @@ pub fn allocateWithRefcount(
pub fn unsafeReallocate(
source_ptr: [*]u8,
allocator: *Allocator,
alignment: usize,
alignment: u32,
old_length: usize,
new_length: usize,
element_width: usize,
@ -165,8 +210,8 @@ pub fn unsafeReallocate(
// TODO handle out of memory
// NOTE realloc will dealloc the original allocation
const old_allocation = (source_ptr - align_width)[0..old_width];
const new_allocation = allocator.realloc(old_allocation, new_width) catch unreachable;
const old_allocation = source_ptr - align_width;
const new_allocation = realloc(old_allocation, new_width, old_width, alignment);
const new_source = @ptrCast([*]u8, new_allocation) + align_width;
return new_source;

View File

@ -39,8 +39,8 @@ use inkwell::passes::{PassManager, PassManagerBuilder};
use inkwell::types::{BasicTypeEnum, FunctionType, IntType, StructType};
use inkwell::values::BasicValueEnum::{self, *};
use inkwell::values::{
BasicValue, CallSiteValue, FloatValue, FunctionValue, InstructionOpcode, IntValue,
PointerValue, StructValue,
BasicValue, CallSiteValue, FloatValue, FunctionValue, InstructionOpcode, InstructionValue,
IntValue, PointerValue, StructValue,
};
use inkwell::OptimizationLevel;
use inkwell::{AddressSpace, IntPredicate};
@ -223,6 +223,55 @@ impl<'a, 'ctx, 'env> Env<'a, 'ctx, 'env> {
})
}
pub fn alignment_type(&self) -> IntType<'ctx> {
self.context.i32_type()
}
pub fn alignment_const(&self, alignment: u32) -> IntValue<'ctx> {
self.alignment_type().const_int(alignment as u64, false)
}
pub fn alignment_intvalue(&self, element_layout: &Layout<'a>) -> BasicValueEnum<'ctx> {
let alignment = element_layout.alignment_bytes(self.ptr_bytes);
let alignment_iv = self.alignment_const(alignment);
alignment_iv.into()
}
pub fn call_alloc(
&self,
number_of_bytes: IntValue<'ctx>,
alignment: u32,
) -> PointerValue<'ctx> {
let function = self.module.get_function("roc_alloc").unwrap();
let alignment = self.alignment_const(alignment);
let call = self.builder.build_call(
function,
&[number_of_bytes.into(), alignment.into()],
"roc_alloc",
);
call.set_call_convention(C_CALL_CONV);
call.try_as_basic_value()
.left()
.unwrap()
.into_pointer_value()
// TODO check if alloc returned null; if so, runtime error for OOM!
}
pub fn call_dealloc(&self, ptr: PointerValue<'ctx>, alignment: u32) -> InstructionValue<'ctx> {
let function = self.module.get_function("roc_dealloc").unwrap();
let alignment = self.alignment_const(alignment);
let call =
self.builder
.build_call(function, &[ptr.into(), alignment.into()], "roc_dealloc");
call.set_call_convention(C_CALL_CONV);
call.try_as_basic_value().right().unwrap()
}
pub fn call_memset(
&self,
bytes_ptr: PointerValue<'ctx>,
@ -322,7 +371,6 @@ fn add_intrinsics<'ctx>(ctx: &'ctx Context, module: &Module<'ctx>) {
// List of all supported LLVM intrinsics:
//
// https://releases.llvm.org/10.0.0/docs/LangRef.html#standard-c-library-intrinsics
let void_type = ctx.void_type();
let i1_type = ctx.bool_type();
let f64_type = ctx.f64_type();
let i128_type = ctx.i128_type();
@ -330,41 +378,6 @@ fn add_intrinsics<'ctx>(ctx: &'ctx Context, module: &Module<'ctx>) {
let i32_type = ctx.i32_type();
let i16_type = ctx.i16_type();
let i8_type = ctx.i8_type();
let i8_ptr_type = i8_type.ptr_type(AddressSpace::Generic);
add_intrinsic(
module,
LLVM_MEMSET_I64,
void_type.fn_type(
&[
i8_ptr_type.into(),
i8_type.into(),
i64_type.into(),
i1_type.into(),
],
false,
),
);
add_intrinsic(
module,
LLVM_MEMSET_I32,
void_type.fn_type(
&[
i8_ptr_type.into(),
i8_type.into(),
i32_type.into(),
i1_type.into(),
],
false,
),
);
add_intrinsic(
module,
LLVM_SQRT_F64,
f64_type.fn_type(&[f64_type.into()], false),
);
add_intrinsic(
module,
@ -477,14 +490,6 @@ fn add_intrinsics<'ctx>(ctx: &'ctx Context, module: &Module<'ctx>) {
ctx.struct_type(&fields, false)
.fn_type(&[i128_type.into(), i128_type.into()], false)
});
// mul with overflow
add_intrinsic(module, LLVM_SMUL_WITH_OVERFLOW_I64, {
let fields = [i64_type.into(), i1_type.into()];
ctx.struct_type(&fields, false)
.fn_type(&[i64_type.into(), i64_type.into()], false)
});
}
static LLVM_MEMSET_I64: &str = "llvm.memset.p0i8.i64";
@ -518,13 +523,15 @@ fn add_intrinsic<'ctx>(
intrinsic_name: &'static str,
fn_type: FunctionType<'ctx>,
) -> FunctionValue<'ctx> {
let fn_val = module.add_function(intrinsic_name, fn_type, None);
// LLVM intrinsics always use the C calling convention, because
// they are implemented in C libraries
fn_val.set_call_conventions(C_CALL_CONV);
fn_val
add_func(
module,
intrinsic_name,
fn_type,
Linkage::External,
// LLVM intrinsics always use the C calling convention, because
// they are implemented in C libraries
C_CALL_CONV,
)
}
pub fn construct_optimization_passes<'a>(
@ -1706,11 +1713,7 @@ pub fn allocate_with_refcount_help<'a, 'ctx, 'env>(
"add_extra_bytes",
);
env.builder
.build_array_malloc(ctx.i8_type(), number_of_bytes, "create_ptr")
.unwrap()
// TODO check if malloc returned null; if so, runtime error for OOM!
env.call_alloc(number_of_bytes, layout.alignment_bytes(env.ptr_bytes))
};
// We must return a pointer to the first element:
@ -1743,7 +1746,7 @@ pub fn allocate_with_refcount_help<'a, 'ctx, 'env>(
"get_data_ptr",
),
ptr_type,
"malloc_cast_to_desired",
"alloc_cast_to_desired",
)
.into_pointer_value()
}
@ -1751,12 +1754,12 @@ pub fn allocate_with_refcount_help<'a, 'ctx, 'env>(
let refcount_ptr = match extra_bytes {
n if n == env.ptr_bytes => {
// the malloced pointer is the same as the refcounted pointer
// the allocated pointer is the same as the refcounted pointer
unsafe { PointerToRefcount::from_ptr(env, ptr) }
}
n if n == 2 * env.ptr_bytes => {
// the refcount is stored just before the start of the actual data
// but in this case (because of alignment) not at the start of the malloced buffer
// but in this case (because of alignment) not at the start of the allocated buffer
PointerToRefcount::from_ptr_to_data(env, data_ptr)
}
n => unreachable!("invalid extra_bytes {}", n),
@ -1785,8 +1788,6 @@ fn list_literal<'a, 'ctx, 'env>(
let len = len_type.const_int(len_u64, false);
allocate_list(env, inplace, elem_layout, len)
// TODO check if malloc returned null; if so, runtime error for OOM!
};
// Copy the elements from the list literal into the array
@ -2715,9 +2716,13 @@ fn expose_function_to_host_help<'a, 'ctx, 'env>(
let c_function_type = env.context.void_type().fn_type(&argument_types, false);
let c_function =
env.module
.add_function(c_function_name, c_function_type, Some(Linkage::External));
let c_function = add_func(
env.module,
c_function_name,
c_function_type,
Linkage::External,
C_CALL_CONV,
);
let subprogram = env.new_subprogram(c_function_name);
c_function.set_subprogram(subprogram);
@ -2759,10 +2764,12 @@ fn expose_function_to_host_help<'a, 'ctx, 'env>(
let size_function_name: String =
format!("roc_{}_size", roc_function.get_name().to_str().unwrap());
let size_function = env.module.add_function(
let size_function = add_func(
env.module,
size_function_name.as_str(),
size_function_type,
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
let subprogram = env.new_subprogram(&size_function_name);
@ -2968,9 +2975,13 @@ fn make_exception_catching_wrapper<'a, 'ctx, 'env>(
let wrapper_function_type = wrapper_return_type.fn_type(&argument_types, false);
// Add main to the module.
let wrapper_function =
env.module
.add_function(&wrapper_function_name, wrapper_function_type, None);
let wrapper_function = add_func(
env.module,
&wrapper_function_name,
wrapper_function_type,
Linkage::External,
C_CALL_CONV,
);
let subprogram = env.new_subprogram(wrapper_function_name);
wrapper_function.set_subprogram(subprogram);
@ -3031,11 +3042,13 @@ pub fn build_proc_header<'a, 'ctx, 'env>(
let fn_type = get_fn_type(&ret_type, &arg_basic_types);
let fn_val = env
.module
.add_function(fn_name.as_str(), fn_type, Some(Linkage::Private));
fn_val.set_call_conventions(FAST_CALL_CONV);
let fn_val = add_func(
env.module,
fn_name.as_str(),
fn_type,
Linkage::Private,
FAST_CALL_CONV,
);
let subprogram = env.new_subprogram(&fn_name);
fn_val.set_subprogram(subprogram);
@ -3096,14 +3109,14 @@ pub fn build_closure_caller<'a, 'ctx, 'env>(
let function_type = context.void_type().fn_type(&argument_types, false);
let function_value = env.module.add_function(
let function_value = add_func(
env.module,
function_name.as_str(),
function_type,
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
function_value.set_call_conventions(C_CALL_CONV);
// STEP 2: build function body
let entry = context.append_basic_block(function_value, "entry");
@ -3205,14 +3218,14 @@ fn build_function_caller<'a, 'ctx, 'env>(
let function_type = context.void_type().fn_type(&argument_types, false);
let function_value = env.module.add_function(
let function_value = add_func(
env.module,
function_name.as_str(),
function_type,
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
function_value.set_call_conventions(C_CALL_CONV);
// STEP 2: build function body
let entry = context.append_basic_block(function_value, "entry");
@ -3301,10 +3314,12 @@ fn build_host_exposed_alias_size_help<'a, 'ctx, 'env>(
)
};
let size_function = env.module.add_function(
let size_function = add_func(
env.module,
size_function_name.as_str(),
size_function_type,
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
let entry = context.append_basic_block(size_function, "entry");
@ -5729,12 +5744,13 @@ fn cxa_allocate_exception<'a, 'ctx, 'env>(
Some(gvalue) => gvalue,
None => {
// void *__cxa_allocate_exception(size_t thrown_size);
let cxa_allocate_exception = module.add_function(
let cxa_allocate_exception = add_func(
module,
name,
u8_ptr.fn_type(&[context.i64_type().into()], false),
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
cxa_allocate_exception.set_call_conventions(C_CALL_CONV);
cxa_allocate_exception
}
@ -5762,14 +5778,15 @@ fn cxa_throw_exception<'a, 'ctx, 'env>(env: &Env<'a, 'ctx, 'env>, info: BasicVal
Some(value) => value,
None => {
// void __cxa_throw (void *thrown_exception, std::type_info *tinfo, void (*dest) (void *) );
let cxa_throw = module.add_function(
let cxa_throw = add_func(
module,
name,
context
.void_type()
.fn_type(&[u8_ptr.into(), u8_ptr.into(), u8_ptr.into()], false),
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
cxa_throw.set_call_conventions(C_CALL_CONV);
cxa_throw
}
@ -5803,12 +5820,13 @@ fn cxa_rethrow_exception(env: &Env<'_, '_, '_>) {
let function = match module.get_function(&name) {
Some(gvalue) => gvalue,
None => {
let cxa_rethrow = module.add_function(
let cxa_rethrow = add_func(
module,
name,
context.void_type().fn_type(&[], false),
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
cxa_rethrow.set_call_conventions(C_CALL_CONV);
cxa_rethrow
}
@ -5829,12 +5847,13 @@ fn get_foreign_symbol<'a, 'ctx, 'env>(
match module.get_function(foreign_symbol.as_str()) {
Some(gvalue) => gvalue,
None => {
let foreign_function = module.add_function(
let foreign_function = add_func(
module,
foreign_symbol.as_str(),
function_type,
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
foreign_function.set_call_conventions(C_CALL_CONV);
foreign_function
}
@ -5850,12 +5869,13 @@ fn get_gxx_personality_v0<'a, 'ctx, 'env>(env: &Env<'a, 'ctx, 'env>) -> Function
match module.get_function(&name) {
Some(gvalue) => gvalue,
None => {
let personality_func = module.add_function(
let personality_func = add_func(
module,
"__gxx_personality_v0",
context.i64_type().fn_type(&[], false),
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
personality_func.set_call_conventions(C_CALL_CONV);
personality_func
}
@ -5871,12 +5891,13 @@ fn cxa_end_catch(env: &Env<'_, '_, '_>) {
let function = match module.get_function(&name) {
Some(gvalue) => gvalue,
None => {
let cxa_end_catch = module.add_function(
let cxa_end_catch = add_func(
module,
name,
context.void_type().fn_type(&[], false),
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
cxa_end_catch.set_call_conventions(C_CALL_CONV);
cxa_end_catch
}
@ -5900,12 +5921,13 @@ fn cxa_begin_catch<'a, 'ctx, 'env>(
None => {
let u8_ptr = context.i8_type().ptr_type(AddressSpace::Generic);
let cxa_begin_catch = module.add_function(
let cxa_begin_catch = add_func(
module,
"__cxa_begin_catch",
u8_ptr.fn_type(&[u8_ptr.into()], false),
Some(Linkage::External),
Linkage::External,
C_CALL_CONV,
);
cxa_begin_catch.set_call_conventions(C_CALL_CONV);
cxa_begin_catch
}
@ -5917,3 +5939,26 @@ fn cxa_begin_catch<'a, 'ctx, 'env>(
call.set_call_convention(C_CALL_CONV);
call.try_as_basic_value().left().unwrap()
}
/// Add a function to a module, after asserting that the function is unique.
/// We never want to define the same function twice in the same module!
/// The result can be bugs that are difficult to track down.
pub fn add_func<'ctx>(
module: &Module<'ctx>,
name: &str,
typ: FunctionType<'ctx>,
linkage: Linkage,
call_conv: u32,
) -> FunctionValue<'ctx> {
if cfg!(debug_assertions) {
if let Some(func) = module.get_function(name) {
panic!("Attempting to redefine LLVM function {}, which was already defined in this module as:\n\n{:?}", name, func);
}
}
let fn_val = module.add_function(name, typ, Some(linkage));
fn_val.set_call_conventions(call_conv);
fn_val
}

View File

@ -18,16 +18,6 @@ use inkwell::{AddressSpace, IntPredicate};
use roc_builtins::bitcode;
use roc_mono::layout::{Builtin, Layout, LayoutIds, MemoryMode};
fn alignment_intvalue<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
let alignment = element_layout.alignment_bytes(env.ptr_bytes);
let alignment_iv = env.ptr_int().const_int(alignment as u64, false);
alignment_iv.into()
}
fn list_returned_from_zig<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
output: BasicValueEnum<'ctx>,
@ -102,7 +92,7 @@ pub fn list_single<'a, 'ctx, 'env>(
call_bitcode_fn_returns_list(
env,
&[
alignment_intvalue(env, element_layout),
env.alignment_intvalue(element_layout),
pass_element_as_opaque(env, element),
layout_width(env, element_layout),
],
@ -124,7 +114,7 @@ pub fn list_repeat<'a, 'ctx, 'env>(
env,
&[
list_len.into(),
alignment_intvalue(env, element_layout),
env.alignment_intvalue(element_layout),
pass_element_as_opaque(env, element),
layout_width(env, element_layout),
inc_element_fn.as_global_value().as_pointer_value().into(),
@ -185,7 +175,7 @@ pub fn list_prepend<'a, 'ctx, 'env>(
if elem_layout.safe_to_memcpy() {
// Copy the bytes from the original array into the new
// one we just malloc'd.
// one we just allocated
//
// TODO how do we decide when to do the small memcpy vs the normal one?
builder
@ -218,7 +208,7 @@ pub fn list_join<'a, 'ctx, 'env>(
env,
&[
pass_list_as_i128(env, outer_list),
alignment_intvalue(env, element_layout),
env.alignment_intvalue(element_layout),
layout_width(env, element_layout),
],
&bitcode::LIST_JOIN,
@ -258,7 +248,7 @@ pub fn list_reverse<'a, 'ctx, 'env>(
env,
&[
pass_list_as_i128(env, list),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, &element_layout),
],
&bitcode::LIST_REVERSE,
@ -314,7 +304,7 @@ pub fn list_append<'a, 'ctx, 'env>(
env,
&[
pass_list_as_i128(env, original_wrapper.into()),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
pass_element_as_opaque(env, element),
layout_width(env, element_layout),
],
@ -335,7 +325,7 @@ pub fn list_drop<'a, 'ctx, 'env>(
env,
&[
pass_list_as_i128(env, original_wrapper.into()),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, &element_layout),
count.into(),
dec_element_fn.as_global_value().as_pointer_value().into(),
@ -344,7 +334,7 @@ pub fn list_drop<'a, 'ctx, 'env>(
)
}
/// List.set : List elem, Int, elem -> List elem
/// List.set : List elem, Nat, elem -> List elem
pub fn list_set<'a, 'ctx, 'env>(
parent: FunctionValue<'ctx>,
args: &[(BasicValueEnum<'ctx>, &'a Layout<'a>)],
@ -501,7 +491,7 @@ pub fn list_walk_generic<'a, 'ctx, 'env>(
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
pass_as_opaque(env, default_ptr),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, element_layout),
layout_width(env, default_layout),
pass_as_opaque(env, result_ptr),
@ -520,7 +510,7 @@ pub fn list_walk_generic<'a, 'ctx, 'env>(
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
pass_as_opaque(env, default_ptr),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, element_layout),
layout_width(env, default_layout),
dec_element_fn.as_global_value().as_pointer_value().into(),
@ -642,7 +632,7 @@ pub fn list_keep_if<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, element_layout),
inc_element_fn.as_global_value().as_pointer_value().into(),
dec_element_fn.as_global_value().as_pointer_value().into(),
@ -678,7 +668,7 @@ pub fn list_keep_oks<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, &before_layout),
env.alignment_intvalue(&before_layout),
layout_width(env, before_layout),
layout_width(env, result_layout),
layout_width(env, after_layout),
@ -715,7 +705,7 @@ pub fn list_keep_errs<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, &before_layout),
env.alignment_intvalue(&before_layout),
layout_width(env, before_layout),
layout_width(env, result_layout),
layout_width(env, after_layout),
@ -762,7 +752,7 @@ pub fn list_keep_result<'a, 'ctx, 'env>(
pass_list_as_i128(env, list),
pass_as_opaque(env, closure_data_ptr),
stepper_caller.into(),
alignment_intvalue(env, &before_layout),
env.alignment_intvalue(&before_layout),
layout_width(env, before_layout),
layout_width(env, after_layout),
layout_width(env, result_layout),
@ -789,7 +779,7 @@ pub fn list_sort_with<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, element_layout),
],
bitcode::LIST_SORT_WITH,
@ -812,7 +802,7 @@ pub fn list_map_with_index<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, element_layout),
layout_width(env, return_layout),
],
@ -836,7 +826,7 @@ pub fn list_map<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, &element_layout),
env.alignment_intvalue(&element_layout),
layout_width(env, element_layout),
layout_width(env, return_layout),
],
@ -866,7 +856,7 @@ pub fn list_map2<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, return_layout),
env.alignment_intvalue(return_layout),
layout_width(env, element1_layout),
layout_width(env, element2_layout),
layout_width(env, return_layout),
@ -903,7 +893,7 @@ pub fn list_map3<'a, 'ctx, 'env>(
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
alignment_intvalue(env, result_layout),
env.alignment_intvalue(result_layout),
layout_width(env, element1_layout),
layout_width(env, element2_layout),
layout_width(env, element3_layout),
@ -936,7 +926,7 @@ pub fn list_concat<'a, 'ctx, 'env>(
&[
pass_list_as_i128(env, first_list),
pass_list_as_i128(env, second_list),
alignment_intvalue(env, elem_layout),
env.alignment_intvalue(elem_layout),
layout_width(env, elem_layout),
],
&bitcode::LIST_CONCAT,
@ -1204,12 +1194,10 @@ fn clone_nonempty_list<'a, 'ctx, 'env>(
// Allocate space for the new array that we'll copy into.
let clone_ptr = allocate_list(env, inplace, elem_layout, list_len);
// TODO check if malloc returned null; if so, runtime error for OOM!
// Either memcpy or deep clone the array elements
if elem_layout.safe_to_memcpy() {
// Copy the bytes from the original array into the new
// one we just malloc'd.
// one we just allocated
//
// TODO how do we decide when to do the small memcpy vs the normal one?
builder

View File

@ -0,0 +1,141 @@
use crate::llvm::build::{add_func, set_name, C_CALL_CONV};
use crate::llvm::convert::ptr_int;
use inkwell::builder::Builder;
use inkwell::context::Context;
use inkwell::module::{Linkage, Module};
use inkwell::AddressSpace;
/// Define functions for roc_alloc, roc_realloc, and roc_dealloc
/// which use libc implementations (malloc, realloc, and free)
pub fn add_default_roc_externs<'ctx>(
ctx: &'ctx Context,
module: &Module<'ctx>,
builder: &Builder<'ctx>,
ptr_bytes: u32,
) {
let usize_type = ptr_int(ctx, ptr_bytes);
let i8_ptr_type = ctx.i8_type().ptr_type(AddressSpace::Generic);
// roc_alloc
{
// The type of this function (but not the implementation) should have
// already been defined by the builtins, which rely on it.
let fn_val = module.get_function("roc_alloc").unwrap();
let mut params = fn_val.get_param_iter();
let size_arg = params.next().unwrap();
let _alignment_arg = params.next().unwrap();
debug_assert!(params.next().is_none());
// Add a basic block for the entry point
let entry = ctx.append_basic_block(fn_val, "entry");
builder.position_at_end(entry);
// Call libc malloc()
let retval = builder
.build_array_malloc(ctx.i8_type(), size_arg.into_int_value(), "call_malloc")
.unwrap();
builder.build_return(Some(&retval));
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
}
}
// roc_realloc
{
let libc_realloc_val = {
let fn_val = add_func(
module,
"realloc",
i8_ptr_type.fn_type(
&[
// ptr: *void
i8_ptr_type.into(),
// size: usize
usize_type.into(),
],
false,
),
Linkage::External,
C_CALL_CONV,
);
let mut params = fn_val.get_param_iter();
let ptr_arg = params.next().unwrap();
let size_arg = params.next().unwrap();
debug_assert!(params.next().is_none());
set_name(ptr_arg, "ptr");
set_name(size_arg, "size");
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
}
fn_val
};
// The type of this function (but not the implementation) should have
// already been defined by the builtins, which rely on it.
let fn_val = module.get_function("roc_realloc").unwrap();
let mut params = fn_val.get_param_iter();
let ptr_arg = params.next().unwrap();
let new_size_arg = params.next().unwrap();
let _old_size_arg = params.next().unwrap();
let _alignment_arg = params.next().unwrap();
debug_assert!(params.next().is_none());
// Add a basic block for the entry point
let entry = ctx.append_basic_block(fn_val, "entry");
builder.position_at_end(entry);
// Call libc realloc()
let call = builder.build_call(
libc_realloc_val,
&[ptr_arg, new_size_arg],
"call_libc_realloc",
);
call.set_call_convention(C_CALL_CONV);
let retval = call.try_as_basic_value().left().unwrap();
builder.build_return(Some(&retval));
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
}
}
// roc_dealloc
{
// The type of this function (but not the implementation) should have
// already been defined by the builtins, which rely on it.
let fn_val = module.get_function("roc_dealloc").unwrap();
let mut params = fn_val.get_param_iter();
let ptr_arg = params.next().unwrap();
let _alignment_arg = params.next().unwrap();
debug_assert!(params.next().is_none());
// Add a basic block for the entry point
let entry = ctx.append_basic_block(fn_val, "entry");
builder.position_at_end(entry);
// Call libc free()
builder.build_free(ptr_arg.into_pointer_value());
builder.build_return(None);
if cfg!(debug_assertions) {
crate::llvm::build::verify_fn(fn_val);
}
}
}

View File

@ -6,4 +6,5 @@ pub mod build_list;
pub mod build_str;
pub mod compare;
pub mod convert;
pub mod externs;
pub mod refcounting;

View File

@ -1,6 +1,6 @@
use crate::debug_info_init;
use crate::llvm::build::{
cast_basic_basic, cast_block_of_memory_to_tag, set_name, Env, FAST_CALL_CONV,
add_func, cast_basic_basic, cast_block_of_memory_to_tag, set_name, Env, FAST_CALL_CONV,
LLVM_SADD_WITH_OVERFLOW_I64,
};
use crate::llvm::build_list::{incrementing_elem_loop, list_len, load_list};
@ -40,8 +40,8 @@ impl<'ctx> PointerToRefcount<'ctx> {
/// # Safety
///
/// the invariant is that the given pointer really points to the refcount,
/// not the data, and only is the start of the malloced buffer if the alignment
/// works out that way.
/// not the data, and only is the start of the allocated buffer if the
/// alignment works out that way.
pub unsafe fn from_ptr<'a, 'env>(env: &Env<'a, 'ctx, 'env>, ptr: PointerValue<'ctx>) -> Self {
// must make sure it's a pointer to usize
let refcount_type = ptr_int(env.context, env.ptr_bytes);
@ -165,17 +165,18 @@ impl<'ctx> PointerToRefcount<'ctx> {
false,
);
let function_value =
env.module
.add_function(fn_name, fn_type, Some(Linkage::Private));
// Because it's an internal-only function, it should use the fast calling convention.
function_value.set_call_conventions(FAST_CALL_CONV);
let function_value = add_func(
env.module,
fn_name,
fn_type,
Linkage::Private,
FAST_CALL_CONV, // Because it's an internal-only function, it should use the fast calling convention.
);
let subprogram = env.new_subprogram(fn_name);
function_value.set_subprogram(subprogram);
Self::_build_decrement_function_body(env, function_value, alignment);
Self::build_decrement_function_body(env, function_value, alignment);
function_value
}
@ -194,10 +195,10 @@ impl<'ctx> PointerToRefcount<'ctx> {
call.set_call_convention(FAST_CALL_CONV);
}
fn _build_decrement_function_body<'a, 'env>(
fn build_decrement_function_body<'a, 'env>(
env: &Env<'a, 'ctx, 'env>,
parent: FunctionValue<'ctx>,
extra_bytes: u32,
alignment: u32,
) {
let builder = env.builder;
let ctx = env.context;
@ -269,15 +270,21 @@ impl<'ctx> PointerToRefcount<'ctx> {
{
builder.position_at_end(then_block);
if !env.leak {
match extra_bytes {
let ptr = builder.build_pointer_cast(
refcount_ptr.value,
ctx.i8_type().ptr_type(AddressSpace::Generic),
"cast_to_i8_ptr",
);
match alignment {
n if env.ptr_bytes == n => {
// the refcount ptr is also the ptr to the malloced region
builder.build_free(refcount_ptr.value);
// the refcount ptr is also the ptr to the allocated region
env.call_dealloc(ptr, alignment);
}
n if 2 * env.ptr_bytes == n => {
// we need to step back another ptr_bytes to get the malloced ptr
let malloced = Self::from_ptr_to_data(env, refcount_ptr.value);
builder.build_free(malloced.value);
// we need to step back another ptr_bytes to get the allocated ptr
let allocated = Self::from_ptr_to_data(env, ptr);
env.call_dealloc(allocated.value, alignment);
}
n => unreachable!("invalid extra_bytes {:?}", n),
}
@ -1149,12 +1156,13 @@ pub fn build_header_help<'a, 'ctx, 'env>(
VoidType(t) => t.fn_type(arguments, false),
};
let fn_val = env
.module
.add_function(fn_name, fn_type, Some(Linkage::Private));
// Because it's an internal-only function, it should use the fast calling convention.
fn_val.set_call_conventions(FAST_CALL_CONV);
let fn_val = add_func(
env.module,
fn_name,
fn_type,
Linkage::Private,
FAST_CALL_CONV, // Because it's an internal-only function, it should use the fast calling convention.
);
let subprogram = env.new_subprogram(&fn_name);
fn_val.set_subprogram(subprogram);

View File

@ -315,6 +315,13 @@ impl Assembler<AArch64GeneralReg, AArch64FloatReg> for AArch64Assembler {
fn jmp_imm32(_buf: &mut Vec<'_, u8>, _offset: i32) -> usize {
unimplemented!("jump instructions not yet implemented for AArch64");
}
#[inline(always)]
fn tail_call(buf: &mut Vec<'_, u8>) -> u64 {
Self::jmp_imm32(buf, 0);
buf.len() as u64 - 4 // TODO is 4 the correct offset in ARM?
}
#[inline(always)]
fn jne_reg64_imm64_imm32(
_buf: &mut Vec<'_, u8>,

View File

@ -99,6 +99,8 @@ pub trait Assembler<GeneralReg: RegTrait, FloatReg: RegTrait> {
// It returns the base offset to calculate the jump from (generally the instruction after the jump).
fn jmp_imm32(buf: &mut Vec<'_, u8>, offset: i32) -> usize;
fn tail_call(buf: &mut Vec<'_, u8>) -> u64;
// Jumps by an offset of offset bytes if reg is not equal to imm.
// It should always generate the same number of bytes to enable replacement if offset changes.
// It returns the base offset to calculate the jump from (generally the instruction after the jump).
@ -341,6 +343,14 @@ impl<
Ok(())
}
/// Used for generating wrappers for malloc/realloc/free
fn build_wrapped_jmp(&mut self) -> Result<(&'a [u8], u64), String> {
let mut out = bumpalo::vec![in self.env.arena];
let offset = ASM::tail_call(&mut out);
Ok((out.into_bump_slice(), offset))
}
fn build_fn_call(
&mut self,
dst: &Symbol,

View File

@ -834,6 +834,13 @@ impl Assembler<X86_64GeneralReg, X86_64FloatReg> for X86_64Assembler {
jmp_imm32(buf, offset);
buf.len()
}
#[inline(always)]
fn tail_call(buf: &mut Vec<'_, u8>) -> u64 {
Self::jmp_imm32(buf, 0);
buf.len() as u64 - 4
}
#[inline(always)]
fn jne_reg64_imm64_imm32(
buf: &mut Vec<'_, u8>,

View File

@ -22,6 +22,7 @@ pub struct Env<'a> {
pub interns: Interns,
pub exposed_to_host: MutSet<Symbol>,
pub lazy_literals: bool,
pub generate_allocators: bool,
}
// These relocations likely will need a length.
@ -67,6 +68,9 @@ where
// The backend should track these args so it can use them as needed.
fn load_args(&mut self, args: &'a [(Layout<'a>, Symbol)]) -> Result<(), String>;
/// Used for generating wrappers for malloc/realloc/free
fn build_wrapped_jmp(&mut self) -> Result<(&'a [u8], u64), String>;
/// build_proc creates a procedure and outputs it to the wrapped object writer.
fn build_proc(&mut self, proc: Proc<'a>) -> Result<(&'a [u8], &[Relocation]), String> {
self.reset();

View File

@ -89,6 +89,60 @@ pub fn build_module<'a>(
}
}
fn generate_wrapper<'a, B: Backend<'a>>(
backend: &mut B,
output: &mut Object,
wrapper_name: String,
wraps: String,
) -> Result<(), String> {
let text_section = output.section_id(StandardSection::Text);
let proc_symbol = Symbol {
name: wrapper_name.as_bytes().to_vec(),
value: 0,
size: 0,
kind: SymbolKind::Text,
scope: SymbolScope::Dynamic,
weak: false,
section: SymbolSection::Section(text_section),
flags: SymbolFlags::None,
};
let proc_id = output.add_symbol(proc_symbol);
let (proc_data, offset) = backend.build_wrapped_jmp()?;
let proc_offset = output.add_symbol_data(proc_id, text_section, proc_data, 16);
let name = wraps.as_str().as_bytes();
// If the symbol is an undefined zig builtin, we need to add it here.
let symbol = Symbol {
name: name.to_vec(),
value: 0,
size: 0,
kind: SymbolKind::Text,
scope: SymbolScope::Dynamic,
weak: true,
section: SymbolSection::Undefined,
flags: SymbolFlags::None,
};
output.add_symbol(symbol);
if let Some(sym_id) = output.symbol_id(name) {
let reloc = write::Relocation {
offset: offset + proc_offset,
size: 32,
kind: RelocationKind::PltRelative,
encoding: RelocationEncoding::X86Branch,
symbol: sym_id,
addend: -4,
};
output
.add_relocation(text_section, reloc)
.map_err(|e| format!("{:?}", e))?;
Ok(())
} else {
Err(format!("failed to find fn symbol for {:?}", wraps))
}
}
fn build_object<'a, B: Backend<'a>>(
env: &'a Env,
procedures: MutMap<(symbol::Symbol, Layout<'a>), Proc<'a>>,
@ -107,6 +161,27 @@ fn build_object<'a, B: Backend<'a>>(
);
*/
if env.generate_allocators {
generate_wrapper(
&mut backend,
&mut output,
"roc_alloc".into(),
"malloc".into(),
)?;
generate_wrapper(
&mut backend,
&mut output,
"roc_realloc".into(),
"realloc".into(),
)?;
generate_wrapper(
&mut backend,
&mut output,
"roc_dealloc".into(),
"free".into(),
)?;
}
// Setup layout_ids for procedure calls.
let mut layout_ids = roc_mono::layout::LayoutIds::default();
let mut procs = Vec::with_capacity_in(procedures.len(), env.arena);

View File

@ -11,7 +11,7 @@ extern crate libc;
mod helpers;
#[cfg(all(test, any(target_os = "linux", target_os = "macos"), any(target_arch = "x86_64"/*, target_arch = "aarch64"*/)))]
mod gen_num {
mod dev_num {
#[test]
fn i64_values() {
assert_evals_to!("0", 0, i64);

View File

@ -173,6 +173,7 @@ pub fn helper<'a>(
interns,
exposed_to_host: exposed_to_host.keys().copied().collect(),
lazy_literals,
generate_allocators: true, // Needed for testing, since we don't have a platform
};
let target = target_lexicon::Triple::host();

View File

@ -1,18 +1,8 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate libc;
extern crate roc_gen;
#[macro_use]
mod helpers;
#[cfg(test)]
mod gen_num {
mod gen_compare {
use crate::assert_evals_to;
use crate::assert_llvm_evals_to;
use indoc::indoc;
#[test]
fn eq_i64() {

View File

@ -3,9 +3,30 @@
use crate::assert_evals_to;
use crate::assert_llvm_evals_to;
use crate::helpers::with_larger_debug_stack;
use core::ffi::c_void;
use indoc::indoc;
use roc_std::{RocList, RocStr};
#[no_mangle]
pub unsafe fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
libc::malloc(size)
}
#[no_mangle]
pub unsafe fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
libc::realloc(c_ptr, new_size)
}
#[no_mangle]
pub unsafe fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
libc::free(c_ptr)
}
#[test]
fn roc_list_construction() {
let list = RocList::from_slice(&[1i64; 23]);

View File

@ -4,6 +4,7 @@ use roc_build::program::FunctionIterator;
use roc_can::builtins::builtin_defs_map;
use roc_can::def::Def;
use roc_collections::all::{MutMap, MutSet};
use roc_gen::llvm::externs::add_default_roc_externs;
use roc_module::symbol::Symbol;
use roc_types::subs::VarStore;
@ -179,12 +180,16 @@ pub fn helper<'a>(
),
};
let builder = context.create_builder();
let module = roc_gen::llvm::build::module_from_builtins(context, "app");
// Add roc_alloc, roc_realloc, and roc_dealloc, since the repl has no
// platform to provide them.
add_default_roc_externs(context, &module, &builder, ptr_bytes);
// strip Zig debug stuff
module.strip_debug_info();
let builder = context.create_builder();
let opt_level = if cfg!(debug_assertions) {
roc_gen::llvm::build::OptLevel::Normal
} else {

View File

@ -4,6 +4,7 @@
// we actually want to compare against the literal float bits
#![allow(clippy::clippy::float_cmp)]
pub mod gen_compare;
pub mod gen_dict;
pub mod gen_hash;
pub mod gen_list;

View File

@ -4,6 +4,7 @@ use roc_std::alloca;
use roc_std::RocCallResult;
use roc_std::RocStr;
use std::alloc::Layout;
use std::ffi::c_void;
use std::time::SystemTime;
extern "C" {
@ -18,6 +19,30 @@ extern "C" {
#[link_name = "roc__rocMain_1_Fx_size"]
fn size_Fx() -> i64;
fn malloc(size: usize) -> *mut c_void;
fn realloc(c_ptr: *mut c_void, size: usize) -> *mut c_void;
fn free(c_ptr: *mut c_void);
}
#[no_mangle]
pub unsafe fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return malloc(size);
}
#[no_mangle]
pub unsafe fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return free(c_ptr);
}
#[no_mangle]

View File

@ -29,6 +29,22 @@ extern fn roc__mainForHost_1_Fx_caller(*const u8, [*]u8, [*]u8) void;
extern fn roc__mainForHost_1_Fx_size() i64;
extern fn roc__mainForHost_1_Fx_result_size() i64;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const Unit = extern struct {};
pub export fn main() u8 {

View File

@ -1,5 +1,6 @@
#![allow(non_snake_case)]
use core::ffi::c_void;
use roc_std::{alloca, RocCallResult, RocResult, RocStr};
use std::alloc::Layout;
@ -24,6 +25,30 @@ extern "C" {
#[link_name = "roc__mainForHost_1_Fx_result_size"]
fn size_Fx_result() -> i64;
fn malloc(size: usize) -> *mut c_void;
fn realloc(c_ptr: *mut c_void, size: usize) -> *mut c_void;
fn free(c_ptr: *mut c_void);
}
#[no_mangle]
pub unsafe fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return malloc(size);
}
#[no_mangle]
pub unsafe fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return free(c_ptr);
}
#[no_mangle]

View File

@ -1,19 +1,18 @@
app "custom-malloc-example"
packages { base: "platform" }
imports [ base.Task.{ Task }, base.File, base.Path ]
imports [ base.Task.{ Task } ]
provides [ main ] to base
main : Task.Task {} []
main =
when Path.fromStr "thing.txt" is
Ok path ->
{} <- Task.await (Task.putLine "Writing to file")
_ <- Task.await (Task.putLine "About to allocate a list!")
result <- Task.attempt (File.writeUtf8 path "zig is awesome")
# This is the only allocation in this Roc code!
# (The strings all get stored in the application
# binary, and are never allocated on the heap.)
list = [ 1, 2, 3, 4 ]
when result is
Ok _ -> Task.putLine "successfully wrote to file"
Err BadThing -> Task.putLine "error writing to file"
Err _ -> Task.putLine "something worse"
_ -> Task.putLine "invalid path"
if List.len list > 100 then
Task.putLine "The list was big!"
else
Task.putLine "The list was small!"

View File

@ -28,6 +28,10 @@ extern fn roc__mainForHost_1_Fx_caller(*const u8, *const u8, [*]u8, [*]u8) void;
extern fn roc__mainForHost_1_Fx_size() i64;
extern fn roc__mainForHost_1_Fx_result_size() i64;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
const Unit = extern struct {};
pub export fn main() u8 {
@ -60,28 +64,31 @@ pub export fn main() u8 {
return 0;
}
pub export fn malloc(size: usize) callconv(.C) ?*c_void {
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
const stdout = std.io.getStdOut().writer();
const allocator = testing.allocator;
// Perfo,m the actual malloc
// Perform the actual malloc
const startNs = std.time.nanoTimestamp();
const ptr = allocator.alignedAlloc(u8, 16, size) catch unreachable;
const ptr = malloc(size);
const endNs = std.time.nanoTimestamp();
const totalMs = @divTrunc(endNs - startNs, 1000);
stdout.print("\x1B[36m{} | \x1B[39m Custom malloc allocated {} bytes in {} ms!\n", .{startNs, size, totalMs}) catch unreachable;
return @ptrCast(?*c_void, ptr);
return ptr;
}
pub export fn free(c_ptr: *u128) callconv(.C) void {
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
const stdout = std.io.getStdOut().writer();
const allocator = testing.allocator;
// Perform the actual free
const startNs = std.time.nanoTimestamp();
allocator.destroy(c_ptr);
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
const endNs = std.time.nanoTimestamp();
const totalMs = @divTrunc(endNs - startNs, 1000);
@ -149,7 +156,7 @@ pub export fn roc_fx_readAllUtf8(rocPath: RocStr) callconv(.C) ReadResult {
};
var str_ptr = @ptrCast([*]u8, content);
var roc_str3 = RocStr.init(testing.allocator, str_ptr, content.len);
var roc_str3 = RocStr.init(str_ptr, content.len);
return .{ .bytes = roc_str3, .errno = 0 };
}
@ -163,119 +170,3 @@ pub export fn roc_fx_writeAllUtf8(filePath: RocStr, content: RocStr) callconv(.C
return .{ .errno = 0 };
}
pub fn roc_fx_readAllUtf8_that_does_not_work(rocPath: *RocStr) ReadResult {
const allocator = std.heap.c_allocator;
// fopen wants a C string, so stack-allocate one using rocPath's contents
const len = rocPath.len() + 1;
var raw = allocator.alloc(u8, len) catch unreachable;
var path: [*:0]u8 = @ptrCast([*:0]u8, raw);
rocPath.memcpy(path, len);
path[len] = 0; // nul-terminate the path, since it's a C string
// Open the file
const file = fopen(path, "r") orelse {
return ReadResult{ .bytes = RocStr.empty(), .errno = errno };
};
// Now that the file has been opened, make sure we always (try to) close it
// before returning, even if something went wrong while reading it.
defer {
if (fclose(file) != 0) {
return ReadResult{ .bytes = RocStr.empty(), .errno = errno };
}
}
// Next we'll count the total number of bytes in the file, which we need
// to know so we can allocate a correctly-sized buffer to read into.
// First, seek to the end of the file
if (fseek(file, 0, SEEK_END) != 0) {
return ReadResult{ .bytes = RocStr.empty(), .errno = errno };
}
// Now the current file position (which ftell returns) will be the end of
// the file - which will be equal to the total number of bytes in the file.
const totalBytes: c_long = ftell(file);
// In the highly unusal case that there are no bytes to read, return early.
if (totalBytes <= 0) {
// If the file was empty, return an empty list.
if (totalBytes == 0) {
return ReadResult{ .bytes = RocStr.empty(), .errno = 0 };
}
// ftell returns -1 on error, so return an error here
return ReadResult{ .bytes = RocStr.empty(), .errno = errno };
}
// Rewind to the beginning of the file, so we can start actually reading.
if (fseek(file, 0, SEEK_SET) != 0) {
return ReadResult{ .bytes = RocStr.empty(), .errno = errno };
}
// Allocate enough bytes for the contents of the file, plus the refcount.
const refcountBytes = @sizeOf(usize);
var buffer: [*]u8 = malloc(totalBytes + refcountBytes) orelse {
// If allocation failed, throw a runtime exception for Roc to catch.
// fclose the file explicitly before throwing, because libunwind
// will disregard our defer block. (TODO verify this!)
//
// Silently ignore fclose errors here, because we're about to throw an
// allocation failure exception; fclose failures won't affect that.
fclose(file);
// TODO use libunwind to throw an exception here
// TODO set an "allocation failed" exception object for `catch` to receive
// TODO write a test for this which simulates allocation failure
};
// Initialize the refcount to a positive number - meaning it's actually
// a capacity value, which is appropriate since we return a Unique value.
@ptrCast(buffer, [*]usize)[0] = totalBytes;
// The buffer pointer should point to the first byte *after* the refcount
buffer += refcountBytes;
// Read the bytes into the buffer.
const bytesRead = fread(buffer, 1, totalBytes, file);
// fread indicates an error by returning a number that's different from
// the number of elements we requested to read
if (bytesRead != totalBytes) {
return ReadResult{ .bytes = RocStr.empty(), .errno = errno };
}
// Explicitly return errno = 0 to indicate there was no error.
//
// (We don't want to read from the errno global here because it might have
// a nonzero value leftover from previous unrelated operations.)
return ReadResult{ .bytes = RocStr.init(buffer, totalBytes), .errno = 0 };
}
// const c = @cImport({
// @cInclude("stdio.h");
// @cInclude("stdlib.h");
// });
//
// extern var errno: c_int;
//
// const FILE = extern struct {
// unused: u8,
// };
// extern "c" fn fopen(filename: [*:0]const u8, modes: [*:0]const u8) ?*FILE;
//extern "c" fn fopen(filename: [*:0]const u8, modes: [*:0]const u8) ?*FILE;
//extern "c" fn fclose(stream: *FILE) c_int;
//extern "c" fn fseek(stream: *FILE, offset: c_long, origin: c_int) c_int;
// extern fn fopen([*:0]const u8, [*:0]const u8) ?*FILE;
// extern fn fseek(*FILE, c_long, c_int) c_int;
//extern fn fopen([*c]const u8, [*c]const u8) [*c]FILE;
// extern fn ftell([*c]FILE) c_long;
// extern fn fread([*c]u8, size_t, size_t, [*c]FILE) size_t;
// extern fn fclose([*c]FILE) c_int;

View File

@ -4,6 +4,7 @@ use roc_std::alloca;
use roc_std::RocCallResult;
use roc_std::RocStr;
use std::alloc::Layout;
use std::ffi::c_void;
use std::time::SystemTime;
extern "C" {
@ -26,6 +27,30 @@ extern "C" {
#[link_name = "roc__mainForHost_1_Fx_result_size"]
fn size_Fx_result() -> i64;
fn malloc(size: usize) -> *mut c_void;
fn realloc(c_ptr: *mut c_void, size: usize) -> *mut c_void;
fn free(c_ptr: *mut c_void);
}
#[no_mangle]
pub unsafe fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return malloc(size);
}
#[no_mangle]
pub unsafe fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return free(c_ptr);
}
#[no_mangle]

View File

@ -19,6 +19,22 @@ comptime {
}
}
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, old_size: usize, new_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const mem = std.mem;
const Allocator = mem.Allocator;
@ -45,7 +61,7 @@ pub export fn main() i32 {
// stdout the result
stdout.print("{}\n", .{callresult.content.asSlice()}) catch unreachable;
callresult.content.deinit(std.heap.c_allocator);
callresult.content.deinit();
// end time
var ts2: std.os.timespec = undefined;

View File

@ -22,6 +22,22 @@ const Allocator = mem.Allocator;
extern fn roc__mainForHost_1_exposed(RocList, *RocCallResult) void;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
// warning! the array is currently stack-allocated so don't make this too big
const NUM_NUMS = 100;

View File

@ -2,11 +2,36 @@
use roc_std::RocCallResult;
use roc_std::RocList;
use std::ffi::c_void;
use std::time::SystemTime;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed"]
fn quicksort(list: RocList<i64>, output: &mut RocCallResult<RocList<i64>>) -> ();
fn malloc(size: usize) -> *mut c_void;
fn realloc(c_ptr: *mut c_void, size: usize) -> *mut c_void;
fn free(c_ptr: *mut c_void);
}
#[no_mangle]
pub unsafe fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return malloc(size);
}
#[no_mangle]
pub unsafe fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return free(c_ptr);
}
const NUM_NUMS: usize = 100;

View File

@ -28,6 +28,22 @@ extern fn roc__mainForHost_1_Fx_caller(*const u8, *const u8, [*]u8, [*]u8) void;
extern fn roc__mainForHost_1_Fx_size() i64;
extern fn roc__mainForHost_1_Fx_result_size() i64;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const Unit = extern struct {};
pub export fn main() u8 {
@ -120,7 +136,7 @@ pub export fn roc_fx_readAllUtf8(rocPath: RocStr) callconv(.C) ReadResult {
};
var str_ptr = @ptrCast([*]u8, content);
var roc_str3 = RocStr.init(testing.allocator, str_ptr, content.len);
var roc_str3 = RocStr.init(str_ptr, content.len);
return .{ .bytes = roc_str3, .errno = 0 };
}

View File

@ -1,9 +1,9 @@
#![allow(non_snake_case)]
use roc_std::alloca;
use roc_std::RocCallResult;
use roc_std::RocStr;
use std::alloc::Layout;
use std::ffi::c_void;
use std::time::SystemTime;
type Model = *const u8;
@ -60,6 +60,30 @@ extern "C" {
#[link_name = "roc__mainForHost_1_Model_size"]
fn size_Model() -> i64;
fn malloc(size: usize) -> *mut c_void;
fn realloc(c_ptr: *mut c_void, size: usize) -> *mut c_void;
fn free(c_ptr: *mut c_void);
}
#[no_mangle]
pub unsafe fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return malloc(size);
}
#[no_mangle]
pub unsafe fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return free(c_ptr);
}
unsafe fn call_Fx(function_pointer: *const u8, closure_data: *const u8, output: *mut u8) -> () {

View File

@ -1,5 +1,6 @@
#![crate_type = "lib"]
#![no_std]
use core::ffi::c_void;
use core::fmt;
pub mod alloca;
@ -7,6 +8,15 @@ pub mod alloca;
// A list of C functions that are being imported
extern "C" {
pub fn printf(format: *const u8, ...) -> i32;
pub fn roc_alloc(size: usize, alignment: u32) -> *mut c_void;
pub fn roc_realloc(
ptr: *mut c_void,
new_size: usize,
old_size: usize,
alignment: u32,
) -> *mut c_void;
pub fn roc_dealloc(ptr: *mut c_void, alignment: u32);
}
const REFCOUNT_1: usize = isize::MIN as usize;
@ -138,7 +148,7 @@ impl<T> RocList<T> {
let num_bytes = core::mem::size_of::<usize>() + padding + element_bytes;
let elements = unsafe {
let raw_ptr = libc::malloc(num_bytes) as *mut u8;
let raw_ptr = roc_alloc(num_bytes, core::mem::size_of::<usize>() as u32) as *mut u8;
// pointer to the first element
let raw_ptr = Self::get_element_ptr(raw_ptr as *mut T) as *mut T;
@ -369,8 +379,7 @@ impl RocStr {
let num_bytes = core::mem::size_of::<usize>() + element_bytes;
let elements = unsafe {
let raw_ptr = libc::malloc(num_bytes);
let raw_ptr = roc_alloc(num_bytes, core::mem::size_of::<usize>() as u32) as *mut u8;
// write the capacity
let capacity_ptr = raw_ptr as *mut usize;
*capacity_ptr = capacity;
@ -452,7 +461,7 @@ impl Clone for RocStr {
let capacity_size = core::mem::size_of::<usize>();
let copy_length = self.length + capacity_size;
let elements = unsafe {
let raw = libc::malloc(copy_length);
let raw = roc_alloc(copy_length, core::mem::size_of::<usize>() as u32);
libc::memcpy(
raw,