Add a RocRefcounted trait in roc_std

This is required to properly handle refcounting of RocList.
Without it, we can't tell if we need to get the length from the heap.
That said, it isn't a pretty solution.

I think dealing with generating bespoke type in glue would feel nicer than this but be much more work.
It also would deal with the issue of implementations in the bitcode not matching external libraries.
That said, it would require exposing way more symbols from roc for each monomophorphized list variant.
This commit is contained in:
Brendan Hansknecht 2024-04-05 17:48:30 -07:00
parent 3c842196fa
commit 93fab26c01
No known key found for this signature in database
GPG Key ID: 0EA784685083E75B
9 changed files with 237 additions and 310 deletions

View File

@ -8,7 +8,7 @@ use bumpalo::{collections::Vec, Bump};
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_mono::layout::{Builtin, InLayout, LayoutInterner, LayoutRepr, UnionLayout};
use roc_std::{RocBox, RocDec, RocList, RocOrder, RocResult, RocStr, I128, U128};
use roc_std::{RocBox, RocDec, RocList, RocOrder, RocRefcounted, RocResult, RocStr, I128, U128};
use roc_wasm_module::{
linking::SymInfo, linking::WasmObjectSymbol, Align, Export, ExportType, LocalId, Signature,
ValueType, WasmModule,
@ -197,7 +197,10 @@ impl Wasm32Result for RocStr {
}
}
impl<T: Wasm32Result> Wasm32Result for RocList<T> {
impl<T: Wasm32Result> Wasm32Result for RocList<T>
where
T: RocRefcounted,
{
fn build_wrapper_body(code_builder: &mut CodeBuilder, main_function_index: u32) {
build_wrapper_body_stack_memory(code_builder, main_function_index, 12)
}

View File

@ -1,4 +1,4 @@
use roc_std::{RocBox, RocDec, RocList, RocOrder, RocResult, RocStr, I128, U128};
use roc_std::{RocBox, RocDec, RocList, RocOrder, RocRefcounted, RocResult, RocStr, I128, U128};
pub trait Wasm32Sized: Sized {
const SIZE_OF_WASM: usize;
@ -53,7 +53,10 @@ impl Wasm32Sized for RocStr {
const ALIGN_OF_WASM: usize = 4;
}
impl<T: Wasm32Sized> Wasm32Sized for RocList<T> {
impl<T: Wasm32Sized> Wasm32Sized for RocList<T>
where
T: RocRefcounted,
{
const SIZE_OF_WASM: usize = 12;
const ALIGN_OF_WASM: usize = 4;
}

View File

@ -1,7 +1,7 @@
use roc_error_macros::internal_error;
use roc_gen_wasm::wasm32_sized::Wasm32Sized;
use roc_mono::layout::Builtin;
use roc_std::{RocBox, RocDec, RocList, RocOrder, RocResult, RocStr, I128, U128};
use roc_std::{RocBox, RocDec, RocList, RocOrder, RocRefcounted, RocResult, RocStr, I128, U128};
use roc_wasm_module::round_up_to_alignment;
use std::convert::TryInto;
@ -84,7 +84,10 @@ impl FromWasm32Memory for RocStr {
}
}
impl<T: FromWasm32Memory + Clone> FromWasm32Memory for RocList<T> {
impl<T: FromWasm32Memory + Clone> FromWasm32Memory for RocList<T>
where
T: RocRefcounted,
{
fn decode(memory: &[u8], offset: u32) -> Self {
let elements = <u32 as FromWasm32Memory>::decode(memory, offset + 4 * Builtin::WRAPPER_PTR);
let length = <u32 as FromWasm32Memory>::decode(memory, offset + 4 * Builtin::WRAPPER_LEN);

View File

@ -9,8 +9,6 @@ import "../../roc_std/Cargo.toml" as rocStdCargoToml : Str
import "../../roc_std/src/lib.rs" as rocStdLib : Str
import "../../roc_std/src/roc_box.rs" as rocStdBox : Str
import "../../roc_std/src/roc_list.rs" as rocStdList : Str
import "../../roc_std/src/roc_dict.rs" as rocStdDict : Str
import "../../roc_std/src/roc_set.rs" as rocStdSet : Str
import "../../roc_std/src/roc_str.rs" as rocStdStr : Str
import "../../roc_std/src/storage.rs" as rocStdStorage : Str
@ -45,8 +43,6 @@ staticFiles = [
{ name: "roc_std/src/lib.rs", content: rocStdLib },
{ name: "roc_std/src/roc_box.rs", content: rocStdBox },
{ name: "roc_std/src/roc_list.rs", content: rocStdList },
{ name: "roc_std/src/roc_dict.rs", content: rocStdDict },
{ name: "roc_std/src/roc_set.rs", content: rocStdSet },
{ name: "roc_std/src/roc_str.rs", content: rocStdStr },
{ name: "roc_std/src/storage.rs", content: rocStdStorage },
]
@ -2006,16 +2002,16 @@ typeName = \types, id ->
Num F32 -> "f32"
Num F64 -> "f64"
Num Dec -> "roc_std:RocDec"
RocDict key value ->
keyName = typeName types key
valueName = typeName types value
RocDict _key _value ->
# keyName = typeName types key
# valueName = typeName types value
# "roc_std::RocDict<$(keyName), $(valueName)>"
crash "RocDict is not yet supported in rust"
"roc_std::RocDict<$(keyName), $(valueName)>"
RocSet elem ->
elemName = typeName types elem
"roc_std::RocSet<$(elemName)>"
RocSet _elem ->
# elemName = typeName types elem
# "roc_std::RocSet<$(elemName)>"
crash "RocSet is not yet supported in rust"
RocList elem ->
elemName = typeName types elem

View File

@ -10,18 +10,15 @@ use core::hash::{Hash, Hasher};
use core::mem::{ManuallyDrop, MaybeUninit};
use core::ops::Drop;
use core::str;
use std::convert::Infallible;
mod roc_box;
mod roc_dict;
mod roc_list;
mod roc_set;
mod roc_str;
mod storage;
pub use roc_box::RocBox;
pub use roc_dict::RocDict;
pub use roc_list::{RocList, SendSafeRocList};
pub use roc_set::RocSet;
pub use roc_str::{InteriorNulError, RocStr, SendSafeRocStr};
pub use storage::Storage;
@ -557,3 +554,88 @@ impl Hash for U128 {
u128::from(*self).hash(state);
}
}
/// All Roc types that are refcounted must implement this trait.
pub trait RocRefcounted {
/// Increments the refcount n times.
fn inc(&mut self, n: usize);
/// Decrements the refcount potentially freeing the underlying allocation.
fn dec(&mut self);
/// Returns true if the type is actually refcounted by roc.
fn is_refcounted() -> bool;
}
macro_rules! roc_refcounted_noop_impl {
( $( $T:tt),+ ) => {
$(
impl RocRefcounted for $T {
fn inc(&mut self, _: usize) {}
fn dec(&mut self) {}
fn is_refcounted() -> bool {
false
}
}
)+
};
}
roc_refcounted_noop_impl!(bool);
roc_refcounted_noop_impl!(u8, u16, u32, u64, u128, U128);
roc_refcounted_noop_impl!(i8, i16, i32, i64, i128, I128);
roc_refcounted_noop_impl!(f32, f64);
roc_refcounted_noop_impl!(RocDec);
roc_refcounted_noop_impl!(Infallible, ());
macro_rules! roc_refcounted_arr_impl {
( $n:tt ) => {
impl<T> RocRefcounted for [T; $n]
where
T: RocRefcounted,
{
fn inc(&mut self, n: usize) {
self.iter_mut().for_each(|x| x.inc(n));
}
fn dec(&mut self) {
self.iter_mut().for_each(|x| x.dec());
}
fn is_refcounted() -> bool {
T::is_refcounted()
}
}
};
}
roc_refcounted_arr_impl!(0);
roc_refcounted_arr_impl!(1);
roc_refcounted_arr_impl!(2);
roc_refcounted_arr_impl!(3);
roc_refcounted_arr_impl!(4);
roc_refcounted_arr_impl!(8);
macro_rules! roc_refcounted_tuple_impl {
( $( $idx:tt $T:ident),* ) => {
impl<$($T, )+> RocRefcounted for ($($T, )*)
where
$($T : RocRefcounted, )*
{
fn inc(&mut self, n: usize) {
$(
self.$idx.inc(n);
)*
}
fn dec(&mut self) {
$(
self.$idx.dec();
)*
}
fn is_refcounted() -> bool {
$($T::is_refcounted() || )* false
}
}
};
}
roc_refcounted_tuple_impl!(0 A, 1 B);
roc_refcounted_tuple_impl!(0 A, 1 B, 2 C);
roc_refcounted_tuple_impl!(0 A, 1 B, 3 C, 3 D);

View File

@ -1,208 +0,0 @@
use crate::roc_list::RocList;
use core::{
fmt::{self, Debug},
hash::Hash,
mem::{align_of, ManuallyDrop},
};
/// At the moment, Roc's Dict is just an association list. Its lookups are O(n) but
/// we haven't grown such big programs that it's a problem yet!
///
/// We do some things in this data structure that only make sense because the
/// memory is managed in Roc:
///
/// 1. We don't implement an [`IntoIterator`] that iterates over owned values,
/// since Roc owns the memory, not rust.
/// 2. We use a union for [`RocDictItem`] instead of just a struct. See the
/// comment on that data structure for why.
#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct RocDict<K, V>(RocList<RocDictItem<K, V>>);
impl<K, V> RocDict<K, V> {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn with_capacity(capacity: usize) -> Self {
Self(RocList::with_capacity(capacity))
}
pub fn iter(&self) -> impl Iterator<Item = (&K, &V)> {
self.0.iter().map(|item| (item.key(), item.value()))
}
pub fn iter_keys(&self) -> impl Iterator<Item = &K> {
self.0.iter().map(|item| item.key())
}
pub fn iter_values(&self) -> impl Iterator<Item = &V> {
self.0.iter().map(|item| item.value())
}
}
impl<K: Hash, V> RocDict<K, V> {
unsafe fn insert_unchecked(&mut self, _key: K, _val: V) {
todo!();
}
}
impl<K: Hash, V> FromIterator<(K, V)> for RocDict<K, V> {
fn from_iter<T: IntoIterator<Item = (K, V)>>(into_iter: T) -> Self {
let src = into_iter.into_iter();
let mut ret = Self::with_capacity(src.size_hint().0);
for (key, val) in src {
unsafe {
ret.insert_unchecked(key, val);
}
}
ret
}
}
impl<'a, K, V> IntoIterator for &'a RocDict<K, V> {
type Item = (&'a K, &'a V);
type IntoIter = IntoIter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
IntoIter {
index: 0,
items: self.0.as_slice(),
}
}
}
pub struct IntoIter<'a, K, V> {
index: usize,
items: &'a [RocDictItem<K, V>],
}
impl<'a, K, V> Iterator for IntoIter<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
let item = self
.items
.get(self.index)
.map(|item| (item.key(), item.value()));
self.index += 1;
item
}
fn size_hint(&self) -> (usize, Option<usize>) {
let remaining = self.items.len() - self.index;
(remaining, Some(remaining))
}
}
impl<K: Debug, V: Debug> Debug for RocDict<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("RocDict ")?;
f.debug_map().entries(self.iter()).finish()
}
}
/// Roc is constructing these values according to its memory layout rules.
/// Specifically:
///
/// 1. fields with the highest alignment go first
/// 2. then fields are sorted alphabetically
///
/// Taken together, these mean that if we have a value with higher alignment
/// than the key, it'll be first in memory. Otherwise, the key will be first.
/// Fortunately, the total amount of memory doesn't change, so we can use a
/// union and disambiguate by examining the alignment of the key and value.
///
/// However, note that this only makes sense while we're storing KV pairs
/// contiguously in memory. If we separate them at some point, we'll need to
/// change this implementation drastically!
#[derive(Eq)]
#[repr(C)]
union RocDictItem<K, V> {
key_first: ManuallyDrop<KeyFirst<K, V>>,
value_first: ManuallyDrop<ValueFirst<K, V>>,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
struct KeyFirst<K, V> {
key: K,
value: V,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
struct ValueFirst<K, V> {
value: V,
key: K,
}
impl<K, V> RocDictItem<K, V> {
fn key(&self) -> &K {
if align_of::<K>() >= align_of::<V>() {
unsafe { &self.key_first.key }
} else {
unsafe { &self.value_first.key }
}
}
fn value(&self) -> &V {
if align_of::<K>() >= align_of::<V>() {
unsafe { &self.key_first.value }
} else {
unsafe { &self.value_first.value }
}
}
}
impl<K, V> Drop for RocDictItem<K, V> {
fn drop(&mut self) {
if align_of::<K>() >= align_of::<V>() {
unsafe { ManuallyDrop::drop(&mut self.key_first) }
} else {
unsafe { ManuallyDrop::drop(&mut self.value_first) }
}
}
}
impl<K: PartialEq, V: PartialEq> PartialEq for RocDictItem<K, V> {
fn eq(&self, other: &Self) -> bool {
self.key() == other.key() && self.value() == other.value()
}
}
impl<K: PartialOrd, V: PartialOrd> PartialOrd for RocDictItem<K, V> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
self.key().partial_cmp(other.key()).map(|key_cmp| {
match self.value().partial_cmp(other.value()) {
Some(value_cmp) => key_cmp.then(value_cmp),
None => key_cmp,
}
})
}
}
impl<K: Ord, V: Ord> Ord for RocDictItem<K, V> {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.key()
.cmp(other.key())
.then(self.value().cmp(other.value()))
}
}
impl<K: Hash, V: Hash> Hash for RocDictItem<K, V> {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.key().hash(state);
self.value().hash(state);
}
}

View File

@ -14,7 +14,7 @@ use core::{
};
use std::ops::Range;
use crate::{roc_alloc, roc_dealloc, roc_realloc, storage::Storage};
use crate::{roc_alloc, roc_dealloc, roc_realloc, storage::Storage, RocRefcounted};
#[cfg(feature = "serde")]
use core::marker::PhantomData;
@ -26,7 +26,10 @@ use serde::{
};
#[repr(C)]
pub struct RocList<T> {
pub struct RocList<T>
where
T: RocRefcounted,
{
elements: Option<NonNull<ManuallyDrop<T>>>,
length: usize,
// This technically points to directly after the refcount.
@ -34,7 +37,10 @@ pub struct RocList<T> {
capacity_or_ref_ptr: usize,
}
impl<T> RocList<T> {
impl<T> RocList<T>
where
T: RocRefcounted,
{
#[inline(always)]
fn alloc_alignment() -> u32 {
mem::align_of::<T>().max(mem::align_of::<Storage>()) as u32
@ -214,12 +220,17 @@ impl<T> RocList<T> {
}
/// Useful for doing memcpy on the underlying allocation. Returns NULL if list is empty.
pub(crate) fn ptr_to_allocation(&self) -> *mut c_void {
let alignment = Self::alloc_alignment() as usize;
fn ptr_to_allocation(&self) -> *mut c_void {
let alignment = RocList::<T>::alloc_alignment() as usize;
if self.is_seamless_slice() {
((self.capacity_or_ref_ptr << 1) - alignment) as *mut _
} else {
unsafe { self.ptr_to_first_elem().cast::<u8>().sub(alignment) as *mut _ }
let offset = if T::is_refcounted() {
alignment * 2
} else {
alignment
};
unsafe { self.ptr_to_first_elem().cast::<u8>().sub(offset) as *mut _ }
}
}
@ -282,7 +293,7 @@ impl<T> RocList<T> {
impl<T> RocList<T>
where
T: Clone,
T: Clone + RocRefcounted,
{
pub fn from_slice(slice: &[T]) -> Self {
let mut list = Self::empty();
@ -370,7 +381,10 @@ where
}
}
impl<T> RocList<T> {
impl<T> RocList<T>
where
T: RocRefcounted,
{
#[track_caller]
pub fn slice_range(&self, range: Range<usize>) -> Self {
match self.try_slice_range(range) {
@ -499,7 +513,10 @@ impl<T> RocList<T> {
}
}
impl<T> Deref for RocList<T> {
impl<T> Deref for RocList<T>
where
T: RocRefcounted,
{
type Target = [T];
fn deref(&self) -> &Self::Target {
@ -513,7 +530,10 @@ impl<T> Deref for RocList<T> {
}
}
impl<T> DerefMut for RocList<T> {
impl<T> DerefMut for RocList<T>
where
T: RocRefcounted,
{
fn deref_mut(&mut self) -> &mut Self::Target {
if let Some(elements) = self.elements {
let ptr = elements.as_ptr().cast::<T>();
@ -526,7 +546,10 @@ impl<T> DerefMut for RocList<T> {
}
}
impl<T> Default for RocList<T> {
impl<T> Default for RocList<T>
where
T: RocRefcounted,
{
fn default() -> Self {
Self::empty()
}
@ -534,18 +557,20 @@ impl<T> Default for RocList<T> {
impl<T, U> PartialEq<RocList<U>> for RocList<T>
where
T: PartialEq<U>,
U: RocRefcounted,
T: PartialEq<U> + RocRefcounted,
{
fn eq(&self, other: &RocList<U>) -> bool {
self.as_slice() == other.as_slice()
}
}
impl<T> Eq for RocList<T> where T: Eq {}
impl<T> Eq for RocList<T> where T: Eq + RocRefcounted {}
impl<T, U> PartialOrd<RocList<U>> for RocList<T>
where
T: PartialOrd<U>,
U: RocRefcounted,
T: PartialOrd<U> + RocRefcounted,
{
fn partial_cmp(&self, other: &RocList<U>) -> Option<cmp::Ordering> {
// If one is longer than the other, use that as the ordering.
@ -569,7 +594,7 @@ where
impl<T> Ord for RocList<T>
where
T: Ord,
T: Ord + RocRefcounted,
{
fn cmp(&self, other: &Self) -> Ordering {
// If one is longer than the other, use that as the ordering.
@ -593,14 +618,17 @@ where
impl<T> Debug for RocList<T>
where
T: Debug,
T: Debug + RocRefcounted,
{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.deref().fmt(f)
}
}
impl<T> Clone for RocList<T> {
impl<T> Clone for RocList<T>
where
T: RocRefcounted,
{
fn clone(&self) -> Self {
// Increment the reference count
if let Some((_, storage)) = self.elements_and_storage() {
@ -620,7 +648,27 @@ impl<T> Clone for RocList<T> {
}
}
impl<T> Drop for RocList<T> {
impl<T> RocRefcounted for RocList<T>
where
T: RocRefcounted,
{
fn inc(&mut self, _: usize) {
todo!()
}
fn dec(&mut self) {
todo!()
}
fn is_refcounted() -> bool {
true
}
}
impl<T> Drop for RocList<T>
where
T: RocRefcounted,
{
fn drop(&mut self) {
if let Some((elements, storage)) = self.elements_and_storage() {
// Decrease the list's reference count.
@ -637,7 +685,7 @@ impl<T> Drop for RocList<T> {
}
// Release the memory.
roc_dealloc(self.ptr_to_allocation(), Self::alloc_alignment());
roc_dealloc((&*self).ptr_to_allocation(), Self::alloc_alignment());
}
} else {
// Write the storage back.
@ -650,20 +698,26 @@ impl<T> Drop for RocList<T> {
impl<T> From<&[T]> for RocList<T>
where
T: Clone,
T: Clone + RocRefcounted,
{
fn from(slice: &[T]) -> Self {
Self::from_slice(slice)
}
}
impl<T, const SIZE: usize> From<[T; SIZE]> for RocList<T> {
impl<T, const SIZE: usize> From<[T; SIZE]> for RocList<T>
where
T: RocRefcounted,
{
fn from(array: [T; SIZE]) -> Self {
Self::from_iter(array)
}
}
impl<'a, T> IntoIterator for &'a RocList<T> {
impl<'a, T> IntoIterator for &'a RocList<T>
where
T: RocRefcounted,
{
type Item = &'a T;
type IntoIter = core::slice::Iter<'a, T>;
@ -672,7 +726,10 @@ impl<'a, T> IntoIterator for &'a RocList<T> {
}
}
impl<T: Hash> Hash for RocList<T> {
impl<T: Hash> Hash for RocList<T>
where
T: RocRefcounted,
{
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
// This is the same as Rust's Vec implementation, which
// just delegates to the slice implementation. It's a bit surprising
@ -688,7 +745,10 @@ impl<T: Hash> Hash for RocList<T> {
}
}
impl<T> FromIterator<T> for RocList<T> {
impl<T> FromIterator<T> for RocList<T>
where
T: RocRefcounted,
{
fn from_iter<I>(into: I) -> Self
where
I: IntoIterator<Item = T>,
@ -732,7 +792,10 @@ impl<T> FromIterator<T> for RocList<T> {
}
#[cfg(feature = "serde")]
impl<T: Serialize> Serialize for RocList<T> {
impl<T: Serialize> Serialize for RocList<T>
where
T: RocRefcounted,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
@ -750,7 +813,7 @@ impl<'de, T> Deserialize<'de> for RocList<T>
where
// TODO: I'm not sure about requiring clone here. Is that fine? Is that
// gonna mean lots of extra allocations?
T: Deserialize<'de> + core::clone::Clone,
T: Deserialize<'de> + core::clone::Clone + RocRefcounted,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@ -762,13 +825,15 @@ where
// This is a RocList that is checked to ensure it is unique or readonly such that it can be sent between threads safely.
#[repr(transparent)]
pub struct SendSafeRocList<T>(RocList<T>);
pub struct SendSafeRocList<T>(RocList<T>)
where
T: RocRefcounted;
unsafe impl<T> Send for SendSafeRocList<T> where T: Send {}
unsafe impl<T> Send for SendSafeRocList<T> where T: Send + RocRefcounted {}
impl<T> Clone for SendSafeRocList<T>
where
T: Clone,
T: Clone + RocRefcounted,
{
fn clone(&self) -> Self {
if self.0.is_readonly() {
@ -782,7 +847,7 @@ where
impl<T> From<RocList<T>> for SendSafeRocList<T>
where
T: Clone,
T: Clone + RocRefcounted,
{
fn from(l: RocList<T>) -> Self {
if l.is_unique() || l.is_readonly() {
@ -796,19 +861,28 @@ where
}
}
impl<T> From<SendSafeRocList<T>> for RocList<T> {
impl<T> From<SendSafeRocList<T>> for RocList<T>
where
T: RocRefcounted,
{
fn from(l: SendSafeRocList<T>) -> Self {
l.0
}
}
#[cfg(feature = "serde")]
struct RocListVisitor<T> {
struct RocListVisitor<T>
where
T: RocRefcounted,
{
marker: PhantomData<T>,
}
#[cfg(feature = "serde")]
impl<T> RocListVisitor<T> {
impl<T> RocListVisitor<T>
where
T: RocRefcounted,
{
fn new() -> Self {
RocListVisitor {
marker: PhantomData,
@ -819,7 +893,7 @@ impl<T> RocListVisitor<T> {
#[cfg(feature = "serde")]
impl<'de, T> Visitor<'de> for RocListVisitor<T>
where
T: Deserialize<'de> + core::clone::Clone,
T: Deserialize<'de> + core::clone::Clone + RocRefcounted,
{
type Value = RocList<T>;

View File

@ -1,44 +0,0 @@
use crate::roc_dict::RocDict;
use core::{
fmt::{self, Debug},
hash::Hash,
};
#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct RocSet<T>(RocDict<T, ()>);
impl<T> RocSet<T> {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
#[allow(unused)]
pub fn with_capacity(capacity: usize) -> Self {
Self(RocDict::with_capacity(capacity))
}
#[allow(unused)]
pub fn iter(&self) -> impl Iterator<Item = &T> {
self.0.iter_keys()
}
}
impl<T: Hash> FromIterator<T> for RocSet<T> {
fn from_iter<I: IntoIterator<Item = T>>(into_iter: I) -> Self {
Self(RocDict::from_iter(
into_iter.into_iter().map(|elem| (elem, ())),
))
}
}
impl<T: Debug> Debug for RocSet<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("RocSet ")?;
f.debug_set().entries(self.iter()).finish()
}
}

View File

@ -21,7 +21,7 @@ use core::{
use std::ffi::{CStr, CString};
use std::{ops::Range, ptr::NonNull};
use crate::{roc_realloc, RocList};
use crate::{roc_realloc, RocList, RocRefcounted};
#[repr(transparent)]
pub struct RocStr(RocStrInner);
@ -792,6 +792,24 @@ impl From<SendSafeRocStr> for RocStr {
}
}
impl RocRefcounted for RocStr {
fn inc(&mut self, n: usize) {
if !self.is_small_str() {
unsafe { self.0.heap_allocated.deref_mut().inc(n) }
}
}
fn dec(&mut self) {
if !self.is_small_str() {
unsafe { self.0.heap_allocated.deref_mut().dec() }
}
}
fn is_refcounted() -> bool {
true
}
}
#[repr(C)]
struct BigString {
elements: NonNull<u8>,