Merge pull request #3138 from rtfeldman/remove-refcount-trait

Replace `ReferenceCount` trait with using `Clone` and `Drop`
This commit is contained in:
Folkert de Vries 2022-05-27 13:08:46 +02:00 committed by GitHub
commit e281aa32e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 222 additions and 425 deletions

View File

@ -1093,7 +1093,7 @@ pub struct {name} {{
let payload = {assign_payload};
core::mem::drop(self);
core::mem::drop::<Self>(self);
payload
}}"#,
@ -1161,72 +1161,6 @@ pub struct {name} {{
);
}
// The roc_std::ReferenceCount impl for the tag union
{
let opt_impl = Some(format!("unsafe impl roc_std::ReferenceCount for {name}"));
add_decl(
impls,
opt_impl.clone(),
architecture,
r#"fn increment(&self) {
if let Some(storage) = self.storage() {
let mut copy = storage.get();
if !copy.is_readonly() {
copy.increment_reference_count();
storage.set(copy);
}
}
}"#
.to_string(),
);
add_decl(
impls,
opt_impl,
architecture,
r#"unsafe fn decrement(wrapper_ptr: *const Self) {
let wrapper = &*wrapper_ptr;
if let Some(storage) = Self::storage(wrapper) {
// Decrement the refcount and return early if no dealloc is needed
{
let mut new_storage = storage.get();
if new_storage.is_readonly() {
return;
}
let needs_dealloc = new_storage.decrease();
if !needs_dealloc {
// Write the storage back.
storage.set(new_storage);
return;
}
}
if !wrapper.pointer.is_null() {
// If there is a payload, recursively drop it first.
let mut payload = core::mem::ManuallyDrop::take(&mut *wrapper.pointer);
core::mem::drop(payload);
}
// Dealloc the pointer
let alignment = core::mem::align_of::<Self>().max(core::mem::align_of::<roc_std::Storage>());
let alloc_ptr = wrapper.pointer.cast::<u8>().sub(alignment);
crate::roc_dealloc(
alloc_ptr as *mut core::ffi::c_void,
alignment as u32,
);
}
}"#.to_string()
);
}
// The Clone impl for the tag union
{
// Note that these never have Copy because they always contain a pointer.
@ -1234,7 +1168,13 @@ pub struct {name} {{
// Recursive tag unions need a custom Clone which bumps refcount.
let body = r#"fn clone(&self) -> Self {
roc_std::ReferenceCount::increment(self);
if let Some(storage) = self.storage() {
let mut new_storage = storage.get();
if !new_storage.is_readonly() {
new_storage.increment_reference_count();
storage.set(new_storage);
}
}
Self {
pointer: self.pointer
@ -1254,12 +1194,47 @@ pub struct {name} {{
impls,
opt_impl,
architecture,
r#"fn drop(&mut self) {
unsafe {
roc_std::ReferenceCount::decrement(self as *const Self);
}
}"#
.to_string(),
format!(
r#"fn drop(&mut self) {{
if let Some(storage) = self.storage() {{
// Decrement the refcount and return early if no dealloc is needed
{{
let mut new_storage = storage.get();
if new_storage.is_readonly() {{
return;
}}
let needs_dealloc = new_storage.decrease();
if !needs_dealloc {{
// Write the storage back.
storage.set(new_storage);
return;
}}
}}
if !self.pointer.is_null() {{
// If there is a payload, drop it first.
let payload = unsafe {{ core::mem::ManuallyDrop::take(&mut *self.pointer) }};
core::mem::drop::<{payload_type_name}>(payload);
}}
// Dealloc the pointer
unsafe {{
let alignment = core::mem::align_of::<Self>().max(core::mem::align_of::<roc_std::Storage>());
let alloc_ptr = self.pointer.cast::<u8>().sub(alignment);
crate::roc_dealloc(
alloc_ptr as *mut core::ffi::c_void,
alignment as u32,
);
}}
}}
}}"#
),
);
}

View File

@ -7,7 +7,6 @@ The user needs to analyse the Wasm module's memory to decode the result.
use bumpalo::{collections::Vec, Bump};
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_mono::layout::{Builtin, Layout, UnionLayout};
use roc_std::ReferenceCount;
use roc_target::TargetInfo;
use crate::wasm32_sized::Wasm32Sized;
@ -197,7 +196,7 @@ impl Wasm32Result for RocStr {
}
}
impl<T: Wasm32Result + ReferenceCount> Wasm32Result for RocList<T> {
impl<T: Wasm32Result> Wasm32Result for RocList<T> {
fn build_wrapper_body(code_builder: &mut CodeBuilder, main_function_index: u32) {
build_wrapper_body_stack_memory(code_builder, main_function_index, 12)
}

View File

@ -1,4 +1,4 @@
use roc_std::{ReferenceCount, RocDec, RocList, RocOrder, RocStr};
use roc_std::{RocDec, RocList, RocOrder, RocStr};
pub trait Wasm32Sized: Sized {
const SIZE_OF_WASM: usize;
@ -35,7 +35,7 @@ impl Wasm32Sized for RocStr {
const ALIGN_OF_WASM: usize = 4;
}
impl<T: Wasm32Sized + ReferenceCount> Wasm32Sized for RocList<T> {
impl<T: Wasm32Sized> Wasm32Sized for RocList<T> {
const SIZE_OF_WASM: usize = 12;
const ALIGN_OF_WASM: usize = 4;
}

View File

@ -1,5 +1,5 @@
use roc_gen_wasm::wasm32_sized::Wasm32Sized;
use roc_std::{ReferenceCount, RocDec, RocList, RocOrder, RocStr};
use roc_std::{RocDec, RocList, RocOrder, RocStr};
use std::convert::TryInto;
pub trait FromWasmerMemory: Wasm32Sized {
@ -74,7 +74,7 @@ impl FromWasmerMemory for RocStr {
}
}
impl<T: FromWasmerMemory + Clone + ReferenceCount> FromWasmerMemory for RocList<T> {
impl<T: FromWasmerMemory + Clone> FromWasmerMemory for RocList<T> {
fn decode(memory: &wasmer::Memory, offset: u32) -> Self {
let bytes = <u64 as FromWasmerMemory>::decode(memory, offset);

View File

@ -1,8 +1,8 @@
use crate::graphics::colors::Rgba;
use core::alloc::Layout;
use core::ffi::c_void;
use core::mem::ManuallyDrop;
use roc_std::{ReferenceCount, RocList, RocStr};
use core::mem::{self, ManuallyDrop};
use roc_std::{RocList, RocStr};
use std::ffi::CStr;
use std::fmt::Debug;
use std::mem::MaybeUninit;
@ -273,7 +273,7 @@ impl RocElem {
}
#[repr(C)]
#[derive(Debug)]
#[derive(Debug, Clone, Copy)]
pub struct RocRect {
pub color: Rgba,
@ -284,31 +284,34 @@ pub struct RocRect {
pub width: f32,
}
unsafe impl ReferenceCount for RocElem {
/// Increment the reference count.
fn increment(&self) {
use RocElemTag::*;
match self.tag() {
Rect => { /* nothing to increment! */ }
Text => unsafe { &*self.entry().text }.increment(),
impl Clone for RocElem {
fn clone(&self) -> Self {
unsafe {
match self.tag() {
RocElemTag::Rect => Self {
tag: RocElemTag::Rect,
entry: RocElemEntry {
rect: self.entry.rect.clone(),
},
},
RocElemTag::Text => Self {
tag: RocElemTag::Text,
entry: RocElemEntry {
text: self.entry.text.clone(),
},
},
}
}
}
}
/// Decrement the reference count.
///
/// # Safety
///
/// The caller must ensure that `ptr` points to a value with a non-zero
/// reference count.
unsafe fn decrement(ptr: *const Self) {
use RocElemTag::*;
let elem = &*ptr;
match elem.tag() {
Rect => { /* nothing to decrement! */ }
Text => ReferenceCount::decrement(&*elem.entry().text),
impl Drop for RocElem {
fn drop(&mut self) {
unsafe {
match self.tag() {
RocElemTag::Rect => mem::drop(ManuallyDrop::take(&mut self.entry.rect)),
RocElemTag::Text => mem::drop(ManuallyDrop::take(&mut self.entry.text)),
}
}
}
}

View File

@ -1,7 +1,7 @@
use crate::graphics::colors::Rgba;
use core::ffi::c_void;
use core::mem::{self, ManuallyDrop};
use roc_std::{ReferenceCount, RocList, RocStr};
use roc_std::{RocList, RocStr};
use std::ffi::CStr;
use std::os::raw::c_char;
@ -51,7 +51,7 @@ pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut
#[repr(transparent)]
#[cfg(target_pointer_width = "64")] // on a 64-bit system, the tag fits in this pointer's spare 3 bits
pub struct RocElem {
entry: *const RocElemEntry,
entry: *mut RocElemEntry,
}
impl RocElem {
@ -80,53 +80,50 @@ pub enum RocElemTag {
}
#[repr(C)]
#[derive(Clone)]
pub struct RocButton {
pub child: ManuallyDrop<RocElem>,
pub styles: ButtonStyles,
}
#[repr(C)]
#[derive(Clone)]
pub struct RocRowOrCol {
pub children: RocList<RocElem>,
}
unsafe impl ReferenceCount for RocElem {
/// Increment the reference count.
fn increment(&self) {
use RocElemTag::*;
match self.tag() {
Button => unsafe { &*self.entry().button.child }.increment(),
Text => unsafe { &*self.entry().text }.increment(),
Row | Col => {
let children = unsafe { &self.entry().row_or_col.children };
for child in children.as_slice().iter() {
child.increment();
}
impl Clone for RocElem {
fn clone(&self) -> Self {
unsafe {
match self.tag() {
RocElemTag::Button => Self {
entry: &mut RocElemEntry {
button: (*self.entry).button.clone(),
},
},
RocElemTag::Text => Self {
entry: &mut RocElemEntry {
text: (*self.entry).text.clone(),
},
},
RocElemTag::Col | RocElemTag::Row => Self {
entry: &mut RocElemEntry {
row_or_col: (*self.entry).row_or_col.clone(),
},
},
}
}
}
}
/// Decrement the reference count.
///
/// # Safety
///
/// The caller must ensure that `ptr` points to a value with a non-zero
/// reference count.
unsafe fn decrement(ptr: *const Self) {
use RocElemTag::*;
let elem = &*ptr;
match elem.tag() {
Button => ReferenceCount::decrement(&*elem.entry().button.child),
Text => ReferenceCount::decrement(&*elem.entry().text),
Row | Col => {
let children = &elem.entry().row_or_col.children;
for child in children.as_slice().iter() {
ReferenceCount::decrement(child);
impl Drop for RocElem {
fn drop(&mut self) {
unsafe {
match self.tag() {
RocElemTag::Button => mem::drop(ManuallyDrop::take(&mut (*self.entry).button)),
RocElemTag::Text => mem::drop(ManuallyDrop::take(&mut (*self.entry).text)),
RocElemTag::Col | RocElemTag::Row => {
mem::drop(ManuallyDrop::take(&mut (*self.entry).row_or_col))
}
}
}

View File

@ -8,12 +8,10 @@ use core::str;
use std::hash::{Hash, Hasher};
use std::io::Write;
mod rc;
mod roc_list;
mod roc_str;
mod storage;
pub use rc::ReferenceCount;
pub use roc_list::RocList;
pub use roc_str::RocStr;
pub use storage::Storage;

View File

@ -1,103 +0,0 @@
/// A type which uses reference counting for it's heap allocated memory.
///
/// Note that if a type doesn't allocate any heap memory (eg. `i32`), the
/// `increment` and `decrement` methods don't need to do anything.
///
/// # Safety
///
/// It must be safe to memcpy this type to a new location after the reference count has been increased.
pub unsafe trait ReferenceCount {
/// Increment the reference count.
fn increment(&self);
/// Decrement the reference count.
///
/// This takes a pointer rather than a reference because it can receive a null pointer
/// (e.g. in the case of a cons list), and null references in Rust are undefined behavior.
///
/// # Safety
///
/// The caller must ensure that `ptr` points to a value with a non-zero
/// reference count.
unsafe fn decrement(ptr: *const Self);
}
macro_rules! impl_reference_count_for_primitive {
($ty:ty) => {
unsafe impl ReferenceCount for $ty {
fn increment(&self) {
// Do nothing.
}
unsafe fn decrement(_ptr: *const Self) {
// Do nothing.
}
}
};
}
impl_reference_count_for_primitive!(bool);
impl_reference_count_for_primitive!(char);
impl_reference_count_for_primitive!(u8);
impl_reference_count_for_primitive!(i8);
impl_reference_count_for_primitive!(u16);
impl_reference_count_for_primitive!(i16);
impl_reference_count_for_primitive!(u32);
impl_reference_count_for_primitive!(i32);
impl_reference_count_for_primitive!(u64);
impl_reference_count_for_primitive!(i64);
impl_reference_count_for_primitive!(u128);
impl_reference_count_for_primitive!(i128);
impl_reference_count_for_primitive!(f32);
impl_reference_count_for_primitive!(f64);
macro_rules! impl_reference_count_for_tuple {
($($ty:ident: $field:tt,)*) => {
unsafe impl<$($ty),*> ReferenceCount for ($($ty,)*)
where
$($ty: ReferenceCount,)*
{
fn increment(&self) {
$(self.$field.increment();)*
}
#[allow(unused_variables, clippy::unused_unit)]
unsafe fn decrement(ptr: *const Self) {
let ptrs = {
let this = &*ptr;
($(core::ptr::addr_of!(this.$field),)*)
};
$($ty::decrement(ptrs.$field);)*
}
}
};
}
impl_reference_count_for_tuple!();
impl_reference_count_for_tuple!(A: 0,);
impl_reference_count_for_tuple!(A: 0, B: 1,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8, J: 9,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8, J: 9, K: 10,);
impl_reference_count_for_tuple!(A: 0, B: 1, C: 2, D: 3, E: 4, F: 5, G: 6, H: 7, I: 8, J: 9, K: 10, L: 11,);
unsafe impl<T, const N: usize> ReferenceCount for [T; N]
where
T: ReferenceCount,
{
fn increment(&self) {
self.iter().for_each(T::increment)
}
unsafe fn decrement(ptr: *const Self) {
for i in 0..N {
T::decrement(ptr.cast::<T>().add(i));
}
}
}

View File

@ -5,26 +5,22 @@ use core::{
cmp::{self, Ordering},
fmt::Debug,
intrinsics::copy_nonoverlapping,
mem::{self, ManuallyDrop},
ops::Deref,
ptr,
ptr::NonNull,
};
use crate::{rc::ReferenceCount, roc_alloc, roc_dealloc, roc_realloc, storage::Storage};
use crate::{roc_alloc, roc_dealloc, roc_realloc, storage::Storage};
#[repr(C)]
pub struct RocList<T>
where
T: ReferenceCount,
{
elements: Option<NonNull<T>>,
pub struct RocList<T> {
elements: Option<NonNull<ManuallyDrop<T>>>,
length: usize,
capacity: usize,
}
impl<T> RocList<T>
where
T: ReferenceCount,
{
impl<T> RocList<T> {
pub fn empty() -> Self {
RocList {
elements: None,
@ -33,12 +29,6 @@ where
}
}
pub fn from_slice(slice: &[T]) -> Self {
let mut list = Self::empty();
list.extend_from_slice(slice);
list
}
pub fn len(&self) -> usize {
self.length
}
@ -55,6 +45,23 @@ where
&*self
}
fn elements_and_storage(&self) -> Option<(NonNull<ManuallyDrop<T>>, &Cell<Storage>)> {
let elements = self.elements?;
let storage = unsafe { &*elements.as_ptr().cast::<Cell<Storage>>().sub(1) };
Some((elements, storage))
}
}
impl<T> RocList<T>
where
T: Clone,
{
pub fn from_slice(slice: &[T]) -> Self {
let mut list = Self::empty();
list.extend_from_slice(slice);
list
}
pub fn extend_from_slice(&mut self, slice: &[T]) {
// TODO: Can we do better for ZSTs? Alignment might be a problem.
@ -62,10 +69,10 @@ where
return;
}
let alignment = cmp::max(core::mem::align_of::<T>(), core::mem::align_of::<Storage>());
let alignment = cmp::max(mem::align_of::<T>(), mem::align_of::<Storage>());
let elements_offset = alignment;
let new_size = elements_offset + core::mem::size_of::<T>() * (self.len() + slice.len());
let new_size = elements_offset + mem::size_of::<T>() * (self.len() + slice.len());
let new_ptr = if let Some((elements, storage)) = self.elements_and_storage() {
// Decrement the list's refence count.
@ -74,7 +81,7 @@ where
if is_unique {
// If the memory is not shared, we can reuse the memory.
let old_size = elements_offset + core::mem::size_of::<T>() * self.len();
let old_size = elements_offset + mem::size_of::<T>() * self.len();
unsafe {
let ptr = elements.as_ptr().cast::<u8>().sub(alignment).cast();
roc_realloc(ptr, new_size, old_size, alignment as u32).cast()
@ -87,7 +94,12 @@ where
// Allocate new memory.
let new_ptr = unsafe { roc_alloc(new_size, alignment as u32) };
let new_elements = unsafe { new_ptr.cast::<u8>().add(alignment).cast::<T>() };
let new_elements = unsafe {
new_ptr
.cast::<u8>()
.add(alignment)
.cast::<ManuallyDrop<T>>()
};
// Initialize the reference count.
unsafe {
@ -116,7 +128,12 @@ where
new_ptr
};
let elements = unsafe { new_ptr.cast::<u8>().add(elements_offset).cast::<T>() };
let elements = unsafe {
new_ptr
.cast::<u8>()
.add(elements_offset)
.cast::<ManuallyDrop<T>>()
};
let non_null_elements = NonNull::new(elements).unwrap();
self.elements = Some(non_null_elements);
@ -124,14 +141,14 @@ where
let elements = self.elements.unwrap().as_ptr();
let append_ptr = unsafe { elements.add(self.len()) };
for (i, element) in slice.iter().enumerate() {
// Increment the element's reference count.
element.increment();
// Write the element into the slot.
// Use .cloned() to increment the elements' reference counts, if needed.
for (i, new_elem) in slice.iter().cloned().enumerate() {
unsafe {
let element = core::ptr::read(element);
append_ptr.add(i).write(element);
// Write the element into the slot, without dropping it.
append_ptr
.add(i)
.write(ptr::read(&ManuallyDrop::new(new_elem)));
}
// It's important that the length is increased one by one, to
@ -142,23 +159,15 @@ where
self.capacity = self.length
}
fn elements_and_storage(&self) -> Option<(NonNull<T>, &Cell<Storage>)> {
let elements = self.elements?;
let storage = unsafe { &*elements.as_ptr().cast::<Cell<Storage>>().sub(1) };
Some((elements, storage))
}
}
impl<T> Deref for RocList<T>
where
T: ReferenceCount,
{
impl<T> Deref for RocList<T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
if let Some(elements) = self.elements {
let elements = core::ptr::slice_from_raw_parts(elements.as_ptr(), self.length);
let elements = ptr::slice_from_raw_parts(elements.as_ptr().cast::<T>(), self.length);
unsafe { &*elements }
} else {
&[]
@ -166,10 +175,7 @@ where
}
}
impl<T> Default for RocList<T>
where
T: ReferenceCount,
{
impl<T> Default for RocList<T> {
fn default() -> Self {
Self::empty()
}
@ -177,20 +183,18 @@ where
impl<T, U> PartialEq<RocList<U>> for RocList<T>
where
T: PartialEq<U> + ReferenceCount,
U: ReferenceCount,
T: PartialEq<U>,
{
fn eq(&self, other: &RocList<U>) -> bool {
self.deref() == other.deref()
}
}
impl<T> Eq for RocList<T> where T: Eq + ReferenceCount {}
impl<T> Eq for RocList<T> where T: Eq {}
impl<T, U> PartialOrd<RocList<U>> for RocList<T>
where
T: PartialOrd<U> + ReferenceCount,
U: ReferenceCount,
T: PartialOrd<U>,
{
fn partial_cmp(&self, other: &RocList<U>) -> Option<cmp::Ordering> {
// If one is longer than the other, use that as the ordering.
@ -214,7 +218,7 @@ where
impl<T> Ord for RocList<T>
where
T: Ord + ReferenceCount,
T: Ord,
{
fn cmp(&self, other: &Self) -> Ordering {
// If one is longer than the other, use that as the ordering.
@ -238,78 +242,25 @@ where
impl<T> Debug for RocList<T>
where
T: Debug + ReferenceCount,
T: Debug,
{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.deref().fmt(f)
}
}
unsafe impl<T> ReferenceCount for RocList<T>
where
T: ReferenceCount,
{
fn increment(&self) {
// Increment list's the reference count.
if let Some((_, storage)) = self.elements_and_storage() {
let mut copy = storage.get();
if !copy.is_readonly() {
copy.increment_reference_count();
storage.set(copy);
}
// Increment the children's the reference counts.
self.iter().for_each(T::increment);
}
}
unsafe fn decrement(ptr: *const Self) {
let this = unsafe { &*ptr };
let (elements, storage) = if let Some((elements, storage)) = this.elements_and_storage() {
(elements, storage)
} else {
return;
};
// Decrement the refence counts of the contained values.
for i in 0..this.len() {
unsafe {
T::decrement(elements.as_ptr().add(i));
}
}
// Decrease the list's reference count.
let mut copy = storage.get();
let can_be_released = copy.decrease();
if !can_be_released {
if !copy.is_readonly() {
// Write the storage back.
storage.set(copy);
}
return;
}
// Release the memory.
let alignment = cmp::max(core::mem::align_of::<T>(), core::mem::align_of::<Storage>());
unsafe {
roc_dealloc(
elements.as_ptr().cast::<u8>().sub(alignment).cast(),
alignment as u32,
);
}
}
}
impl<T> Clone for RocList<T>
where
T: ReferenceCount,
{
impl<T> Clone for RocList<T> {
fn clone(&self) -> Self {
// Increment the reference counts.
self.increment();
// Increment the reference count
if let Some((_, storage)) = self.elements_and_storage() {
let mut new_storage = storage.get();
if !new_storage.is_readonly() {
new_storage.increment_reference_count();
storage.set(new_storage);
}
}
// Create a copy.
Self {
elements: self.elements,
length: self.length,
@ -318,30 +269,50 @@ where
}
}
impl<T> Drop for RocList<T>
where
T: ReferenceCount,
{
impl<T> Drop for RocList<T> {
fn drop(&mut self) {
unsafe {
Self::decrement(self);
if let Some((elements, storage)) = self.elements_and_storage() {
// Decrease the list's reference count.
let mut new_storage = storage.get();
let needs_dealloc = new_storage.decrease();
if needs_dealloc {
unsafe {
// Drop the stored elements.
for index in 0..self.len() {
let elem_ptr = elements.as_ptr().add(index);
mem::drop::<T>(ManuallyDrop::take(&mut *elem_ptr));
}
let alignment = cmp::max(mem::align_of::<T>(), mem::align_of::<Storage>());
// Release the memory.
roc_dealloc(
elements.as_ptr().cast::<u8>().sub(alignment).cast(),
alignment as u32,
);
}
} else {
if !new_storage.is_readonly() {
// Write the storage back.
storage.set(new_storage);
}
}
}
}
}
impl<T> From<&[T]> for RocList<T>
where
T: ReferenceCount,
T: Clone,
{
fn from(slice: &[T]) -> Self {
Self::from_slice(slice)
}
}
impl<T> IntoIterator for RocList<T>
where
T: ReferenceCount,
{
impl<T> IntoIterator for RocList<T> {
type Item = T;
type IntoIter = IntoIter<T>;
@ -350,18 +321,12 @@ where
}
}
pub struct IntoIter<T>
where
T: ReferenceCount,
{
pub struct IntoIter<T> {
list: RocList<T>,
idx: usize,
}
impl<T> Iterator for IntoIter<T>
where
T: ReferenceCount,
{
impl<T> Iterator for IntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
@ -374,34 +339,21 @@ where
self.idx += 1;
// Return the element.
let element = unsafe { element_ptr.read() };
Some(element)
Some(unsafe { ManuallyDrop::into_inner(element_ptr.read()) })
}
}
impl<T> Drop for IntoIter<T>
where
T: ReferenceCount,
{
impl<T> Drop for IntoIter<T> {
fn drop(&mut self) {
// Check if there are any elements left of which we need to decrement
// the refence counts.
let elements = if let Some(elements) = self.list.elements {
elements
} else {
return;
};
// If there are any elements left that need to be dropped, drop them.
if let Some(elements) = self.list.elements {
// Set the list's length to zero to prevent double-frees.
// Note that this leaks if dropping any of the elements panics.
let len = mem::take(&mut self.list.length);
// Set the list's length to zero to prevent double-frees.
// Note that this leaks if decrementing any of the elements' reference
// counts panics.
let len = core::mem::take(&mut self.list.length);
// Decrement the reference counts of the elements that haven't been
// returned from the iterator.
for i in self.idx..len {
unsafe {
T::decrement(elements.as_ptr().add(i));
// Drop the elements that haven't been returned from the iterator.
for i in self.idx..len {
mem::drop::<T>(unsafe { ManuallyDrop::take(&mut *elements.as_ptr().add(i)) })
}
}
}

View File

@ -8,7 +8,7 @@ use core::{
};
use std::hash::Hash;
use crate::{rc::ReferenceCount, RocList};
use crate::RocList;
#[repr(transparent)]
pub struct RocStr(RocStrInner);
@ -116,28 +116,6 @@ impl Debug for RocStr {
}
}
unsafe impl ReferenceCount for RocStr {
fn increment(&self) {
match self.as_enum_ref() {
RocStrInnerRef::HeapAllocated(h) => h.increment(),
RocStrInnerRef::SmallString(_) => {
// Do nothing.
}
}
}
unsafe fn decrement(ptr: *const Self) {
let this = unsafe { &*ptr };
if this.is_small_str() {
// Do nothing.
} else {
unsafe {
RocList::<u8>::decrement(ptr.cast());
}
}
}
}
impl Clone for RocStr {
fn clone(&self) -> Self {
match self.as_enum_ref() {
@ -151,9 +129,7 @@ impl Clone for RocStr {
impl Drop for RocStr {
fn drop(&mut self) {
if self.is_small_str() {
// Do nothing.
} else {
if !self.is_small_str() {
unsafe {
ManuallyDrop::drop(&mut self.0.heap_allocated);
}