Merge pull request #40 from urbit/peter/remove-dyn

Replace dynamic trait objects with static type parameters
This commit is contained in:
Peter McEvoy 2023-03-15 17:00:52 -04:00 committed by GitHub
commit 2a553a4aa8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 50 additions and 47 deletions

View File

@ -167,7 +167,7 @@ impl DirectAtom {
Atom { direct: self }
}
pub fn as_ubig(self, _stack: &mut dyn Stack) -> UBig {
pub fn as_ubig<S: Stack>(self, _stack: &mut S) -> UBig {
UBig::from(self.0)
}
@ -219,7 +219,7 @@ pub const fn D(n: u64) -> Noun {
}
#[allow(non_snake_case)]
pub fn T(allocator: &mut dyn NounAllocator, tup: &[Noun]) -> Noun {
pub fn T<A: NounAllocator>(allocator: &mut A, tup: &[Noun]) -> Noun {
Cell::new_tuple(allocator, tup).as_noun()
}
@ -275,8 +275,8 @@ impl IndirectAtom {
*
* Note: size is in 64-bit words, not bytes.
*/
pub unsafe fn new_raw(
allocator: &mut dyn NounAllocator,
pub unsafe fn new_raw<A: NounAllocator>(
allocator: &mut A,
size: usize,
data: *const u64,
) -> Self {
@ -289,8 +289,8 @@ impl IndirectAtom {
*
* Note: size is bytes, not words
*/
pub unsafe fn new_raw_bytes(
allocator: &mut dyn NounAllocator,
pub unsafe fn new_raw_bytes<A: NounAllocator>(
allocator: &mut A,
size: usize,
data: *const u8,
) -> Self {
@ -303,7 +303,10 @@ impl IndirectAtom {
* until it is written and normalized) and a mutable pointer which is the data buffer for the
* indirect atom, to be written into.
*/
pub unsafe fn new_raw_mut(allocator: &mut dyn NounAllocator, size: usize) -> (Self, *mut u64) {
pub unsafe fn new_raw_mut<A: NounAllocator>(
allocator: &mut A,
size: usize,
) -> (Self, *mut u64) {
debug_assert!(size > 0);
let buffer = allocator.alloc_indirect(size);
*buffer = 0;
@ -315,8 +318,8 @@ impl IndirectAtom {
* Return the atom (which should not be used until it is written and normalized) and a mutable
* pointer which is the data buffer for the indirect atom, to be written into.
*/
pub unsafe fn new_raw_mut_zeroed(
allocator: &mut dyn NounAllocator,
pub unsafe fn new_raw_mut_zeroed<A: NounAllocator>(
allocator: &mut A,
size: usize,
) -> (Self, *mut u64) {
let allocation = Self::new_raw_mut(allocator, size);
@ -327,8 +330,8 @@ impl IndirectAtom {
/** Make an indirect atom that can be written into as a bitslice. The constraints of
* [new_raw_mut_zeroed] also apply here
*/
pub unsafe fn new_raw_mut_bitslice<'a>(
allocator: &mut dyn NounAllocator,
pub unsafe fn new_raw_mut_bitslice<'a, A: NounAllocator>(
allocator: &mut A,
size: usize,
) -> (Self, &'a mut BitSlice<u64, Lsb0>) {
let (noun, ptr) = Self::new_raw_mut_zeroed(allocator, size);
@ -343,8 +346,8 @@ impl IndirectAtom {
*
* Note: size is bytes, not words
*/
pub unsafe fn new_raw_mut_bytes<'a>(
allocator: &mut dyn NounAllocator,
pub unsafe fn new_raw_mut_bytes<'a, A: NounAllocator>(
allocator: &mut A,
size: usize,
) -> (Self, &'a mut [u8]) {
let word_size = (size + 7) >> 3;
@ -382,7 +385,7 @@ impl IndirectAtom {
BitSlice::from_slice(self.as_slice())
}
pub fn as_ubig(&self, stack: &mut dyn Stack) -> UBig {
pub fn as_ubig<S: Stack>(&self, stack: &mut S) -> UBig {
UBig::from_le_bytes_stack(stack, self.as_bytes())
}
@ -490,7 +493,7 @@ impl Cell {
}
}
pub fn new(allocator: &mut dyn NounAllocator, head: Noun, tail: Noun) -> Cell {
pub fn new<T: NounAllocator>(allocator: &mut T, head: Noun, tail: Noun) -> Cell {
unsafe {
let (cell, memory) = Self::new_raw_mut(allocator);
(*memory).head = head;
@ -499,7 +502,7 @@ impl Cell {
}
}
pub fn new_tuple(allocator: &mut dyn NounAllocator, tup: &[Noun]) -> Cell {
pub fn new_tuple<A: NounAllocator>(allocator: &mut A, tup: &[Noun]) -> Cell {
if tup.len() < 2 {
panic!("Cannot create tuple with fewer than 2 elements");
}
@ -512,7 +515,7 @@ impl Cell {
cell
}
pub unsafe fn new_raw_mut(allocator: &mut dyn NounAllocator) -> (Cell, *mut CellMemory) {
pub unsafe fn new_raw_mut<A: NounAllocator>(allocator: &mut A) -> (Cell, *mut CellMemory) {
let memory = allocator.alloc_cell();
(*memory).metadata = 0;
(Self::from_raw_pointer(memory), memory)
@ -578,7 +581,7 @@ pub union Atom {
}
impl Atom {
pub fn new(allocator: &mut dyn NounAllocator, value: u64) -> Atom {
pub fn new<A: NounAllocator>(allocator: &mut A, value: u64) -> Atom {
if value <= DIRECT_MAX {
unsafe { DirectAtom::new_unchecked(value).as_atom() }
} else {
@ -588,7 +591,7 @@ impl Atom {
// to_le_bytes and new_raw are copies. We should be able to do this completely without copies
// if we integrate with ibig properly.
pub fn from_ubig(allocator: &mut dyn NounAllocator, big: &UBig) -> Atom {
pub fn from_ubig<A: NounAllocator>(allocator: &mut A, big: &UBig) -> Atom {
let bit_size = big.bit_len();
let buffer = big.to_le_bytes_stack();
if bit_size < 64 {
@ -649,7 +652,7 @@ impl Atom {
}
}
pub fn as_ubig(self, stack: &mut dyn Stack) -> UBig {
pub fn as_ubig<S: Stack>(self, stack: &mut S) -> UBig {
if self.is_indirect() {
unsafe { self.indirect.as_ubig(stack) }
} else {
@ -979,7 +982,7 @@ impl fmt::Display for Noun {
* An allocation object (probably a mem::NockStack) which can allocate a memory buffer sized to
* a certain number of nouns
*/
pub trait NounAllocator {
pub trait NounAllocator: Sized {
/** Allocate memory for some multiple of the size of a noun
*
* This should allocate *two more* `u64`s than `words` to make space for the size and metadata

View File

@ -531,7 +531,7 @@ impl UBig {
// given at least 2 words of extra capacity. However, this supports UBigs which have already
// been expanded through other operations.
#[inline]
pub fn add_stack(stack: &mut dyn Stack, lhs: UBig, rhs: UBig) -> UBig {
pub fn add_stack<S: Stack>(stack: &mut S, lhs: UBig, rhs: UBig) -> UBig {
match (lhs.into_repr(), rhs.into_repr()) {
(Small(word0), Small(word1)) => UBig::add_word_stack(stack, word0, word1),
(Small(word0), Large(buffer1)) => UBig::add_large_word_stack(stack, buffer1, word0),
@ -548,7 +548,7 @@ impl UBig {
/// Add two `Word`s.
#[inline]
fn add_word_stack(stack: &mut dyn Stack, a: Word, b: Word) -> UBig {
fn add_word_stack<S: Stack>(stack: &mut S, a: Word, b: Word) -> UBig {
let (res, overflow) = a.overflowing_add(b);
if overflow {
let mut buffer = Buffer::allocate_stack(stack, 2);
@ -561,7 +561,7 @@ impl UBig {
}
/// Add a large number to a `Word`.
fn add_large_word_stack(stack: &mut dyn Stack, mut buffer: Buffer, rhs: Word) -> UBig {
fn add_large_word_stack<S: Stack>(stack: &mut S, mut buffer: Buffer, rhs: Word) -> UBig {
debug_assert!(buffer.len() >= 2);
if add::add_word_in_place(&mut buffer, rhs) {
buffer.push_may_reallocate_stack(stack, 1);
@ -570,7 +570,7 @@ impl UBig {
}
/// Add two large numbers.
fn add_large_stack(stack: &mut dyn Stack, mut buffer: Buffer, rhs: &[Word]) -> UBig {
fn add_large_stack<S: Stack>(stack: &mut S, mut buffer: Buffer, rhs: &[Word]) -> UBig {
let n = buffer.len().min(rhs.len());
let overflow = add::add_same_len_in_place(&mut buffer[..n], &rhs[..n]);
if rhs.len() > n {
@ -622,7 +622,7 @@ impl UBig {
// Subtraction is always in-place
#[inline]
pub fn sub_stack(_stack: &mut dyn Stack, lhs: UBig, rhs: UBig) -> UBig {
pub fn sub_stack<S: Stack>(_stack: &mut S, lhs: UBig, rhs: UBig) -> UBig {
lhs - rhs
}

View File

@ -21,7 +21,7 @@ use core::{
pub(crate) struct Buffer(ManuallyDrop<Vec<Word>>);
impl Buffer {
pub(crate) fn allocate_stack(stack: &mut dyn Stack, num_words: usize) -> Buffer {
pub(crate) fn allocate_stack<S: Stack>(stack: &mut S, num_words: usize) -> Buffer {
if num_words > Buffer::MAX_CAPACITY {
UBig::panic_number_too_large();
}
@ -44,7 +44,7 @@ impl Buffer {
)))
}
pub(crate) fn ensure_capacity_stack(&mut self, stack: &mut dyn Stack, num_words: usize) {
pub(crate) fn ensure_capacity_stack<S: Stack>(&mut self, stack: &mut S, num_words: usize) {
if num_words > self.capacity() {
self.reallocate_stack(stack, num_words);
}
@ -69,7 +69,7 @@ impl Buffer {
// }
}
fn reallocate_stack(&mut self, stack: &mut dyn Stack, num_words: usize) {
fn reallocate_stack<S: Stack>(&mut self, stack: &mut S, num_words: usize) {
assert!(num_words >= self.len());
let mut new_buffer = Buffer::allocate_stack(stack, num_words);
new_buffer.clone_from(self);
@ -106,7 +106,7 @@ impl Buffer {
}
#[inline]
pub(crate) fn push_may_reallocate_stack(&mut self, stack: &mut dyn Stack, word: Word) {
pub(crate) fn push_may_reallocate_stack<S: Stack>(&mut self, stack: &mut S, word: Word) {
self.ensure_capacity_stack(stack, self.len() + 1);
self.push(word);
}

View File

@ -31,7 +31,7 @@ impl Default for IBig {
impl UBig {
#[inline]
pub fn from_le_bytes_stack(stack: &mut dyn Stack, bytes: &[u8]) -> UBig {
pub fn from_le_bytes_stack<S: Stack>(stack: &mut S, bytes: &[u8]) -> UBig {
if bytes.len() <= WORD_BYTES {
// fast path
UBig::from_word(primitive::word_from_le_bytes_partial(bytes))
@ -40,7 +40,7 @@ impl UBig {
}
}
fn from_le_bytes_large_stack(stack: &mut dyn Stack, bytes: &[u8]) -> UBig {
fn from_le_bytes_large_stack<S: Stack>(stack: &mut S, bytes: &[u8]) -> UBig {
debug_assert!(bytes.len() > WORD_BYTES);
let mut buffer = Buffer::allocate_stack(stack, (bytes.len() - 1) / WORD_BYTES + 1);
let mut chunks = bytes.chunks_exact(WORD_BYTES);
@ -552,7 +552,7 @@ impl TryFrom<&IBig> for UBig {
impl UBig {
#[inline]
pub(crate) fn from_unsigned_stack<T>(stack: &mut dyn Stack, x: T) -> UBig
pub(crate) fn from_unsigned_stack<S: Stack, T>(stack: &mut S, x: T) -> UBig
where
T: PrimitiveUnsigned,
{

View File

@ -1279,7 +1279,7 @@ impl_div_ibig_signed!(isize);
impl UBig {
#[inline]
pub fn div_stack(stack: &mut dyn Stack, lhs: UBig, rhs: UBig) -> UBig {
pub fn div_stack<S: Stack>(stack: &mut S, lhs: UBig, rhs: UBig) -> UBig {
match (lhs.into_repr(), rhs.into_repr()) {
(Small(word0), Small(word1)) => UBig::div_word(word0, word1),
(Small(_), Large(_)) => UBig::from_word(0),
@ -1295,7 +1295,7 @@ impl UBig {
}
#[inline]
pub fn rem_stack(stack: &mut dyn Stack, lhs: UBig, rhs: UBig) -> UBig {
pub fn rem_stack<S: Stack>(stack: &mut S, lhs: UBig, rhs: UBig) -> UBig {
match (lhs.into_repr(), rhs.into_repr()) {
(Small(word0), Small(word1)) => UBig::rem_word(word0, word1),
(Small(word0), Large(_)) => UBig::from_word(word0),
@ -1311,7 +1311,7 @@ impl UBig {
}
#[inline]
pub fn div_rem_stack(stack: &mut dyn Stack, lhs: UBig, rhs: UBig) -> (UBig, UBig) {
pub fn div_rem_stack<S: Stack>(stack: &mut S, lhs: UBig, rhs: UBig) -> (UBig, UBig) {
match (lhs.into_repr(), rhs.into_repr()) {
(Small(word0), Small(word1)) => UBig::div_rem_word(word0, word1),
(Small(word0), Large(_)) => (UBig::from_word(0), UBig::from_word(word0)),
@ -1327,14 +1327,14 @@ impl UBig {
}
/// `lhs / rhs`
fn div_large_stack(stack: &mut dyn Stack, mut lhs: Buffer, mut rhs: Buffer) -> UBig {
fn div_large_stack<S: Stack>(stack: &mut S, mut lhs: Buffer, mut rhs: Buffer) -> UBig {
let _shift = UBig::div_rem_in_lhs_stack(stack, &mut lhs, &mut rhs);
lhs.erase_front(rhs.len());
lhs.into()
}
/// `lhs % rhs`
fn rem_large_stack(stack: &mut dyn Stack, mut lhs: Buffer, mut rhs: Buffer) -> UBig {
fn rem_large_stack<S: Stack>(stack: &mut S, mut lhs: Buffer, mut rhs: Buffer) -> UBig {
let shift = UBig::div_rem_in_lhs_stack(stack, &mut lhs, &mut rhs);
let n = rhs.len();
rhs.copy_from_slice(&lhs[..n]);
@ -1344,8 +1344,8 @@ impl UBig {
}
/// `(lhs / rhs, lhs % rhs)`
fn div_rem_large_stack(
stack: &mut dyn Stack,
fn div_rem_large_stack<S: Stack>(
stack: &mut S,
mut lhs: Buffer,
mut rhs: Buffer,
) -> (UBig, UBig) {
@ -1361,7 +1361,7 @@ impl UBig {
/// lhs = (lhs / rhs, lhs % rhs)
///
/// Returns shift.
fn div_rem_in_lhs_stack(stack: &mut dyn Stack, lhs: &mut Buffer, rhs: &mut Buffer) -> u32 {
fn div_rem_in_lhs_stack<S: Stack>(stack: &mut S, lhs: &mut Buffer, rhs: &mut Buffer) -> u32 {
let (shift, fast_div_rhs_top) = div::normalize_large(rhs);
let lhs_carry = shift::shl_in_place(lhs, shift);
if lhs_carry != 0 {

View File

@ -9,7 +9,7 @@ pub(crate) struct MemoryAllocation {
start: *mut u8,
}
pub trait Stack {
pub trait Stack: Sized {
unsafe fn alloc_layout(&mut self, layout: Layout) -> *mut u64;
}
@ -24,7 +24,7 @@ pub(crate) struct Memory<'a> {
}
impl MemoryAllocation {
pub(crate) fn new_stack(stack: &mut dyn Stack, layout: Layout) -> MemoryAllocation {
pub(crate) fn new_stack<S: Stack>(stack: &mut S, layout: Layout) -> MemoryAllocation {
let start = if layout.size() == 0 {
// We should use layout.dangling(), but that is unstable.
layout.align() as *mut u8

View File

@ -300,7 +300,7 @@ impl_mul_ibig_primitive!(isize);
impl UBig {
#[inline]
pub fn mul_stack(stack: &mut dyn Stack, lhs: UBig, rhs: UBig) -> UBig {
pub fn mul_stack<S: Stack>(stack: &mut S, lhs: UBig, rhs: UBig) -> UBig {
match (lhs.into_repr(), rhs.into_repr()) {
(Small(word0), Small(word1)) => UBig::mul_word_stack(stack, word0, word1),
(Small(word0), Large(buffer1)) => UBig::mul_large_word_stack(stack, buffer1, word0),
@ -310,11 +310,11 @@ impl UBig {
}
#[inline]
fn mul_word_stack(stack: &mut dyn Stack, a: Word, b: Word) -> UBig {
fn mul_word_stack<S: Stack>(stack: &mut S, a: Word, b: Word) -> UBig {
UBig::from_unsigned_stack(stack, extend_word(a) * extend_word(b))
}
fn mul_large_word_stack(stack: &mut dyn Stack, mut buffer: Buffer, a: Word) -> UBig {
fn mul_large_word_stack<S: Stack>(stack: &mut S, mut buffer: Buffer, a: Word) -> UBig {
match a {
0 => UBig::from_word(0),
1 => buffer.into(),
@ -328,7 +328,7 @@ impl UBig {
}
}
pub fn mul_large_stack(stack: &mut dyn Stack, lhs: &[Word], rhs: &[Word]) -> UBig {
pub fn mul_large_stack<S: Stack>(stack: &mut S, lhs: &[Word], rhs: &[Word]) -> UBig {
debug_assert!(lhs.len() >= 2 && rhs.len() >= 2);
// This may be 1 too large.