[ares] [wip] hamt and trait-based object preservation on stack pop

This commit is contained in:
Edward Amsden 2023-02-16 12:46:07 -06:00
parent 5edfc1a801
commit 646576dd56
7 changed files with 567 additions and 221 deletions

305
rust/ares/src/hamt.rs Normal file
View File

@ -0,0 +1,305 @@
use std::marker::PhantomData;
use either::Either::{self, *};
use crate::noun::Noun;
use crate::mem::{NockStack, unifying_equality, Preserve};
use crate::mug::mug_u32;
use std::ptr::{write_bytes, copy_nonoverlapping};
/// A HamtNode is a pointer to a buffer, structured as follows:
///
/// word 0: occupancy bitmap for mug chunk values
/// word 1: type bitmap for occupied mug chunk values (clear - node, set - leaf)
/// following words: an entry for each set bit in the occupancy bitmap, pointer to HamtNode or HamtLeaf as
/// given by type bitmap
#[derive(Copy,Clone)]
struct HamtNode<T> {
ptr: *mut u64,
phantom: PhantomData<T>
}
#[inline]
fn chunk_to_bit(chunk: u32) -> u64 {
1u64 << chunk
}
#[inline]
fn chunk_to_mask(chunk: u32) -> u64 {
chunk_to_bit(chunk) - 1
}
#[inline]
fn ptr_as_node<T>(ptr: *mut u64) -> HamtNode<T> {
HamtNode {
ptr: ptr,
phantom: PhantomData::<T>,
}
}
impl<T: Copy> HamtNode<T> {
unsafe fn new_raw(stack: &mut NockStack, entries: usize) -> Self {
let buf = stack.struct_alloc(entries + 2);
write_bytes(buf, 0, entries + 2);
ptr_as_node(buf)
}
fn size(self) -> usize {
unsafe {
(*self.ptr).count_ones() as usize
}
}
fn bitmap(self) -> u64 {
unsafe { *self.ptr }
}
fn typemap(self) -> u64 {
unsafe { *self.ptr.add(1) }
}
#[inline]
fn index(self, chunk: u32) -> Option<usize> {
if self.bitmap() & chunk_to_bit(chunk) != 0 {
Some((self.bitmap() & chunk_to_mask(chunk)).count_ones() as usize)
} else {
None
}
}
fn entry(self, chunk: u32) -> Option<(Either<HamtNode<T>, *const HamtLeaf<T>>, usize)> {
self.index(chunk).map(|idx| {
(unsafe {
if (*self.ptr.add(1)) & chunk_to_bit(chunk) == 0 {
Left(ptr_as_node(*(self.ptr.add(2 + idx)) as *mut u64))
} else {
Right((*self.ptr.add(2 + idx)) as *const HamtLeaf<T>)
}
}, idx)
})
}
}
/// A HamtLeaf is a size and pointer to a memory buffer of map entries
struct HamtLeaf<T> {
claimants: usize,
entries: *mut (Noun, T),
}
pub struct Hamt<T>(HamtNode<T>);
impl<T: Copy> Hamt<T> {
pub fn new(stack: &mut NockStack) -> Self {
unsafe {
Hamt(HamtNode::new_raw(stack, 0))
}
}
/// Look up a noun in an immutable HAMT and return the associated value
pub fn lookup(self, stack: &mut NockStack, n: &mut Noun) -> Option<T> {
let mut node = self.0;
let mut mug = mug_u32(stack, *n);
'lookup: loop {
unsafe {
let mug_chunk = mug & 0x3f;
mug = mug >> 6;
match node.entry(mug_chunk) {
None => { break None; },
Some((Left(next_node), _idx)) => {
node = next_node;
continue;
},
Some((Right(leaf), _idx)) => {
for i in 0..(*leaf).claimants {
if unifying_equality(stack, &mut (*(*leaf).entries.add(i)).0, n) {
break 'lookup Some((*(*leaf).entries.add(i)).1);
}
};
break None;
},
}
}
}
}
/// Insert a pair into an immutable HAMT, creating a new HAMT
///
/// The noun key must be mutable to support unifying equality
pub fn insert(self, stack: &mut NockStack, n: &mut Noun, t: T) -> Self {
let mut node = self.0;
let mut new_node = unsafe { HamtNode::<T>::new_raw(stack, node.size() + 1) };
let ret = Hamt(node);
let mut depth = 0u8;
let mut mug = mug_u32(stack, *n);
'insert: loop {
unsafe {
depth += 1;
let mug_chunk = mug & 0x3f; // least-significant 6 bits
mug = mug >> 6;
match node.entry(mug_chunk) {
None => { // no entry in the bitmap, write a leaf
let new_bitmap = node.bitmap() | chunk_to_bit(mug_chunk);
let new_typemap = node.typemap() | chunk_to_bit(mug_chunk);
*new_node.ptr = new_bitmap;
*new_node.ptr.add(1) = new_typemap;
let new_leaf_buf = stack.struct_alloc(1);
*new_leaf_buf = (*n, t);
let new_leaf = stack.struct_alloc(1);
*new_leaf = HamtLeaf {
claimants: 1,
entries: new_leaf_buf
};
let split = (node.bitmap() & chunk_to_mask(mug_chunk)).count_ones() as usize;
copy_nonoverlapping(node.ptr.add(2), new_node.ptr.add(2), split);
*new_node.ptr.add(2+split) = new_leaf as u64;
copy_nonoverlapping(node.ptr.add(2+split), new_node.ptr.add(3+split), node.size() - split);
break;
},
// there's already a node at this entry, insert into it
Some((Left(next_node), idx)) => {
let next_new_node = HamtNode::new_raw(stack, next_node.size() + 1);
copy_nonoverlapping(node.ptr, new_node.ptr, node.size() + 2);
*new_node.ptr.add(2 + idx) = next_new_node.ptr as u64;
node = next_node;
new_node = next_new_node;
continue;
},
Some((Right(leaf), idx)) => {
// check whether we should overwrite a key
for i in 0..(*leaf).claimants {
if unifying_equality(stack, &mut (*(*leaf).entries.add(i)).0, n) {
let new_leaf_buf = stack.struct_alloc((*leaf).claimants);
copy_nonoverlapping((*leaf).entries, new_leaf_buf, (*leaf).claimants);
(*new_leaf_buf.add(i)).1 = t;
let new_leaf = stack.struct_alloc(1);
*new_leaf = HamtLeaf {
claimants: (*leaf).claimants,
entries: new_leaf_buf,
};
copy_nonoverlapping(node.ptr, new_node.ptr, node.size() + 2);
*new_node.ptr.add(2+idx) = new_leaf as u64;
break 'insert;
}
}
// We have gone as far as we can by distinguishing mugs, chain by nouns now
if depth >= 6 {
// append to this leaf
let new_leaf_buf = stack.struct_alloc((*leaf).claimants + 1);
copy_nonoverlapping((*leaf).entries, new_leaf_buf, (*leaf).claimants);
*new_leaf_buf.add((*leaf).claimants) = (*n, t);
let new_leaf = stack.struct_alloc(1);
*new_leaf = HamtLeaf {
claimants: (*leaf).claimants + 1,
entries: new_leaf_buf,
};
copy_nonoverlapping(node.ptr, new_node.ptr, node.size() + 2);
*new_node.ptr.add(2+idx) = new_leaf as u64;
break;
// We encountered a leaf which we should push down as a node
} else {
// We make a node which won't go in our new tree, but contains the existing
// leaf in the proper spot in the bitmap for the next level. We use this as
// the value of `node` in the next iteration.
// We then allocate our next new node as usual, set up the references in the
// current new_node, update the iterators, and go around again
//
// assertion: we haven't gone deep enough to chain at leaves, so there is
// only one key-value pair at this leaf
assert!((*leaf).claimants == 1);
let rival = (*(*leaf).entries).0;
let rival_mug = mug_u32(stack, rival);
let rival_mug_chunk = rival_mug >> (depth * 6) & 0x3f;
let rival_mug_bit = chunk_to_bit(rival_mug_chunk);
let fake_next_leaf_buf = stack.struct_alloc(1);
copy_nonoverlapping((*leaf).entries, fake_next_leaf_buf, 1);
let fake_next_leaf = stack.struct_alloc(1);
*fake_next_leaf = HamtLeaf {
claimants: 1,
entries: fake_next_leaf_buf,
};
let fake_next_node = HamtNode::new_raw(stack, 1);
*fake_next_node.ptr = rival_mug_bit;
*fake_next_node.ptr.add(1) = rival_mug_bit;
*fake_next_node.ptr.add(2) = fake_next_leaf as u64;
copy_nonoverlapping(node.ptr, new_node.ptr, node.size() + 2);
let next_new_node = HamtNode::new_raw(stack, 2);
*new_node.ptr.add(2 + idx) = next_new_node.ptr as u64;
node = fake_next_node;
new_node = next_new_node;
continue;
}
},
}
}
};
return ret;
}
}
impl <T: Copy + Preserve> Preserve for Hamt<T> {
unsafe fn preserve(&mut self, stack: &mut NockStack) {
// we special case the outer copy because it's destination is just a pointer and not a
// pointer + index
if stack.in_frame((*self).0.ptr) {
let dest_buf = stack.struct_alloc_in_previous_frame::<u64>((*self).0.size() + 2);
copy_nonoverlapping((*self).0.ptr, dest_buf, (*self).0.size() + 2);
(*self).0.ptr = dest_buf;
let traversal_stack = stack.struct_alloc::<(*mut u64, u32)>(6);
let mut traversal_depth = 1;
*traversal_stack = (dest_buf, 0);
'preserve: loop {
if traversal_depth == 0 { break; }
let (buffer, mut position) = *traversal_stack.add(traversal_depth - 1);
let node: HamtNode<T> = ptr_as_node(buffer);
'preserve_node: loop {
if position >= 64 {
traversal_depth -= 1;
continue 'preserve;
}
match node.entry(position) {
None => {
position += 1;
continue 'preserve_node;
},
Some((Left(next_node), idx)) => {
// Another node
let size = next_node.size();
if stack.in_frame(next_node.ptr) {
let dest_buf = stack.struct_alloc_in_previous_frame::<u64>(size + 2);
copy_nonoverlapping(next_node.ptr, dest_buf, size);
assert!(traversal_depth <= 5); // We're gonna be incrementing so it
// has to stay below 6
*node.ptr.add(idx+2) = dest_buf as u64;
(*traversal_stack.add(traversal_depth - 1)).1 = position + 1;
*traversal_stack.add(traversal_depth) = (dest_buf, 0);
traversal_depth = traversal_depth + 1;
continue 'preserve;
} else {
position += 1;
continue 'preserve_node;
}
},
Some((Right(leaf), idx)) => {
// Leaf structs and entry buffers get allocated together so no need to
// check the entry buffer separately
if stack.in_frame(leaf as *const u64) {
let new_entries = stack.struct_alloc_in_previous_frame::<(Noun, T)>((*leaf).claimants);
copy_nonoverlapping((*leaf).entries, new_entries, (*leaf).claimants);
for i in 0 .. (*leaf).claimants {
(*new_entries.add(i)).0.preserve(stack);
(*new_entries.add(i)).1.preserve(stack);
};
let new_leaf = stack.alloc_in_previous_frame::<HamtLeaf<T>>();
(*new_leaf) = HamtLeaf {
claimants: (*leaf).claimants,
entries: new_entries,
};
*node.ptr.add(idx + 2) = new_leaf as u64;
}
position +=1;
continue 'preserve_node;
}
}
}
}
}
}
}

View File

@ -78,140 +78,122 @@ pub fn interpret(
*(stack.local_noun_pointer(0)) = work_to_noun(Done);
}
push_formula(stack, formula);
loop {
match unsafe { noun_to_work(*(stack.local_noun_pointer(0))) } {
Done => {
stack.pop(&mut res);
break;
}
NockCellComputeHead => {
unsafe {
unsafe {
loop {
match noun_to_work(*(stack.local_noun_pointer(0))) {
Done => {
stack.preserve(&mut res);
stack.pop();
break;
}
NockCellComputeHead => {
*stack.local_noun_pointer(0) = work_to_noun(NockCellComputeTail);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
NockCellComputeTail => {
unsafe {
}
NockCellComputeTail => {
*(stack.local_noun_pointer(0)) = work_to_noun(NockCellCons);
*(stack.local_noun_pointer(1)) = res;
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
};
}
NockCellCons => {
unsafe {
}
NockCellCons => {
let head = *stack.local_noun_pointer(1);
res = Cell::new(stack, head, res).as_noun();
};
stack.pop(&mut res);
}
Nock0Axis => {
if let Ok(atom) = unsafe { (*(stack.local_noun_pointer(1))).as_atom() } {
res = slot(subject, atom.as_bitslice());
stack.pop(&mut res);
} else {
panic!("Axis must be atom");
};
}
Nock1Constant => {
unsafe {
res = *(stack.local_noun_pointer(1));
stack.preserve(&mut res);
stack.pop();
}
stack.pop(&mut res);
}
Nock2ComputeSubject => {
unsafe {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock2ComputeFormula);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
Nock2ComputeFormula => {
unsafe {
Nock0Axis => {
if let Ok(atom) = (*(stack.local_noun_pointer(1))).as_atom() {
res = slot(subject, atom.as_bitslice());
stack.preserve(&mut res);
stack.pop();
} else {
panic!("Axis must be atom");
};
}
Nock1Constant => {
res = *(stack.local_noun_pointer(1));
stack.preserve(&mut res);
stack.pop();
}
Nock2ComputeSubject => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock2ComputeFormula);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
}
Nock2ComputeFormula => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock2ComputeResult);
*(stack.local_noun_pointer(1)) = res;
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
};
}
Nock2ComputeResult => {
unsafe {
}
Nock2ComputeResult => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock2RestoreSubject);
*(stack.local_noun_pointer(2)) = subject;
subject = *(stack.local_noun_pointer(1));
push_formula(stack, res);
};
}
Nock2RestoreSubject => unsafe {
subject = *(stack.local_noun_pointer(2));
stack.pop(&mut res);
},
Nock3ComputeChild => unsafe {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock3ComputeType);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
},
Nock3ComputeType => {
res = unsafe {
if res.is_cell() {
}
Nock2RestoreSubject => {
subject = *(stack.local_noun_pointer(2));
stack.preserve(&mut res);
stack.pop();
},
Nock3ComputeChild => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock3ComputeType);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
},
Nock3ComputeType => {
res = if res.is_cell() {
DirectAtom::new_unchecked(0).as_atom().as_noun()
} else {
DirectAtom::new_unchecked(1).as_atom().as_noun()
}
};
stack.pop(&mut res);
}
Nock4ComputeChild => {
unsafe {
};
stack.preserve(&mut res);
stack.pop();
}
Nock4ComputeChild => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock4Increment);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
Nock4Increment => {
if let Ok(atom) = res.as_atom() {
res = inc(stack, atom).as_noun();
stack.pop(&mut res);
} else {
panic!("Cannot increment (Nock 4) a cell");
};
}
Nock5ComputeLeftChild => {
unsafe {
}
Nock4Increment => {
if let Ok(atom) = res.as_atom() {
res = inc(stack, atom).as_noun();
stack.preserve(&mut res);
stack.pop();
} else {
panic!("Cannot increment (Nock 4) a cell");
};
}
Nock5ComputeLeftChild => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock5ComputeRightChild);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
Nock5ComputeRightChild => {
unsafe {
}
Nock5ComputeRightChild => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock5TestEquals);
*(stack.local_noun_pointer(1)) = res;
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
};
}
Nock5TestEquals => {
unsafe {
}
Nock5TestEquals => {
let saved_value_ptr = stack.local_noun_pointer(1);
res = if unifying_equality(stack, &mut res, saved_value_ptr) {
DirectAtom::new_unchecked(0).as_atom().as_noun()
} else {
DirectAtom::new_unchecked(1).as_atom().as_noun()
};
stack.pop(&mut res);
};
}
Nock6ComputeTest => {
unsafe {
stack.preserve(&mut res);
stack.pop();
}
Nock6ComputeTest => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock6ComputeBranch);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
Nock6ComputeBranch => {
unsafe {
}
Nock6ComputeBranch => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock6Done);
if let Left(direct) = res.as_either_direct_allocated() {
if direct.data() == 0 {
@ -226,64 +208,51 @@ pub fn interpret(
} else {
panic!("Test branch of Nock 6 must return a direct atom");
}
};
}
Nock6Done => {
stack.pop(&mut res);
}
Nock7ComputeSubject => {
unsafe {
}
Nock6Done => {
stack.preserve(&mut res);
stack.pop();
}
Nock7ComputeSubject => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock7ComputeResult);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
Nock7ComputeResult => {
unsafe {
}
Nock7ComputeResult => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock7RestoreSubject);
*(stack.local_noun_pointer(1)) = subject;
subject = res;
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
};
}
Nock7RestoreSubject => {
unsafe {
}
Nock7RestoreSubject => {
subject = *(stack.local_noun_pointer(1));
stack.pop(&mut res);
};
}
Nock8ComputeSubject => {
unsafe {
stack.preserve(&mut res);
stack.pop();
}
Nock8ComputeSubject => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock8ComputeResult);
let formula = *stack.local_noun_pointer(1);
push_formula(stack, formula);
};
}
Nock8ComputeResult => {
unsafe {
}
Nock8ComputeResult => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock8RestoreSubject);
*(stack.local_noun_pointer(1)) = subject;
subject = Cell::new(stack, res, subject).as_noun();
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
};
}
Nock8RestoreSubject => {
unsafe {
}
Nock8RestoreSubject => {
subject = *(stack.local_noun_pointer(1));
stack.pop(&mut res);
};
}
Nock9ComputeCore => {
unsafe {
stack.preserve(&mut res);
stack.pop();
}
Nock9ComputeCore => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock9ComputeResult);
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
};
}
Nock9ComputeResult => {
unsafe {
}
Nock9ComputeResult => {
if let Ok(formula_axis) = (*(stack.local_noun_pointer(1))).as_atom() {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock9RestoreSubject);
*(stack.local_noun_pointer(2)) = subject;
@ -292,62 +261,67 @@ pub fn interpret(
} else {
panic!("Axis into core must be atom");
}
};
}
Nock9RestoreSubject => unsafe {
subject = *(stack.local_noun_pointer(2));
stack.pop(&mut res);
},
Nock10ComputeTree => unsafe {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock10ComputePatch);
let formula = *stack.local_noun_pointer(3);
push_formula(stack, formula);
},
Nock10ComputePatch => unsafe {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock10Edit);
*(stack.local_noun_pointer(3)) = res;
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
},
Nock10Edit => unsafe {
if let Ok(edit_axis) = (*stack.local_noun_pointer(1)).as_atom() {
let tree = *stack.local_noun_pointer(3);
res = edit(stack, edit_axis.as_bitslice(), res, tree);
stack.pop(&mut res);
} else {
panic!("Axis into tree must be atom");
}
},
Nock11ComputeHint => unsafe {
let hint = *stack.local_noun_pointer(1);
if let Ok(hint_cell) = hint.as_cell() {
if let Ok(found) = match_pre_hint(stack, subject, hint_cell) {
res = found;
stack.pop(&mut res);
} else {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock11ComputeResult);
push_formula(stack, hint_cell.tail());
}
} else {
panic!("IMPOSSIBLE: tried to compute a dynamic hint but hint is an atom");
}
},
Nock11ComputeResult => unsafe {
let hint = *stack.local_noun_pointer(1);
if let Ok(found) = match_post_hint(stack, newt, subject, hint, res) {
res = found;
stack.pop(&mut res);
} else {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock11Done);
Nock9RestoreSubject => {
subject = *(stack.local_noun_pointer(2));
stack.preserve(&mut res);
stack.pop();
},
Nock10ComputeTree => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock10ComputePatch);
let formula = *stack.local_noun_pointer(3);
push_formula(stack, formula);
},
Nock10ComputePatch => {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock10Edit);
*(stack.local_noun_pointer(3)) = res;
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
},
Nock10Edit => {
if let Ok(edit_axis) = (*stack.local_noun_pointer(1)).as_atom() {
let tree = *stack.local_noun_pointer(3);
res = edit(stack, edit_axis.as_bitslice(), res, tree);
stack.preserve(&mut res);
stack.pop();
} else {
panic!("Axis into tree must be atom");
}
},
Nock11ComputeHint => {
let hint = *stack.local_noun_pointer(1);
if let Ok(hint_cell) = hint.as_cell() {
if let Ok(found) = match_pre_hint(stack, subject, hint_cell) {
res = found;
stack.preserve(&mut res);
stack.pop();
} else {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock11ComputeResult);
push_formula(stack, hint_cell.tail());
}
} else {
panic!("IMPOSSIBLE: tried to compute a dynamic hint but hint is an atom");
}
},
Nock11ComputeResult => {
let hint = *stack.local_noun_pointer(1);
if let Ok(found) = match_post_hint(stack, newt, subject, hint, res) {
res = found;
stack.preserve(&mut res);
stack.pop();
} else {
*(stack.local_noun_pointer(0)) = work_to_noun(Nock11Done);
let formula = *stack.local_noun_pointer(2);
push_formula(stack, formula);
}
},
Nock11Done => {
stack.preserve(&mut res);
stack.pop();
}
},
Nock11Done => {
stack.pop(&mut res);
}
};
};
}
};
res
}
@ -598,7 +572,7 @@ fn edit(
res
}
fn inc(stack: &mut NockStack, atom: Atom) -> Atom {
pub fn inc(stack: &mut NockStack, atom: Atom) -> Atom {
match atom.as_either() {
Left(direct) => Atom::new(stack, direct.data() + 1),
Right(indirect) => {

View File

@ -7,8 +7,10 @@ pub mod mug;
pub mod newt;
pub mod noun;
pub mod serf;
//pub mod bytecode;
pub mod serialization;
pub mod snapshot;
pub mod hamt;
#[cfg(test)]
mod tests {

View File

@ -1,5 +1,5 @@
use crate::assert_acyclic;
use crate::noun::{Cell, CellMemory, IndirectAtom, Noun, NounAllocator};
use crate::noun::{Atom, Cell, CellMemory, IndirectAtom, Noun, NounAllocator};
use either::Either::{self, Left, Right};
use libc::{c_void, memcmp};
use memmap::MmapMut;
@ -108,6 +108,18 @@ impl NockStack {
self.size
}
#[inline]
pub fn in_frame(&self, ptr: *const u64) -> bool {
match &self.polarity {
Polarity::East => {
ptr >= self.frame_pointer && ptr < self.stack_pointer
},
Polarity::West => {
ptr >= self.stack_pointer && ptr < self.frame_pointer
},
}
}
/** Mutable pointer to a slot in a stack frame: east stack */
unsafe fn slot_pointer_east(&mut self, slot: usize) -> *mut u64 {
self.frame_pointer.sub(slot + 1)
@ -229,6 +241,28 @@ impl NockStack {
}
}
unsafe fn struct_alloc_in_previous_frame_east<T>(&mut self, count: usize) -> *mut T {
let prev_stack_pointer_pointer = self.previous_stack_pointer_pointer_east();
// note that the allocation is on the west frame, and thus resembles raw_alloc_west
let alloc = *(prev_stack_pointer_pointer);
*prev_stack_pointer_pointer = (*prev_stack_pointer_pointer).add(word_size_of::<T>() * count);
alloc as *mut T
}
unsafe fn struct_alloc_in_previous_frame_west<T>(&mut self, count: usize) -> *mut T {
let prev_stack_pointer_pointer = self.previous_stack_pointer_pointer_west();
// note that the allocation is on the east frame, and thus resembles raw_alloc_east
*prev_stack_pointer_pointer = (*prev_stack_pointer_pointer).sub(word_size_of::<T>() * count);
*prev_stack_pointer_pointer as *mut T
}
pub unsafe fn struct_alloc_in_previous_frame<T>(&mut self, count: usize) -> *mut T {
match &self.polarity {
Polarity::East => self.struct_alloc_in_previous_frame_east(count),
Polarity::West => self.struct_alloc_in_previous_frame_west(count),
}
}
unsafe fn top_in_previous_frame_east<T>(&mut self) -> *mut T {
let prev_stack_pointer_pointer = self.previous_stack_pointer_pointer_east();
(*prev_stack_pointer_pointer).sub(word_size_of::<T>()) as *mut T
@ -309,7 +343,7 @@ impl NockStack {
self.raw_alloc_west(word_size_of::<T>() * count) as *mut T
}
unsafe fn struct_alloc<T>(&mut self, count: usize) -> *mut T {
pub unsafe fn struct_alloc<T>(&mut self, count: usize) -> *mut T {
match &self.polarity {
Polarity::East => self.struct_alloc_east::<T>(count),
Polarity::West => self.struct_alloc_west::<T>(count),
@ -539,48 +573,31 @@ impl NockStack {
assert_acyclic!(*noun);
}
/** Pop a frame from the (east) stack, providing a result, which will be copied to the return target
* (west) frame. */
unsafe fn pop_east(&mut self, result: &mut Noun) {
self.copy_east(result);
self.pop_no_copy_east();
}
unsafe fn pop_no_copy_east(&mut self) {
unsafe fn pop_east(&mut self) {
self.stack_pointer = *self.previous_stack_pointer_pointer_east();
self.frame_pointer = *self.previous_frame_pointer_pointer_east();
self.polarity = Polarity::West;
}
/** Pop a frame from the (west) stack, providing a result, which will be copied to the return target
* (east) frame. */
unsafe fn pop_west(&mut self, result: &mut Noun) {
self.copy_west(result);
self.pop_no_copy_west();
}
unsafe fn pop_no_copy_west(&mut self) {
unsafe fn pop_west(&mut self) {
self.stack_pointer = *self.previous_stack_pointer_pointer_west();
self.frame_pointer = *self.previous_frame_pointer_pointer_west();
self.polarity = Polarity::East;
}
pub unsafe fn pop_no_copy(&mut self) {
/** Pop a stack frame. If references to stack allocated functions are maintained past a pop,
* then call `preserve()` to ensure those objects are copied out.
*/
pub unsafe fn pop(&mut self) {
match &self.polarity {
Polarity::East => self.pop_no_copy_east(),
Polarity::West => self.pop_no_copy_west(),
Polarity::East => self.pop_east(),
Polarity::West => self.pop_west(),
}
}
/** Pop a frame from the stack, providing a result, which will be copied to the return target
* frame. */
pub fn pop(&mut self, result: &mut Noun) {
unsafe {
match &self.polarity {
Polarity::East => self.pop_east(result),
Polarity::West => self.pop_west(result),
};
}
pub unsafe fn preserve<T: Preserve>(&mut self, x: &mut T) {
x.preserve(self);
}
/** Push a frame onto the west stack with 0 or more local variable slots.
@ -753,7 +770,7 @@ pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Nou
}
}
stack.restore_prev_stack_pointer_from_local(0);
stack.pop_no_copy();
stack.pop();
assert_acyclic!(*a);
assert_acyclic!(*b);
(*a).raw_equals(*b)
@ -828,3 +845,43 @@ impl NounAllocator for NockStack {
self.struct_alloc::<CellMemory>(1)
}
}
/// Immutable, acyclic objects which may be copied up the stack
pub trait Preserve {
/// Ensure an object will not be invalidated by popping the NockStack
unsafe fn preserve(&mut self, stack: &mut NockStack);
}
impl Preserve for IndirectAtom {
unsafe fn preserve(&mut self, stack: &mut NockStack) {
let size = indirect_raw_size(*self);
let buf = stack.struct_alloc_in_previous_frame::<u64>(size);
copy_nonoverlapping(self.to_raw_pointer(), buf, size);
*self = IndirectAtom::from_raw_pointer(buf);
}
}
impl Preserve for Atom {
unsafe fn preserve(&mut self, stack: &mut NockStack) {
match self.as_either() {
Left(_direct) => {},
Right(mut indirect) => {
indirect.preserve(stack);
*self = indirect.as_atom();
},
}
}
}
impl Preserve for Noun {
unsafe fn preserve(&mut self, stack: &mut NockStack) {
match stack.polarity {
Polarity::East => {
stack.copy_east(self);
},
Polarity::West => {
stack.copy_west(self);
},
}
}
}

View File

@ -165,7 +165,7 @@ pub fn mug_u32(stack: &mut NockStack, noun: Noun) -> u32 {
}
}
unsafe {
stack.pop_no_copy();
stack.pop();
get_mug(noun).expect("Noun should have a mug once it is mugged.")
}
}

View File

@ -649,7 +649,7 @@ impl fmt::Debug for Allocated {
#[repr(C)]
#[repr(packed(8))]
pub union Noun {
raw: u64,
pub(crate) raw: u64,
direct: DirectAtom,
indirect: IndirectAtom,
atom: Atom,
@ -738,6 +738,10 @@ impl Noun {
pub unsafe fn raw_equals(self, other: Noun) -> bool {
self.raw == other.raw
}
pub unsafe fn from_raw(raw: u64) -> Noun {
Noun { raw: raw }
}
}
impl fmt::Debug for Noun {

View File

@ -36,7 +36,10 @@ pub fn cue(stack: &mut NockStack, buffer: Atom) -> Noun {
if unsafe { stack.prev_stack_pointer_equals_local(0) } {
let mut result = unsafe { *(stack.local_noun_pointer(1)) };
assert_acyclic!(result);
stack.pop(&mut result);
unsafe {
stack.preserve(&mut result);
stack.pop();
}
break result;
} else {
let dest_ptr: *mut Noun = unsafe { *(stack.top_in_previous_frame()) };
@ -241,11 +244,12 @@ pub fn jam(stack: &mut NockStack, noun: Noun) -> Atom {
}
}
}
let mut result = unsafe { state.atom.normalize_as_atom().as_noun() };
stack.pop(&mut result);
result.as_atom().expect(
"IMPOSSIBLE: result was coerced from an atom so should not fail coercion to an atom",
)
unsafe {
let mut result = state.atom.normalize_as_atom();
stack.preserve(&mut result);
stack.pop();
result
}
}
fn jam_atom(traversal: &mut NockStack, state: &mut JamState, atom: Atom) {