stack push_east/push_west

This commit is contained in:
Chris Allen 2024-11-06 16:49:32 -06:00
parent 795ef7a0d1
commit 797777a7ac
17 changed files with 318 additions and 250 deletions

View File

@ -70,27 +70,27 @@ impl<T: Copy> MutHamt<T> {
}
}
pub fn lookup(self, stack: &mut NockStack, n: &mut Noun) -> Option<T> {
pub fn lookup(self, stack: &mut NockStack, n: &mut Noun) -> AllocResult<Option<T>> {
let mut stem = self.0;
let mut mug = mug_u32(stack, *n);
let mut mug = mug_u32(stack, *n)?;
unsafe {
'lookup: loop {
let chunk = mug & 0x1f;
mug >>= 5;
match (*stem).entry(chunk) {
None => {
break None;
break Ok(None);
}
Some(Left(next_stem)) => {
stem = next_stem;
}
Some(Right(leaf)) => {
for pair in leaf.to_mut_slice().iter_mut() {
if unifying_equality(stack, n, &mut pair.0) {
break 'lookup Some(pair.1);
if unifying_equality(stack, n, &mut pair.0)? {
break 'lookup Ok(Some(pair.1));
}
}
break None;
break Ok(None);
}
}
}
@ -99,7 +99,7 @@ impl<T: Copy> MutHamt<T> {
pub fn insert(self, stack: &mut NockStack, n: &mut Noun, t: T) -> AllocResult<()> {
let mut stem = self.0;
let mut mug = mug_u32(stack, *n);
let mut mug = mug_u32(stack, *n)?;
let mut depth = 0u8;
unsafe {
'insert: loop {
@ -126,7 +126,7 @@ impl<T: Copy> MutHamt<T> {
}
Some(Right(leaf)) => {
for pair in leaf.to_mut_slice().iter_mut() {
if unifying_equality(stack, n, &mut pair.0) {
if unifying_equality(stack, n, &mut pair.0)? {
pair.1 = t;
break 'insert;
}
@ -145,7 +145,7 @@ impl<T: Copy> MutHamt<T> {
} else {
assert!(leaf.len == 1);
let new_stem = stack.struct_alloc::<MutStem<T>>(1)?;
let leaf_mug = mug_u32(stack, (*leaf.buffer).0);
let leaf_mug = mug_u32(stack, (*leaf.buffer).0)?;
let leaf_chunk = (leaf_mug >> ((depth + 1) * 5)) & 0x1f;
(*new_stem).bitmap = chunk_to_bit(leaf_chunk);
(*new_stem).typemap = 0;
@ -308,15 +308,15 @@ impl<T: Copy + Preserve> Hamt<T> {
* A mutable reference is required so that unifying equality can unify the key with a key entry
* in the HAMT
*/
pub fn lookup(&self, stack: &mut NockStack, n: &mut Noun) -> Option<T> {
pub fn lookup(&self, stack: &mut NockStack, n: &mut Noun) -> AllocResult<Option<T>> {
let mut stem = unsafe { *self.0 };
let mut mug = mug_u32(stack, *n);
let mut mug = mug_u32(stack, *n)?;
'lookup: loop {
let chunk = mug & 0x1F; // 5 bits
mug >>= 5;
match stem.entry(chunk) {
None => {
break None;
break Ok(None);
}
Some((Left(next_stem), _idx)) => {
stem = next_stem;
@ -324,11 +324,11 @@ impl<T: Copy + Preserve> Hamt<T> {
}
Some((Right(leaf), _idx)) => {
for pair in unsafe { leaf.to_mut_slice().iter_mut() } {
if unsafe { unifying_equality(stack, n, &mut pair.0) } {
break 'lookup Some(pair.1);
if unsafe { unifying_equality(stack, n, &mut pair.0)? } {
break 'lookup Ok(Some(pair.1));
}
}
break None;
break Ok(None);
}
}
}
@ -338,7 +338,7 @@ impl<T: Copy + Preserve> Hamt<T> {
/// Make a new HAMT with the value inserted or replaced at the key.
pub fn insert(&self, stack: &mut NockStack, n: &mut Noun, t: T) -> AllocResult<Hamt<T>> {
let mut mug = mug_u32(stack, *n);
let mut mug = mug_u32(stack, *n)?;
let mut depth = 0u8;
let mut stem = unsafe { *self.0 };
let stem_ret = unsafe { stack.struct_alloc::<Stem<T>>(1) }?;
@ -395,7 +395,7 @@ impl<T: Copy + Preserve> Hamt<T> {
Some((Right(leaf), idx)) => {
// Override existing value for key, if one exists
for (ldx, pair) in leaf.to_mut_slice().iter_mut().enumerate() {
if unifying_equality(stack, n, &mut pair.0) {
if unifying_equality(stack, n, &mut pair.0)? {
let new_leaf_buffer = stack.struct_alloc(leaf.len)?;
copy_nonoverlapping(leaf.buffer, new_leaf_buffer, leaf.len);
(*new_leaf_buffer.add(ldx)).1 = t;
@ -448,7 +448,7 @@ impl<T: Copy + Preserve> Hamt<T> {
*fake_buffer = Entry { leaf };
// Get the mug chunk for the Noun at the *next* level so that we can
// build a fake stem for it
let fake_mug = mug_u32(stack, (*leaf.buffer).0);
let fake_mug = mug_u32(stack, (*leaf.buffer).0)?;
let fake_chunk = (fake_mug >> ((depth + 1) * 5)) & 0x1F;
let next_stem = Stem {
bitmap: chunk_to_bit(fake_chunk),
@ -609,13 +609,13 @@ impl<T: Copy + Preserve> Preserve for Hamt<T> {
}
impl<T: Copy + Persist> Persist for Hamt<T> {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
if pma_contains(self.0, 1) {
return 0;
return Ok(0);
}
let mut bytes: usize = size_of::<Stem<T>>();
if pma_contains((*self.0).buffer, (*self.0).size()) {
return bytes;
return Ok(bytes);
};
bytes += (*self.0).size() * size_of::<Entry<T>>();
@ -632,7 +632,7 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
assert!(depth < 6);
if traversal[depth].bitmap == 0 {
if depth == 0 {
break bytes;
break Ok(bytes);
}
depth -= 1;
continue;
@ -680,8 +680,8 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
bytes += size_of::<(Noun, T)>() * leaf.len;
while leaf.len > 0 {
bytes += (*leaf.buffer).0.space_needed(stack);
bytes += (*leaf.buffer).1.space_needed(stack);
bytes += (*leaf.buffer).0.space_needed(stack)?;
bytes += (*leaf.buffer).1.space_needed(stack)?;
leaf.buffer = leaf.buffer.add(1);
leaf.len -= 1;
}
@ -689,9 +689,9 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
}
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
if pma_contains(self.0, 1) {
return;
return Ok(());
}
let stem_ptr = *buffer as *mut Stem<T>;
copy_nonoverlapping(self.0, stem_ptr, 1);
@ -700,7 +700,7 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
let stem_buffer_size = (*stem_ptr).size();
if stem_buffer_size == 0 || pma_contains((*stem_ptr).buffer, stem_buffer_size) {
return;
return Ok(());
}
let stem_buffer_ptr = *buffer as *mut Entry<T>;
copy_nonoverlapping((*stem_ptr).buffer, stem_buffer_ptr, stem_buffer_size);
@ -785,15 +785,16 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
while leaf_idx < (*leaf_ptr).len {
(*(*leaf_ptr).buffer.add(leaf_idx))
.0
.copy_to_buffer(stack, buffer);
.copy_to_buffer(stack, buffer)?;
(*(*leaf_ptr).buffer.add(leaf_idx))
.1
.copy_to_buffer(stack, buffer);
.copy_to_buffer(stack, buffer)?;
leaf_idx += 1;
}
}
}
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -950,13 +951,13 @@ mod test {
let mut n = D(0);
let t = D(1);
hamt = hamt.insert(&mut stack, &mut n, t).unwrap();
let lu = hamt.lookup(&mut stack, &mut n);
let lu = hamt.lookup(&mut stack, &mut n).expect("lookup failed due to OOM");
let lu_value = unsafe { lu.expect("lookup failed").as_raw() };
assert_eq!(lu_value, 1);
let mut n = D(2);
let t = D(3);
hamt = hamt.insert(&mut stack, &mut n, t).unwrap();
let lu = hamt.lookup(&mut stack, &mut D(2));
let lu = hamt.lookup(&mut stack, &mut D(2)).expect("lookup failed due to OOM");
let lu_value = unsafe { lu.expect("lookup failed").as_raw() };
assert_eq!(lu_value, 3);
}
@ -983,15 +984,15 @@ mod test {
let t = D(317365951);
hamt = hamt.insert(&mut stack, &mut n, t).unwrap();
let lu = hamt.lookup(&mut stack, &mut D(0));
let lu = hamt.lookup(&mut stack, &mut D(0)).expect("lookup failed due to OOM");
let lu_value = unsafe { lu.expect("0 lookup failed").as_raw() };
assert_eq!(lu_value, 0);
let lu = hamt.lookup(&mut stack, &mut D(87699370));
let lu = hamt.lookup(&mut stack, &mut D(87699370)).expect("lookup failed due to OOM");
let lu_value = unsafe { lu.expect("87699370 lookup failed").as_raw() };
assert_eq!(lu_value, 87699370);
let lu = hamt.lookup(&mut stack, &mut D(317365951));
let lu = hamt.lookup(&mut stack, &mut D(317365951)).expect("lookup failed due to OOM");
let lu_value = unsafe { lu.expect("317365951 lookup failed").as_raw() };
assert_eq!(lu_value, 317365951);
}

View File

@ -435,7 +435,7 @@ pub fn interpret(context: &mut Context, subject: Noun, formula: Noun) -> Result<
// Bottom of trace stack
*(context.stack.local_noun_pointer(1) as *mut *const TraceStack) = std::ptr::null();
*(context.stack.push()) = NockWork::Done;
*(context.stack.push()?) = NockWork::Done;
};
// DO NOT REMOVE THIS COMMENT
@ -568,7 +568,7 @@ unsafe fn work(terminator: Arc<AtomicBool>, context: &mut Context, formula: Noun
debug_assertions(stack, res);
mean_frame_push(stack, 0);
*stack.push() = NockWork::Ret;
*stack.push()? = NockWork::Ret;
push_formula(stack, res, true)?;
}
}
@ -626,7 +626,7 @@ unsafe fn work(terminator: Arc<AtomicBool>, context: &mut Context, formula: Noun
Todo5::TestEquals => {
let stack = &mut context.stack;
let saved_value_ptr = &mut five.left;
res = if unifying_equality(stack, &mut res, saved_value_ptr) {
res = if unifying_equality(stack, &mut res, saved_value_ptr)? {
D(0)
} else {
D(1)
@ -724,7 +724,7 @@ unsafe fn work(terminator: Arc<AtomicBool>, context: &mut Context, formula: Noun
if !cfg!(feature = "sham_hints") {
if let Some((jet, _path)) = context.warm.find_jet(
&mut context.stack, &mut res, &mut formula,
) {
)? {
match jet(context, res) {
Ok(jet_res) => {
res = jet_res;
@ -748,7 +748,7 @@ unsafe fn work(terminator: Arc<AtomicBool>, context: &mut Context, formula: Noun
// jetted code.
if context.trace_info.is_some() {
if let Some(path) =
context.cold.matches(stack, &mut res)
context.cold.matches(stack, &mut res)?
{
append_trace(stack, path);
};
@ -767,7 +767,7 @@ unsafe fn work(terminator: Arc<AtomicBool>, context: &mut Context, formula: Noun
subject = res;
mean_frame_push(stack, 0);
*stack.push() = NockWork::Ret;
*stack.push()? = NockWork::Ret;
push_formula(stack, formula, true)?;
// We could trace on 2 as well, but 2 only comes from Hoon via
@ -775,7 +775,7 @@ unsafe fn work(terminator: Arc<AtomicBool>, context: &mut Context, formula: Noun
// jetted code.
if context.trace_info.is_some() {
if let Some(path) =
context.cold.matches(stack, &mut res)
context.cold.matches(stack, &mut res)?
{
append_trace(stack, path);
};
@ -1016,7 +1016,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
// Formula
match formula_cell.head().as_either_atom_cell() {
Right(_cell) => {
*stack.push() = NockWork::WorkCons(NockCons {
*stack.push()? = NockWork::WorkCons(NockCons {
todo: TodoCons::ComputeHead,
head: formula_cell.head(),
tail: formula_cell.tail(),
@ -1027,20 +1027,20 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
match direct.data() {
0 => {
if let Ok(axis_atom) = formula_cell.tail().as_atom() {
*stack.push() = NockWork::Work0(Nock0 { axis: axis_atom });
*stack.push()? = NockWork::Work0(Nock0 { axis: axis_atom });
} else {
// Axis for Nock 0 must be an atom
return BAIL_EXIT;
}
}
1 => {
*stack.push() = NockWork::Work1(Nock1 {
*stack.push()? = NockWork::Work1(Nock1 {
noun: formula_cell.tail(),
});
}
2 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
*stack.push() = NockWork::Work2(Nock2 {
*stack.push()? = NockWork::Work2(Nock2 {
todo: Todo2::ComputeSubject,
subject: arg_cell.head(),
formula: arg_cell.tail(),
@ -1052,20 +1052,20 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
};
}
3 => {
*stack.push() = NockWork::Work3(Nock3 {
*stack.push()? = NockWork::Work3(Nock3 {
todo: Todo3::ComputeChild,
child: formula_cell.tail(),
});
}
4 => {
*stack.push() = NockWork::Work4(Nock4 {
*stack.push()? = NockWork::Work4(Nock4 {
todo: Todo4::ComputeChild,
child: formula_cell.tail(),
});
}
5 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
*stack.push() = NockWork::Work5(Nock5 {
*stack.push()? = NockWork::Work5(Nock5 {
todo: Todo5::ComputeLeftChild,
left: arg_cell.head(),
right: arg_cell.tail(),
@ -1078,7 +1078,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
6 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
if let Ok(branch_cell) = arg_cell.tail().as_cell() {
*stack.push() = NockWork::Work6(Nock6 {
*stack.push()? = NockWork::Work6(Nock6 {
todo: Todo6::ComputeTest,
test: arg_cell.head(),
zero: branch_cell.head(),
@ -1096,7 +1096,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
}
7 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
*stack.push() = NockWork::Work7(Nock7 {
*stack.push()? = NockWork::Work7(Nock7 {
todo: Todo7::ComputeSubject,
subject: arg_cell.head(),
formula: arg_cell.tail(),
@ -1109,7 +1109,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
}
8 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
*stack.push() = NockWork::Work8(Nock8 {
*stack.push()? = NockWork::Work8(Nock8 {
todo: Todo8::ComputeSubject,
pin: arg_cell.head(),
formula: arg_cell.tail(),
@ -1123,7 +1123,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
9 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
if let Ok(axis_atom) = arg_cell.head().as_atom() {
*stack.push() = NockWork::Work9(Nock9 {
*stack.push()? = NockWork::Work9(Nock9 {
todo: Todo9::ComputeCore,
axis: axis_atom,
core: arg_cell.tail(),
@ -1142,7 +1142,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
if let Ok(patch_cell) = arg_cell.head().as_cell() {
if let Ok(axis_atom) = patch_cell.head().as_atom() {
*stack.push() = NockWork::Work10(Nock10 {
*stack.push()? = NockWork::Work10(Nock10 {
todo: Todo10::ComputeTree,
axis: axis_atom,
tree: arg_cell.tail(),
@ -1165,7 +1165,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
match arg_cell.head().as_either_atom_cell() {
Left(tag_atom) => {
*stack.push() = NockWork::Work11S(Nock11S {
*stack.push()? = NockWork::Work11S(Nock11S {
todo: Todo11S::ComputeResult,
tag: tag_atom,
body: arg_cell.tail(),
@ -1174,7 +1174,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
}
Right(hint_cell) => {
if let Ok(tag_atom) = hint_cell.head().as_atom() {
*stack.push() = NockWork::Work11D(Nock11D {
*stack.push()? = NockWork::Work11D(Nock11D {
todo: Todo11D::ComputeHint,
tag: tag_atom,
hint: hint_cell.tail(),
@ -1194,7 +1194,7 @@ fn push_formula(stack: &mut NockStack, formula: Noun, tail: bool) -> Result<Noun
}
12 => {
if let Ok(arg_cell) = formula_cell.tail().as_cell() {
*stack.push() = NockWork::Work12(Nock12 {
*stack.push()? = NockWork::Work12(Nock12 {
todo: Todo12::ComputeReff,
reff: arg_cell.head(),
path: arg_cell.tail(),
@ -1446,10 +1446,16 @@ mod hint {
match interpret(context, subject, body) {
Ok(mut nock_res) => {
let stack = &mut context.stack;
// This is an awkward expression but
// the entire unsafe { ... } block is a bool expression
if unsafe {
!unifying_equality(
let eq_res = unifying_equality(
stack, &mut nock_res, &mut jet_res,
)
);
match eq_res {
Ok(eq) => !eq,
Err(err) => return Some(Err(From::from(err))),
}
} {
// XX: need NockStack allocated string interpolation
// let tape = tape(stack, "jet mismatch in {}, raw: {}, jetted: {}", jet_name, nock_res, jet_res);
@ -1503,7 +1509,7 @@ mod hint {
Err(_) => return Some(BAIL_EXIT),
};
let mut key = key.as_noun();
context.cache.lookup(stack, &mut key).map(Ok)
context.cache.lookup(stack, &mut key).transpose().map(|r| r.map_err(From::from))
}
_ => None,
}

View File

@ -306,13 +306,14 @@ pub mod util {
kick(context, core, D(2))
}
#[cfg(test)]
pub mod test {
use super::*;
use crate::hamt::Hamt;
use crate::interpreter::Slogger;
use crate::mem::{AllocResult, NockStack};
use crate::noun::{Atom, Noun, D, T};
use crate::unifying_equality::unifying_equality;
use crate::unifying_equality::test::unifying_equality;
use assert_no_alloc::assert_no_alloc;
use ibig::UBig;

View File

@ -28,19 +28,19 @@ pub type Result = std::result::Result<bool, Error>;
// Batteries is a core hierarchy (e.g. a path of parent batteries to a root)
#[derive(Copy, Clone)]
pub struct Batteries(*mut BatteriesMem);
pub struct Batteries(pub *mut BatteriesMem);
const NO_BATTERIES: Batteries = Batteries(null_mut());
#[derive(Copy, Clone)]
struct BatteriesMem {
pub(crate) struct BatteriesMem {
battery: Noun,
parent_axis: Atom,
pub(crate) parent_axis: Atom,
parent_batteries: Batteries,
}
impl Persist for Batteries {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
let mut bytes = 0;
let mut batteries = *self;
@ -52,14 +52,14 @@ impl Persist for Batteries {
break;
}
bytes += size_of::<BatteriesMem>();
bytes += (*batteries.0).battery.space_needed(stack);
bytes += (*batteries.0).parent_axis.space_needed(stack);
bytes += (*batteries.0).battery.space_needed(stack)?;
bytes += (*batteries.0).parent_axis.space_needed(stack)?;
batteries = (*batteries.0).parent_batteries;
}
bytes
Ok(bytes)
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
let mut dest = self;
loop {
if dest.0.is_null() {
@ -81,6 +81,7 @@ impl Persist for Batteries {
dest.0 = batteries_mem_ptr;
dest = &mut (*dest.0).parent_batteries;
}
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -150,7 +151,7 @@ impl Iterator for Batteries {
}
impl Batteries {
pub fn matches(self, stack: &mut NockStack, mut core: Noun) -> bool {
pub fn matches(self, stack: &mut NockStack, mut core: Noun) -> AllocResult<bool> {
let mut root_found: bool = false;
for (battery, parent_axis) in self {
@ -160,26 +161,26 @@ impl Batteries {
if let Ok(d) = parent_axis.as_direct() {
if d.data() == 0 {
if unsafe { unifying_equality(stack, &mut core, battery) } {
if unsafe { unifying_equality(stack, &mut core, battery)? } {
root_found = true;
continue;
} else {
return false;
return Ok(false);
};
};
};
if let Ok(mut core_battery) = core.slot(2) {
if unsafe { !unifying_equality(stack, &mut core_battery, battery) } {
return false;
if unsafe { !unifying_equality(stack, &mut core_battery, battery)? } {
return Ok(false);
};
if let Ok(core_parent) = core.slot_atom(parent_axis) {
core = core_parent;
continue;
} else {
return false;
return Ok(false);
}
} else {
return false;
return Ok(false);
}
}
@ -187,7 +188,7 @@ impl Batteries {
panic!("cold: core matched exactly, but never matched root");
}
true
Ok(true)
}
}
@ -205,7 +206,7 @@ struct BatteriesListMem {
}
impl Persist for BatteriesList {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
let mut bytes = 0;
let mut list = *self;
loop {
@ -216,14 +217,14 @@ impl Persist for BatteriesList {
break;
}
bytes += size_of::<BatteriesListMem>();
bytes += (*list.0).batteries.space_needed(stack);
bytes += (*list.0).batteries.space_needed(stack)?;
list = (*list.0).next;
}
bytes
Ok(bytes)
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
let mut dest = self;
loop {
@ -242,6 +243,7 @@ impl Persist for BatteriesList {
(*dest.0).batteries.copy_to_buffer(stack, buffer);
dest = &mut (*dest.0).next;
}
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -307,8 +309,20 @@ impl Iterator for BatteriesList {
}
impl BatteriesList {
fn matches(mut self, stack: &mut NockStack, core: Noun) -> Option<Batteries> {
self.find(|&batteries| batteries.matches(stack, core))
fn matches(self, stack: &mut NockStack, core: Noun) -> AllocResult<Option<Batteries>> {
for batteries in self {
match batteries.matches(stack, core) {
Ok(matched) => {
if matched {
return Ok(Some(batteries));
} else {
continue;
}
},
Err(err) => return Err(err),
}
}
Ok(None)
}
}
@ -326,7 +340,7 @@ pub(crate) struct NounListMem {
}
impl Persist for NounList {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
let mut bytes: usize = 0;
let mut list = *self;
@ -339,14 +353,14 @@ impl Persist for NounList {
}
bytes += size_of::<NounListMem>();
bytes += (*list.0).element.space_needed(stack);
bytes += (*list.0).element.space_needed(stack)?;
list = (*list.0).next;
}
bytes
Ok(bytes)
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
let mut dest = self;
loop {
@ -366,6 +380,7 @@ impl Persist for NounList {
dest = &mut (*dest.0).next;
}
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -455,21 +470,21 @@ struct ColdMem {
}
impl Persist for Cold {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
if pma_contains(self.0, 1) {
return 0;
return Ok(0);
}
let mut bytes = size_of::<ColdMem>();
bytes += (*self.0).battery_to_paths.space_needed(stack);
bytes += (*self.0).root_to_paths.space_needed(stack);
bytes += (*self.0).path_to_batteries.space_needed(stack);
bytes
bytes += (*self.0).battery_to_paths.space_needed(stack)?;
bytes += (*self.0).root_to_paths.space_needed(stack)?;
bytes += (*self.0).path_to_batteries.space_needed(stack)?;
Ok(bytes)
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
if pma_contains(self.0, 1) {
return;
return Ok(());
}
let cold_mem_ptr = *buffer as *mut ColdMem;
@ -478,9 +493,10 @@ impl Persist for Cold {
self.0 = cold_mem_ptr;
(*self.0).battery_to_paths.copy_to_buffer(stack, buffer);
(*self.0).root_to_paths.copy_to_buffer(stack, buffer);
(*self.0).path_to_batteries.copy_to_buffer(stack, buffer);
(*self.0).battery_to_paths.copy_to_buffer(stack, buffer)?;
(*self.0).root_to_paths.copy_to_buffer(stack, buffer)?;
(*self.0).path_to_batteries.copy_to_buffer(stack, buffer)?;
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -553,32 +569,39 @@ impl Cold {
}
}
pub fn find(&mut self, stack: &mut NockStack, path: &mut Noun) -> BatteriesList {
unsafe {
pub fn find(&mut self, stack: &mut NockStack, path: &mut Noun) -> AllocResult<BatteriesList> {
Ok(unsafe {
(*(self.0))
.path_to_batteries
.lookup(stack, path)
.lookup(stack, path)?
.unwrap_or(BATTERIES_LIST_NIL)
}
})
}
/** Try to match a core directly to the cold state, print the resulting path if found
*/
pub fn matches(&mut self, stack: &mut NockStack, core: &mut Noun) -> Option<Noun> {
let mut battery = (*core).slot(2).ok()?;
pub fn matches(&mut self, stack: &mut NockStack, core: &mut Noun) -> crate::noun::Result<Option<Noun>> {
let mut battery = (*core).slot(2)?;
unsafe {
let paths = (*(self.0)).battery_to_paths.lookup(stack, &mut battery)?;
for path in paths {
if let Some(batteries_list) =
(*(self.0)).path_to_batteries.lookup(stack, &mut (*path))
{
if let Some(_batt) = batteries_list.matches(stack, *core) {
return Some(*path);
if let Some(paths) = paths {
for path in paths {
if let Some(batteries_list) = (*(self.0)).path_to_batteries.lookup(stack, &mut (*path))?
{
if let Some(_batt) = batteries_list.matches(stack, *core)? {
return Ok(Some(*path));
} else {
return Ok(None);
}
} else {
return Ok(None);
}
}
Ok(None)
} else {
Ok(None)
}
};
None
}
}
/// register a core, return a boolean of whether we actually needed to register (false ->
@ -598,9 +621,9 @@ impl Cold {
if let Ok(parent_axis_direct) = parent_axis.as_direct() {
if parent_axis_direct.data() == 0 {
let mut root_path = T(stack, &[chum, D(0)])?;
if let Some(paths) = (*(self.0)).root_to_paths.lookup(stack, &mut core) {
if let Some(paths) = (*(self.0)).root_to_paths.lookup(stack, &mut core)? {
for a_path in paths {
if unifying_equality(stack, &mut root_path, a_path) {
if unifying_equality(stack, &mut root_path, a_path)? {
return Ok(false); // it's already in here
}
}
@ -614,7 +637,7 @@ impl Cold {
let current_batteries_list: BatteriesList = (*(self.0))
.path_to_batteries
.lookup(stack, &mut root_path)
.lookup(stack, &mut root_path)?
.unwrap_or(BATTERIES_LIST_NIL);
let batteries_list_mem_ptr: *mut BatteriesListMem = stack.struct_alloc(1)?;
@ -625,7 +648,7 @@ impl Cold {
let current_paths_list: NounList = (*(self.0))
.root_to_paths
.lookup(stack, &mut core)
.lookup(stack, &mut core)?
.unwrap_or(NOUN_LIST_NIL);
let paths_list_mem_ptr: *mut NounListMem = stack.struct_alloc(1)?;
@ -657,14 +680,14 @@ impl Cold {
let mut battery = core.slot(2)?;
let mut parent = core.slot_atom(parent_axis)?;
// Check if we already registered this core
if let Some(paths) = (*(self.0)).battery_to_paths.lookup(stack, &mut battery) {
if let Some(paths) = (*(self.0)).battery_to_paths.lookup(stack, &mut battery)? {
for path in paths {
if let Ok(path_cell) = (*path).as_cell() {
if unifying_equality(stack, &mut path_cell.head(), &mut chum) {
if unifying_equality(stack, &mut path_cell.head(), &mut chum)? {
if let Some(batteries_list) =
(*(self.0)).path_to_batteries.lookup(stack, &mut *path)
(*(self.0)).path_to_batteries.lookup(stack, &mut *path)?
{
if let Some(_batteries) = batteries_list.matches(stack, core) {
if let Some(_batteries) = batteries_list.matches(stack, core)? {
return Ok(false);
}
}
@ -682,13 +705,13 @@ impl Cold {
let mut battery_to_paths = (*(self.0)).battery_to_paths;
let root_to_paths = (*(self.0)).root_to_paths;
if let Some(paths) = battery_to_paths.lookup(stack, &mut parent_battery) {
if let Some(paths) = battery_to_paths.lookup(stack, &mut parent_battery)? {
for a_path in paths {
// path is a reserved word lol
let battery_list = path_to_batteries
.lookup(stack, &mut *a_path)
.lookup(stack, &mut *a_path)?
.unwrap_or(BATTERIES_LIST_NIL);
if let Some(parent_batteries) = battery_list.matches(stack, parent) {
if let Some(parent_batteries) = battery_list.matches(stack, parent)? {
let mut my_path = T(stack, &[chum, *a_path])?;
let batteries_mem_ptr: *mut BatteriesMem = stack.struct_alloc(1)?;
@ -699,7 +722,7 @@ impl Cold {
};
let current_batteries_list = path_to_batteries
.lookup(stack, &mut my_path)
.lookup(stack, &mut my_path)?
.unwrap_or(BATTERIES_LIST_NIL);
let batteries_list_mem_ptr: *mut BatteriesListMem = stack.struct_alloc(1)?;
*batteries_list_mem_ptr = BatteriesListMem {
@ -708,7 +731,7 @@ impl Cold {
};
let current_paths_list = battery_to_paths
.lookup(stack, &mut battery)
.lookup(stack, &mut battery)?
.unwrap_or(NOUN_LIST_NIL);
let paths_list_mem_ptr: *mut NounListMem = stack.struct_alloc(1)?;
*paths_list_mem_ptr = NounListMem {
@ -731,13 +754,13 @@ impl Cold {
}
};
if let Some(paths) = root_to_paths.lookup(stack, &mut parent) {
if let Some(paths) = root_to_paths.lookup(stack, &mut parent)? {
for a_path in paths {
// path is a reserved word lol
let battery_list = path_to_batteries
.lookup(stack, &mut *a_path)
.lookup(stack, &mut *a_path)?
.unwrap_or(BATTERIES_LIST_NIL);
if let Some(parent_batteries) = battery_list.matches(stack, parent) {
if let Some(parent_batteries) = battery_list.matches(stack, parent)? {
let mut my_path = T(stack, &[chum, *a_path])?;
let batteries_mem_ptr: *mut BatteriesMem = stack.struct_alloc(1)?;
@ -748,7 +771,7 @@ impl Cold {
};
let current_batteries_list = path_to_batteries
.lookup(stack, &mut my_path)
.lookup(stack, &mut my_path)?
.unwrap_or(BATTERIES_LIST_NIL);
let batteries_list_mem_ptr: *mut BatteriesListMem = stack.struct_alloc(1)?;
*batteries_list_mem_ptr = BatteriesListMem {
@ -757,7 +780,7 @@ impl Cold {
};
let current_paths_list = battery_to_paths
.lookup(stack, &mut battery)
.lookup(stack, &mut battery)?
.unwrap_or(NOUN_LIST_NIL);
let paths_list_mem_ptr: *mut NounListMem = stack.struct_alloc(1)?;
*paths_list_mem_ptr = NounListMem {
@ -1187,6 +1210,7 @@ pub(crate) mod test {
use std::iter::FromIterator;
use super::*;
use crate::unifying_equality::test::unifying_equality;
use crate::{
hamt::Hamt,
mem::NockStack,

View File

@ -10,7 +10,7 @@ crate::gdb!();
pub fn jet_mug(context: &mut Context, subject: Noun) -> Result<Noun> {
let arg = slot(subject, 6)?;
Ok(mug(&mut context.stack, arg).as_noun())
Ok(mug(&mut context.stack, arg)?.as_noun())
}
#[cfg(test)]

View File

@ -24,7 +24,7 @@ pub fn jet_roll(context: &mut Context, subject: Noun) -> Result<Noun> {
let mut gate = slot(sample, 3)?;
let mut prod = slot(gate, 13)?;
let site = Site::new(context, &mut gate);
let site = Site::new(context, &mut gate)?;
loop {
if let Ok(list_cell) = list.as_cell() {
list = list_cell.tail();
@ -61,7 +61,7 @@ pub fn jet_turn(context: &mut Context, subject: Noun) -> Result<Noun> {
// Since the gate doesn't change, we can do a single jet check and use that through the whole
// loop
let site = Site::new(context, &mut gate);
let site = Site::new(context, &mut gate)?;
loop {
if let Ok(list_cell) = list.as_cell() {
list = list_cell.tail();

View File

@ -27,7 +27,7 @@ pub fn jet_ut_crop(context: &mut Context, subject: Noun) -> Result<Noun> {
let fun = 141 + tas!(b"crop") + (flag << 8);
let mut key = T(&mut context.stack, &[D(fun), sut, rff, bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;
@ -57,7 +57,7 @@ pub fn jet_ut_fish(context: &mut Context, subject: Noun) -> Result<Noun> {
let fun = 141 + tas!(b"fish") + (flag << 8);
let mut key = T(&mut context.stack, &[D(fun), sut, axe.as_noun(), bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;
@ -86,7 +86,7 @@ pub fn jet_ut_fuse(context: &mut Context, subject: Noun) -> Result<Noun> {
let fun = 141 + tas!(b"fuse") + (flag << 8);
let mut key = T(&mut context.stack, &[D(fun), sut, rff, bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;
@ -108,7 +108,7 @@ pub fn jet_ut_mint(context: &mut Context, subject: Noun) -> Result<Noun> {
let vet = slot(van, 59).map_or(NONE, |x| x);
let mut key = T(&mut context.stack, &[D(fun), vet, sut, gol, gen, bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;
@ -139,7 +139,7 @@ pub fn jet_ut_mull(context: &mut Context, subject: Noun) -> Result<Noun> {
let fun = 141 + tas!(b"mull") + (flag << 8);
let mut key = T(&mut context.stack, &[D(fun), sut, gol, dox, gen, bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;
@ -174,7 +174,7 @@ pub fn jet_ut_nest_dext(context: &mut Context, subject: Noun) -> Result<Noun> {
let fun = (141 + tas!(b"dext")) + (flag << 8);
let mut key = T(&mut context.stack, &[D(fun), sut, rff, bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;
@ -207,7 +207,7 @@ pub fn jet_ut_rest(context: &mut Context, subject: Noun) -> Result<Noun> {
let fun = 141 + tas!(b"rest") + (flag << 8);
let mut key = T(&mut context.stack, &[D(fun), sut, leg, bat])?;
match context.cache.lookup(&mut context.stack, &mut key) {
match context.cache.lookup(&mut context.stack, &mut key)? {
Some(pro) => Ok(pro),
None => {
let pro = interpret(context, subject, slot(subject, 2)?)?;

View File

@ -565,7 +565,7 @@ pub fn jet_stir(context: &mut Context, subject: Noun) -> Result<Noun> {
let puq_vex = slot(q_vex, 6)?;
let quq_vex = slot(q_vex, 7)?;
*(context.stack.push::<StirPair>()) = StirPair {
*(context.stack.push::<StirPair>()?) = StirPair {
har: p_vex,
res: puq_vex,
};

View File

@ -24,8 +24,8 @@ pub fn jet_gor(context: &mut Context, subject: Noun) -> jets::Result<Noun> {
let a = slot(sam, 2)?;
let b = slot(sam, 3)?;
let c = mug(stack, a);
let d = mug(stack, b);
let c = mug(stack, a)?;
let d = mug(stack, b)?;
match c.data().cmp(&d.data()) {
Ordering::Greater => Ok(NO),
@ -41,11 +41,11 @@ pub fn jet_mor(context: &mut Context, subject: Noun) -> jets::Result<Noun> {
let a = slot(sam, 2)?;
let b = slot(sam, 3)?;
let c = mug(stack, a);
let d = mug(stack, b);
let c = mug(stack, a)?;
let d = mug(stack, b)?;
let e = mug(stack, c.as_noun());
let f = mug(stack, d.as_noun());
let e = mug(stack, c.as_noun())?;
let f = mug(stack, d.as_noun())?;
match e.data().cmp(&f.data()) {
Ordering::Greater => Ok(NO),

View File

@ -98,7 +98,7 @@ impl Warm {
batteries: Batteries,
jet: Jet,
) -> AllocResult<()> {
let current_warm_entry = self.0.lookup(stack, formula).unwrap_or(WARM_ENTRY_NIL);
let current_warm_entry = self.0.lookup(stack, formula)?.unwrap_or(WARM_ENTRY_NIL);
unsafe {
let warm_entry_mem_ptr: *mut WarmEntryMem = stack.struct_alloc(1)?;
*warm_entry_mem_ptr = WarmEntryMem {
@ -115,7 +115,7 @@ impl Warm {
pub fn init(stack: &mut NockStack, cold: &mut Cold, hot: &Hot) -> AllocResult<Self> {
let mut warm = Self::new(stack)?;
for (mut path, axis, jet) in *hot {
let batteries_list = cold.find(stack, &mut path);
let batteries_list = cold.find(stack, &mut path)?;
for batteries in batteries_list {
let mut batteries_tmp = batteries;
let (battery, _parent_axis) = batteries_tmp
@ -141,13 +141,17 @@ impl Warm {
stack: &mut NockStack,
s: &mut Noun,
f: &mut Noun,
) -> Option<(Jet, Noun)> {
) -> AllocResult<Option<(Jet, Noun)>> {
let warm_it = self.0.lookup(stack, f)?;
for (path, batteries, jet) in warm_it {
if batteries.matches(stack, *s) {
return Some((jet, path));
for warm_entry in warm_it {
unsafe {
let jet = (*warm_entry.0).jet;
let path = (*warm_entry.0).path;
if (*warm_entry.0).batteries.matches(stack, *s)? {
return Ok(Some((jet, path)));
}
}
}
None
Ok(None)
}
}

View File

@ -90,7 +90,7 @@ pub enum AllocationType {
/// Non-size parameters for validating an allocation
#[derive(Debug, Clone)]
pub struct Allocation {
pub direction: ArenaOrientation,
pub orientation: ArenaOrientation,
pub alloc_type: AllocationType,
pub pc: bool,
}
@ -180,7 +180,7 @@ impl NockStack {
pub(crate) fn get_alloc_config(&self, alloc_type: AllocationType) -> Allocation {
Allocation {
direction: if self.is_west() {
orientation: if self.is_west() {
ArenaOrientation::West
} else {
ArenaOrientation::East
@ -235,7 +235,7 @@ impl NockStack {
// east:
// noun_ptr <= *prev_alloc_ptr - size
// West: the stack pointer must not overlap the alloc pointer
let (target_point, limit_point, direction) = match (alloc.alloc_type, alloc.direction) {
let (target_point, limit_point, direction) = match (alloc.alloc_type, alloc.orientation) {
// West + Alloc, alloc is decreasing
(AllocationType::Alloc, ArenaOrientation::West) => {
let start_point = self.alloc_pointer as usize;
@ -664,7 +664,7 @@ impl NockStack {
}
}
unsafe fn copy(&mut self, noun: &mut Noun) {
unsafe fn copy(&mut self, noun: &mut Noun) -> AllocResult<()> {
assert_acyclic!(*noun);
assert_no_forwarding_pointers!(*noun);
assert_no_junior_pointers!(self, *noun);
@ -674,9 +674,9 @@ impl NockStack {
let noun_ptr = noun as *mut Noun;
// Add two slots to the lightweight stack
// Set the first new slot to the noun to be copied
*(self.push::<Noun>()) = *noun;
*(self.push::<Noun>()?) = *noun;
// Set the second new slot to a pointer to the noun being copied. this is the destination pointer, which will change
*(self.push::<*mut Noun>()) = noun_ptr;
*(self.push::<*mut Noun>()?) = noun_ptr;
loop {
if self.stack_is_empty() {
break;
@ -735,10 +735,10 @@ impl NockStack {
(*alloc).metadata = (*cell.to_raw_pointer()).metadata;
// Push the tail and the head to the work stack
*(self.push::<Noun>()) = cell.tail();
*(self.push::<*mut Noun>()) = &mut (*alloc).tail;
*(self.push::<Noun>()) = cell.head();
*(self.push::<*mut Noun>()) = &mut (*alloc).head;
*(self.push::<Noun>()?) = cell.tail();
*(self.push::<*mut Noun>()?) = &mut (*alloc).tail;
*(self.push::<Noun>()?) = cell.head();
*(self.push::<*mut Noun>()?) = &mut (*alloc).head;
// Set the forwarding pointer
cell.set_forwarding_pointer(alloc);
@ -760,6 +760,7 @@ impl NockStack {
assert_acyclic!(*noun);
assert_no_forwarding_pointers!(*noun);
assert_no_junior_pointers!(self, *noun);
Ok(())
}
// TODO: #684: Add OOM checks here? Unsure.
@ -923,7 +924,7 @@ impl NockStack {
* this violates the _east/_west naming convention somewhat, since e.g.
* a west frame when pc == false has a west-oriented lightweight stack,
* but when pc == true it becomes east-oriented.*/
pub unsafe fn push<T>(&mut self) -> *mut T {
pub unsafe fn push<T>(&mut self) -> AllocResult<*mut T> {
if self.is_west() && !self.pc || !self.is_west() && self.pc {
self.push_west::<T>()
} else {
@ -934,37 +935,49 @@ impl NockStack {
/** Push onto a west-oriented lightweight stack, moving the stack_pointer. */
// TODO: #684: Add OOM checks here
// TODO: Basic alloc function
unsafe fn push_west<T>(&mut self) -> *mut T {
unsafe fn push_west<T>(&mut self) -> AllocResult<*mut T> {
let words = word_size_of::<T>();
let () = self.alloc_would_oom_(Allocation { orientation: ArenaOrientation::West, alloc_type: AllocationType::Push, pc: self.pc }, words)?;
let ap = if self.pc {
*(self.prev_alloc_pointer_pointer())
} else {
self.alloc_pointer
};
let alloc = self.stack_pointer;
let new_sp = self.stack_pointer.add(word_size_of::<T>());
let new_sp = self.stack_pointer.add(words);
if new_sp > ap {
ptr::null_mut()
// Previously we would return a null pointer in this error case.
// Now that we have the alloc_would_oom methods, this shouldn't ever happen.
// ptr::null_mut()
// If it does, somehow, happen, we should panic.
panic!("Out of memory, alloc_would_oom didn't catch it. memory_state: {:#?}", self.memory_state(Some(words)));
} else {
self.stack_pointer = new_sp;
alloc as *mut T
Ok(alloc as *mut T)
}
}
/** Push onto an east-oriented ligthweight stack, moving the stack_pointer */
// TODO: #684: Add OOM checks here
// TODO: Basic alloc function
unsafe fn push_east<T>(&mut self) -> *mut T {
unsafe fn push_east<T>(&mut self) -> AllocResult<*mut T> {
let words = word_size_of::<T>();
let () = self.alloc_would_oom_(Allocation { orientation: ArenaOrientation::West, alloc_type: AllocationType::Push, pc: self.pc }, words)?;
let ap = if self.pc {
*(self.prev_alloc_pointer_pointer())
} else {
self.alloc_pointer
};
let alloc = self.stack_pointer.sub(word_size_of::<T>());
let alloc = self.stack_pointer.sub(words);
if alloc < ap {
ptr::null_mut()
// Previously we would return a null pointer in this error case.
// Now that we have the alloc_would_oom methods, this shouldn't ever happen.
// ptr::null_mut()
// If it does, somehow, happen, we should panic.
panic!("Out of memory, alloc_would_oom didn't catch it. memory_state: {:#?}", self.memory_state(Some(words)));
} else {
self.stack_pointer = alloc;
alloc as *mut T
Ok(alloc as *mut T)
}
}
@ -1283,7 +1296,7 @@ mod test {
use super::*;
use crate::{
jets::cold::{test::{make_noun_list, make_test_stack}, NounList, Nounable}, mem::NockStack, noun::D, unifying_equality::unifying_equality,
jets::cold::{test::{make_noun_list, make_test_stack}, NounList, Nounable}, mem::NockStack, noun::D, unifying_equality::test::unifying_equality,
};
// cargo test -- test_noun_list_alloc --nocapture

View File

@ -128,9 +128,9 @@ pub fn mug_u32_one(noun: Noun) -> Option<u32> {
}
}
pub fn mug_u32(stack: &mut NockStack, noun: Noun) -> u32 {
pub fn mug_u32(stack: &mut NockStack, noun: Noun) -> AllocResult<u32> {
if let Some(mug) = get_mug(noun) {
return mug;
return Ok(mug);
}
assert_acyclic!(noun);
@ -139,7 +139,7 @@ pub fn mug_u32(stack: &mut NockStack, noun: Noun) -> u32 {
stack.frame_push(0);
unsafe {
*(stack.push()) = noun;
*(stack.push()?) = noun;
}
loop {
if stack.stack_is_empty() {
@ -174,8 +174,8 @@ pub fn mug_u32(stack: &mut NockStack, noun: Noun) -> u32 {
continue;
}
_ => {
*(stack.push()) = cell.tail();
*(stack.push()) = cell.head();
*(stack.push()?) = cell.tail();
*(stack.push()?) = cell.head();
continue;
}
}
@ -193,9 +193,10 @@ pub fn mug_u32(stack: &mut NockStack, noun: Noun) -> u32 {
assert_no_forwarding_pointers!(noun);
assert_no_junior_pointers!(stack, noun);
get_mug(noun).expect("Noun should have a mug once it is mugged.")
// TODO: Purge this expect.
Ok(get_mug(noun).expect("Noun should have a mug once it is mugged."))
}
pub fn mug(stack: &mut NockStack, noun: Noun) -> DirectAtom {
unsafe { DirectAtom::new_unchecked(mug_u32(stack, noun) as u64) }
pub fn mug(stack: &mut NockStack, noun: Noun) -> AllocResult<DirectAtom> {
Ok(unsafe { DirectAtom::new_unchecked(mug_u32(stack, noun)? as u64) })
}

View File

@ -1,4 +1,4 @@
use crate::mem::NockStack;
use crate::mem::{AllocResult, NockStack};
use crate::noun::{Allocated, Atom, Cell, CellMemory, IndirectAtom, Noun};
use either::Either::{Left, Right};
use std::convert::TryInto;
@ -116,20 +116,20 @@ pub unsafe fn pma_dirty<T>(ptr: *mut T, count: usize) {
pub trait Persist {
/// Count how much space is needed, in bytes. May set marks so long as marks are cleaned up by
/// [copy_into_buffer]
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize;
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize>;
/// Copy into the provided buffer, which may be assumed to be at least as large as the size
/// returned by [space_needed] on the same structure.
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8);
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()>;
/// Persist an object into the PMA using [space_needed] and [copy_to_buffer], returning
/// a [u64] (probably a pointer or tagged pointer) that can be saved into metadata.
unsafe fn save_to_pma(&mut self, stack: &mut NockStack) -> u64 {
unsafe fn save_to_pma(&mut self, stack: &mut NockStack) -> AllocResult<u64> {
unsafe {
let space = self.space_needed(stack);
let space = self.space_needed(stack)?;
if space == 0 {
return self.handle_to_u64();
return Ok(self.handle_to_u64());
}
let space_as_pages = (space + (BT_PAGESIZE as usize - 1)) >> BT_PAGEBITS;
@ -139,7 +139,7 @@ pub trait Persist {
self.copy_to_buffer(stack, &mut buffer);
let space_isize: isize = space.try_into().unwrap();
assert!(buffer.offset_from(orig_buffer) == space_isize);
self.handle_to_u64()
Ok(self.handle_to_u64())
}
}
@ -161,17 +161,17 @@ unsafe fn unmark(a: Allocated) {
}
impl Persist for Atom {
unsafe fn space_needed(&mut self, _stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, _stack: &mut NockStack) -> AllocResult<usize> {
if let Ok(indirect) = self.as_indirect() {
let count = indirect.raw_size();
if !pma_contains(indirect.to_raw_pointer(), count) && !mark(indirect.as_allocated()) {
return count * size_of::<u64>();
return Ok(count * size_of::<u64>());
}
}
0
Ok(0)
}
unsafe fn copy_to_buffer(&mut self, _stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, _stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
if let Ok(mut indirect) = self.as_indirect() {
let count = indirect.raw_size();
if !pma_contains(indirect.to_raw_pointer(), count) {
@ -188,6 +188,7 @@ impl Persist for Atom {
}
}
}
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -200,10 +201,10 @@ impl Persist for Atom {
}
impl Persist for Noun {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
let mut space = 0usize;
stack.frame_push(0);
*(stack.push::<Noun>()) = *self;
*(stack.push::<Noun>()?) = *self;
loop {
if stack.stack_is_empty() {
break;
@ -213,25 +214,25 @@ impl Persist for Noun {
match noun.as_either_atom_cell() {
Left(mut atom) => {
space += atom.space_needed(stack);
space += atom.space_needed(stack)?;
}
Right(cell) => {
if !pma_contains(cell.to_raw_pointer(), 1) && !mark(cell.as_allocated()) {
space += size_of::<CellMemory>();
(*stack.push::<Noun>()) = cell.tail();
(*stack.push::<Noun>()) = cell.head();
(*stack.push::<Noun>()?) = cell.tail();
(*stack.push::<Noun>()?) = cell.head();
}
}
}
}
stack.frame_pop();
space
Ok(space)
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
let mut buffer_u64 = (*buffer) as *mut u64;
stack.frame_push(0);
*(stack.push::<*mut Noun>()) = self as *mut Noun;
*(stack.push::<*mut Noun>()?) = self as *mut Noun;
loop {
if stack.stack_is_empty() {
@ -275,8 +276,8 @@ impl Persist for Noun {
*dest = Cell::from_raw_pointer(new_cell_mem).as_noun();
*(stack.push::<*mut Noun>()) = &mut (*new_cell_mem).tail;
*(stack.push::<*mut Noun>()) = &mut (*new_cell_mem).head;
*(stack.push::<*mut Noun>()?) = &mut (*new_cell_mem).tail;
*(stack.push::<*mut Noun>()?) = &mut (*new_cell_mem).head;
buffer_u64 = new_cell_mem.add(1) as *mut u64;
}
@ -286,6 +287,7 @@ impl Persist for Noun {
}
*buffer = buffer_u64 as *mut u8;
stack.frame_pop();
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {

View File

@ -37,15 +37,15 @@ enum BTMetaField {
struct Snapshot(pub *mut SnapshotMem);
impl Persist for Snapshot {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> AllocResult<usize> {
let mut arvo = (*(self.0)).arvo;
let mut cold = (*(self.0)).cold;
let arvo_space_needed = arvo.space_needed(stack);
let cold_space_needed = cold.space_needed(stack);
(((size_of::<SnapshotMem>() + 7) >> 3) << 3) + arvo_space_needed + cold_space_needed
let arvo_space_needed = arvo.space_needed(stack)?;
let cold_space_needed = cold.space_needed(stack)?;
Ok((((size_of::<SnapshotMem>() + 7) >> 3) << 3) + arvo_space_needed + cold_space_needed)
}
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) -> AllocResult<()> {
let snapshot_buffer = *buffer as *mut SnapshotMem;
std::ptr::copy_nonoverlapping(self.0, snapshot_buffer, 1);
*self = Snapshot(snapshot_buffer);
@ -58,6 +58,7 @@ impl Persist for Snapshot {
let mut cold = (*snapshot_buffer).cold;
cold.copy_to_buffer(stack, buffer);
(*snapshot_buffer).cold = cold;
Ok(())
}
unsafe fn handle_to_u64(&self) -> u64 {
@ -131,7 +132,7 @@ impl Context {
self.nock_context.cold = (*snapshot.0).cold;
handle
};
}?;
pma_meta_set(
BTMetaField::SnapshotVersion as usize,
PMA_CURRENT_SNAPSHOT_VERSION,
@ -163,7 +164,7 @@ impl Context {
let hot = Hot::init(&mut stack, constant_hot_state)?;
let warm = Warm::init(&mut stack, &mut cold, &hot)?;
let mug = mug_u32(&mut stack, arvo);
let mug = mug_u32(&mut stack, arvo)?;
let slogger = newt.slogger().expect("Newt should make slogger");
let nock_context = interpreter::Context {
@ -206,7 +207,7 @@ impl Context {
self.nock_context.scry_stack = D(0);
// XX save to PMA
self.mug = mug_u32(&mut self.nock_context.stack, self.arvo);
self.mug = mug_u32(&mut self.nock_context.stack, self.arvo)?;
Ok(())
}

View File

@ -109,7 +109,7 @@ pub fn cue_bitslice(stack: &mut NockStack, buffer: &BitSlice<u64, Lsb0>) -> Resu
unsafe {
stack.with_frame(0, |stack: &mut NockStack| {
*(stack.push::<CueStackEntry>()) =
*(stack.push::<CueStackEntry>()?) =
CueStackEntry::DestinationPointer(&mut result as *mut Noun);
loop {
if stack.stack_is_empty() {
@ -127,7 +127,7 @@ pub fn cue_bitslice(stack: &mut NockStack, buffer: &BitSlice<u64, Lsb0>) -> Resu
let mut backref_noun =
Atom::new(stack, rub_backref(&mut cursor, buffer)?)?.as_noun();
*dest_ptr = backref_map
.lookup(stack, &mut backref_noun)
.lookup(stack, &mut backref_noun)?
.ok_or(Deterministic(Exit, D(0)))?;
} else {
// 10 tag: cell
@ -136,13 +136,13 @@ pub fn cue_bitslice(stack: &mut NockStack, buffer: &BitSlice<u64, Lsb0>) -> Resu
let mut backref_atom =
Atom::new(stack, (cursor - 2) as u64)?.as_noun();
backref_map.insert(stack, &mut backref_atom, *dest_ptr);
*(stack.push()) = CueStackEntry::BackRef(
*(stack.push()?) = CueStackEntry::BackRef(
cursor as u64 - 2,
dest_ptr as *const Noun,
);
*(stack.push()) =
*(stack.push()?) =
CueStackEntry::DestinationPointer(&mut (*cell_mem_ptr).tail);
*(stack.push()) =
*(stack.push()?) =
CueStackEntry::DestinationPointer(&mut (*cell_mem_ptr).head);
}
} else {
@ -284,14 +284,14 @@ pub fn jam(stack: &mut NockStack, noun: Noun) -> AllocResult<Atom> {
};
stack.frame_push(0);
unsafe {
*(stack.push::<Noun>()) = noun;
*(stack.push::<Noun>()?) = noun;
};
'jam: loop {
if stack.stack_is_empty() {
break;
} else {
let mut noun = unsafe { *(stack.top::<Noun>()) };
if let Some(backref) = backref_map.lookup(stack, &mut noun) {
if let Some(backref) = backref_map.lookup(stack, &mut noun)? {
match noun.as_either_atom_cell() {
Left(atom) => {
let atom_size = met0_usize(atom);
@ -324,8 +324,8 @@ pub fn jam(stack: &mut NockStack, noun: Noun) -> AllocResult<Atom> {
jam_cell(stack, &mut state);
unsafe {
stack.pop::<Noun>();
*(stack.push::<Noun>()) = cell.tail();
*(stack.push::<Noun>()) = cell.head();
*(stack.push::<Noun>()?) = cell.tail();
*(stack.push::<Noun>()?) = cell.head();
};
continue;
}
@ -545,7 +545,7 @@ mod tests {
let mut stack = setup_stack();
let mut rng_clone = rng.clone();
let (original, total_size) = generate_deeply_nested_noun(&mut stack, depth, &mut rng_clone);
let (original, total_size) = generate_deeply_nested_noun(&mut stack, depth, &mut rng_clone).unwrap();
println!(
"Total size of all generated nouns: {:.2} KB",
@ -563,7 +563,7 @@ mod tests {
assert_noun_eq(&mut stack, cued, original);
}
fn generate_random_noun(stack: &mut NockStack, bits: usize, rng: &mut StdRng) -> (Noun, usize) {
fn generate_random_noun(stack: &mut NockStack, bits: usize, rng: &mut StdRng) -> AllocResult<(Noun, usize)> {
const MAX_DEPTH: usize = 100; // Adjust this value as needed
fn inner(
stack: &mut NockStack,
@ -571,7 +571,7 @@ mod tests {
rng: &mut StdRng,
depth: usize,
accumulated_size: usize,
) -> (Noun, usize) {
) -> AllocResult<(Noun, usize)> {
let mut done = false;
if depth >= MAX_DEPTH || stack.size() < 1024 || accumulated_size > stack.size() - 1024 {
// println!("Done at depth and size: {} {:.2} KB", depth, accumulated_size as f64 / 1024.0);
@ -584,25 +584,25 @@ mod tests {
let noun = atom.as_noun();
(noun, accumulated_size + noun.mass())
} else {
let (left, left_size) = inner(stack, bits / 2, rng, depth + 1, accumulated_size);
let (right, _) = inner(stack, bits / 2, rng, depth + 1, left_size);
let (left, left_size) = inner(stack, bits / 2, rng, depth + 1, accumulated_size)?;
let (right, _) = inner(stack, bits / 2, rng, depth + 1, left_size)?;
let cell = Cell::new(stack, left, right).unwrap();
let noun = cell.as_noun();
(noun, noun.mass())
};
if unsafe { result.0.space_needed(stack) } > stack.size() {
if unsafe { result.0.space_needed(stack)? } > stack.size() {
eprintln!(
"Stack size exceeded with noun size {:.2} KB",
result.0.mass() as f64 / 1024.0
);
unsafe {
let top_noun = *stack.top::<Noun>();
(top_noun, result.1)
Ok((top_noun, result.1))
}
} else {
result
Ok(result)
}
}
@ -613,18 +613,18 @@ mod tests {
stack: &mut NockStack,
depth: usize,
rng: &mut StdRng,
) -> (Noun, usize) {
) -> AllocResult<(Noun, usize)> {
if depth == 0 {
let (noun, size) = generate_random_noun(stack, 100, rng);
(noun, size)
let (noun, size) = generate_random_noun(stack, 100, rng)?;
Ok((noun, size))
} else {
let (left, left_size) = generate_deeply_nested_noun(stack, depth - 1, rng);
let (right, right_size) = generate_deeply_nested_noun(stack, depth - 1, rng);
let (left, left_size) = generate_deeply_nested_noun(stack, depth - 1, rng)?;
let (right, right_size) = generate_deeply_nested_noun(stack, depth - 1, rng)?;
let cell = Cell::new(stack, left, right).unwrap();
let mut noun = cell.as_noun();
let total_size = left_size + right_size + noun.mass();
if unsafe { noun.space_needed(stack) } > stack.size() {
if unsafe { noun.space_needed(stack)? } > stack.size() {
eprintln!(
"Stack size exceeded at depth {} with noun size {:.2} KB",
depth,
@ -632,11 +632,11 @@ mod tests {
);
unsafe {
let top_noun = *stack.top::<Noun>();
(top_noun, total_size)
Ok((top_noun, total_size))
}
} else {
// println!("Size: {:.2} KB, depth: {}", noun.mass() as f64 / 1024.0, depth);
(noun, total_size)
Ok((noun, total_size))
}
}
}
@ -662,7 +662,7 @@ mod tests {
let mut rng = StdRng::seed_from_u64(1);
// Create an atom with a very large value to potentially cause overflow
let (large_atom, _) = generate_deeply_nested_noun(&mut big_stack, 5, &mut rng);
let (large_atom, _) = generate_deeply_nested_noun(&mut big_stack, 5, &mut rng).unwrap();
// Attempt to jam and then cue the large atom in the big stack
let jammed = jam(&mut big_stack, large_atom).unwrap();

View File

@ -5,6 +5,7 @@ use bitvec::slice::BitSlice;
use crate::interpreter::{interpret, Context};
use crate::jets::util::slot;
use crate::jets::{Jet, JetErr};
use crate::mem::AllocResult;
use crate::noun::{Noun, D, T};
/// Return Err if the computation crashed or should punt to Nock
@ -19,13 +20,13 @@ pub struct Site {
impl Site {
/// Prepare a locally cached gate to call repeatedly.
pub fn new(ctx: &mut Context, core: &mut Noun) -> Site {
pub fn new(ctx: &mut Context, core: &mut Noun) -> AllocResult<Site> {
let mut battery = slot(*core, 2).unwrap();
let context = slot(*core, 7).unwrap();
let warm_result = ctx
.warm
.find_jet(&mut ctx.stack, core, &mut battery)
.find_jet(&mut ctx.stack, core, &mut battery)?
.filter(|(_jet, mut path)| {
// check that 7 is a prefix of the parent battery axis,
// to ensure that the sample (axis 6) is not part of the jet match.
@ -34,10 +35,12 @@ impl Site {
// jet and we only actually match one of them, but we check all of them and run
// unjetted if any have an axis outside 7.
let axis_7_bits: &BitSlice<u64, Lsb0> = BitSlice::from_element(&7u64);
let batteries_list = ctx.cold.find(&mut ctx.stack, &mut path);
// TODO: This panic is ugly but the code is awkward.
let batteries_list = ctx.cold.find(&mut ctx.stack, &mut path).ok().expect("OOM'd on ctx.cold.find in Site::new");
let mut ret = true;
for mut batteries in batteries_list {
if let Some((_battery, parent_axis)) = batteries.next() {
if let Some((_, parent_axis)) = batteries.next() {
// let parent_axis = unsafe { (*battery.0).parent_axis };
let parent_axis_prefix_bits = &parent_axis.as_bitslice()[0..3];
if parent_axis_prefix_bits == axis_7_bits {
continue;
@ -51,13 +54,13 @@ impl Site {
}
}
ret
});
Site {
});
Ok(Site {
battery,
context,
jet: warm_result.map(|(jet, _)| jet),
path: warm_result.map(|(_, path)| path).unwrap_or(D(0)),
}
})
}
}

View File

@ -1,6 +1,7 @@
use crate::assert_acyclic;
use crate::assert_no_forwarding_pointers;
use crate::assert_no_junior_pointers;
use crate::mem::AllocResult;
use crate::mem::{NockStack, ALLOC, FRAME, STACK};
use crate::noun::Noun;
use crate::persist::{pma_contains, pma_dirty};
@ -23,7 +24,18 @@ macro_rules! assert_no_junior_pointers {
( $x:expr, $y:expr ) => {};
}
pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Noun) -> bool {
#[cfg(test)]
pub(crate) mod test {
use crate::mem::NockStack;
use crate::noun::Noun;
/// Tests only, not part of the actual implementation. Use this outside of #[cfg(test)] and I will sic the linter on you.
pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Noun) -> bool {
super::unifying_equality(stack, a, b).expect("OOM error in test::unifying_equality")
}
}
pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Noun) -> AllocResult<bool> {
/* This version of unifying equality is not like that of vere.
* Vere does a tree comparison (accelerated by pointer equality and short-circuited by mug
* equality) and then unifies the nouns at the top level if they are equal.
@ -56,18 +68,18 @@ pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Nou
// If the nouns are already word-equal we have nothing to do
if (*a).raw_equals(*b) {
return true;
return Ok(true);
};
// If the nouns have cached mugs which are disequal we have nothing to do
if let (Ok(a_alloc), Ok(b_alloc)) = ((*a).as_allocated(), (*b).as_allocated()) {
if let (Some(a_mug), Some(b_mug)) = (a_alloc.get_cached_mug(), b_alloc.get_cached_mug()) {
if a_mug != b_mug {
return false;
return Ok(false);
};
};
};
stack.frame_push(0);
*(stack.push::<(*mut Noun, *mut Noun)>()) = (a, b);
*(stack.push::<(*mut Noun, *mut Noun)>()?) = (a, b);
loop {
if stack.stack_is_empty() {
break;
@ -145,9 +157,9 @@ pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Nou
* If both sides are equal, then we will discover pointer
* equality when we return and unify the cell.
*/
*(stack.push::<(*mut Noun, *mut Noun)>()) =
*(stack.push::<(*mut Noun, *mut Noun)>()?) =
(x_cell.tail_as_mut(), y_cell.tail_as_mut());
*(stack.push::<(*mut Noun, *mut Noun)>()) =
*(stack.push::<(*mut Noun, *mut Noun)>()?) =
(x_cell.head_as_mut(), y_cell.head_as_mut());
continue;
}
@ -169,7 +181,7 @@ pub unsafe fn unifying_equality(stack: &mut NockStack, a: *mut Noun, b: *mut Nou
assert_no_junior_pointers!(stack, *a);
assert_no_junior_pointers!(stack, *b);
(*a).raw_equals(*b)
Ok((*a).raw_equals(*b))
}
unsafe fn senior_pointer_first(