mirror of
https://github.com/urbit/ares.git
synced 2024-11-26 09:57:56 +03:00
[ares] get rid of warnings
This commit is contained in:
parent
140dcf4e28
commit
b7fac596c3
@ -30,7 +30,7 @@ fn main() -> io::Result<()> {
|
||||
let mut i = 0;
|
||||
let mut input = unsafe { DirectAtom::new_unchecked(0).as_atom().as_noun() };
|
||||
loop {
|
||||
if (i >= 1) {
|
||||
if i >= 1 {
|
||||
break;
|
||||
};
|
||||
i += 1;
|
||||
@ -68,7 +68,7 @@ fn main() -> io::Result<()> {
|
||||
out_map.as_mut_ptr(),
|
||||
jammed_output.size() << 3,
|
||||
);
|
||||
out_map.flush();
|
||||
out_map.flush()?;
|
||||
};
|
||||
|
||||
Ok(())
|
||||
|
@ -64,6 +64,7 @@ pub enum Polarity {
|
||||
/** A stack for Nock computation, which supports stack allocation and delimited copying collection
|
||||
* for returned nouns
|
||||
*/
|
||||
#[allow(dead_code)] // We need the memory field to keep our memory from being unmapped
|
||||
pub struct NockStack {
|
||||
/** The base pointer */
|
||||
start: *const u64,
|
||||
|
@ -3,73 +3,6 @@ use crate::mem::*;
|
||||
use crate::noun::{Allocated, Atom, DirectAtom, Noun};
|
||||
use either::Either::*;
|
||||
use murmur3::murmur3_32_nocopy;
|
||||
use std::cmp::min;
|
||||
use std::io::{Read, Result};
|
||||
use std::ptr::{copy_nonoverlapping, write_bytes};
|
||||
|
||||
/** A reader for an atom which pads the atom out to a given length */
|
||||
struct PaddedReadAtom {
|
||||
atom_bytes: usize, // actual size of the stored atom
|
||||
atom_base: *const u8, // pointer to the atom data
|
||||
atom_cursor: usize, // How many bytes we have read
|
||||
atom_len: usize, // The total padded length
|
||||
}
|
||||
|
||||
impl PaddedReadAtom {
|
||||
fn new(atom: Atom, len: usize) -> Self {
|
||||
match atom.as_either() {
|
||||
Left(direct) => PaddedReadAtom {
|
||||
atom_bytes: 8,
|
||||
atom_base: (&direct as *const DirectAtom) as *const u8,
|
||||
atom_cursor: 0,
|
||||
atom_len: len,
|
||||
},
|
||||
Right(indirect) => PaddedReadAtom {
|
||||
atom_bytes: indirect.size() << 3, // size is in 64 bit words, multiply by 8 to get bytes
|
||||
atom_base: indirect.data_pointer() as *const u8, // data pointer, but for bytes
|
||||
atom_cursor: 0,
|
||||
atom_len: len,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Read for PaddedReadAtom {
|
||||
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
|
||||
if self.atom_cursor >= self.atom_len {
|
||||
Ok(0) // we are done
|
||||
} else {
|
||||
let req = buf.len(); // How many bytes does the reading caller want?
|
||||
if self.atom_cursor < self.atom_bytes {
|
||||
// are we still reading bytes from the atom?
|
||||
let len = min(
|
||||
self.atom_len - self.atom_cursor,
|
||||
min(self.atom_bytes - self.atom_cursor, req),
|
||||
);
|
||||
// copy out bytes into the buffer, not running over the atom length itself, the
|
||||
// padded length, or the buffer length
|
||||
unsafe {
|
||||
copy_nonoverlapping(
|
||||
self.atom_base.add(self.atom_cursor),
|
||||
buf.as_mut_ptr(),
|
||||
len,
|
||||
);
|
||||
}
|
||||
self.atom_cursor += len;
|
||||
Ok(len)
|
||||
} else {
|
||||
// We are past the atom and into padding
|
||||
let len = min(self.atom_len - self.atom_cursor, req);
|
||||
// write 0s until we hit the buffer length or the padded length
|
||||
unsafe {
|
||||
write_bytes(buf.as_mut_ptr(), 0, len);
|
||||
}
|
||||
self.atom_cursor += len;
|
||||
Ok(len)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Murmur3 hash an atom with a given padded length
|
||||
fn muk_u32(syd: u32, len: usize, key: Atom) -> u32 {
|
||||
|
@ -3,7 +3,7 @@ use either::Either;
|
||||
use intmap::IntMap;
|
||||
use std::fmt::Debug;
|
||||
use std::ptr;
|
||||
use std::slice::{from_raw_parts, from_raw_parts_mut, from_ref};
|
||||
use std::slice::{from_raw_parts, from_raw_parts_mut};
|
||||
|
||||
/** Tag for a direct atom. */
|
||||
const DIRECT_TAG: u64 = 0x0;
|
||||
@ -463,7 +463,7 @@ impl Atom {
|
||||
|
||||
pub fn data_pointer(&self) -> *const u64 {
|
||||
match self.as_either() {
|
||||
Either::Left(direct) => (self as *const Atom) as *const u64,
|
||||
Either::Left(_direct) => (self as *const Atom) as *const u64,
|
||||
Either::Right(indirect) => indirect.data_pointer(),
|
||||
}
|
||||
}
|
||||
|
@ -36,9 +36,7 @@ pub fn cue(stack: &mut NockStack, buffer: Atom) -> Noun {
|
||||
if unsafe { stack.prev_stack_pointer_equals_local(0) } {
|
||||
let mut result = unsafe { *(stack.local_noun_pointer(1)) };
|
||||
assert_acyclic!(result);
|
||||
unsafe {
|
||||
stack.pop(&mut result);
|
||||
};
|
||||
stack.pop(&mut result);
|
||||
break result;
|
||||
} else {
|
||||
let dest_ptr: *mut Noun = unsafe { *(stack.top_in_previous_frame()) };
|
||||
@ -97,7 +95,6 @@ pub fn cue(stack: &mut NockStack, buffer: Atom) -> Noun {
|
||||
|
||||
// TODO: use first_zero() on a slice of the buffer
|
||||
fn get_size(cursor: &mut usize, buffer: &BitSlice<u64, Lsb0>) -> usize {
|
||||
let mut bitsize: usize = 0;
|
||||
let buff_at_cursor = &buffer[*cursor..];
|
||||
let bitsize = buff_at_cursor
|
||||
.first_one()
|
||||
@ -179,7 +176,6 @@ pub fn jam(stack: &mut NockStack, noun: Noun) -> Atom {
|
||||
} else {
|
||||
let mut noun = unsafe { *(stack.top_in_previous_frame::<Noun>()) };
|
||||
let mug = mug_u32(stack, noun);
|
||||
let backref_map_len = backref_map.len();
|
||||
match backref_map.get_mut(mug as u64) {
|
||||
None => {}
|
||||
Some(backref_chain) => {
|
||||
|
Loading…
Reference in New Issue
Block a user