mirror of
https://github.com/urbit/ares.git
synced 2024-11-26 09:57:56 +03:00
pma: lint and c warnings cleanup
This commit is contained in:
parent
58c49bc5e1
commit
3ae877f655
@ -5,7 +5,7 @@ use crate::persist::{pma_contains, Persist};
|
|||||||
use crate::unifying_equality::unifying_equality;
|
use crate::unifying_equality::unifying_equality;
|
||||||
use either::Either::{self, *};
|
use either::Either::{self, *};
|
||||||
use std::mem::size_of;
|
use std::mem::size_of;
|
||||||
use std::ptr::{copy_nonoverlapping, null, null_mut};
|
use std::ptr::{copy_nonoverlapping, null_mut};
|
||||||
use std::slice;
|
use std::slice;
|
||||||
|
|
||||||
type MutStemEntry<T> = Either<*mut MutStem<T>, Leaf<T>>;
|
type MutStemEntry<T> = Either<*mut MutStem<T>, Leaf<T>>;
|
||||||
@ -557,7 +557,7 @@ impl<T: Copy + Preserve> Preserve for Hamt<T> {
|
|||||||
typemap: next_stem.typemap,
|
typemap: next_stem.typemap,
|
||||||
buffer: dest_buffer,
|
buffer: dest_buffer,
|
||||||
};
|
};
|
||||||
*(stem.buffer.add(idx) as *mut Entry<T>) =
|
*stem.buffer.add(idx) =
|
||||||
Entry { stem: new_stem };
|
Entry { stem: new_stem };
|
||||||
assert!(traversal_depth <= 5); // will increment
|
assert!(traversal_depth <= 5); // will increment
|
||||||
traversal_stack[traversal_depth - 1] =
|
traversal_stack[traversal_depth - 1] =
|
||||||
@ -583,7 +583,7 @@ impl<T: Copy + Preserve> Preserve for Hamt<T> {
|
|||||||
pair.0.preserve(stack);
|
pair.0.preserve(stack);
|
||||||
pair.1.preserve(stack);
|
pair.1.preserve(stack);
|
||||||
}
|
}
|
||||||
*(stem.buffer.add(idx) as *mut Entry<T>) =
|
*stem.buffer.add(idx) =
|
||||||
Entry { leaf: new_leaf };
|
Entry { leaf: new_leaf };
|
||||||
}
|
}
|
||||||
position += 1;
|
position += 1;
|
||||||
@ -615,7 +615,7 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
|
|||||||
typemap: 0,
|
typemap: 0,
|
||||||
buffer: null_mut(),
|
buffer: null_mut(),
|
||||||
}; 6];
|
}; 6];
|
||||||
traversal[0] = (*self.0);
|
traversal[0] = *self.0;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
assert!(depth < 6);
|
assert!(depth < 6);
|
||||||
@ -630,8 +630,8 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
|
|||||||
let next_chunk = traversal[depth].bitmap.trailing_zeros();
|
let next_chunk = traversal[depth].bitmap.trailing_zeros();
|
||||||
let next_type = traversal[depth].typemap & (1 << next_chunk) != 0;
|
let next_type = traversal[depth].typemap & (1 << next_chunk) != 0;
|
||||||
let next_entry = *traversal[depth].buffer;
|
let next_entry = *traversal[depth].buffer;
|
||||||
traversal[depth].bitmap = traversal[depth].bitmap >> (next_chunk + 1);
|
traversal[depth].bitmap >>= next_chunk + 1;
|
||||||
traversal[depth].typemap = traversal[depth].typemap >> (next_chunk + 1);
|
traversal[depth].typemap >>= next_chunk + 1;
|
||||||
traversal[depth].buffer = traversal[depth].buffer.add(1);
|
traversal[depth].buffer = traversal[depth].buffer.add(1);
|
||||||
|
|
||||||
if next_type {
|
if next_type {
|
||||||
@ -676,7 +676,7 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
|
|||||||
let stem_ptr = *buffer as *mut Stem<T>;
|
let stem_ptr = *buffer as *mut Stem<T>;
|
||||||
copy_nonoverlapping(self.0, stem_ptr, 1);
|
copy_nonoverlapping(self.0, stem_ptr, 1);
|
||||||
*buffer = stem_ptr.add(1) as *mut u8;
|
*buffer = stem_ptr.add(1) as *mut u8;
|
||||||
(*self).0 = stem_ptr;
|
self.0 = stem_ptr;
|
||||||
|
|
||||||
let stem_buffer_size = (*stem_ptr).size();
|
let stem_buffer_size = (*stem_ptr).size();
|
||||||
if pma_contains((*stem_ptr).buffer, stem_buffer_size) {
|
if pma_contains((*stem_ptr).buffer, stem_buffer_size) {
|
||||||
@ -709,8 +709,8 @@ impl<T: Copy + Persist> Persist for Hamt<T> {
|
|||||||
let next_type = traversal[depth].typemap & (1 << next_chunk) != 0;
|
let next_type = traversal[depth].typemap & (1 << next_chunk) != 0;
|
||||||
let next_entry_ptr = traversal[depth].buffer;
|
let next_entry_ptr = traversal[depth].buffer;
|
||||||
|
|
||||||
traversal[depth].bitmap = traversal[depth].bitmap >> (next_chunk + 1);
|
traversal[depth].bitmap >>= next_chunk + 1;
|
||||||
traversal[depth].typemap = traversal[depth].typemap >> (next_chunk + 1);
|
traversal[depth].typemap >>= next_chunk + 1;
|
||||||
traversal[depth].buffer = traversal[depth].buffer.add(1);
|
traversal[depth].buffer = traversal[depth].buffer.add(1);
|
||||||
|
|
||||||
if next_type {
|
if next_type {
|
||||||
|
@ -57,15 +57,15 @@ impl Persist for Batteries {
|
|||||||
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
|
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
|
||||||
let mut dest = self;
|
let mut dest = self;
|
||||||
loop {
|
loop {
|
||||||
if (*dest).0.is_null() {
|
if dest.0.is_null() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if pma_contains((*dest).0, 1) {
|
if pma_contains(dest.0, 1) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let batteries_mem_ptr = *buffer as *mut BatteriesMem;
|
let batteries_mem_ptr = *buffer as *mut BatteriesMem;
|
||||||
copy_nonoverlapping((*dest).0, batteries_mem_ptr, 1);
|
copy_nonoverlapping(dest.0, batteries_mem_ptr, 1);
|
||||||
*buffer = batteries_mem_ptr.add(1) as *mut u8;
|
*buffer = batteries_mem_ptr.add(1) as *mut u8;
|
||||||
|
|
||||||
(*batteries_mem_ptr).battery.copy_to_buffer(stack, buffer);
|
(*batteries_mem_ptr).battery.copy_to_buffer(stack, buffer);
|
||||||
@ -73,8 +73,8 @@ impl Persist for Batteries {
|
|||||||
.parent_axis
|
.parent_axis
|
||||||
.copy_to_buffer(stack, buffer);
|
.copy_to_buffer(stack, buffer);
|
||||||
|
|
||||||
(*dest).0 = batteries_mem_ptr;
|
dest.0 = batteries_mem_ptr;
|
||||||
dest = &mut (*(*dest).0).parent_batteries;
|
dest = &mut (*dest.0).parent_batteries;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -222,20 +222,20 @@ impl Persist for BatteriesList {
|
|||||||
let mut dest = self;
|
let mut dest = self;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if (*dest).0.is_null() {
|
if dest.0.is_null() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if pma_contains((*dest).0, 1) {
|
if pma_contains(dest.0, 1) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let list_mem_ptr = *buffer as *mut BatteriesListMem;
|
let list_mem_ptr = *buffer as *mut BatteriesListMem;
|
||||||
copy_nonoverlapping((*dest).0, list_mem_ptr, 1);
|
copy_nonoverlapping(dest.0, list_mem_ptr, 1);
|
||||||
*buffer = list_mem_ptr.add(1) as *mut u8;
|
*buffer = list_mem_ptr.add(1) as *mut u8;
|
||||||
(*dest).0 = list_mem_ptr;
|
dest.0 = list_mem_ptr;
|
||||||
|
|
||||||
(*(*dest).0).batteries.copy_to_buffer(stack, buffer);
|
(*dest.0).batteries.copy_to_buffer(stack, buffer);
|
||||||
dest = &mut (*(*dest).0).next;
|
dest = &mut (*dest.0).next;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -345,21 +345,21 @@ impl Persist for NounList {
|
|||||||
let mut dest = self;
|
let mut dest = self;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if (*dest).0.is_null() {
|
if dest.0.is_null() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if pma_contains((*dest).0, 1) {
|
if pma_contains(dest.0, 1) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let noun_list_mem_ptr = *buffer as *mut NounListMem;
|
let noun_list_mem_ptr = *buffer as *mut NounListMem;
|
||||||
copy_nonoverlapping((*dest).0, noun_list_mem_ptr, 1);
|
copy_nonoverlapping(dest.0, noun_list_mem_ptr, 1);
|
||||||
*buffer = noun_list_mem_ptr.add(1) as *mut u8;
|
*buffer = noun_list_mem_ptr.add(1) as *mut u8;
|
||||||
|
|
||||||
(*dest).0 = noun_list_mem_ptr;
|
dest.0 = noun_list_mem_ptr;
|
||||||
(*(*dest).0).element.copy_to_buffer(stack, buffer);
|
(*dest.0).element.copy_to_buffer(stack, buffer);
|
||||||
|
|
||||||
dest = &mut (*(*dest).0).next;
|
dest = &mut (*dest.0).next;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -456,9 +456,9 @@ impl Persist for Cold {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut bytes = size_of::<ColdMem>();
|
let mut bytes = size_of::<ColdMem>();
|
||||||
bytes += (*(*self).0).battery_to_paths.space_needed(stack);
|
bytes += (*self.0).battery_to_paths.space_needed(stack);
|
||||||
bytes += (*(*self).0).root_to_paths.space_needed(stack);
|
bytes += (*self.0).root_to_paths.space_needed(stack);
|
||||||
bytes += (*(*self).0).path_to_batteries.space_needed(stack);
|
bytes += (*self.0).path_to_batteries.space_needed(stack);
|
||||||
bytes
|
bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -471,11 +471,11 @@ impl Persist for Cold {
|
|||||||
copy_nonoverlapping(self.0, cold_mem_ptr, 1);
|
copy_nonoverlapping(self.0, cold_mem_ptr, 1);
|
||||||
*buffer = cold_mem_ptr.add(1) as *mut u8;
|
*buffer = cold_mem_ptr.add(1) as *mut u8;
|
||||||
|
|
||||||
(*self).0 = cold_mem_ptr;
|
self.0 = cold_mem_ptr;
|
||||||
|
|
||||||
(*(*self).0).battery_to_paths.copy_to_buffer(stack, buffer);
|
(*self.0).battery_to_paths.copy_to_buffer(stack, buffer);
|
||||||
(*(*self).0).root_to_paths.copy_to_buffer(stack, buffer);
|
(*self.0).root_to_paths.copy_to_buffer(stack, buffer);
|
||||||
(*(*self).0).path_to_batteries.copy_to_buffer(stack, buffer);
|
(*self.0).path_to_batteries.copy_to_buffer(stack, buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn handle_to_u64(&self) -> u64 {
|
unsafe fn handle_to_u64(&self) -> u64 {
|
||||||
|
@ -5,7 +5,6 @@ use crate::noun::{Atom, Cell, CellMemory, IndirectAtom, Noun, NounAllocator};
|
|||||||
use assert_no_alloc::permit_alloc;
|
use assert_no_alloc::permit_alloc;
|
||||||
use either::Either::{self, Left, Right};
|
use either::Either::{self, Left, Right};
|
||||||
use ibig::Stack;
|
use ibig::Stack;
|
||||||
use libc::{c_void, memcmp};
|
|
||||||
use memmap::MmapMut;
|
use memmap::MmapMut;
|
||||||
use std::alloc::Layout;
|
use std::alloc::Layout;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use crate::jets::cold::Cold;
|
|
||||||
use crate::mem::NockStack;
|
use crate::mem::NockStack;
|
||||||
use crate::noun::{Allocated, Atom, Cell, CellMemory, IndirectAtom, Noun};
|
use crate::noun::{Allocated, Atom, Cell, CellMemory, IndirectAtom, Noun};
|
||||||
use ares_pma::*;
|
use ares_pma::*;
|
||||||
@ -39,8 +38,7 @@ pub fn pma_open(path: PathBuf) -> Result<(), std::io::Error> {
|
|||||||
bt_state_new(&mut state);
|
bt_state_new(&mut state);
|
||||||
let err = bt_state_open(state, path_cstring.as_ptr(), PMA_FLAGS, PMA_MODE);
|
let err = bt_state_open(state, path_cstring.as_ptr(), PMA_FLAGS, PMA_MODE);
|
||||||
if err == 0 {
|
if err == 0 {
|
||||||
PMA.set(PMAState(state as u64))
|
PMA.set(PMAState(state as u64)).map_err(|state| state.0 as *mut BT_state)
|
||||||
.or_else(|state| Err(state.0 as *mut BT_state))
|
|
||||||
.expect("PMA state already initialized to:");
|
.expect("PMA state already initialized to:");
|
||||||
assert!(get_pma_state().is_some());
|
assert!(get_pma_state().is_some());
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -162,19 +160,17 @@ unsafe fn unmark(a: Allocated) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Persist for Atom {
|
impl Persist for Atom {
|
||||||
unsafe fn space_needed(&mut self, stack: &mut NockStack) -> usize {
|
unsafe fn space_needed(&mut self, _stack: &mut NockStack) -> usize {
|
||||||
if let Ok(indirect) = self.as_indirect() {
|
if let Ok(indirect) = self.as_indirect() {
|
||||||
let count = indirect.raw_size();
|
let count = indirect.raw_size();
|
||||||
if !pma_contains(indirect.to_raw_pointer(), count) {
|
if !pma_contains(indirect.to_raw_pointer(), count) && !mark(indirect.as_allocated()) {
|
||||||
if !mark(indirect.as_allocated()) {
|
|
||||||
return count * size_of::<u64>();
|
return count * size_of::<u64>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
|
unsafe fn copy_to_buffer(&mut self, _stack: &mut NockStack, buffer: &mut *mut u8) {
|
||||||
if let Ok(mut indirect) = self.as_indirect() {
|
if let Ok(mut indirect) = self.as_indirect() {
|
||||||
let count = indirect.raw_size();
|
let count = indirect.raw_size();
|
||||||
if !pma_contains(indirect.to_raw_pointer(), count) {
|
if !pma_contains(indirect.to_raw_pointer(), count) {
|
||||||
@ -219,8 +215,7 @@ impl Persist for Noun {
|
|||||||
space += atom.space_needed(stack);
|
space += atom.space_needed(stack);
|
||||||
}
|
}
|
||||||
Right(cell) => {
|
Right(cell) => {
|
||||||
if !pma_contains(cell.to_raw_pointer(), 1) {
|
if !pma_contains(cell.to_raw_pointer(), 1) && !mark(cell.as_allocated()) {
|
||||||
if !mark(cell.as_allocated()) {
|
|
||||||
space += size_of::<CellMemory>();
|
space += size_of::<CellMemory>();
|
||||||
(*stack.push::<Noun>()) = cell.tail();
|
(*stack.push::<Noun>()) = cell.tail();
|
||||||
(*stack.push::<Noun>()) = cell.head();
|
(*stack.push::<Noun>()) = cell.head();
|
||||||
@ -228,7 +223,6 @@ impl Persist for Noun {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
stack.frame_pop();
|
stack.frame_pop();
|
||||||
space
|
space
|
||||||
}
|
}
|
||||||
@ -236,7 +230,7 @@ impl Persist for Noun {
|
|||||||
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
|
unsafe fn copy_to_buffer(&mut self, stack: &mut NockStack, buffer: &mut *mut u8) {
|
||||||
let mut buffer_u64 = (*buffer) as *mut u64;
|
let mut buffer_u64 = (*buffer) as *mut u64;
|
||||||
stack.frame_push(0);
|
stack.frame_push(0);
|
||||||
*(stack.push::<*mut Noun>()) = (self as *mut Noun);
|
*(stack.push::<*mut Noun>()) = self as *mut Noun;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if stack.stack_is_empty() {
|
if stack.stack_is_empty() {
|
||||||
@ -247,7 +241,7 @@ impl Persist for Noun {
|
|||||||
stack.pop::<*mut Noun>();
|
stack.pop::<*mut Noun>();
|
||||||
|
|
||||||
match (*dest).as_either_direct_allocated() {
|
match (*dest).as_either_direct_allocated() {
|
||||||
Left(direct) => {}
|
Left(_direct) => {}
|
||||||
Right(allocated) => {
|
Right(allocated) => {
|
||||||
if let Some(a) = allocated.forwarding_pointer() {
|
if let Some(a) = allocated.forwarding_pointer() {
|
||||||
*dest = a.as_noun();
|
*dest = a.as_noun();
|
||||||
|
@ -7,7 +7,6 @@ use crate::jets::list::util::{lent, zing};
|
|||||||
use crate::jets::nock::util::mook;
|
use crate::jets::nock::util::mook;
|
||||||
use crate::jets::warm::Warm;
|
use crate::jets::warm::Warm;
|
||||||
use crate::mem::NockStack;
|
use crate::mem::NockStack;
|
||||||
use crate::mem::Preserve;
|
|
||||||
use crate::mug::*;
|
use crate::mug::*;
|
||||||
use crate::newt::Newt;
|
use crate::newt::Newt;
|
||||||
use crate::noun::{Atom, Cell, DirectAtom, Noun, Slots, D, T};
|
use crate::noun::{Atom, Cell, DirectAtom, Noun, Slots, D, T};
|
||||||
@ -160,8 +159,8 @@ impl Context {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut hot = Hot::init(&mut stack, constant_hot_state);
|
let hot = Hot::init(&mut stack, constant_hot_state);
|
||||||
let warm = Warm::init(&mut stack, &mut cold, &mut hot);
|
let warm = Warm::init(&mut stack, &mut cold, &hot);
|
||||||
let mug = mug_u32(&mut stack, arvo);
|
let mug = mug_u32(&mut stack, arvo);
|
||||||
|
|
||||||
let nock_context = interpreter::Context {
|
let nock_context = interpreter::Context {
|
||||||
|
@ -3,10 +3,7 @@ extern crate bindgen;
|
|||||||
use std::env;
|
use std::env;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use bindgen::CargoCallbacks;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let profile = env::var("PROFILE").unwrap();
|
|
||||||
let opt_level = env::var("OPT_LEVEL").unwrap();
|
let opt_level = env::var("OPT_LEVEL").unwrap();
|
||||||
let define_debug = if env::var("CARGO_FEATURE_DEBUG_PRINTS").is_ok() {
|
let define_debug = if env::var("CARGO_FEATURE_DEBUG_PRINTS").is_ok() {
|
||||||
"-DDEBUG"
|
"-DDEBUG"
|
||||||
@ -72,7 +69,7 @@ fn main() {
|
|||||||
.header(headers_path_str)
|
.header(headers_path_str)
|
||||||
// Tell cargo to invalidate the built crate whenever any of the
|
// Tell cargo to invalidate the built crate whenever any of the
|
||||||
// included header files changed.
|
// included header files changed.
|
||||||
.parse_callbacks(Box::new(CargoCallbacks))
|
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
|
||||||
// Finish the builder and generate the bindings.
|
// Finish the builder and generate the bindings.
|
||||||
.generate()
|
.generate()
|
||||||
// Unwrap the Result and panic on failure.
|
// Unwrap the Result and panic on failure.
|
||||||
|
@ -158,7 +158,7 @@ off2addr(vaof_t off)
|
|||||||
|
|
||||||
/* NMEMB: number of members in array, a */
|
/* NMEMB: number of members in array, a */
|
||||||
#define NMEMB(a) \
|
#define NMEMB(a) \
|
||||||
(sizeof(a[0]) / sizeof(a))
|
(sizeof(a) / sizeof(a[0]))
|
||||||
|
|
||||||
#define offsetof(st, m) \
|
#define offsetof(st, m) \
|
||||||
__builtin_offsetof(st, m)
|
__builtin_offsetof(st, m)
|
||||||
@ -236,8 +236,8 @@ struct BT_page {
|
|||||||
BT_pageheader head; /* header */
|
BT_pageheader head; /* header */
|
||||||
union { /* data section */
|
union { /* data section */
|
||||||
BT_dat datd[BT_DAT_MAXENTRIES]; /* union view */
|
BT_dat datd[BT_DAT_MAXENTRIES]; /* union view */
|
||||||
BT_kv datk[0]; /* struct view */
|
BT_kv datk[BT_DAT_MAXKEYS]; /* struct view */
|
||||||
BYTE datc[0]; /* byte-level view */
|
BYTE datc[BT_DAT_MAXBYTES]; /* byte-level view */
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
static_assert(sizeof(BT_page) == BT_PAGESIZE);
|
static_assert(sizeof(BT_page) == BT_PAGESIZE);
|
||||||
@ -567,6 +567,9 @@ _node_cow(BT_state *state, BT_page *node, pgno_t *pgno)
|
|||||||
return BT_SUCC;
|
return BT_SUCC;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void *
|
||||||
|
_bt_bsearch(BT_page *page, vaof_t va) __attribute((unused));
|
||||||
|
|
||||||
/* binary search a page's data section for a va. Returns a pointer to the found BT_dat */
|
/* binary search a page's data section for a va. Returns a pointer to the found BT_dat */
|
||||||
static void *
|
static void *
|
||||||
_bt_bsearch(BT_page *page, vaof_t va)
|
_bt_bsearch(BT_page *page, vaof_t va)
|
||||||
@ -655,6 +658,9 @@ _bt_find(BT_state *state, BT_findpath *path, vaof_t lo, vaof_t hi)
|
|||||||
return _bt_find2(state, root, path, maxdepth, lo, hi);
|
return _bt_find2(state, root, path, maxdepth, lo, hi);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
_bt_findpath_is_root(BT_findpath *path) __attribute((unused));
|
||||||
|
|
||||||
static int
|
static int
|
||||||
_bt_findpath_is_root(BT_findpath *path)
|
_bt_findpath_is_root(BT_findpath *path)
|
||||||
{
|
{
|
||||||
@ -770,6 +776,9 @@ _bt_split_child(BT_state *state, BT_page *parent, size_t i, pgno_t *newchild)
|
|||||||
return BT_SUCC;
|
return BT_SUCC;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
_bt_rebalance(BT_state *state, BT_page *node) __attribute((unused));
|
||||||
|
|
||||||
static int
|
static int
|
||||||
_bt_rebalance(BT_state *state, BT_page *node)
|
_bt_rebalance(BT_state *state, BT_page *node)
|
||||||
{
|
{
|
||||||
@ -1596,6 +1605,9 @@ struct BT_ppage {
|
|||||||
BT_page *parent;
|
BT_page *parent;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static int
|
||||||
|
_bt_delete(BT_state *state, vaof_t lo, vaof_t hi) __attribute((unused));
|
||||||
|
|
||||||
static int
|
static int
|
||||||
_bt_delete(BT_state *state, vaof_t lo, vaof_t hi)
|
_bt_delete(BT_state *state, vaof_t lo, vaof_t hi)
|
||||||
{
|
{
|
||||||
@ -2511,6 +2523,9 @@ _bt_falloc(BT_state *state, size_t pages)
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
_bt_sync_hasdirtypage(BT_state *state, BT_page *node) __attribute((unused));
|
||||||
|
|
||||||
static int
|
static int
|
||||||
_bt_sync_hasdirtypage(BT_state *state, BT_page *node)
|
_bt_sync_hasdirtypage(BT_state *state, BT_page *node)
|
||||||
/* ;;: could be more efficiently replaced by a gcc vectorized builtin */
|
/* ;;: could be more efficiently replaced by a gcc vectorized builtin */
|
||||||
@ -3188,6 +3203,9 @@ _sham_sync2(BT_state *state, BT_page *node, uint8_t depth, uint8_t maxdepth)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
_sham_sync(BT_state *state) __attribute((unused));
|
||||||
|
|
||||||
static void
|
static void
|
||||||
_sham_sync(BT_state *state)
|
_sham_sync(BT_state *state)
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user