mirror of
https://github.com/ProvableHQ/leo.git
synced 2024-12-29 13:14:05 +03:00
delete all unsafe instead of commenting out
This commit is contained in:
parent
6cc70015d8
commit
6dbc21484f
@ -1,175 +0,0 @@
|
||||
// Copyright (C) 2019-2022 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// Copyright Rust project developers under MIT or APACHE-2.0.
|
||||
|
||||
use core::alloc::Layout;
|
||||
use core::cell::{Cell, RefCell};
|
||||
use core::mem::{self, MaybeUninit};
|
||||
use core::{cmp, ptr, slice};
|
||||
use std::iter;
|
||||
|
||||
// The arenas start with PAGE-sized chunks, and then each new chunk is twice as
|
||||
// big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
|
||||
// we stop growing. This scales well, from arenas that are barely used up to
|
||||
// arenas that are used for 100s of MiBs. Note also that the chosen sizes match
|
||||
// the usual sizes of pages and huge pages on Linux.
|
||||
const PAGE: usize = 4096;
|
||||
const HUGE_PAGE: usize = 2 * 1024 * 1024;
|
||||
|
||||
pub struct DroplessArena {
|
||||
/// A pointer to the start of the free space.
|
||||
start: Cell<*mut u8>,
|
||||
|
||||
/// A pointer to the end of free space.
|
||||
///
|
||||
/// The allocation proceeds from the end of the chunk towards the start.
|
||||
/// When this pointer crosses the start pointer, a new chunk is allocated.
|
||||
end: Cell<*mut u8>,
|
||||
|
||||
/// A vector of arena chunks.
|
||||
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
|
||||
}
|
||||
|
||||
unsafe impl Send for DroplessArena {}
|
||||
|
||||
impl Default for DroplessArena {
|
||||
#[inline]
|
||||
fn default() -> DroplessArena {
|
||||
DroplessArena {
|
||||
start: Cell::new(ptr::null_mut()),
|
||||
end: Cell::new(ptr::null_mut()),
|
||||
chunks: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DroplessArena {
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
fn grow(&self, additional: usize) {
|
||||
unsafe {
|
||||
let mut chunks = self.chunks.borrow_mut();
|
||||
|
||||
let new_cap = if let Some(last_chunk) = chunks.last_mut() {
|
||||
// If the previous chunk's len is less than HUGE_PAGE bytes,
|
||||
// then this chunk will be at least double the previous chunk's size.
|
||||
last_chunk.storage.len().min(HUGE_PAGE / 2) * 2
|
||||
} else {
|
||||
PAGE
|
||||
};
|
||||
// Also ensure that this chunk can fit `additional`.
|
||||
let new_cap = cmp::max(additional, new_cap);
|
||||
|
||||
let mut chunk = TypedArenaChunk::<u8>::new(new_cap);
|
||||
self.start.set(chunk.start());
|
||||
self.end.set(chunk.end());
|
||||
chunks.push(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates a byte slice with specified layout from the current memory chunk.
|
||||
/// Returns `None` if there is no free space left to satisfy the request.
|
||||
#[inline]
|
||||
fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
|
||||
let start = self.start.get() as usize;
|
||||
let end = self.end.get() as usize;
|
||||
|
||||
let align = layout.align();
|
||||
let bytes = layout.size();
|
||||
|
||||
let new_end = end.checked_sub(bytes)? & !(align - 1);
|
||||
if start <= new_end {
|
||||
let new_end = new_end as *mut u8;
|
||||
self.end.set(new_end);
|
||||
Some(new_end)
|
||||
} else {
|
||||
// There's no more space since we're growing towards the start.
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
|
||||
assert!(layout.size() != 0);
|
||||
loop {
|
||||
if let Some(a) = self.alloc_raw_without_grow(layout) {
|
||||
break a;
|
||||
}
|
||||
// No free space left. Allocate a new chunk to satisfy the request.
|
||||
// On failure the grow will panic or abort.
|
||||
self.grow(layout.size());
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates a slice of objects that are copied into the `DroplessArena`,
|
||||
/// returning a mutable reference to it.
|
||||
/// Will panic if passed a zero-sized type.
|
||||
///
|
||||
/// Panics:
|
||||
///
|
||||
/// - Zero-sized types
|
||||
/// - Zero-length slices
|
||||
#[inline]
|
||||
#[allow(clippy::mut_from_ref)]
|
||||
pub fn alloc_slice<T: Copy>(&self, slice: &[T]) -> &mut [T] {
|
||||
assert!(!mem::needs_drop::<T>());
|
||||
assert!(mem::size_of::<T>() != 0);
|
||||
assert!(!slice.is_empty());
|
||||
|
||||
let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
|
||||
|
||||
unsafe {
|
||||
mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
|
||||
slice::from_raw_parts_mut(mem, slice.len())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct TypedArenaChunk<T> {
|
||||
/// The raw storage for the arena chunk.
|
||||
storage: Box<[MaybeUninit<T>]>,
|
||||
}
|
||||
|
||||
impl<T> TypedArenaChunk<T> {
|
||||
#[inline]
|
||||
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
|
||||
TypedArenaChunk {
|
||||
// HACK(Centril) around `Box::new_uninit_slice` not being stable.
|
||||
storage: iter::repeat_with(MaybeUninit::<T>::uninit).take(capacity).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
// Returns a pointer to the first allocated object.
|
||||
#[inline]
|
||||
fn start(&mut self) -> *mut T {
|
||||
// HACK(Centril) around `MaybeUninit::slice_as_mut_ptr` not being stable.
|
||||
self.storage.as_mut_ptr() as *mut T
|
||||
}
|
||||
|
||||
// Returns a pointer to the end of the allocated space.
|
||||
#[inline]
|
||||
fn end(&mut self) -> *mut T {
|
||||
unsafe {
|
||||
if mem::size_of::<T>() == 0 {
|
||||
// A pointer as large as possible for zero-sized elements.
|
||||
!0 as *mut T
|
||||
} else {
|
||||
self.start().add(self.storage.len())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -14,8 +14,6 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// mod dropless;
|
||||
|
||||
pub mod symbol;
|
||||
pub use symbol::{sym, Symbol};
|
||||
|
||||
|
@ -369,15 +369,9 @@ fn normalize_newlines(src: &mut String) {
|
||||
|
||||
// Account for removed `\r`.
|
||||
// After `buf.truncate(..)`, `buf` is guaranteed to contain utf-8 again.
|
||||
// N.B., this is the safe version of the below.
|
||||
let new_len = buf.len() - gap_len;
|
||||
buf.truncate(new_len);
|
||||
*src = String::from_utf8(buf).unwrap();
|
||||
// // After `set_len`, `buf` is guaranteed to contain utf-8 again.
|
||||
// unsafe {
|
||||
// buf.set_len(new_len);
|
||||
// *src = String::from_utf8_unchecked(buf);
|
||||
// }
|
||||
|
||||
fn find_crlf(src: &[u8]) -> Option<usize> {
|
||||
let mut search_idx = 0;
|
||||
@ -420,8 +414,6 @@ fn analyze_source_file(src: &str, source_file_start_pos: BytePos) -> (Vec<BytePo
|
||||
while i < src.len() {
|
||||
let i_usize = i as usize;
|
||||
let byte = src_bytes[i_usize];
|
||||
// SAFETY: We verified that i < src.len().
|
||||
// let byte = unsafe { *src_bytes.get_unchecked(i_usize) };
|
||||
|
||||
// How much to advance to get to the next UTF-8 char in the string.
|
||||
let mut char_len = 1;
|
||||
|
@ -242,8 +242,6 @@ impl Symbol {
|
||||
None => unreachable!(),
|
||||
Some(x) => x,
|
||||
})
|
||||
// // SAFETY: per above addition, we know `index > 0` always applies.
|
||||
// Self(unsafe { NonZeroU32::new_unchecked(index) })
|
||||
}
|
||||
|
||||
/// Maps a string to its interned representation.
|
||||
@ -398,18 +396,6 @@ impl Interner {
|
||||
return Symbol::new(sym as u32);
|
||||
}
|
||||
|
||||
// // SAFETY: `from_utf8_unchecked` is safe since we just allocated a `&str`,
|
||||
// // which is known to be UTF-8.
|
||||
// let bytes = arena.alloc_slice(string.as_bytes());
|
||||
// let string: &str = unsafe { str::from_utf8_unchecked(bytes) };
|
||||
//
|
||||
// unsafe fn transmute_lt<'a, 'b, T: ?Sized>(x: &'a T) -> &'b T {
|
||||
// transmute(x)
|
||||
// }
|
||||
//
|
||||
// // SAFETY: Extending to `'static` is fine. Accesses only happen while the arena is alive.
|
||||
// let string: &'static _ = unsafe { transmute_lt(string) };
|
||||
|
||||
Symbol::new(set.insert_full(InternedStr::Owned(string.into())).0 as u32)
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user