- Replace `rust/crabrl-fork` gitlink with tracked source files - Add workspace notes documenting why the fork is vendored - Update ignore rules for vendored fork build artifacts
243 lines
6.0 KiB
Rust
243 lines
6.0 KiB
Rust
use bumpalo::Bump;
|
|
use compact_str::CompactString;
|
|
use parking_lot::Mutex;
|
|
use std::cell::RefCell;
|
|
use std::collections::HashMap;
|
|
use std::mem::MaybeUninit;
|
|
use std::sync::Arc;
|
|
|
|
const ARENA_SIZE: usize = 64 * 1024 * 1024; // 64MB arenas
|
|
const POOL_SIZE: usize = 1024;
|
|
|
|
#[repr(align(64))]
|
|
pub struct ArenaAllocator {
|
|
current: RefCell<Bump>,
|
|
arenas: RefCell<Vec<Bump>>,
|
|
string_to_id: Arc<Mutex<HashMap<CompactString, u32>>>,
|
|
id_to_string: Arc<Mutex<Vec<CompactString>>>,
|
|
}
|
|
|
|
impl ArenaAllocator {
|
|
pub fn new() -> Self {
|
|
Self {
|
|
current: RefCell::new(Bump::with_capacity(ARENA_SIZE)),
|
|
arenas: RefCell::new(Vec::with_capacity(16)),
|
|
string_to_id: Arc::new(Mutex::new(HashMap::new())),
|
|
id_to_string: Arc::new(Mutex::new(Vec::new())),
|
|
}
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn alloc<T>(&self, val: T) -> &T {
|
|
unsafe {
|
|
let ptr = self.current.borrow().alloc(val) as *const T;
|
|
&*ptr
|
|
}
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn alloc_slice<T: Copy>(&self, slice: &[T]) -> &[T] {
|
|
unsafe {
|
|
let ptr = self.current.borrow().alloc_slice_copy(slice) as *const [T];
|
|
&*ptr
|
|
}
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn alloc_str(&self, s: &str) -> &str {
|
|
unsafe {
|
|
let ptr = self.current.borrow().alloc_str(s) as *const str;
|
|
&*ptr
|
|
}
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn intern_string(&self, s: &str) -> u32 {
|
|
let key = CompactString::from(s);
|
|
|
|
// Check if already interned
|
|
if let Some(&id) = self.string_to_id.lock().get(&key) {
|
|
return id;
|
|
}
|
|
|
|
// Add new interned string
|
|
let mut id_to_string = self.id_to_string.lock();
|
|
let mut string_to_id = self.string_to_id.lock();
|
|
|
|
// Double-check after acquiring both locks
|
|
if let Some(&id) = string_to_id.get(&key) {
|
|
return id;
|
|
}
|
|
|
|
let id = id_to_string.len() as u32;
|
|
id_to_string.push(key.clone());
|
|
string_to_id.insert(key, id);
|
|
|
|
id
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn get_interned(&self, id: u32) -> Option<CompactString> {
|
|
self.id_to_string.lock().get(id as usize).cloned()
|
|
}
|
|
|
|
pub fn get_all_strings(&self) -> Vec<CompactString> {
|
|
self.id_to_string.lock().clone()
|
|
}
|
|
|
|
pub fn string_count(&self) -> usize {
|
|
self.id_to_string.lock().len()
|
|
}
|
|
|
|
pub fn reset(&self) {
|
|
let mut current = self.current.borrow_mut();
|
|
current.reset();
|
|
|
|
let mut arenas = self.arenas.borrow_mut();
|
|
for arena in arenas.iter_mut() {
|
|
arena.reset();
|
|
}
|
|
|
|
// Clear string interning
|
|
self.string_to_id.lock().clear();
|
|
self.id_to_string.lock().clear();
|
|
}
|
|
|
|
pub fn new_arena(&self) {
|
|
let mut arenas = self.arenas.borrow_mut();
|
|
let old = std::mem::replace(
|
|
&mut *self.current.borrow_mut(),
|
|
Bump::with_capacity(ARENA_SIZE),
|
|
);
|
|
arenas.push(old);
|
|
}
|
|
}
|
|
|
|
impl Default for ArenaAllocator {
|
|
fn default() -> Self {
|
|
Self::new()
|
|
}
|
|
}
|
|
|
|
pub struct ObjectPool<T> {
|
|
pool: Vec<Box<T>>,
|
|
factory: fn() -> T,
|
|
}
|
|
|
|
impl<T> ObjectPool<T> {
|
|
pub fn new(capacity: usize, factory: fn() -> T) -> Self {
|
|
let mut pool = Vec::with_capacity(capacity);
|
|
for _ in 0..capacity {
|
|
pool.push(Box::new(factory()));
|
|
}
|
|
Self { pool, factory }
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn acquire(&mut self) -> Box<T> {
|
|
self.pool
|
|
.pop()
|
|
.unwrap_or_else(|| Box::new((self.factory)()))
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn release(&mut self, obj: Box<T>) {
|
|
if self.pool.len() < POOL_SIZE {
|
|
self.pool.push(obj);
|
|
}
|
|
}
|
|
}
|
|
|
|
#[repr(C, align(64))]
|
|
pub struct StackBuffer<const N: usize> {
|
|
data: [MaybeUninit<u8>; N],
|
|
len: usize,
|
|
}
|
|
|
|
impl<const N: usize> Default for StackBuffer<N> {
|
|
fn default() -> Self {
|
|
Self::new()
|
|
}
|
|
}
|
|
|
|
impl<const N: usize> StackBuffer<N> {
|
|
#[inline(always)]
|
|
pub const fn new() -> Self {
|
|
Self {
|
|
data: unsafe { MaybeUninit::uninit().assume_init() },
|
|
len: 0,
|
|
}
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn push(&mut self, byte: u8) -> bool {
|
|
if self.len < N {
|
|
self.data[self.len] = MaybeUninit::new(byte);
|
|
self.len += 1;
|
|
true
|
|
} else {
|
|
false
|
|
}
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn as_slice(&self) -> &[u8] {
|
|
unsafe { std::slice::from_raw_parts(self.data.as_ptr() as *const u8, self.len) }
|
|
}
|
|
|
|
#[inline(always)]
|
|
pub fn clear(&mut self) {
|
|
self.len = 0;
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn test_arena_allocator() {
|
|
let arena = ArenaAllocator::new();
|
|
let s1 = arena.alloc_str("hello");
|
|
let s2 = arena.alloc_str("world");
|
|
assert_eq!(s1, "hello");
|
|
assert_eq!(s2, "world");
|
|
}
|
|
|
|
#[test]
|
|
fn test_string_interning() {
|
|
let arena = ArenaAllocator::new();
|
|
let id1 = arena.intern_string("test");
|
|
let id2 = arena.intern_string("test");
|
|
assert_eq!(id1, id2);
|
|
|
|
let s = arena.get_interned(id1).unwrap();
|
|
assert_eq!(s, "test");
|
|
}
|
|
|
|
#[test]
|
|
fn test_string_interning_different() {
|
|
let arena = ArenaAllocator::new();
|
|
let id1 = arena.intern_string("foo");
|
|
let id2 = arena.intern_string("bar");
|
|
assert_ne!(id1, id2);
|
|
|
|
assert_eq!(arena.get_interned(id1).unwrap(), "foo");
|
|
assert_eq!(arena.get_interned(id2).unwrap(), "bar");
|
|
}
|
|
|
|
#[test]
|
|
fn test_get_all_strings() {
|
|
let arena = ArenaAllocator::new();
|
|
arena.intern_string("a");
|
|
arena.intern_string("b");
|
|
arena.intern_string("c");
|
|
|
|
let all = arena.get_all_strings();
|
|
assert_eq!(all.len(), 3);
|
|
assert!(all.contains(&CompactString::from("a")));
|
|
assert!(all.contains(&CompactString::from("b")));
|
|
assert!(all.contains(&CompactString::from("c")));
|
|
}
|
|
}
|