54 lines
1.6 KiB
Rust
54 lines
1.6 KiB
Rust
|
use std::mem::size_of;
|
||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||
|
|
||
|
pub(super) struct TempWasmAllocator {
|
||
|
data: *mut u8,
|
||
|
wasm_pointer: u32,
|
||
|
currently_allocated: AtomicUsize,
|
||
|
offset_high: AtomicUsize,
|
||
|
}
|
||
|
|
||
|
impl TempWasmAllocator {
|
||
|
pub(super) fn new(data: *mut u8, wasm_pointer: u32) -> Self {
|
||
|
Self {
|
||
|
data,
|
||
|
wasm_pointer,
|
||
|
currently_allocated: AtomicUsize::new(0),
|
||
|
offset_high: AtomicUsize::new(0),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
pub fn alloc<T>(&self) -> AllocatedObject<T> {
|
||
|
self.currently_allocated.fetch_add(size_of::<T>(), Ordering::SeqCst);
|
||
|
let ptr_offset = self.offset_high.fetch_add(size_of::<T>(), Ordering::SeqCst);
|
||
|
let ptr = unsafe { self.data.add(ptr_offset) } as *mut T;
|
||
|
AllocatedObject::<T> {
|
||
|
ptr,
|
||
|
wasm_pointer: self.wasm_pointer + ptr_offset as u32,
|
||
|
allocator: self as *const TempWasmAllocator,
|
||
|
}
|
||
|
}
|
||
|
|
||
|
pub fn drop<T>(&self) {
|
||
|
self.currently_allocated.fetch_sub(size_of::<T>(), Ordering::SeqCst);
|
||
|
// As soon as we've no longer allocated anything, we reset our allocating back to the start.
|
||
|
if self.currently_allocated.load(Ordering::SeqCst) == 0 {
|
||
|
self.offset_high.store(0, Ordering::SeqCst);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
pub struct AllocatedObject<T> {
|
||
|
pub ptr: *mut T,
|
||
|
pub wasm_pointer: u32,
|
||
|
allocator: *const TempWasmAllocator,
|
||
|
}
|
||
|
|
||
|
impl<T> Drop for AllocatedObject<T> {
|
||
|
fn drop(&mut self) {
|
||
|
unsafe {
|
||
|
self.allocator.as_ref().unwrap().drop::<T>();
|
||
|
}
|
||
|
}
|
||
|
}
|