Skip to content

Commit

Permalink
Add SlabAllocator::reset
Browse files Browse the repository at this point in the history
  • Loading branch information
marc0246 committed Oct 20, 2024
1 parent 90f3d8e commit 257a3de
Showing 1 changed file with 38 additions and 7 deletions.
45 changes: 38 additions & 7 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,11 +135,13 @@ use core::{
pub struct SlabAllocator<T> {
free_list_head: Cell<Option<NonNull<Slot<T>>>>,
slab_list_head: Cell<Option<NonNull<Slab<T>>>>,
/// Points to the slot of the slab of the head of the slab list where the free slots start.
/// If the slab list is empty then this is dangling.
/// The next free slab that should be used, if any.
slab_list_next: Cell<Option<NonNull<Slab<T>>>>,
/// Points to where the free slots start in a fresh slab. If the slab list is empty then this
/// is dangling.
free_start: Cell<NonNull<Slot<T>>>,
/// Points to the end of the slab of the head of the slab list. If the slab list is empty then
/// this is dangling.
/// Points to where the free slots end in a fresh slab. If the slab list is empty then this is
/// dangling.
free_end: Cell<NonNull<Slot<T>>>,
slab_capacity: NonZeroUsize,
}
Expand Down Expand Up @@ -175,6 +177,7 @@ impl<T> SlabAllocator<T> {
SlabAllocator {
free_list_head: Cell::new(None),
slab_list_head: Cell::new(None),
slab_list_next: Cell::new(None),
free_start: Cell::new(dangling),
free_end: Cell::new(dangling),
slab_capacity,
Expand Down Expand Up @@ -292,10 +295,20 @@ impl<T> SlabAllocator<T> {

#[cold]
fn allocate_slow(&self) -> Result<NonNull<Slot<T>>, AllocError> {
let slab = self.add_slab()?;
let slab = if let Some(slab) = self.slab_list_next.get() {
// SAFETY: `slab` being in the slab list means it refers to a currently allocated slab
// and that its header is properly initialized.
let next = unsafe { (*slab.as_ptr()).next };

// SAFETY: The allocation succeeded, which means we've been given at least the slab header,
// so the offset must be in range.
self.slab_list_next.set(next);

slab
} else {
self.add_slab()?
};

// SAFETY: We either got an existing slab or successfully allocated a new one above, and a
// slab always includes at least the slab header, so the offset must be in range.
let slots = unsafe { NonNull::new_unchecked(ptr::addr_of_mut!((*slab.as_ptr()).slots)) };

// SAFETY:
Expand Down Expand Up @@ -408,6 +421,24 @@ impl<T> SlabAllocator<T> {
self.free_list_head.set(Some(ptr));
}

/// Resets the allocator, deallocating all currently allocated slots at once.
///
/// This operation is *O*(1).
///
/// # Safety
///
/// This function semantically behaves as if [`deallocate`] was called for every currently
/// allocated slot.
///
/// [`deallocate`]: Self::deallocate
pub unsafe fn reset(&self) {
self.free_list_head.set(None);
self.slab_list_next.set(self.slab_list_head.get());
let dangling = NonNull::dangling();
self.free_start.set(dangling);
self.free_end.set(dangling);
}

fn slab_count(&self) -> usize {
let mut head = self.slab_list_head.get();
let mut count = 0;
Expand Down

0 comments on commit 257a3de

Please sign in to comment.