fix: inconsistencies with buffer size
This commit is contained in:
@@ -11,7 +11,7 @@ const INITIAL_HEAP_SIZE: usize = MEMFD_INITIAL_SIZE;
|
|||||||
const METADATA_SIZE: usize = size_of::<Metadata>();
|
const METADATA_SIZE: usize = size_of::<Metadata>();
|
||||||
|
|
||||||
pub(crate) static ALLOCATOR: LazyLock<Mutex<Allocator>> =
|
pub(crate) static ALLOCATOR: LazyLock<Mutex<Allocator>> =
|
||||||
LazyLock::new(|| unsafe { Mutex::new(Allocator::new()) });
|
LazyLock::new(|| Mutex::new(Allocator::new()));
|
||||||
|
|
||||||
struct Metadata {
|
struct Metadata {
|
||||||
chunk: NonNull<Chunk>,
|
chunk: NonNull<Chunk>,
|
||||||
@@ -34,14 +34,14 @@ unsafe impl Send for Chunk {}
|
|||||||
unsafe impl Send for Allocator {}
|
unsafe impl Send for Allocator {}
|
||||||
|
|
||||||
impl Allocator {
|
impl Allocator {
|
||||||
unsafe fn new() -> Self {
|
fn new() -> Self {
|
||||||
let mut allocator = BUMP_ALLOCATOR.lock().unwrap();
|
let mut allocator = BUMP_ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
let mem = allocator.alloc(INITIAL_HEAP_SIZE).unwrap();
|
let mem = unsafe { allocator.alloc(INITIAL_HEAP_SIZE).unwrap() };
|
||||||
|
|
||||||
let head = Box::new(Chunk {
|
let head = Box::new(Chunk {
|
||||||
buffer: mem.byte_add(METADATA_SIZE),
|
buffer: unsafe { mem.byte_add(METADATA_SIZE) },
|
||||||
size: INITIAL_HEAP_SIZE,
|
size: INITIAL_HEAP_SIZE - METADATA_SIZE,
|
||||||
in_use: false,
|
in_use: false,
|
||||||
next_chunk: None,
|
next_chunk: None,
|
||||||
prev_chunk: None,
|
prev_chunk: None,
|
||||||
@@ -49,27 +49,32 @@ impl Allocator {
|
|||||||
let head = NonNull::new(Box::leak(head)).unwrap();
|
let head = NonNull::new(Box::leak(head)).unwrap();
|
||||||
|
|
||||||
let mem = mem as *mut Metadata;
|
let mem = mem as *mut Metadata;
|
||||||
*mem = Metadata { chunk: head };
|
unsafe {
|
||||||
|
*mem = Metadata { chunk: head };
|
||||||
|
}
|
||||||
|
|
||||||
Allocator { head, tail: head }
|
Allocator { head, tail: head }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) unsafe fn allocate(&mut self, size: usize) -> Option<*mut u8> {
|
pub(crate) fn allocate(&mut self, size: usize) -> Option<*mut u8> {
|
||||||
let size = (size + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
|
let size = (size + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
|
||||||
|
|
||||||
let mut head = Some(self.head);
|
let mut head = Some(self.head);
|
||||||
|
|
||||||
while head.is_some() {
|
while head.is_some() {
|
||||||
let current_head = head.unwrap().as_mut();
|
// The heap uses a global Mutex. Only one thread can operate on it at a time.
|
||||||
|
let current_head = unsafe { head.unwrap().as_mut() };
|
||||||
|
|
||||||
if !current_head.in_use && current_head.size >= size {
|
if !current_head.in_use && current_head.size >= size {
|
||||||
if current_head.size == size {
|
if current_head.size < (size + METADATA_SIZE + ALIGNMENT) {
|
||||||
current_head.in_use = true;
|
current_head.in_use = true;
|
||||||
return Some(current_head.buffer);
|
return Some(current_head.buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
let unused_space = Box::new(Chunk {
|
let unused_space = Box::new(Chunk {
|
||||||
buffer: current_head.buffer.byte_add(size + METADATA_SIZE),
|
// We know that size of buffer is larger than size + METADATA_SIZE + ALIGNMENT.
|
||||||
|
// Therefore size + METADATA_SIZE is still inside of the buffer
|
||||||
|
buffer: unsafe { current_head.buffer.byte_add(size + METADATA_SIZE) },
|
||||||
size: current_head.size - size - METADATA_SIZE,
|
size: current_head.size - size - METADATA_SIZE,
|
||||||
in_use: false,
|
in_use: false,
|
||||||
next_chunk: current_head.next_chunk,
|
next_chunk: current_head.next_chunk,
|
||||||
@@ -77,11 +82,20 @@ impl Allocator {
|
|||||||
});
|
});
|
||||||
let ptr = NonNull::new(Box::leak(unused_space)).unwrap();
|
let ptr = NonNull::new(Box::leak(unused_space)).unwrap();
|
||||||
|
|
||||||
*(ptr.as_ref().buffer.byte_sub(METADATA_SIZE) as *mut Metadata) =
|
// buffer points to current_head + size + METADATA_SIZE.
|
||||||
Metadata { chunk: ptr };
|
// Therefore buffer - METADATA_SIZE points right after the buffer of current_head
|
||||||
|
// and right before the buffer of unused_space.
|
||||||
|
// This is where the pointer to the metadata chunk is expected
|
||||||
|
unsafe {
|
||||||
|
*(ptr.as_ref().buffer.byte_sub(METADATA_SIZE) as *mut Metadata) =
|
||||||
|
Metadata { chunk: ptr };
|
||||||
|
}
|
||||||
|
|
||||||
if ptr.as_ref().next_chunk.is_none() {
|
// We know that accessing ptr is safe since we just allocated it.
|
||||||
self.tail = ptr;
|
unsafe {
|
||||||
|
if ptr.as_ref().next_chunk.is_none() {
|
||||||
|
self.tail = ptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
current_head.in_use = true;
|
current_head.in_use = true;
|
||||||
@@ -99,9 +113,9 @@ impl Allocator {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) unsafe fn deallocate(&mut self, ptr: *mut u8) {
|
pub(crate) fn deallocate(&mut self, ptr: *mut u8) {
|
||||||
let metadata = ptr.byte_sub(METADATA_SIZE) as *mut Metadata;
|
let metadata = unsafe { ptr.byte_sub(METADATA_SIZE) as *mut Metadata };
|
||||||
let metadata = (*metadata).chunk.as_mut();
|
let metadata = unsafe { (*metadata).chunk.as_mut() };
|
||||||
|
|
||||||
debug_assert_eq!(metadata.in_use, true);
|
debug_assert_eq!(metadata.in_use, true);
|
||||||
debug_assert_eq!(metadata.buffer, ptr);
|
debug_assert_eq!(metadata.buffer, ptr);
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ impl<const N: usize> SharedPtr<N> {
|
|||||||
pub fn new() -> Option<Self> {
|
pub fn new() -> Option<Self> {
|
||||||
let mut allocator = ALLOCATOR.lock().unwrap();
|
let mut allocator = ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
let buf = unsafe { allocator.allocate(N)? };
|
let buf = allocator.allocate(N)?;
|
||||||
|
|
||||||
Some(SharedPtr(buf))
|
Some(SharedPtr(buf))
|
||||||
}
|
}
|
||||||
@@ -51,9 +51,7 @@ impl<const N: usize> Drop for SharedPtr<N> {
|
|||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let mut allocator = ALLOCATOR.lock().unwrap();
|
let mut allocator = ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
unsafe {
|
allocator.deallocate(self.0);
|
||||||
allocator.deallocate(self.0);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user