Compare commits
13 Commits
536fb4cac3
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| ef9bcc94fb | |||
| 7f46573218 | |||
| 9b466a72bb | |||
| 9ec0b728c2 | |||
| 0200b3c913 | |||
| 18f1a1b9a6 | |||
| 7c336c8769 | |||
| 72b2e58244 | |||
| f8d497f3e1 | |||
| 5cd37dfc93 | |||
| e649e2b595 | |||
| 6edbb046fa | |||
| d409a38eca |
@@ -11,7 +11,7 @@ const INITIAL_HEAP_SIZE: usize = MEMFD_INITIAL_SIZE;
|
|||||||
const METADATA_SIZE: usize = size_of::<Metadata>();
|
const METADATA_SIZE: usize = size_of::<Metadata>();
|
||||||
|
|
||||||
pub(crate) static ALLOCATOR: LazyLock<Mutex<Allocator>> =
|
pub(crate) static ALLOCATOR: LazyLock<Mutex<Allocator>> =
|
||||||
LazyLock::new(|| unsafe { Mutex::new(Allocator::new()) });
|
LazyLock::new(|| Mutex::new(Allocator::new()));
|
||||||
|
|
||||||
struct Metadata {
|
struct Metadata {
|
||||||
chunk: NonNull<Chunk>,
|
chunk: NonNull<Chunk>,
|
||||||
@@ -34,14 +34,14 @@ unsafe impl Send for Chunk {}
|
|||||||
unsafe impl Send for Allocator {}
|
unsafe impl Send for Allocator {}
|
||||||
|
|
||||||
impl Allocator {
|
impl Allocator {
|
||||||
unsafe fn new() -> Self {
|
fn new() -> Self {
|
||||||
let mut allocator = BUMP_ALLOCATOR.lock().unwrap();
|
let mut allocator = BUMP_ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
let mem = allocator.alloc(INITIAL_HEAP_SIZE).unwrap();
|
let mem = unsafe { allocator.alloc(INITIAL_HEAP_SIZE).unwrap() };
|
||||||
|
|
||||||
let head = Box::new(Chunk {
|
let head = Box::new(Chunk {
|
||||||
buffer: mem.byte_add(METADATA_SIZE),
|
buffer: unsafe { mem.byte_add(METADATA_SIZE) },
|
||||||
size: INITIAL_HEAP_SIZE,
|
size: INITIAL_HEAP_SIZE - METADATA_SIZE,
|
||||||
in_use: false,
|
in_use: false,
|
||||||
next_chunk: None,
|
next_chunk: None,
|
||||||
prev_chunk: None,
|
prev_chunk: None,
|
||||||
@@ -49,27 +49,32 @@ impl Allocator {
|
|||||||
let head = NonNull::new(Box::leak(head)).unwrap();
|
let head = NonNull::new(Box::leak(head)).unwrap();
|
||||||
|
|
||||||
let mem = mem as *mut Metadata;
|
let mem = mem as *mut Metadata;
|
||||||
|
unsafe {
|
||||||
*mem = Metadata { chunk: head };
|
*mem = Metadata { chunk: head };
|
||||||
|
}
|
||||||
|
|
||||||
Allocator { head, tail: head }
|
Allocator { head, tail: head }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) unsafe fn allocate(&mut self, size: usize) -> Option<*mut u8> {
|
pub(crate) fn allocate(&mut self, size: usize) -> Option<*mut u8> {
|
||||||
let size = (size + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
|
let size = (size + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
|
||||||
|
|
||||||
let mut head = Some(self.head);
|
let mut head = Some(self.head);
|
||||||
|
|
||||||
while head.is_some() {
|
while head.is_some() {
|
||||||
let current_head = head.unwrap().as_mut();
|
// The heap uses a global Mutex. Only one thread can operate on it at a time.
|
||||||
|
let current_head = unsafe { head.unwrap().as_mut() };
|
||||||
|
|
||||||
if !current_head.in_use && current_head.size >= size {
|
if !current_head.in_use && current_head.size >= size {
|
||||||
if current_head.size == size {
|
if current_head.size < (size + METADATA_SIZE + ALIGNMENT) {
|
||||||
current_head.in_use = true;
|
current_head.in_use = true;
|
||||||
return Some(current_head.buffer);
|
return Some(current_head.buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
let unused_space = Box::new(Chunk {
|
let unused_space = Box::new(Chunk {
|
||||||
buffer: current_head.buffer.byte_add(size + METADATA_SIZE),
|
// We know that size of buffer is larger than size + METADATA_SIZE + ALIGNMENT.
|
||||||
|
// Therefore size + METADATA_SIZE is still inside of the buffer
|
||||||
|
buffer: unsafe { current_head.buffer.byte_add(size + METADATA_SIZE) },
|
||||||
size: current_head.size - size - METADATA_SIZE,
|
size: current_head.size - size - METADATA_SIZE,
|
||||||
in_use: false,
|
in_use: false,
|
||||||
next_chunk: current_head.next_chunk,
|
next_chunk: current_head.next_chunk,
|
||||||
@@ -77,12 +82,21 @@ impl Allocator {
|
|||||||
});
|
});
|
||||||
let ptr = NonNull::new(Box::leak(unused_space)).unwrap();
|
let ptr = NonNull::new(Box::leak(unused_space)).unwrap();
|
||||||
|
|
||||||
|
// buffer points to current_head + size + METADATA_SIZE.
|
||||||
|
// Therefore buffer - METADATA_SIZE points right after the buffer of current_head
|
||||||
|
// and right before the buffer of unused_space.
|
||||||
|
// This is where the pointer to the metadata chunk is expected
|
||||||
|
unsafe {
|
||||||
*(ptr.as_ref().buffer.byte_sub(METADATA_SIZE) as *mut Metadata) =
|
*(ptr.as_ref().buffer.byte_sub(METADATA_SIZE) as *mut Metadata) =
|
||||||
Metadata { chunk: ptr };
|
Metadata { chunk: ptr };
|
||||||
|
}
|
||||||
|
|
||||||
|
// We know that accessing ptr is safe since we just allocated it.
|
||||||
|
unsafe {
|
||||||
if ptr.as_ref().next_chunk.is_none() {
|
if ptr.as_ref().next_chunk.is_none() {
|
||||||
self.tail = ptr;
|
self.tail = ptr;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
current_head.in_use = true;
|
current_head.in_use = true;
|
||||||
current_head.size = size;
|
current_head.size = size;
|
||||||
@@ -99,9 +113,9 @@ impl Allocator {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) unsafe fn deallocate(&mut self, ptr: *mut u8) {
|
pub(crate) fn deallocate(&mut self, ptr: *mut u8) {
|
||||||
let metadata = ptr.byte_sub(METADATA_SIZE) as *mut Metadata;
|
let metadata = unsafe { ptr.byte_sub(METADATA_SIZE) as *mut Metadata };
|
||||||
let metadata = (*metadata).chunk.as_mut();
|
let metadata = unsafe { (*metadata).chunk.as_mut() };
|
||||||
|
|
||||||
debug_assert_eq!(metadata.in_use, true);
|
debug_assert_eq!(metadata.in_use, true);
|
||||||
debug_assert_eq!(metadata.buffer, ptr);
|
debug_assert_eq!(metadata.buffer, ptr);
|
||||||
@@ -110,6 +124,11 @@ impl Allocator {
|
|||||||
|
|
||||||
// TODO: Consolidate chunks
|
// TODO: Consolidate chunks
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn get_offset(&self, ptr: *const u8) -> usize {
|
||||||
|
let allocator = BUMP_ALLOCATOR.lock().unwrap();
|
||||||
|
allocator.get_offset(ptr)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ use std::{
|
|||||||
usize,
|
usize,
|
||||||
};
|
};
|
||||||
|
|
||||||
use libc::{c_char, c_void, ftruncate, memfd_create, mmap, MAP_SHARED, PROT_READ, PROT_WRITE};
|
use libc::{c_char, c_void, ftruncate, mmap, perror, syscall, SYS_memfd_secret, MAP_FAILED, MAP_SHARED, PROT_READ, PROT_WRITE};
|
||||||
|
|
||||||
pub(crate) const MEMFD_INITIAL_SIZE: usize = 1024 * 1024 * 4;
|
pub(crate) const MEMFD_INITIAL_SIZE: usize = 1024 * 1024;
|
||||||
const MMAP_SIZE: usize = 1024 * 1024 * 1024;
|
const MMAP_SIZE: usize = 1024 * 1024;
|
||||||
|
|
||||||
pub(crate) static BUMP_ALLOCATOR: LazyLock<Mutex<BumpAllocator>> =
|
pub(crate) static BUMP_ALLOCATOR: LazyLock<Mutex<BumpAllocator>> =
|
||||||
LazyLock::new(|| unsafe { Mutex::new(BumpAllocator::new()) });
|
LazyLock::new(|| unsafe { Mutex::new(BumpAllocator::new()) });
|
||||||
@@ -26,7 +26,10 @@ impl BumpAllocator {
|
|||||||
unsafe fn new() -> Self {
|
unsafe fn new() -> Self {
|
||||||
assert!(MMAP_SIZE >= MEMFD_INITIAL_SIZE);
|
assert!(MMAP_SIZE >= MEMFD_INITIAL_SIZE);
|
||||||
|
|
||||||
let data_fd = memfd_create("data\x00".as_ptr() as *const c_char, 0);
|
let data_fd = syscall(SYS_memfd_secret, 0) as i32;
|
||||||
|
if data_fd <= 0 {
|
||||||
|
perror("memfd secret\x00".as_ptr() as *const c_char);
|
||||||
|
}
|
||||||
|
|
||||||
assert!(data_fd > 0);
|
assert!(data_fd > 0);
|
||||||
|
|
||||||
@@ -41,7 +44,7 @@ impl BumpAllocator {
|
|||||||
0,
|
0,
|
||||||
) as *mut u8;
|
) as *mut u8;
|
||||||
|
|
||||||
assert_ne!(start_of_mem, 0 as *mut u8);
|
assert_ne!(start_of_mem, MAP_FAILED as *mut u8);
|
||||||
|
|
||||||
let end_of_mem = start_of_mem.byte_add(MEMFD_INITIAL_SIZE);
|
let end_of_mem = start_of_mem.byte_add(MEMFD_INITIAL_SIZE);
|
||||||
|
|
||||||
@@ -55,7 +58,7 @@ impl BumpAllocator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub unsafe fn alloc(&mut self, size: usize) -> Option<*mut u8> {
|
pub(crate) unsafe fn alloc(&mut self, size: usize) -> Option<*mut u8> {
|
||||||
let new_head = self.head.byte_add(size);
|
let new_head = self.head.byte_add(size);
|
||||||
|
|
||||||
if new_head > self.end_of_mem {
|
if new_head > self.end_of_mem {
|
||||||
@@ -75,13 +78,26 @@ impl BumpAllocator {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dealloc(&mut self) {
|
pub(crate) fn dealloc(&mut self) {
|
||||||
self.number_of_allocated_chunks -= 1;
|
self.number_of_allocated_chunks -= 1;
|
||||||
|
|
||||||
if self.number_of_allocated_chunks == 0 {
|
if self.number_of_allocated_chunks == 0 {
|
||||||
self.head = self.start_of_mem;
|
self.head = self.start_of_mem;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn get_offset(&self, ptr: *const u8) -> usize {
|
||||||
|
let offset = ptr.byte_offset_from(self.start_of_mem);
|
||||||
|
|
||||||
|
debug_assert!(offset >= 0);
|
||||||
|
debug_assert!(offset < self.end_of_mem.byte_offset_from(self.start_of_mem));
|
||||||
|
|
||||||
|
offset as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_fd(&self) -> i32 {
|
||||||
|
self.backing_fd
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
#![feature(test)]
|
use bump_allocator::BUMP_ALLOCATOR;
|
||||||
#![feature(lazy_cell)]
|
|
||||||
|
|
||||||
mod allocator;
|
mod allocator;
|
||||||
mod bump_allocator;
|
mod bump_allocator;
|
||||||
pub mod sharedptr;
|
pub mod sharedptr;
|
||||||
|
|
||||||
|
pub fn get_shared_mem_fd() -> i32 {
|
||||||
|
let allocator = BUMP_ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
|
allocator.get_fd()
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,47 +1,64 @@
|
|||||||
use core::slice;
|
use core::slice;
|
||||||
use std::{
|
use std::ops::{Deref, DerefMut};
|
||||||
ops::{Deref, DerefMut},
|
|
||||||
usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::allocator::ALLOCATOR;
|
use crate::allocator::ALLOCATOR;
|
||||||
|
|
||||||
pub struct SharedPtr<'a>(&'a mut [u8]);
|
#[derive(Debug)]
|
||||||
|
pub struct SharedPtr {
|
||||||
|
ptr: *mut u8,
|
||||||
|
size: usize
|
||||||
|
}
|
||||||
|
|
||||||
impl SharedPtr<'_> {
|
unsafe impl Send for SharedPtr {}
|
||||||
|
unsafe impl Sync for SharedPtr {}
|
||||||
|
|
||||||
|
impl SharedPtr {
|
||||||
pub fn new(size: usize) -> Option<Self> {
|
pub fn new(size: usize) -> Option<Self> {
|
||||||
let mut allocator = ALLOCATOR.lock().unwrap();
|
let mut allocator = ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
let buf = unsafe {
|
|
||||||
let buf = allocator.allocate(size)?;
|
let buf = allocator.allocate(size)?;
|
||||||
slice::from_raw_parts_mut(buf, size)
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(SharedPtr(buf))
|
Some(SharedPtr{ptr: buf, size})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_offset(&self) -> usize {
|
||||||
|
let allocator = ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
|
unsafe { allocator.get_offset(self.ptr) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_size(&self) -> usize {
|
||||||
|
self.size
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Deref for SharedPtr<'a> {
|
impl Deref for SharedPtr {
|
||||||
type Target = &'a mut [u8];
|
type Target = [u8];
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.0
|
unsafe {
|
||||||
|
slice::from_raw_parts(self.ptr, self.size)
|
||||||
|
.try_into()
|
||||||
|
.expect("This should never fail")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> DerefMut for SharedPtr<'a> {
|
impl DerefMut for SharedPtr {
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
&mut self.0
|
unsafe {
|
||||||
|
slice::from_raw_parts_mut(self.ptr, self.size)
|
||||||
|
.try_into()
|
||||||
|
.expect("This should never fail")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for SharedPtr<'_> {
|
impl Drop for SharedPtr {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let mut allocator = ALLOCATOR.lock().unwrap();
|
let mut allocator = ALLOCATOR.lock().unwrap();
|
||||||
|
|
||||||
unsafe {
|
allocator.deallocate(self.ptr);
|
||||||
allocator.deallocate(self.0.as_mut_ptr());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,4 +73,12 @@ mod tests {
|
|||||||
assert_eq!(x[0], 1);
|
assert_eq!(x[0], 1);
|
||||||
drop(x);
|
drop(x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slice() {
|
||||||
|
let mut x = SharedPtr::new(10).unwrap();
|
||||||
|
x[0] = 1;
|
||||||
|
x[1] = 2;
|
||||||
|
assert_eq!(x[0..=1], [1, 2]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user