Skip to content

Commit

Permalink
Add alloc support
Browse files Browse the repository at this point in the history
Removed nightly features; Simplified allocator

Fixed formatting issues
  • Loading branch information
duanyu-yu committed Jan 10, 2024
1 parent 644dad2 commit a31d514
Show file tree
Hide file tree
Showing 6 changed files with 309 additions and 4 deletions.
54 changes: 50 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,12 @@ edition = "2021"

[dependencies]
align-address = "0.1"
allocator-api2 = { version = "0.2", default-features = false }
exclusive_cell = "0.1"
hermit-entry = { version = "0.9", features = ["loader"] }
log = "0.4"
sptr = "0.3"
spinning_top = "0.3"

[features]
default = []
Expand Down
99 changes: 99 additions & 0 deletions src/allocator/bootstrap.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
//! A bootstrap allocator based on a statically allocated buffer.
/// A pointer range that can only be compared against.
mod ptr_range {
use core::ops::Range;
use core::ptr::NonNull;

/// A pointer range that can only be compared against.
pub struct PtrRange<T> {
inner: Range<NonNull<T>>,
}

// SAFETY: We never dereference, but only compare, pointers.
unsafe impl<T> Send for PtrRange<T> {}
unsafe impl<T> Sync for PtrRange<T> {}

impl<T> PtrRange<T> {
/// Returns `true` if the pointer range contains `ptr`.
pub fn contains(&self, ptr: NonNull<T>) -> bool {
self.inner.contains(&ptr)
}
}

impl<T> From<Range<NonNull<T>>> for PtrRange<T> {
fn from(value: Range<NonNull<T>>) -> Self {
Self { inner: value }
}
}
}

use core::mem::MaybeUninit;
use core::ops::Range;
use core::ptr::NonNull;

use allocator_api2::alloc::{AllocError, Allocator, Layout};
use exclusive_cell::ExclusiveCell;

use self::ptr_range::PtrRange;

/// A bootstrap allocator.
///
/// This allocator is generic over the internal allocator and can only be created once.
/// The bootstrap allocator provides the internal allocator with static memory.
///
/// This allocator tracks, which static memory it was using initially.
/// It can be queried whether a pointer belongs to it.
pub struct BootstrapAllocator<A> {
ptr_range: PtrRange<u8>,
allocator: A,
}

impl<A> Default for BootstrapAllocator<A>
where
A: From<&'static mut [MaybeUninit<u8>]>,
{
fn default() -> Self {
let mem = {
const SIZE: usize = 4 * 1024;
const BYTE: MaybeUninit<u8> = MaybeUninit::uninit();
/// The actual memory of the boostrap allocator.
static MEM: ExclusiveCell<[MaybeUninit<u8>; SIZE]> = ExclusiveCell::new([BYTE; SIZE]);
MEM.take().unwrap()
};

let ptr_range = {
let Range { start, end } = mem.as_mut_ptr_range();
let start = NonNull::new(start).unwrap().cast::<u8>();
let end = NonNull::new(end).unwrap().cast::<u8>();
PtrRange::from(start..end)
};
let allocator = A::from(mem);

Self {
ptr_range,
allocator,
}
}
}

impl<A> BootstrapAllocator<A> {
/// Returns `true` if the pointer belonged to the static memory of this allocator.
pub fn manages(&self, ptr: NonNull<u8>) -> bool {
self.ptr_range.contains(ptr)
}
}

unsafe impl<A> Allocator for BootstrapAllocator<A>
where
A: Allocator,
{
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.allocator.allocate(layout)
}

unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
debug_assert!(self.manages(ptr));
unsafe { self.allocator.deallocate(ptr, layout) }
}
}
49 changes: 49 additions & 0 deletions src/allocator/bump.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
//! A bump allocator.
//!
//! This is a simple allocator design which can only allocate and not deallocate.
use core::cell::Cell;
use core::mem::MaybeUninit;
use core::ptr::NonNull;

use allocator_api2::alloc::{AllocError, Allocator, Layout};

/// A simple, `!Sync` implementation of a bump allocator.
///
/// This allocator manages the provided memory.
pub struct BumpAllocator {
mem: Cell<&'static mut [MaybeUninit<u8>]>,
}

unsafe impl Allocator for BumpAllocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr: *mut [MaybeUninit<u8>] = self.allocate_slice(layout)?;
Ok(NonNull::new(ptr as *mut [u8]).unwrap())
}

unsafe fn deallocate(&self, _ptr: NonNull<u8>, _layout: Layout) {}
}

impl BumpAllocator {
fn allocate_slice(&self, layout: Layout) -> Result<&'static mut [MaybeUninit<u8>], AllocError> {
let mem = self.mem.take();
let align_offset = mem.as_ptr().align_offset(layout.align());
let mid = layout.size() + align_offset;
if mid > mem.len() {
self.mem.set(mem);
Err(AllocError)
} else {
let (alloc, remaining) = mem.split_at_mut(mid);
self.mem.set(remaining);
Ok(&mut alloc[align_offset..])
}
}
}

impl From<&'static mut [MaybeUninit<u8>]> for BumpAllocator {
fn from(mem: &'static mut [MaybeUninit<u8>]) -> Self {
Self {
mem: Cell::new(mem),
}
}
}
102 changes: 102 additions & 0 deletions src/allocator/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
//! Implementation of the Hermit Allocator in the loader
mod bootstrap;
mod bump;

use core::ptr;
use core::ptr::NonNull;

use allocator_api2::alloc::{AllocError, Allocator, GlobalAlloc, Layout};
use spinning_top::Spinlock;

use self::bootstrap::BootstrapAllocator;
use self::bump::BumpAllocator;

/// The global system allocator for Hermit.
struct GlobalAllocator {
/// The bootstrap allocator, which is available immediately.
///
/// It allows allocations before the heap has been initalized.
bootstrap_allocator: Option<BootstrapAllocator<BumpAllocator>>,
}

impl GlobalAllocator {
const fn empty() -> Self {
Self {
bootstrap_allocator: None,
}
}

fn align_layout(layout: Layout) -> Layout {
let size = layout.size();
let align = layout.align();
Layout::from_size_align(size, align).unwrap()
}

fn allocate(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocError> {
let layout = Self::align_layout(layout);
self.bootstrap_allocator
.get_or_insert_with(Default::default)
.allocate(layout)
// FIXME: Use NonNull::as_mut_ptr once `slice_ptr_get` is stabilized
// https://github.com/rust-lang/rust/issues/74265
.map(|ptr| NonNull::new(ptr.as_ptr() as *mut u8).unwrap())
}

unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
let layout = Self::align_layout(layout);
let bootstrap_allocator = self.bootstrap_allocator.as_ref().unwrap();
assert!(bootstrap_allocator.manages(ptr));
unsafe {
bootstrap_allocator.deallocate(ptr, layout);
}
}
}

pub struct LockedAllocator(Spinlock<GlobalAllocator>);

impl LockedAllocator {
/// Creates an empty allocator. All allocate calls will return `None`.
pub const fn empty() -> LockedAllocator {
LockedAllocator(Spinlock::new(GlobalAllocator::empty()))
}
}

/// To avoid false sharing, the global memory allocator align
/// all requests to a cache line.
unsafe impl GlobalAlloc for LockedAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.0
.lock()
.allocate(layout)
.ok()
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
unsafe {
self.0
.lock()
.deallocate(NonNull::new_unchecked(ptr), layout)
}
}
}

#[cfg(all(test, not(target_os = "none")))]
mod tests {
use core::mem;

use super::*;

#[test]
fn empty() {
let mut allocator = GlobalAllocator::empty();
let layout = Layout::from_size_align(1, 1).unwrap();
// we have 4 kbyte static memory
assert!(allocator.allocate(layout.clone()).is_ok());

let layout = Layout::from_size_align(0x1000, mem::align_of::<usize>());
let addr = allocator.allocate(layout.unwrap());
assert!(addr.is_err());
}
}
Loading

0 comments on commit a31d514

Please # to comment.