Skip to content

Commit a31d514

Browse files
committed
Add alloc support
Removed nightly features; Simplified allocator Fixed formatting issues
1 parent 644dad2 commit a31d514

File tree

6 files changed

+309
-4
lines changed

6 files changed

+309
-4
lines changed

Cargo.lock

+50-4
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+3
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,12 @@ edition = "2021"
88

99
[dependencies]
1010
align-address = "0.1"
11+
allocator-api2 = { version = "0.2", default-features = false }
12+
exclusive_cell = "0.1"
1113
hermit-entry = { version = "0.9", features = ["loader"] }
1214
log = "0.4"
1315
sptr = "0.3"
16+
spinning_top = "0.3"
1417

1518
[features]
1619
default = []

src/allocator/bootstrap.rs

+99
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
//! A bootstrap allocator based on a statically allocated buffer.
2+
3+
/// A pointer range that can only be compared against.
4+
mod ptr_range {
5+
use core::ops::Range;
6+
use core::ptr::NonNull;
7+
8+
/// A pointer range that can only be compared against.
9+
pub struct PtrRange<T> {
10+
inner: Range<NonNull<T>>,
11+
}
12+
13+
// SAFETY: We never dereference, but only compare, pointers.
14+
unsafe impl<T> Send for PtrRange<T> {}
15+
unsafe impl<T> Sync for PtrRange<T> {}
16+
17+
impl<T> PtrRange<T> {
18+
/// Returns `true` if the pointer range contains `ptr`.
19+
pub fn contains(&self, ptr: NonNull<T>) -> bool {
20+
self.inner.contains(&ptr)
21+
}
22+
}
23+
24+
impl<T> From<Range<NonNull<T>>> for PtrRange<T> {
25+
fn from(value: Range<NonNull<T>>) -> Self {
26+
Self { inner: value }
27+
}
28+
}
29+
}
30+
31+
use core::mem::MaybeUninit;
32+
use core::ops::Range;
33+
use core::ptr::NonNull;
34+
35+
use allocator_api2::alloc::{AllocError, Allocator, Layout};
36+
use exclusive_cell::ExclusiveCell;
37+
38+
use self::ptr_range::PtrRange;
39+
40+
/// A bootstrap allocator.
41+
///
42+
/// This allocator is generic over the internal allocator and can only be created once.
43+
/// The bootstrap allocator provides the internal allocator with static memory.
44+
///
45+
/// This allocator tracks, which static memory it was using initially.
46+
/// It can be queried whether a pointer belongs to it.
47+
pub struct BootstrapAllocator<A> {
48+
ptr_range: PtrRange<u8>,
49+
allocator: A,
50+
}
51+
52+
impl<A> Default for BootstrapAllocator<A>
53+
where
54+
A: From<&'static mut [MaybeUninit<u8>]>,
55+
{
56+
fn default() -> Self {
57+
let mem = {
58+
const SIZE: usize = 4 * 1024;
59+
const BYTE: MaybeUninit<u8> = MaybeUninit::uninit();
60+
/// The actual memory of the boostrap allocator.
61+
static MEM: ExclusiveCell<[MaybeUninit<u8>; SIZE]> = ExclusiveCell::new([BYTE; SIZE]);
62+
MEM.take().unwrap()
63+
};
64+
65+
let ptr_range = {
66+
let Range { start, end } = mem.as_mut_ptr_range();
67+
let start = NonNull::new(start).unwrap().cast::<u8>();
68+
let end = NonNull::new(end).unwrap().cast::<u8>();
69+
PtrRange::from(start..end)
70+
};
71+
let allocator = A::from(mem);
72+
73+
Self {
74+
ptr_range,
75+
allocator,
76+
}
77+
}
78+
}
79+
80+
impl<A> BootstrapAllocator<A> {
81+
/// Returns `true` if the pointer belonged to the static memory of this allocator.
82+
pub fn manages(&self, ptr: NonNull<u8>) -> bool {
83+
self.ptr_range.contains(ptr)
84+
}
85+
}
86+
87+
unsafe impl<A> Allocator for BootstrapAllocator<A>
88+
where
89+
A: Allocator,
90+
{
91+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
92+
self.allocator.allocate(layout)
93+
}
94+
95+
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
96+
debug_assert!(self.manages(ptr));
97+
unsafe { self.allocator.deallocate(ptr, layout) }
98+
}
99+
}

src/allocator/bump.rs

+49
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
//! A bump allocator.
2+
//!
3+
//! This is a simple allocator design which can only allocate and not deallocate.
4+
5+
use core::cell::Cell;
6+
use core::mem::MaybeUninit;
7+
use core::ptr::NonNull;
8+
9+
use allocator_api2::alloc::{AllocError, Allocator, Layout};
10+
11+
/// A simple, `!Sync` implementation of a bump allocator.
12+
///
13+
/// This allocator manages the provided memory.
14+
pub struct BumpAllocator {
15+
mem: Cell<&'static mut [MaybeUninit<u8>]>,
16+
}
17+
18+
unsafe impl Allocator for BumpAllocator {
19+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
20+
let ptr: *mut [MaybeUninit<u8>] = self.allocate_slice(layout)?;
21+
Ok(NonNull::new(ptr as *mut [u8]).unwrap())
22+
}
23+
24+
unsafe fn deallocate(&self, _ptr: NonNull<u8>, _layout: Layout) {}
25+
}
26+
27+
impl BumpAllocator {
28+
fn allocate_slice(&self, layout: Layout) -> Result<&'static mut [MaybeUninit<u8>], AllocError> {
29+
let mem = self.mem.take();
30+
let align_offset = mem.as_ptr().align_offset(layout.align());
31+
let mid = layout.size() + align_offset;
32+
if mid > mem.len() {
33+
self.mem.set(mem);
34+
Err(AllocError)
35+
} else {
36+
let (alloc, remaining) = mem.split_at_mut(mid);
37+
self.mem.set(remaining);
38+
Ok(&mut alloc[align_offset..])
39+
}
40+
}
41+
}
42+
43+
impl From<&'static mut [MaybeUninit<u8>]> for BumpAllocator {
44+
fn from(mem: &'static mut [MaybeUninit<u8>]) -> Self {
45+
Self {
46+
mem: Cell::new(mem),
47+
}
48+
}
49+
}

src/allocator/mod.rs

+102
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,102 @@
1+
//! Implementation of the Hermit Allocator in the loader
2+
3+
mod bootstrap;
4+
mod bump;
5+
6+
use core::ptr;
7+
use core::ptr::NonNull;
8+
9+
use allocator_api2::alloc::{AllocError, Allocator, GlobalAlloc, Layout};
10+
use spinning_top::Spinlock;
11+
12+
use self::bootstrap::BootstrapAllocator;
13+
use self::bump::BumpAllocator;
14+
15+
/// The global system allocator for Hermit.
16+
struct GlobalAllocator {
17+
/// The bootstrap allocator, which is available immediately.
18+
///
19+
/// It allows allocations before the heap has been initalized.
20+
bootstrap_allocator: Option<BootstrapAllocator<BumpAllocator>>,
21+
}
22+
23+
impl GlobalAllocator {
24+
const fn empty() -> Self {
25+
Self {
26+
bootstrap_allocator: None,
27+
}
28+
}
29+
30+
fn align_layout(layout: Layout) -> Layout {
31+
let size = layout.size();
32+
let align = layout.align();
33+
Layout::from_size_align(size, align).unwrap()
34+
}
35+
36+
fn allocate(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocError> {
37+
let layout = Self::align_layout(layout);
38+
self.bootstrap_allocator
39+
.get_or_insert_with(Default::default)
40+
.allocate(layout)
41+
// FIXME: Use NonNull::as_mut_ptr once `slice_ptr_get` is stabilized
42+
// https://github.com/rust-lang/rust/issues/74265
43+
.map(|ptr| NonNull::new(ptr.as_ptr() as *mut u8).unwrap())
44+
}
45+
46+
unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
47+
let layout = Self::align_layout(layout);
48+
let bootstrap_allocator = self.bootstrap_allocator.as_ref().unwrap();
49+
assert!(bootstrap_allocator.manages(ptr));
50+
unsafe {
51+
bootstrap_allocator.deallocate(ptr, layout);
52+
}
53+
}
54+
}
55+
56+
pub struct LockedAllocator(Spinlock<GlobalAllocator>);
57+
58+
impl LockedAllocator {
59+
/// Creates an empty allocator. All allocate calls will return `None`.
60+
pub const fn empty() -> LockedAllocator {
61+
LockedAllocator(Spinlock::new(GlobalAllocator::empty()))
62+
}
63+
}
64+
65+
/// To avoid false sharing, the global memory allocator align
66+
/// all requests to a cache line.
67+
unsafe impl GlobalAlloc for LockedAllocator {
68+
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
69+
self.0
70+
.lock()
71+
.allocate(layout)
72+
.ok()
73+
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
74+
}
75+
76+
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
77+
unsafe {
78+
self.0
79+
.lock()
80+
.deallocate(NonNull::new_unchecked(ptr), layout)
81+
}
82+
}
83+
}
84+
85+
#[cfg(all(test, not(target_os = "none")))]
86+
mod tests {
87+
use core::mem;
88+
89+
use super::*;
90+
91+
#[test]
92+
fn empty() {
93+
let mut allocator = GlobalAllocator::empty();
94+
let layout = Layout::from_size_align(1, 1).unwrap();
95+
// we have 4 kbyte static memory
96+
assert!(allocator.allocate(layout.clone()).is_ok());
97+
98+
let layout = Layout::from_size_align(0x1000, mem::align_of::<usize>());
99+
let addr = allocator.allocate(layout.unwrap());
100+
assert!(addr.is_err());
101+
}
102+
}

0 commit comments

Comments
 (0)