Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Make Trace object-safe, allowing dynamic types and slices to be garbage collected #20

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 24 additions & 10 deletions libs/simple/src/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,31 @@ pub const MAXIMUM_SMALL_WORDS: usize = 32;
/// The alignment of elements in the arena
pub const ARENA_ELEMENT_ALIGN: usize = mem::align_of::<GcHeader>();

#[inline]
pub const fn small_object_size<T>() -> usize {
let header_layout = Layout::new::<GcHeader>();
header_layout.size() + header_layout
.padding_needed_for(std::mem::align_of::<T>())
+ mem::size_of::<T>()
#[derive(Copy, Clone)]
pub struct SmallObjectSize {
pub val_size: usize,
pub val_align: usize
}
#[inline]
pub const fn is_small_object<T>() -> bool {
small_object_size::<T>() <= MAXIMUM_SMALL_WORDS * 8
&& mem::align_of::<T>() <= ARENA_ELEMENT_ALIGN
impl SmallObjectSize {
#[inline]
pub const fn of<T>() -> SmallObjectSize {
SmallObjectSize {
val_size: std::mem::size_of::<T>(),
val_align: std::mem::align_of::<T>()
}
}
#[inline]
pub const fn is_small(&self) -> bool {
self.object_size() <= MAXIMUM_SMALL_WORDS * 8
&& self.val_align <= ARENA_ELEMENT_ALIGN
}
#[inline]
pub const fn object_size(&self) -> usize {
let header_layout = Layout::new::<GcHeader>();
header_layout.size() + header_layout
.padding_needed_for(self.val_align) + self.val_size
}

}

pub(crate) struct Chunk {
Expand Down
71 changes: 60 additions & 11 deletions libs/simple/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering};
use std::sync::atomic::AtomicPtr;
#[cfg(not(feature = "multiple-collectors"))]
use std::marker::PhantomData;
use std::any::TypeId;
use std::any::{TypeId, Any};

use slog::{Logger, FnValue, debug};

Expand All @@ -49,11 +49,27 @@ use zerogc_context::{
mod alloc;
#[cfg(not(feature = "small-object-arenas"))]
mod alloc {
pub const fn is_small_object<T>() -> bool {
false
}
pub const fn small_object_size<T>() -> usize {
unimplemented!()
#[derive(Copy, Clone)]
pub struct SmallObjectSize {
pub val_size: usize,
pub val_align: usize
}
impl SmallObjectSize {
#[inline]
pub const fn of<T>() -> SmallObjectSize {
SmallObjectSize {
val_size: std::mem::size_of::<T>(),
val_align: std::mem::align_of::<T>()
}
}
#[inline]
pub const fn is_small(&self) -> bool {
false
}
#[inline]
pub const fn object_size(&self) -> usize {
unimplemented!()
}
}
pub struct FakeArena;
impl FakeArena {
Expand Down Expand Up @@ -125,7 +141,7 @@ unsafe impl DynTrace for GcHandleListWrapper {
fn trace(&mut self, visitor: &mut MarkVisitor) {
unsafe {
let Ok(()) = self.0.trace::<_, !>(|raw_ptr, type_info| {
let header = &mut *GcHeader::from_value_ptr(raw_ptr, type_info);
let header = &mut *GcHeader::from_value_ptr(raw_ptr as *mut u8, type_info);
// Mark grey
header.update_raw_state(MarkState::Grey.
to_raw(visitor.inverted_mark));
Expand Down Expand Up @@ -701,6 +717,32 @@ unsafe impl GcVisitor for MarkVisitor<'_> {
Ok(())
}
}

#[inline]
unsafe fn visit_dyn_gc(&mut self, gc_ptr: NonNull<dyn GcSafe>, collector_id: &dyn Any) -> Result<(), Self::Err> where Self: Sized {
if let Some(&id) = collector_id.downcast_ref::<crate::CollectorId>() {
assert!(id, self.expected_collector);
// This MUST match self._visit_own_gc
assert!()
let header = GcHeader::from_value_ptr(
gc_ptr.as_ptr() as *mut u8,
&GcType {
/*
* TODO: Is this possibly correct?
* How do we handle dropping dynamically-dispatched types
*/
drop_func: Some(std::mem::transmute::<
unsafe fn(*mut dyn GcSafe),
unsafe fn(*mut c_void)
>(std::ptr::drop_in_place::<dyn GcSafe>)),

}
)
} else {
// Just ignore ids from other collectors
Ok(())
}
}
}
impl MarkVisitor<'_> {
/// Visit a GC type whose [::zerogc::CollectorId] matches our own
Expand All @@ -710,7 +752,7 @@ impl MarkVisitor<'_> {
// Verify this again (should be checked by caller)
debug_assert_eq!(gc.collector_id(), self.expected_collector);
let header = GcHeader::from_value_ptr(
gc.as_raw_ptr(),
gc.as_raw_ptr() as *mut u8,
T::STATIC_TYPE
);
self.visit_raw_gc(&mut *header, |obj, visitor| {
Expand Down Expand Up @@ -804,7 +846,14 @@ impl<T: GcSafe> StaticGcType for T {
/// and fallback alloc vis `BigGcObject`
#[repr(C)]
struct GcHeader {
type_info: &'static GcType,
/// If this type is a `Sized` type whose info was
/// statically known at allocation, this type-info will be present
///
/// Otherwise, if the object is a trait-object, a slice, or a `str`
/// this will be missing. The other field of the fat-pointer
/// will be one past the end. The data-pointer part of the fat-pointer
/// can be reconstructed from from [Gc::as_raw_ptr]
type_info: Option<&'static GcType>,
/*
* NOTE: State byte should come last
* If the value is small `(u32)`, we could reduce
Expand All @@ -829,8 +878,8 @@ impl GcHeader {
}
}
#[inline]
pub unsafe fn from_value_ptr<T>(ptr: *mut T, static_type: &GcType) -> *mut GcHeader {
(ptr as *mut u8).sub(static_type.value_offset).cast()
pub unsafe fn from_value_ptr(ptr: *mut u8, static_type: &GcType) -> *mut GcHeader {
ptr.sub(static_type.value_offset).cast()
}
#[inline]
fn raw_state(&self) -> RawMarkState {
Expand Down
32 changes: 21 additions & 11 deletions src/cell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,16 @@
//! and it'll generate a safe wrapper.
use core::cell::Cell;

use crate::{GcSafe, Trace, GcVisitor, NullTrace, TraceImmutable, GcDirectBarrier,};
use crate::prelude::*;

/// A `Cell` pointing to a garbage collected object.
///
/// This only supports mutating `NullTrace` types,
/// becuase garbage collected pointers need write barriers.
#[derive(Default, Clone, Debug)]
/// This only supports mutating `NullTrace` types directly,
/// because garbage collected pointers need write barriers.
///
/// However, other types can be mutated through auto-generated
/// accessors on the type (using the [GcDirectBarrier] trait).
#[derive(Default, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
#[repr(transparent)]
pub struct GcCell<T: Trace + Copy>(Cell<T>);
impl<T: Trace + Copy> GcCell<T> {
Expand Down Expand Up @@ -61,14 +64,16 @@ impl<T: NullTrace + Copy> GcCell<T> {
self.0.set(value)
}
}
unsafe impl<'gc, OwningRef, Value> GcDirectBarrier<'gc, OwningRef> for GcCell<Value>
where Value: GcDirectBarrier<'gc, OwningRef> + Copy {
/// Trigger a write barrier on the inner value
///
/// We are a 'direct' write barrier because `Value` is stored inline
unsafe impl<'gc, OwningRef, Value> crate::GcDirectBarrier<'gc, OwningRef> for GcCell<Value>
where Value: crate::GcDirectBarrier<'gc, OwningRef> + Copy {
#[inline]
unsafe fn write_barrier(
&self, owner: &OwningRef,
field_offset: usize
) {
// NOTE: We are direct write because `Value` is stored inline
self.get().write_barrier(owner, field_offset)
}
}
Expand All @@ -80,8 +85,6 @@ unsafe impl<'gc, OwningRef, Value> GcDirectBarrier<'gc, OwningRef> for GcCell<Va
/// In other words is possible to safely trace a `GcCell`
/// with a garbage collected type, as long as it is never mutated.
unsafe impl<T: Trace + Copy> Trace for GcCell<T> {
const NEEDS_TRACE: bool = T::NEEDS_TRACE;

#[inline]
fn visit<V: GcVisitor>(&mut self, visitor: &mut V) -> Result<(), V::Err> {
visitor.visit(self.get_mut())
Expand All @@ -98,9 +101,16 @@ unsafe impl<T: GcSafe + NullTrace + Copy> TraceImmutable for GcCell<T> {
self.set(value);
Ok(())
}

#[inline]
fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> {
self.visit_immutable(visitor)
}
}
unsafe impl<T: GcSafe + Copy + NullTrace> NullTrace for GcCell<T> {}
unsafe impl<T: GcSafe + Copy> GcSafe for GcCell<T> {
unsafe impl<T: GcSafe + Copy> GcSafe for GcCell<T> {}
unsafe impl<T: GcType + Copy> GcType for GcCell<T> {
const NEEDS_TRACE: bool = T::NEEDS_TRACE;
/// Since T is Copy, we shouldn't need to be dropped
const NEEDS_DROP: bool = false;
}
}
16 changes: 10 additions & 6 deletions src/dummy_impl.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
//! Dummy collector implementation for testing

use crate::{
Trace, TraceImmutable, GcVisitor, NullTrace, CollectorId,
GcSafe, GcSystem, GcContext,
};
use crate::{Trace, TraceImmutable, GcVisitor, NullTrace, CollectorId, GcSafe, GcSystem, GcContext, GcType, GcDynVisitError, GcDynVisitor};
use std::ptr::NonNull;

/// Fake a [Gc] that points to the specified value
Expand Down Expand Up @@ -103,8 +100,6 @@ pub struct DummyCollectorId {
_priv: ()
}
unsafe impl Trace for DummyCollectorId {
const NEEDS_TRACE: bool = false;

fn visit<V: GcVisitor>(&mut self, _visitor: &mut V) -> Result<(), <V as GcVisitor>::Err> {
Ok(())
}
Expand All @@ -113,6 +108,15 @@ unsafe impl TraceImmutable for DummyCollectorId {
fn visit_immutable<V: GcVisitor>(&self, _visitor: &mut V) -> Result<(), V::Err> {
Ok(())
}

#[inline]
fn visit_dyn_immutable(&self, _visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> {
Ok(())
}
}
unsafe impl GcType for DummyCollectorId {
const NEEDS_TRACE: bool = false;
const NEEDS_DROP: bool = false;
}

unsafe impl NullTrace for DummyCollectorId {}
Expand Down
Loading