Skip to content

Enum layout optimizations #19536

New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Closed
wants to merge 11 commits into from
26 changes: 14 additions & 12 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ use core::mem;
use core::ops::{Drop, Deref};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr::RawPtr;
use core::ptr;
use core::ptr::{mod, NonZero, RawPtr};
use heap::deallocate;

/// An atomically reference counted wrapper for shared state.
Expand Down Expand Up @@ -59,7 +58,7 @@ use heap::deallocate;
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
_ptr: NonZero<*mut ArcInner<T>>,
}

/// A weak pointer to an `Arc`.
Expand All @@ -71,7 +70,7 @@ pub struct Arc<T> {
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
_ptr: NonZero<*mut ArcInner<T>>,
}

struct ArcInner<T> {
Expand All @@ -92,7 +91,7 @@ impl<T: Sync + Send> Arc<T> {
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: unsafe { mem::transmute(x) } }
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}

/// Downgrades a strong pointer to a weak pointer.
Expand All @@ -116,7 +115,7 @@ impl<T> Arc<T> {
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to
// these contents.
unsafe { &*self._ptr }
unsafe { &**self._ptr }
}
}

Expand Down Expand Up @@ -184,7 +183,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
let inner = unsafe { &mut *self._ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
Expand All @@ -193,10 +192,11 @@ impl<T: Send + Sync + Clone> Arc<T> {
#[experimental = "waiting on stability of Drop"]
impl<T: Sync + Send> Drop for Arc<T> {
fn drop(&mut self) {
let ptr = *self._ptr;
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
if self._ptr.is_null() { return }
if ptr.is_null() { return }

// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
Expand Down Expand Up @@ -228,7 +228,7 @@ impl<T: Sync + Send> Drop for Arc<T> {

if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
atomic::fence(atomic::Acquire);
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
Expand Down Expand Up @@ -256,7 +256,7 @@ impl<T: Sync + Send> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &*self._ptr }
unsafe { &**self._ptr }
}
}

Expand All @@ -274,15 +274,17 @@ impl<T: Sync + Send> Clone for Weak<T> {
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Drop for Weak<T> {
fn drop(&mut self) {
let ptr = *self._ptr;

// see comments above for why this check is here
if self._ptr.is_null() { return }
if ptr.is_null() { return }

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
atomic::fence(atomic::Acquire);
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
Expand Down
31 changes: 16 additions & 15 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,7 @@ use core::mem::{transmute, min_align_of, size_of, forget};
use core::ops::{Deref, Drop};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr;
use core::ptr::RawPtr;
use core::ptr::{mod, NonZero, RawPtr};
use core::result::Result;
use core::result::Result::{Ok, Err};

Expand All @@ -172,7 +171,7 @@ struct RcBox<T> {
pub struct Rc<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_ptr: NonZero<*mut RcBox<T>>,
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
Expand All @@ -196,11 +195,11 @@ impl<T> Rc<T> {
// destructor never frees the allocation while the
// strong destructor is running, even if the weak
// pointer is stored inside the strong one.
_ptr: transmute(box RcBox {
_ptr: NonZero::new(transmute(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
}),
})),
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
Expand Down Expand Up @@ -281,7 +280,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
let val = ptr::read(&*rc); // copy the contained object
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
deallocate(rc._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
forget(rc);
Ok(val)
Expand Down Expand Up @@ -311,7 +310,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
#[experimental]
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
if is_unique(rc) {
let inner = unsafe { &mut *rc._ptr };
let inner = unsafe { &mut **rc._ptr };
Some(&mut inner.value)
} else {
None
Expand Down Expand Up @@ -344,7 +343,7 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
let inner = unsafe { &mut *self._ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.value
}
}
Expand Down Expand Up @@ -386,7 +385,8 @@ impl<T> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_strong();
if self.strong() == 0 {
ptr::read(&**self); // destroy the contained object
Expand All @@ -396,7 +396,7 @@ impl<T> Drop for Rc<T> {
self.dec_weak();

if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
Expand Down Expand Up @@ -604,7 +604,7 @@ impl<T: fmt::Show> fmt::Show for Rc<T> {
pub struct Weak<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_ptr: NonZero<*mut RcBox<T>>,
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
Expand Down Expand Up @@ -668,12 +668,13 @@ impl<T> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to
// zero if all the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
Expand Down Expand Up @@ -728,12 +729,12 @@ trait RcBoxPtr<T> {

impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}

impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}

#[cfg(test)]
Expand Down
48 changes: 26 additions & 22 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ use core::mem;
use core::num::{Int, UnsignedInt};
use core::ops;
use core::ptr;
use core::ptr::NonZero;
use core::raw::Slice as RawSlice;
use core::uint;

Expand Down Expand Up @@ -103,7 +104,7 @@ use slice::{CloneSliceAllocPrelude};
#[unsafe_no_drop_flag]
#[stable]
pub struct Vec<T> {
ptr: *mut T,
ptr: NonZero<*mut T>,
len: uint,
cap: uint,
}
Expand Down Expand Up @@ -146,7 +147,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { ptr: EMPTY as *mut T, len: 0, cap: 0 }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
}

/// Constructs a new, empty `Vec` with the specified capacity.
Expand Down Expand Up @@ -180,15 +181,15 @@ impl<T> Vec<T> {
#[stable]
pub fn with_capacity(capacity: uint) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: EMPTY as *mut T, len: 0, cap: uint::MAX }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
} else if capacity == 0 {
Vec::new()
} else {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
Vec { ptr: ptr as *mut T, len: 0, cap: capacity }
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
}
}

Expand Down Expand Up @@ -261,7 +262,7 @@ impl<T> Vec<T> {
#[unstable = "needs finalization"]
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
capacity: uint) -> Vec<T> {
Vec { ptr: ptr, len: length, cap: capacity }
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
}

/// Creates a vector by copying the elements from a raw pointer.
Expand Down Expand Up @@ -746,19 +747,20 @@ impl<T> Vec<T> {
if self.len == 0 {
if self.cap != 0 {
unsafe {
dealloc(self.ptr, self.cap)
dealloc(*self.ptr, self.cap)
}
self.cap = 0;
}
} else {
unsafe {
// Overflow check is unnecessary as the vector is already at
// least this large.
self.ptr = reallocate(self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
if self.ptr.is_null() { ::alloc::oom() }
let ptr = reallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero::new(ptr);
}
self.cap = self.len;
}
Expand Down Expand Up @@ -818,7 +820,7 @@ impl<T> Vec<T> {
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
mem::transmute(RawSlice {
data: self.ptr as *const T,
data: *self.ptr as *const T,
len: self.len,
})
}
Expand All @@ -841,9 +843,9 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn into_iter(self) -> MoveItems<T> {
unsafe {
let ptr = self.ptr;
let ptr = *self.ptr;
let cap = self.cap;
let begin = self.ptr as *const T;
let begin = ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(ptr as uint + self.len()) as *const T
} else {
Expand Down Expand Up @@ -1060,14 +1062,15 @@ impl<T> Vec<T> {
let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
if old_size > size { panic!("capacity overflow") }
unsafe {
self.ptr = alloc_or_realloc(self.ptr, old_size, size);
if self.ptr.is_null() { ::alloc::oom() }
let ptr = alloc_or_realloc(*self.ptr, old_size, size);
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero::new(ptr);
}
self.cap = max(self.cap, 2) * 2;
}

unsafe {
let end = (self.ptr as *const T).offset(self.len as int) as *mut T;
let end = *self.ptr.offset(self.len as int);
ptr::write(&mut *end, value);
self.len += 1;
}
Expand Down Expand Up @@ -1147,8 +1150,9 @@ impl<T> Vec<T> {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
unsafe {
self.ptr = alloc_or_realloc(self.ptr, self.cap * mem::size_of::<T>(), size);
if self.ptr.is_null() { ::alloc::oom() }
let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero::new(ptr);
}
self.cap = capacity;
}
Expand Down Expand Up @@ -1269,7 +1273,7 @@ impl<T> AsSlice<T> for Vec<T> {
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe {
mem::transmute(RawSlice {
data: self.ptr as *const T,
data: *self.ptr as *const T,
len: self.len
})
}
Expand All @@ -1296,7 +1300,7 @@ impl<T> Drop for Vec<T> {
for x in self.iter() {
ptr::read(x);
}
dealloc(self.ptr, self.cap)
dealloc(*self.ptr, self.cap)
}
}
}
Expand Down Expand Up @@ -1332,7 +1336,7 @@ impl<T> MoveItems<T> {
for _x in self { }
let MoveItems { allocation, cap, ptr: _ptr, end: _end } = self;
mem::forget(self);
Vec { ptr: allocation, cap: cap, len: 0 }
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
}
}

Expand Down
Loading