Skip to content

AtomicPtr without losing pointer provenance #70765

New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions src/libcore/sync/atomic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ use self::Ordering::*;
use crate::cell::UnsafeCell;
use crate::fmt;
use crate::intrinsics;
use crate::mem;

use crate::hint::spin_loop;

Expand Down Expand Up @@ -969,8 +970,11 @@ impl<T> AtomicPtr<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn store(&self, ptr: *mut T, order: Ordering) {
// SAFETY: data races are prevented by atomic intrinsics.
// We transmute to an integer instead of casting to avoid losing pointer
// provenance -- in particular this helps Miri not lose track of the pointer
// when checking for memory leaks.
unsafe {
atomic_store(self.p.get() as *mut usize, ptr as usize, order);
atomic_store(self.p.get() as *mut usize, mem::transmute(ptr), order);
}
}

Expand Down Expand Up @@ -1003,7 +1007,7 @@ impl<T> AtomicPtr<T> {
#[cfg(target_has_atomic = "ptr")]
pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
// SAFETY: data races are prevented by atomic intrinsics.
unsafe { atomic_swap(self.p.get() as *mut usize, ptr as usize, order) as *mut T }
unsafe { atomic_swap(self.p.get() as *mut usize, mem::transmute(ptr), order) as *mut T }
}

/// Stores a value into the pointer if the current value is the same as the `current` value.
Expand Down Expand Up @@ -1091,8 +1095,8 @@ impl<T> AtomicPtr<T> {
unsafe {
let res = atomic_compare_exchange(
self.p.get() as *mut usize,
current as usize,
new as usize,
mem::transmute(current),
mem::transmute(new),
success,
failure,
);
Expand Down Expand Up @@ -1155,8 +1159,8 @@ impl<T> AtomicPtr<T> {
unsafe {
let res = atomic_compare_exchange_weak(
self.p.get() as *mut usize,
current as usize,
new as usize,
mem::transmute(current),
mem::transmute(new),
success,
failure,
);
Expand Down
19 changes: 12 additions & 7 deletions src/libstd/sys/windows/mutex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@

use crate::cell::UnsafeCell;
use crate::mem::{self, MaybeUninit};
use crate::sync::atomic::{AtomicUsize, Ordering};
use crate::ptr;
use crate::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
use crate::sys::c;
use crate::sys::compat;

Expand Down Expand Up @@ -112,12 +113,16 @@ impl Mutex {
let re = box ReentrantMutex::uninitialized();
re.init();
let re = Box::into_raw(re);
match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
0 => re,
n => {
Box::from_raw(re).destroy();
n as *mut _
}
// Get an `AtomicPtr` view of `self.lock`.
let lock_ptr = &*(&self.lock as *const AtomicUsize as *const AtomicPtr<ReentrantMutex>);
let old = lock_ptr.compare_and_swap(ptr::null_mut(), re, Ordering::SeqCst);
if old == ptr::null_mut() {
// Update successful.
re
} else {
// Someone else won the race, clean up our attempt and return their value.
Box::from_raw(re).destroy();
old
}
}

Expand Down