Skip to content

Commit

Permalink
Make get_mut const fn on Rust 1.83+
Browse files Browse the repository at this point in the history
  • Loading branch information
taiki-e committed Oct 3, 2024
1 parent 83911da commit 0dea68c
Show file tree
Hide file tree
Showing 12 changed files with 186 additions and 143 deletions.
7 changes: 0 additions & 7 deletions bench/benches/imp/spinlock_fallback.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,13 +99,6 @@ macro_rules! atomic_int {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = false;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $int_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub(crate) fn load(&self, order: Ordering) -> $int_type {
Expand Down
6 changes: 5 additions & 1 deletion build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ fn main() {
// Custom cfgs set by build script. Not public API.
// grep -F 'cargo:rustc-cfg=' build.rs | grep -Ev '^ *//' | sed -E 's/^.*cargo:rustc-cfg=//; s/(=\\)?".*$//' | LC_ALL=C sort -u | tr '\n' ',' | sed -E 's/,$/\n/'
println!(
"cargo:rustc-check-cfg=cfg(portable_atomic_disable_fiq,portable_atomic_force_amo,portable_atomic_ll_sc_rmw,portable_atomic_pre_llvm_15,portable_atomic_pre_llvm_16,portable_atomic_pre_llvm_18,portable_atomic_pre_llvm_19,portable_atomic_new_atomic_intrinsics,portable_atomic_no_asm,portable_atomic_no_asm_maybe_uninit,portable_atomic_no_atomic_64,portable_atomic_no_atomic_cas,portable_atomic_no_atomic_load_store,portable_atomic_no_atomic_min_max,portable_atomic_no_cfg_target_has_atomic,portable_atomic_no_cmpxchg16b_intrinsic,portable_atomic_no_cmpxchg16b_target_feature,portable_atomic_no_const_raw_ptr_deref,portable_atomic_no_const_transmute,portable_atomic_no_core_unwind_safe,portable_atomic_no_diagnostic_namespace,portable_atomic_no_stronger_failure_ordering,portable_atomic_no_track_caller,portable_atomic_no_unsafe_op_in_unsafe_fn,portable_atomic_s_mode,portable_atomic_sanitize_thread,portable_atomic_target_feature,portable_atomic_unsafe_assume_single_core,portable_atomic_unstable_asm,portable_atomic_unstable_asm_experimental_arch,portable_atomic_unstable_cfg_target_has_atomic,portable_atomic_unstable_isa_attribute)"
"cargo:rustc-check-cfg=cfg(portable_atomic_disable_fiq,portable_atomic_force_amo,portable_atomic_ll_sc_rmw,portable_atomic_new_atomic_intrinsics,portable_atomic_no_asm,portable_atomic_no_asm_maybe_uninit,portable_atomic_no_atomic_64,portable_atomic_no_atomic_cas,portable_atomic_no_atomic_load_store,portable_atomic_no_atomic_min_max,portable_atomic_no_cfg_target_has_atomic,portable_atomic_no_cmpxchg16b_intrinsic,portable_atomic_no_cmpxchg16b_target_feature,portable_atomic_no_const_mut_refs,portable_atomic_no_const_raw_ptr_deref,portable_atomic_no_const_transmute,portable_atomic_no_core_unwind_safe,portable_atomic_no_diagnostic_namespace,portable_atomic_no_stronger_failure_ordering,portable_atomic_no_track_caller,portable_atomic_no_unsafe_op_in_unsafe_fn,portable_atomic_pre_llvm_15,portable_atomic_pre_llvm_16,portable_atomic_pre_llvm_18,portable_atomic_pre_llvm_19,portable_atomic_s_mode,portable_atomic_sanitize_thread,portable_atomic_target_feature,portable_atomic_unsafe_assume_single_core,portable_atomic_unstable_asm,portable_atomic_unstable_asm_experimental_arch,portable_atomic_unstable_cfg_target_has_atomic,portable_atomic_unstable_isa_attribute)"
);
// TODO: handle multi-line target_feature_fallback
// grep -F 'target_feature_fallback("' build.rs | grep -Ev '^ *//' | sed -E 's/^.*target_feature_fallback\(//; s/",.*$/"/' | LC_ALL=C sort -u | tr '\n' ',' | sed -E 's/,$/\n/'
Expand Down Expand Up @@ -118,6 +118,10 @@ fn main() {
if !version.probe(78, 2024, 3, 8) {
println!("cargo:rustc-cfg=portable_atomic_no_diagnostic_namespace");
}
// const_mut_refs/const_refs_to_cell stabilized in Rust 1.83 (nightly-2024-09-16): https://github.com/rust-lang/rust/pull/129195
if !version.probe(83, 2024, 9, 15) {
println!("cargo:rustc-cfg=portable_atomic_no_const_mut_refs");
}

// asm stabilized in Rust 1.59 (nightly-2021-12-16): https://github.com/rust-lang/rust/pull/91728
let no_asm = !version.probe(59, 2021, 12, 15);
Expand Down
7 changes: 0 additions & 7 deletions src/imp/atomic128/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,6 @@ macro_rules! atomic128 {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = IS_ALWAYS_LOCK_FREE;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $int_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
#[cfg_attr(
any(all(debug_assertions, not(portable_atomic_no_track_caller)), miri),
Expand Down
7 changes: 0 additions & 7 deletions src/imp/atomic64/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,6 @@ macro_rules! atomic64 {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = IS_ALWAYS_LOCK_FREE;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $int_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub(crate) fn load(&self, order: Ordering) -> $int_type {
Expand Down
8 changes: 0 additions & 8 deletions src/imp/core_atomic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,6 @@ impl<T> AtomicPtr<T> {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = true;
#[inline]
pub(crate) fn get_mut(&mut self) -> &mut *mut T {
self.inner.get_mut()
}
#[inline]
#[cfg_attr(
any(all(debug_assertions, not(portable_atomic_no_track_caller)), miri),
track_caller
Expand Down Expand Up @@ -164,10 +160,6 @@ macro_rules! atomic_int {
))) | (core::mem::size_of::<$int_type>()
< 8);
#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $int_type {
self.inner.get_mut()
}
#[inline]
#[cfg_attr(
any(all(debug_assertions, not(portable_atomic_no_track_caller)), miri),
track_caller
Expand Down
7 changes: 0 additions & 7 deletions src/imp/fallback/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,13 +228,6 @@ macro_rules! atomic {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = false;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $int_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub(crate) fn load(&self, order: Ordering) -> $int_type {
Expand Down
7 changes: 0 additions & 7 deletions src/imp/float.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,6 @@ macro_rules! atomic_float {
pub(crate) const IS_ALWAYS_LOCK_FREE: bool =
crate::$atomic_int_type::is_always_lock_free();

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $float_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
#[cfg_attr(
any(all(debug_assertions, not(portable_atomic_no_track_caller)), miri),
Expand Down
14 changes: 0 additions & 14 deletions src/imp/interrupt/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,6 @@ impl<T> AtomicPtr<T> {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = IS_ALWAYS_LOCK_FREE;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut *mut T {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.p.get() }
}

#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub(crate) fn load(&self, order: Ordering) -> *mut T {
Expand Down Expand Up @@ -274,13 +267,6 @@ macro_rules! atomic_int {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = IS_ALWAYS_LOCK_FREE;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $int_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
pub(crate) const fn as_ptr(&self) -> *mut $int_type {
self.v.get()
Expand Down
8 changes: 0 additions & 8 deletions src/imp/msp430.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,14 +88,6 @@ macro_rules! atomic {
#[cfg(test)]
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = true;

#[cfg(test)]
#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $value_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
pub(crate) fn load(&self, order: Ordering) -> $value_type {
Expand Down
7 changes: 0 additions & 7 deletions src/imp/riscv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,13 +198,6 @@ macro_rules! atomic_load_store {
}
pub(crate) const IS_ALWAYS_LOCK_FREE: bool = true;

#[inline]
pub(crate) fn get_mut(&mut self) -> &mut $value_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (UnsafeCell::get_mut requires Rust 1.50)
unsafe { &mut *self.v.get() }
}

#[inline]
pub(crate) const fn as_ptr(&self) -> *mut $value_type {
self.v.get()
Expand Down
139 changes: 93 additions & 46 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -662,25 +662,30 @@ impl AtomicBool {
#[cfg(test)]
const IS_ALWAYS_LOCK_FREE: bool = Self::is_always_lock_free();

/// Returns a mutable reference to the underlying [`bool`].
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
///
/// # Examples
///
/// ```
/// use portable_atomic::{AtomicBool, Ordering};
///
/// let mut some_bool = AtomicBool::new(true);
/// assert_eq!(*some_bool.get_mut(), true);
/// *some_bool.get_mut() = false;
/// assert_eq!(some_bool.load(Ordering::SeqCst), false);
/// ```
#[inline]
pub fn get_mut(&mut self) -> &mut bool {
// SAFETY: the mutable reference guarantees unique ownership.
unsafe { &mut *(self.v.get() as *mut bool) }
const_fn! {
const_if: #[cfg(not(portable_atomic_no_const_mut_refs))];
/// Returns a mutable reference to the underlying [`bool`].
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
///
/// This is `const fn` on Rust 1.83+.
///
/// # Examples
///
/// ```
/// use portable_atomic::{AtomicBool, Ordering};
///
/// let mut some_bool = AtomicBool::new(true);
/// assert_eq!(*some_bool.get_mut(), true);
/// *some_bool.get_mut() = false;
/// assert_eq!(some_bool.load(Ordering::SeqCst), false);
/// ```
#[inline]
pub const fn get_mut(&mut self) -> &mut bool {
// SAFETY: the mutable reference guarantees unique ownership.
unsafe { &mut *self.as_ptr() }
}
}

// TODO: Add from_mut/get_mut_slice/from_mut_slice once it is stable on std atomic types.
Expand Down Expand Up @@ -1669,25 +1674,32 @@ impl<T> AtomicPtr<T> {
#[cfg(test)]
const IS_ALWAYS_LOCK_FREE: bool = Self::is_always_lock_free();

/// Returns a mutable reference to the underlying pointer.
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
///
/// # Examples
///
/// ```
/// use portable_atomic::{AtomicPtr, Ordering};
///
/// let mut data = 10;
/// let mut atomic_ptr = AtomicPtr::new(&mut data);
/// let mut other_data = 5;
/// *atomic_ptr.get_mut() = &mut other_data;
/// assert_eq!(unsafe { *atomic_ptr.load(Ordering::SeqCst) }, 5);
/// ```
#[inline]
pub fn get_mut(&mut self) -> &mut *mut T {
self.inner.get_mut()
const_fn! {
const_if: #[cfg(not(portable_atomic_no_const_mut_refs))];
/// Returns a mutable reference to the underlying pointer.
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
///
/// This is `const fn` on Rust 1.83+.
///
/// # Examples
///
/// ```
/// use portable_atomic::{AtomicPtr, Ordering};
///
/// let mut data = 10;
/// let mut atomic_ptr = AtomicPtr::new(&mut data);
/// let mut other_data = 5;
/// *atomic_ptr.get_mut() = &mut other_data;
/// assert_eq!(unsafe { *atomic_ptr.load(Ordering::SeqCst) }, 5);
/// ```
#[inline]
pub const fn get_mut(&mut self) -> &mut *mut T {
// SAFETY: the mutable reference guarantees unique ownership.
// (core::sync::atomic::Atomic*::get_mut is not const yet)
unsafe { &mut *self.as_ptr() }
}
}

// TODO: Add from_mut/get_mut_slice/from_mut_slice once it is stable on std atomic types.
Expand Down Expand Up @@ -2792,11 +2804,39 @@ const IS_ALWAYS_LOCK_FREE: bool = ", stringify!($atomic_type), "::is_always_lock
#[cfg(test)]
const IS_ALWAYS_LOCK_FREE: bool = Self::is_always_lock_free();

#[cfg(not(portable_atomic_no_const_mut_refs))]
doc_comment! {
concat!("Returns a mutable reference to the underlying integer.\n
This is safe because the mutable reference guarantees that no other threads are
concurrently accessing the atomic data.
This is `const fn` on Rust 1.83+.
# Examples
```
use portable_atomic::{", stringify!($atomic_type), ", Ordering};
let mut some_var = ", stringify!($atomic_type), "::new(10);
assert_eq!(*some_var.get_mut(), 10);
*some_var.get_mut() = 5;
assert_eq!(some_var.load(Ordering::SeqCst), 5);
```"),
#[inline]
pub const fn get_mut(&mut self) -> &mut $int_type {
// SAFETY: the mutable reference guarantees unique ownership.
// (core::sync::atomic::Atomic*::get_mut is not const yet)
unsafe { &mut *self.as_ptr() }
}
}
#[cfg(portable_atomic_no_const_mut_refs)]
doc_comment! {
concat!("Returns a mutable reference to the underlying integer.\n
This is safe because the mutable reference guarantees that no other threads are
concurrently accessing the atomic data.
This is `const fn` on Rust 1.83+.
# Examples
```
Expand All @@ -2809,7 +2849,8 @@ assert_eq!(some_var.load(Ordering::SeqCst), 5);
```"),
#[inline]
pub fn get_mut(&mut self) -> &mut $int_type {
self.inner.get_mut()
// SAFETY: the mutable reference guarantees unique ownership.
unsafe { &mut *self.as_ptr() }
}
}

Expand Down Expand Up @@ -4129,13 +4170,19 @@ This type has the same in-memory representation as the underlying floating point
#[cfg(test)]
const IS_ALWAYS_LOCK_FREE: bool = Self::is_always_lock_free();

/// Returns a mutable reference to the underlying float.
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
#[inline]
pub fn get_mut(&mut self) -> &mut $float_type {
self.inner.get_mut()
const_fn! {
const_if: #[cfg(not(portable_atomic_no_const_mut_refs))];
/// Returns a mutable reference to the underlying float.
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
///
/// This is `const fn` on Rust 1.83+.
#[inline]
pub const fn get_mut(&mut self) -> &mut $float_type {
// SAFETY: the mutable reference guarantees unique ownership.
unsafe { &mut *self.as_ptr() }
}
}

// TODO: Add from_mut/get_mut_slice/from_mut_slice once it is stable on std atomic types.
Expand Down
Loading

0 comments on commit 0dea68c

Please sign in to comment.