diff --git a/src/timer.rs b/src/timer.rs index d0cec62..a1f4e81 100644 --- a/src/timer.rs +++ b/src/timer.rs @@ -231,9 +231,8 @@ use crate::Instant; use std::cmp::Ordering; -use std::mem; use std::pin::Pin; -use std::sync::atomic::AtomicUsize; +use std::sync::atomic::{AtomicPtr, AtomicUsize}; use std::sync::atomic::Ordering::SeqCst; use std::sync::{Arc, Mutex, Weak}; use std::task::{Context, Poll}; @@ -484,7 +483,8 @@ impl Ord for HeapTimer { } } -static HANDLE_FALLBACK: AtomicUsize = AtomicUsize::new(0); +static HANDLE_FALLBACK: AtomicPtr = AtomicPtr::new(EMPTY_HANDLE); +const EMPTY_HANDLE: *mut Inner = std::ptr::null_mut(); /// Error returned from `TimerHandle::set_fallback`. #[derive(Clone, Debug)] @@ -515,23 +515,23 @@ impl TimerHandle { /// successful then no future calls may succeed. pub fn set_as_global_fallback(self) -> Result<(), SetDefaultError> { unsafe { - let val = self.into_usize(); - match HANDLE_FALLBACK.compare_exchange(0, val, SeqCst, SeqCst) { + let val = self.into_raw(); + match HANDLE_FALLBACK.compare_exchange(EMPTY_HANDLE, val, SeqCst, SeqCst) { Ok(_) => Ok(()), Err(_) => { - drop(TimerHandle::from_usize(val)); + drop(TimerHandle::from_raw(val)); Err(SetDefaultError(())) } } } } - fn into_usize(self) -> usize { - unsafe { mem::transmute::, usize>(self.inner) } + fn into_raw(self) -> *mut Inner { + self.inner.into_raw() as *mut Inner } - unsafe fn from_usize(val: usize) -> TimerHandle { - let inner = mem::transmute::>(val); + unsafe fn from_raw(raw: *mut Inner) -> TimerHandle { + let inner = Weak::from_raw(raw); TimerHandle { inner } } } @@ -546,7 +546,7 @@ impl Default for TimerHandle { // actually create a helper thread then we'll just return a "defunkt" // handle which will return errors when timer objects are attempted to // be associated. - if fallback == 0 { + if fallback == EMPTY_HANDLE { let helper = match global::HelperThread::new() { Ok(helper) => helper, Err(_) => return TimerHandle { inner: Weak::new() }, @@ -570,11 +570,11 @@ impl Default for TimerHandle { // At this point our fallback handle global was configured so we use // its value to reify a handle, clone it, and then forget our reified // handle as we don't actually have an owning reference to it. - assert!(fallback != 0); + assert!(fallback != EMPTY_HANDLE); unsafe { - let handle = TimerHandle::from_usize(fallback); + let handle = TimerHandle::from_raw(fallback); let ret = handle.clone(); - drop(handle.into_usize()); + drop(handle.into_raw()); return ret; } } diff --git a/src/timer/arc_list.rs b/src/timer/arc_list.rs index 4388052..38b23ce 100644 --- a/src/timer/arc_list.rs +++ b/src/timer/arc_list.rs @@ -3,18 +3,18 @@ use std::marker; use std::ops::Deref; use std::sync::atomic::Ordering::SeqCst; -use std::sync::atomic::{AtomicBool, AtomicUsize}; +use std::sync::atomic::{AtomicBool, AtomicPtr}; use std::sync::Arc; pub struct ArcList { - list: AtomicUsize, + list: AtomicPtr>, _marker: marker::PhantomData, } impl ArcList { pub fn new() -> ArcList { ArcList { - list: AtomicUsize::new(0), + list: AtomicPtr::new(Node::EMPTY), _marker: marker::PhantomData, } } @@ -31,12 +31,12 @@ impl ArcList { return Ok(()); } let mut head = self.list.load(SeqCst); - let node = Arc::into_raw(data.clone()) as usize; + let node = Arc::into_raw(data.clone()) as *mut Node; loop { // If we've been sealed off, abort and return an error - if head == 1 { + if head == Node::sealed() { unsafe { - drop(Arc::from_raw(node as *mut Node)); + drop(Arc::from_raw(node)); } return Err(()); } @@ -55,16 +55,16 @@ impl ArcList { pub fn take(&self) -> ArcList { let mut list = self.list.load(SeqCst); loop { - if list == 1 { + if list == Node::sealed() { break; } - match self.list.compare_exchange(list, 0, SeqCst, SeqCst) { + match self.list.compare_exchange(list, Node::EMPTY, SeqCst, SeqCst) { Ok(_) => break, Err(l) => list = l, } } ArcList { - list: AtomicUsize::new(list), + list: AtomicPtr::new(list), _marker: marker::PhantomData, } } @@ -73,7 +73,7 @@ impl ArcList { /// `push`. pub fn take_and_seal(&self) -> ArcList { ArcList { - list: AtomicUsize::new(self.list.swap(1, SeqCst)), + list: AtomicPtr::new(self.list.swap(Node::sealed(), SeqCst)), _marker: marker::PhantomData, } } @@ -82,10 +82,10 @@ impl ArcList { /// empty list. pub fn pop(&mut self) -> Option>> { let head = *self.list.get_mut(); - if head == 0 || head == 1 { + if head == Node::EMPTY || head == Node::sealed() { return None; } - let head = unsafe { Arc::from_raw(head as *const Node) }; + let head = unsafe { Arc::from_raw(head) }; *self.list.get_mut() = head.next.load(SeqCst); // At this point, the node is out of the list, so store `false` so we // can enqueue it again and see further changes. @@ -103,15 +103,21 @@ impl Drop for ArcList { } pub struct Node { - next: AtomicUsize, + next: AtomicPtr>, enqueued: AtomicBool, data: T, } impl Node { + const EMPTY: *mut Node = std::ptr::null_mut(); + + const fn sealed() -> *mut Node { + std::ptr::null_mut::>().wrapping_add(1) + } + pub fn new(data: T) -> Node { Node { - next: AtomicUsize::new(0), + next: AtomicPtr::new(Node::EMPTY), enqueued: AtomicBool::new(false), data: data, }