3
3
// * no poisoning
4
4
// * init function can fail
5
5
6
- // Our polyfills collide with the strict_provenance feature
7
- #![ allow( unstable_name_collisions) ]
8
-
9
6
use std:: {
10
7
cell:: { Cell , UnsafeCell } ,
11
8
hint:: unreachable_unchecked,
@@ -174,11 +171,11 @@ impl Drop for Guard<'_> {
174
171
fn drop ( & mut self ) {
175
172
let queue = self . queue . swap ( self . new_queue , Ordering :: AcqRel ) ;
176
173
177
- let state = queue . addr ( ) & STATE_MASK ;
174
+ let state = strict :: addr ( queue ) & STATE_MASK ;
178
175
assert_eq ! ( state, RUNNING ) ;
179
176
180
177
unsafe {
181
- let mut waiter = queue . map_addr ( |q| q & !STATE_MASK ) ;
178
+ let mut waiter = strict :: map_addr ( queue , |q| q & !STATE_MASK ) ;
182
179
while !waiter. is_null ( ) {
183
180
let next = ( * waiter) . next ;
184
181
let thread = ( * waiter) . thread . take ( ) . unwrap ( ) ;
@@ -201,13 +198,13 @@ fn initialize_or_wait(queue: &AtomicPtr<Waiter>, mut init: Option<&mut dyn FnMut
201
198
let mut curr_queue = queue. load ( Ordering :: Acquire ) ;
202
199
203
200
loop {
204
- let curr_state = curr_queue . addr ( ) & STATE_MASK ;
201
+ let curr_state = strict :: addr ( curr_queue ) & STATE_MASK ;
205
202
match ( curr_state, & mut init) {
206
203
( COMPLETE , _) => return ,
207
204
( INCOMPLETE , Some ( init) ) => {
208
205
let exchange = queue. compare_exchange (
209
206
curr_queue,
210
- curr_queue . map_addr ( |q| ( q & !STATE_MASK ) | RUNNING ) ,
207
+ strict :: map_addr ( curr_queue , |q| ( q & !STATE_MASK ) | RUNNING ) ,
211
208
Ordering :: Acquire ,
212
209
Ordering :: Acquire ,
213
210
) ;
@@ -231,23 +228,23 @@ fn initialize_or_wait(queue: &AtomicPtr<Waiter>, mut init: Option<&mut dyn FnMut
231
228
}
232
229
233
230
fn wait ( queue : & AtomicPtr < Waiter > , mut curr_queue : * mut Waiter ) {
234
- let curr_state = curr_queue . addr ( ) & STATE_MASK ;
231
+ let curr_state = strict :: addr ( curr_queue ) & STATE_MASK ;
235
232
loop {
236
233
let node = Waiter {
237
234
thread : Cell :: new ( Some ( thread:: current ( ) ) ) ,
238
235
signaled : AtomicBool :: new ( false ) ,
239
- next : curr_queue . map_addr ( |q| q & !STATE_MASK ) ,
236
+ next : strict :: map_addr ( curr_queue , |q| q & !STATE_MASK ) ,
240
237
} ;
241
238
let me = & node as * const Waiter as * mut Waiter ;
242
239
243
240
let exchange = queue. compare_exchange (
244
241
curr_queue,
245
- me . map_addr ( |q| q | curr_state) ,
242
+ strict :: map_addr ( me , |q| q | curr_state) ,
246
243
Ordering :: Release ,
247
244
Ordering :: Relaxed ,
248
245
) ;
249
246
if let Err ( new_queue) = exchange {
250
- if new_queue . addr ( ) & STATE_MASK != curr_state {
247
+ if strict :: addr ( new_queue ) & STATE_MASK != curr_state {
251
248
return ;
252
249
}
253
250
curr_queue = new_queue;
@@ -261,32 +258,26 @@ fn wait(queue: &AtomicPtr<Waiter>, mut curr_queue: *mut Waiter) {
261
258
}
262
259
}
263
260
264
- // This trait is copied directly from the implementation of https://crates.io/crates/sptr
265
- trait Strict {
266
- type Pointee ;
267
- fn addr ( self ) -> usize ;
268
- fn with_addr ( self , addr : usize ) -> Self ;
269
- fn map_addr ( self , f : impl FnOnce ( usize ) -> usize ) -> Self ;
270
- }
271
-
272
- impl < T > Strict for * mut T {
273
- type Pointee = T ;
274
-
261
+ // Polyfill of strict provenance from https://crates.io/crates/sptr.
262
+ //
263
+ // Use free-standing function rather than a trait to keep things simple and
264
+ // avoid any potential conflicts with future stabile std API.
265
+ mod strict {
275
266
#[ must_use]
276
267
#[ inline]
277
- fn addr ( self ) -> usize
268
+ pub ( crate ) fn addr < T > ( ptr : * mut T ) -> usize
278
269
where
279
270
T : Sized ,
280
271
{
281
272
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
282
273
// SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
283
274
// provenance).
284
- unsafe { core:: mem:: transmute ( self ) }
275
+ unsafe { core:: mem:: transmute ( ptr ) }
285
276
}
286
277
287
278
#[ must_use]
288
279
#[ inline]
289
- fn with_addr ( self , addr : usize ) -> Self
280
+ pub ( crate ) fn with_addr < T > ( ptr : * mut T , addr : usize ) -> * mut T
290
281
where
291
282
T : Sized ,
292
283
{
@@ -295,23 +286,23 @@ impl<T> Strict for *mut T {
295
286
// In the mean-time, this operation is defined to be "as if" it was
296
287
// a wrapping_offset, so we can emulate it as such. This should properly
297
288
// restore pointer provenance even under today's compiler.
298
- let self_addr = self . addr ( ) as isize ;
289
+ let self_addr = self :: addr ( ptr ) as isize ;
299
290
let dest_addr = addr as isize ;
300
291
let offset = dest_addr. wrapping_sub ( self_addr) ;
301
292
302
293
// This is the canonical desugarring of this operation,
303
294
// but `pointer::cast` was only stabilized in 1.38.
304
295
// self.cast::<u8>().wrapping_offset(offset).cast::<T>()
305
- ( self as * mut u8 ) . wrapping_offset ( offset) as * mut T
296
+ ( ptr as * mut u8 ) . wrapping_offset ( offset) as * mut T
306
297
}
307
298
308
299
#[ must_use]
309
300
#[ inline]
310
- fn map_addr ( self , f : impl FnOnce ( usize ) -> usize ) -> Self
301
+ pub ( crate ) fn map_addr < T > ( ptr : * mut T , f : impl FnOnce ( usize ) -> usize ) -> * mut T
311
302
where
312
303
T : Sized ,
313
304
{
314
- self . with_addr ( f ( self . addr ( ) ) )
305
+ self :: with_addr ( ptr , f ( addr ( ptr ) ) )
315
306
}
316
307
}
317
308
0 commit comments