diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 967ad3a0e6901..d14b1e491969e 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -178,6 +178,7 @@ #![feature(const_ptr_write)] #![feature(const_trait_impl)] #![feature(const_try)] +#![feature(decl_macro)] #![feature(dropck_eyepatch)] #![feature(exclusive_range_pattern)] #![feature(fundamental)] diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 5ecd0479971ea..7e09c78f71cbb 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -190,7 +190,7 @@ where // then the source pointer will stay in its initial position and we can't use it as reference if src.ptr != src_ptr { debug_assert!( - unsafe { dst_buf.add(len) as *const _ } <= src.ptr, + unsafe { dst_buf.add(len) as *const _ } <= src.ptr.as_ptr(), "InPlaceIterable contract violation, write pointer advanced beyond read pointer" ); } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 2fe5077bfdaef..4db2fe4ff0ff2 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -17,6 +17,17 @@ use core::ops::Deref; use core::ptr::{self, NonNull}; use core::slice::{self}; +macro non_null { + (mut $place:expr, $t:ident) => {{ + #![allow(unused_unsafe)] // we're sometimes used within an unsafe block + unsafe { &mut *ptr::addr_of_mut!($place).cast::>() } + }}, + ($place:expr, $t:ident) => {{ + #![allow(unused_unsafe)] // we're sometimes used within an unsafe block + unsafe { *ptr::addr_of!($place).cast::>() } + }}, +} + /// An iterator that moves out of a vector. /// /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) @@ -40,10 +51,12 @@ pub struct IntoIter< // the drop impl reconstructs a RawVec from buf, cap and alloc // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop pub(super) alloc: ManuallyDrop, - pub(super) ptr: *const T, - pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that - // ptr == end is a quick test for the Iterator being empty, that works - // for both ZST and non-ZST. + pub(super) ptr: NonNull, + /// If T is a ZST, this is actually ptr+len. This encoding is picked so that + /// ptr == end is a quick test for the Iterator being empty, that works + /// for both ZST and non-ZST. + /// For non-ZSTs the pointer is treated as `NonNull` + pub(super) end: *const T, } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] @@ -67,7 +80,7 @@ impl IntoIter { /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_slice(&self) -> &[T] { - unsafe { slice::from_raw_parts(self.ptr, self.len()) } + unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) } } /// Returns the remaining items of this iterator as a mutable slice. @@ -96,7 +109,7 @@ impl IntoIter { } fn as_raw_mut_slice(&mut self) -> *mut [T] { - ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) + ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len()) } /// Drops remaining elements and relinquishes the backing allocation. @@ -123,7 +136,7 @@ impl IntoIter { // this creates less assembly self.cap = 0; self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; - self.ptr = self.buf.as_ptr(); + self.ptr = self.buf; self.end = self.buf.as_ptr(); // Dropping the remaining elements can panic, so this needs to be @@ -137,7 +150,7 @@ impl IntoIter { pub(crate) fn forget_remaining_elements(&mut self) { // For the ZST case, it is crucial that we mutate `end` here, not `ptr`. // `ptr` must stay aligned, while `end` may be unaligned. - self.end = self.ptr; + self.end = self.ptr.as_ptr(); } #[cfg(not(no_global_oom_handling))] @@ -159,7 +172,7 @@ impl IntoIter { // say that they're all at the beginning of the "allocation". 0..this.len() } else { - this.ptr.sub_ptr(buf)..this.end.sub_ptr(buf) + this.ptr.sub_ptr(this.buf)..this.end.sub_ptr(buf) }; let cap = this.cap; let alloc = ManuallyDrop::take(&mut this.alloc); @@ -186,29 +199,35 @@ impl Iterator for IntoIter { #[inline] fn next(&mut self) -> Option { - if self.ptr == self.end { - None - } else if T::IS_ZST { - // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by - // reducing the `end`. - self.end = self.end.wrapping_byte_sub(1); - - // Make up a value of this ZST. - Some(unsafe { mem::zeroed() }) + if T::IS_ZST { + if self.ptr.as_ptr().cast_const() == self.end { + None + } else { + // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by + // reducing the `end`. + self.end = self.end.wrapping_byte_sub(1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } } else { - let old = self.ptr; - self.ptr = unsafe { self.ptr.add(1) }; + if self.ptr == non_null!(self.end, T) { + None + } else { + let old = self.ptr; + self.ptr = unsafe { old.add(1) }; - Some(unsafe { ptr::read(old) }) + Some(unsafe { ptr::read(old.as_ptr()) }) + } } } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = if T::IS_ZST { - self.end.addr().wrapping_sub(self.ptr.addr()) + self.end.addr().wrapping_sub(self.ptr.as_ptr().addr()) } else { - unsafe { self.end.sub_ptr(self.ptr) } + unsafe { non_null!(self.end, T).sub_ptr(self.ptr) } }; (exact, Some(exact)) } @@ -216,7 +235,7 @@ impl Iterator for IntoIter { #[inline] fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> { let step_size = self.len().min(n); - let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); + let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size); if T::IS_ZST { // See `next` for why we sub `end` here. self.end = self.end.wrapping_byte_sub(step_size); @@ -258,7 +277,7 @@ impl Iterator for IntoIter { // Safety: `len` indicates that this many elements are available and we just checked that // it fits into the array. unsafe { - ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len); + ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len); self.forget_remaining_elements(); return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); } @@ -267,7 +286,7 @@ impl Iterator for IntoIter { // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize // the array. return unsafe { - ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N); + ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N); self.ptr = self.ptr.add(N); Ok(raw_ary.transpose().assume_init()) }; @@ -286,7 +305,7 @@ impl Iterator for IntoIter { // that `T: Copy` so reading elements from the buffer doesn't invalidate // them for `Drop`. unsafe { - if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } + if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i).as_ptr()) } } } } @@ -295,18 +314,25 @@ impl Iterator for IntoIter { impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { - if self.end == self.ptr { - None - } else if T::IS_ZST { - // See above for why 'ptr.offset' isn't used - self.end = self.end.wrapping_byte_sub(1); - - // Make up a value of this ZST. - Some(unsafe { mem::zeroed() }) + if T::IS_ZST { + if self.end == self.ptr.as_ptr().cast_const() { + None + } else { + // See above for why 'ptr.offset' isn't used + self.end = self.end.wrapping_byte_sub(1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } } else { - self.end = unsafe { self.end.sub(1) }; + if non_null!(self.end, T) == self.ptr { + None + } else { + let new_end = unsafe { non_null!(self.end, T).sub(1) }; + *non_null!(mut self.end, T) = new_end; - Some(unsafe { ptr::read(self.end) }) + Some(unsafe { ptr::read(new_end.as_ptr()) }) + } } } @@ -332,7 +358,11 @@ impl DoubleEndedIterator for IntoIter { #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { - self.ptr == self.end + if T::IS_ZST { + self.ptr.as_ptr().cast_const() == self.end + } else { + self.ptr == non_null!(self.end, T) + } } } diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 1b24c63755051..b38251d4d84d0 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -2730,14 +2730,8 @@ impl IntoIterator for Vec { begin.add(me.len()) as *const T }; let cap = me.buf.capacity(); - IntoIter { - buf: NonNull::new_unchecked(begin), - phantom: PhantomData, - cap, - alloc, - ptr: begin, - end, - } + let buf = NonNull::new_unchecked(begin); + IntoIter { buf, phantom: PhantomData, cap, alloc, ptr: buf, end } } } } diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index efa6868473e49..e976552cf2b92 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -44,12 +44,12 @@ impl SpecFromIter> for Vec { // than creating it through the generic FromIterator implementation would. That limitation // is not strictly necessary as Vec's allocation behavior is intentionally unspecified. // But it is a conservative choice. - let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr; + let has_advanced = iterator.buf != iterator.ptr; if !has_advanced || iterator.len() >= iterator.cap / 2 { unsafe { let it = ManuallyDrop::new(iterator); if has_advanced { - ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); + ptr::copy(it.ptr.as_ptr(), it.buf.as_ptr(), it.len()); } return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); } diff --git a/tests/codegen/vec-iter.rs b/tests/codegen/vec-iter.rs new file mode 100644 index 0000000000000..567a999e4e2e2 --- /dev/null +++ b/tests/codegen/vec-iter.rs @@ -0,0 +1,46 @@ +// ignore-debug: the debug assertions get in the way +// compile-flags: -O +#![crate_type = "lib"] +#![feature(exact_size_is_empty)] + +use std::vec; + +// CHECK-LABEL: @vec_iter_len_nonnull +#[no_mangle] +pub fn vec_iter_len_nonnull(it: &vec::IntoIter) -> usize { + // CHECK: %subtrahend.i.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: %self2.i.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: sub nuw + // CHECK: ret + it.len() +} + +// CHECK-LABEL: @vec_iter_is_empty_nonnull +#[no_mangle] +pub fn vec_iter_is_empty_nonnull(it: &vec::IntoIter) -> bool { + // CHECK: %other.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: %self2.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: ret + it.is_empty() +} + +// CHECK-LABEL: @vec_iter_next +#[no_mangle] +pub fn vec_iter_next(it: &mut vec::IntoIter) -> Option { + // CHECK: %other.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: %self2.i = load ptr + // CHECK-SAME: !nonnull + // CHECK-SAME: !noundef + // CHECK: ret + it.next() +}