|
| 1 | +diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs |
| 2 | +index 6bcde6d899c..cd4ea829e37 100644 |
| 3 | +--- a/library/alloc/src/vec/into_iter.rs |
| 4 | ++++ b/library/alloc/src/vec/into_iter.rs |
| 5 | +@@ -40,7 +40,9 @@ pub struct IntoIter< |
| 6 | + // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop |
| 7 | + pub(super) alloc: ManuallyDrop<A>, |
| 8 | + pub(super) ptr: *const T, |
| 9 | +- pub(super) end: *const T, |
| 10 | ++ pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that |
| 11 | ++ // ptr == end is a quick test for the Iterator being empty, that works |
| 12 | ++ // for both ZST and non-ZST. |
| 13 | + } |
| 14 | + |
| 15 | + #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] |
| 16 | +@@ -132,7 +134,9 @@ pub(super) fn forget_allocation_drop_remaining(&mut self) { |
| 17 | + |
| 18 | + /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. |
| 19 | + pub(crate) fn forget_remaining_elements(&mut self) { |
| 20 | +- self.ptr = self.end; |
| 21 | ++ // For th ZST case, it is crucial that we mutate `end` here, not `ptr`. |
| 22 | ++ // `ptr` must stay aligned, while `end` may be unaligned. |
| 23 | ++ self.end = self.ptr; |
| 24 | + } |
| 25 | + |
| 26 | + #[cfg(not(no_global_oom_handling))] |
| 27 | +@@ -184,10 +188,9 @@ fn next(&mut self) -> Option<T> { |
| 28 | + if self.ptr == self.end { |
| 29 | + None |
| 30 | + } else if T::IS_ZST { |
| 31 | +- // purposefully don't use 'ptr.offset' because for |
| 32 | +- // vectors with 0-size elements this would return the |
| 33 | +- // same pointer. |
| 34 | +- self.ptr = self.ptr.wrapping_byte_add(1); |
| 35 | ++ // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by |
| 36 | ++ // reducing the `end`. |
| 37 | ++ self.end = self.end.wrapping_byte_sub(1); |
| 38 | + |
| 39 | + // Make up a value of this ZST. |
| 40 | + Some(unsafe { mem::zeroed() }) |
| 41 | +@@ -214,10 +217,8 @@ fn advance_by(&mut self, n: usize) -> Result<(), usize> { |
| 42 | + let step_size = self.len().min(n); |
| 43 | + let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); |
| 44 | + if T::IS_ZST { |
| 45 | +- // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound |
| 46 | +- // effectively results in unsigned pointers representing positions 0..usize::MAX, |
| 47 | +- // which is valid for ZSTs. |
| 48 | +- self.ptr = self.ptr.wrapping_byte_add(step_size); |
| 49 | ++ // See `next` for why we sub `end` here. |
| 50 | ++ self.end = self.end.wrapping_byte_sub(step_size); |
| 51 | + } else { |
| 52 | + // SAFETY: the min() above ensures that step_size is in bounds |
| 53 | + self.ptr = unsafe { self.ptr.add(step_size) }; |
0 commit comments