@@ -140,7 +140,7 @@ impl<T> SliceExt for [T] {
140
140
assume ( !p. is_null ( ) ) ;
141
141
if mem:: size_of :: < T > ( ) == 0 {
142
142
Iter { ptr : p,
143
- end : ( ( p as usize ) . wrapping_add ( self . len ( ) ) ) as * const T ,
143
+ end : ( p as usize + self . len ( ) ) as * const T ,
144
144
_marker : marker:: PhantomData }
145
145
} else {
146
146
Iter { ptr : p,
@@ -277,7 +277,7 @@ impl<T> SliceExt for [T] {
277
277
assume ( !p. is_null ( ) ) ;
278
278
if mem:: size_of :: < T > ( ) == 0 {
279
279
IterMut { ptr : p,
280
- end : ( ( p as usize ) . wrapping_add ( self . len ( ) ) ) as * mut T ,
280
+ end : ( p as usize + self . len ( ) ) as * mut T ,
281
281
_marker : marker:: PhantomData }
282
282
} else {
283
283
IterMut { ptr : p,
@@ -632,17 +632,35 @@ fn size_from_ptr<T>(_: *const T) -> usize {
632
632
633
633
634
634
// Use macros to be generic over const/mut
635
- macro_rules! slice_offset {
635
+ //
636
+ // They require non-negative `$by` because otherwise the expression
637
+ // `(ptr as usize + $by)` would interpret `-1` as `usize::MAX` (and
638
+ // thus trigger a panic when overflow checks are on).
639
+
640
+ // Use this to do `$ptr + $by`, where `$by` is non-negative.
641
+ macro_rules! slice_add_offset {
636
642
( $ptr: expr, $by: expr) => { {
637
643
let ptr = $ptr;
638
644
if size_from_ptr( ptr) == 0 {
639
- transmute( ( ptr as isize ) . wrapping_add ( $by) )
645
+ transmute( ptr as usize + $by)
640
646
} else {
641
647
ptr. offset( $by)
642
648
}
643
649
} } ;
644
650
}
645
651
652
+ // Use this to do `$ptr - $by`, where `$by` is non-negative.
653
+ macro_rules! slice_sub_offset {
654
+ ( $ptr: expr, $by: expr) => { {
655
+ let ptr = $ptr;
656
+ if size_from_ptr( ptr) == 0 {
657
+ transmute( ptr as usize - $by)
658
+ } else {
659
+ ptr. offset( -$by)
660
+ }
661
+ } } ;
662
+ }
663
+
646
664
macro_rules! slice_ref {
647
665
( $ptr: expr) => { {
648
666
let ptr = $ptr;
@@ -665,24 +683,22 @@ macro_rules! iterator {
665
683
#[ inline]
666
684
fn next( & mut self ) -> Option <$elem> {
667
685
// could be implemented with slices, but this avoids bounds checks
668
- if self . ptr == self . end {
669
- None
670
- } else {
671
- unsafe {
672
- if mem:: size_of:: <T >( ) != 0 {
673
- :: intrinsics:: assume( !self . ptr. is_null( ) ) ;
674
- :: intrinsics:: assume( !self . end. is_null( ) ) ;
675
- }
686
+ unsafe {
687
+ :: intrinsics:: assume( !self . ptr. is_null( ) ) ;
688
+ :: intrinsics:: assume( !self . end. is_null( ) ) ;
689
+ if self . ptr == self . end {
690
+ None
691
+ } else {
676
692
let old = self . ptr;
677
- self . ptr = slice_offset !( self . ptr, 1 ) ;
693
+ self . ptr = slice_add_offset !( self . ptr, 1 ) ;
678
694
Some ( slice_ref!( old) )
679
695
}
680
696
}
681
697
}
682
698
683
699
#[ inline]
684
700
fn size_hint( & self ) -> ( usize , Option <usize >) {
685
- let diff = ( self . end as usize ) . wrapping_sub ( self . ptr as usize ) ;
701
+ let diff = ( self . end as usize ) - ( self . ptr as usize ) ;
686
702
let size = mem:: size_of:: <T >( ) ;
687
703
let exact = diff / ( if size == 0 { 1 } else { size} ) ;
688
704
( exact, Some ( exact) )
@@ -710,15 +726,13 @@ macro_rules! iterator {
710
726
#[ inline]
711
727
fn next_back( & mut self ) -> Option <$elem> {
712
728
// could be implemented with slices, but this avoids bounds checks
713
- if self . end == self . ptr {
714
- None
715
- } else {
716
- unsafe {
717
- self . end = slice_offset!( self . end, -1 ) ;
718
- if mem:: size_of:: <T >( ) != 0 {
719
- :: intrinsics:: assume( !self . ptr. is_null( ) ) ;
720
- :: intrinsics:: assume( !self . end. is_null( ) ) ;
721
- }
729
+ unsafe {
730
+ :: intrinsics:: assume( !self . ptr. is_null( ) ) ;
731
+ :: intrinsics:: assume( !self . end. is_null( ) ) ;
732
+ if self . end == self . ptr {
733
+ None
734
+ } else {
735
+ self . end = slice_sub_offset!( self . end, 1 ) ;
722
736
Some ( slice_ref!( self . end) )
723
737
}
724
738
}
@@ -728,29 +742,29 @@ macro_rules! iterator {
728
742
}
729
743
730
744
macro_rules! make_slice {
731
- ( $start: expr, $end: expr) => { {
732
- let start = $start;
733
- let diff = ( $end as usize ) . wrapping_sub( start as usize ) ;
734
- if size_from_ptr( start) == 0 {
735
- // use a non-null pointer value
736
- unsafe { from_raw_parts( 1 as * const _, diff) }
745
+ ( $t: ty => $result: ty: $start: expr, $end: expr) => { {
746
+ let diff = $end as usize - $start as usize ;
747
+ let len = if mem:: size_of:: <T >( ) == 0 {
748
+ diff
737
749
} else {
738
- let len = diff / size_from_ptr( start) ;
739
- unsafe { from_raw_parts( start, len) }
750
+ diff / mem:: size_of:: <$t>( )
751
+ } ;
752
+ unsafe {
753
+ from_raw_parts( $start, len)
740
754
}
741
755
} }
742
756
}
743
757
744
758
macro_rules! make_mut_slice {
745
- ( $start: expr, $end: expr) => { {
746
- let start = $start;
747
- let diff = ( $end as usize ) . wrapping_sub( start as usize ) ;
748
- if size_from_ptr( start) == 0 {
749
- // use a non-null pointer value
750
- unsafe { from_raw_parts_mut( 1 as * mut _, diff) }
759
+ ( $t: ty => $result: ty: $start: expr, $end: expr) => { {
760
+ let diff = $end as usize - $start as usize ;
761
+ let len = if mem:: size_of:: <T >( ) == 0 {
762
+ diff
751
763
} else {
752
- let len = diff / size_from_ptr( start) ;
753
- unsafe { from_raw_parts_mut( start, len) }
764
+ diff / mem:: size_of:: <$t>( )
765
+ } ;
766
+ unsafe {
767
+ from_raw_parts_mut( $start, len)
754
768
}
755
769
} }
756
770
}
@@ -773,14 +787,14 @@ impl<'a, T> Iter<'a, T> {
773
787
/// iterator can continue to be used while this exists.
774
788
#[ unstable( feature = "core" ) ]
775
789
pub fn as_slice ( & self ) -> & ' a [ T ] {
776
- make_slice ! ( self . ptr, self . end)
790
+ make_slice ! ( T => & ' a [ T ] : self . ptr, self . end)
777
791
}
778
792
779
793
// Helper function for Iter::nth
780
794
fn iter_nth ( & mut self , n : usize ) -> Option < & ' a T > {
781
795
match self . as_slice ( ) . get ( n) {
782
796
Some ( elem_ref) => unsafe {
783
- self . ptr = slice_offset ! ( self . ptr , ( n as isize ) . wrapping_add ( 1 ) ) ;
797
+ self . ptr = slice_add_offset ! ( elem_ref as * const _ , 1 ) ;
784
798
Some ( slice_ref ! ( elem_ref) )
785
799
} ,
786
800
None => {
@@ -813,7 +827,12 @@ impl<'a, T> RandomAccessIterator for Iter<'a, T> {
813
827
fn idx ( & mut self , index : usize ) -> Option < & ' a T > {
814
828
unsafe {
815
829
if index < self . indexable ( ) {
816
- Some ( slice_ref ! ( self . ptr. offset( index as isize ) ) )
830
+ if mem:: size_of :: < T > ( ) == 0 {
831
+ // Use a non-null pointer value
832
+ Some ( & mut * ( 1 as * mut _ ) )
833
+ } else {
834
+ Some ( transmute ( self . ptr . offset ( index as isize ) ) )
835
+ }
817
836
} else {
818
837
None
819
838
}
@@ -841,14 +860,14 @@ impl<'a, T> IterMut<'a, T> {
841
860
/// restricted lifetimes that do not consume the iterator.
842
861
#[ unstable( feature = "core" ) ]
843
862
pub fn into_slice ( self ) -> & ' a mut [ T ] {
844
- make_mut_slice ! ( self . ptr, self . end)
863
+ make_mut_slice ! ( T => & ' a mut [ T ] : self . ptr, self . end)
845
864
}
846
865
847
866
// Helper function for IterMut::nth
848
867
fn iter_nth ( & mut self , n : usize ) -> Option < & ' a mut T > {
849
- match make_mut_slice ! ( self . ptr, self . end) . get_mut ( n) {
868
+ match make_mut_slice ! ( T => & ' a mut [ T ] : self . ptr, self . end) . get_mut ( n) {
850
869
Some ( elem_ref) => unsafe {
851
- self . ptr = slice_offset ! ( self . ptr , ( n as isize ) . wrapping_add ( 1 ) ) ;
870
+ self . ptr = slice_add_offset ! ( elem_ref as * mut _ , 1 ) ;
852
871
Some ( slice_ref ! ( elem_ref) )
853
872
} ,
854
873
None => {
0 commit comments