@@ -666,6 +666,60 @@ macro_rules! iterator {
666
666
let exact = diff / ( if size == 0 { 1 } else { size} ) ;
667
667
( exact, Some ( exact) )
668
668
}
669
+
670
+ #[ inline]
671
+ fn count( self ) -> usize {
672
+ self . size_hint( ) . 0
673
+ }
674
+
675
+ #[ inline]
676
+ fn nth( & mut self , n: usize ) -> Option <$elem> {
677
+ // could be implemented with slices, but this avoids bounds checks
678
+ unsafe {
679
+ :: intrinsics:: assume( !self . ptr. is_null( ) ) ;
680
+ :: intrinsics:: assume( !self . end. is_null( ) ) ;
681
+ // There should be some way to use offset and optimize this to LEA but I don't
682
+ // know how to do that AND detect overflow...
683
+ let size = mem:: size_of:: <T >( ) ;
684
+ if size == 0 {
685
+ if let Some ( new_ptr) = ( self . ptr as usize ) . checked_add( n) {
686
+ if new_ptr < ( self . end as usize ) {
687
+ self . ptr = transmute( new_ptr + 1 ) ;
688
+ return Some ( & mut * ( 1 as * mut _) )
689
+ }
690
+ }
691
+ } else {
692
+ if let Some ( new_ptr) = n. checked_mul( size) . and_then( |offset| {
693
+ ( self . ptr as usize ) . checked_add( offset)
694
+ } ) {
695
+ if new_ptr < ( self . end as usize ) {
696
+ self . ptr = transmute( new_ptr + size) ;
697
+ return Some ( transmute( new_ptr) )
698
+ }
699
+ }
700
+ }
701
+ None
702
+ }
703
+ }
704
+
705
+ #[ inline]
706
+ fn last( self ) -> Option <$elem> {
707
+ // We could just call next_back but this avoids the memory write.
708
+ unsafe {
709
+ :: intrinsics:: assume( !self . ptr. is_null( ) ) ;
710
+ :: intrinsics:: assume( !self . end. is_null( ) ) ;
711
+ if self . end == self . ptr {
712
+ None
713
+ } else {
714
+ if mem:: size_of:: <T >( ) == 0 {
715
+ // Use a non-null pointer value
716
+ Some ( & mut * ( 1 as * mut _) )
717
+ } else {
718
+ Some ( transmute( self . end. offset( -1 ) ) )
719
+ }
720
+ }
721
+ }
722
+ }
669
723
}
670
724
671
725
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
0 commit comments