@@ -11,7 +11,7 @@ use core::iter::{
11
11
TrustedRandomAccessNoCoerce ,
12
12
} ;
13
13
use core:: marker:: PhantomData ;
14
- use core:: mem:: { self , ManuallyDrop , MaybeUninit , SizedTypeProperties } ;
14
+ use core:: mem:: { ManuallyDrop , MaybeUninit , SizedTypeProperties } ;
15
15
use core:: num:: NonZeroUsize ;
16
16
#[ cfg( not( no_global_oom_handling) ) ]
17
17
use core:: ops:: Deref ;
@@ -201,25 +201,20 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
201
201
#[ inline]
202
202
fn next ( & mut self ) -> Option < T > {
203
203
if T :: IS_ZST {
204
- if self . ptr . as_ptr ( ) == self . end as * mut _ {
205
- None
206
- } else {
207
- // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
208
- // reducing the `end`.
209
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
210
-
211
- // Make up a value of this ZST.
212
- Some ( unsafe { mem:: zeroed ( ) } )
204
+ if self . ptr . as_ptr ( ) == self . end as * mut T {
205
+ return None ;
213
206
}
207
+ // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
208
+ // reducing the `end`.
209
+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
210
+ Some ( unsafe { self . ptr . read ( ) } )
214
211
} else {
215
- if self . ptr == non_null ! ( self . end, T ) {
216
- None
217
- } else {
218
- let old = self . ptr ;
219
- self . ptr = unsafe { old. add ( 1 ) } ;
220
-
221
- Some ( unsafe { ptr:: read ( old. as_ptr ( ) ) } )
212
+ if self . ptr == unsafe { NonNull :: new_unchecked ( self . end as * mut T ) } {
213
+ return None ;
222
214
}
215
+ let old = self . ptr ;
216
+ self . ptr = unsafe { old. add ( 1 ) } ;
217
+ Some ( unsafe { old. read ( ) } )
223
218
}
224
219
}
225
220
@@ -305,32 +300,25 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
305
300
// Also note the implementation of `Self: TrustedRandomAccess` requires
306
301
// that `T: Copy` so reading elements from the buffer doesn't invalidate
307
302
// them for `Drop`.
308
- unsafe { if T :: IS_ZST { mem :: zeroed ( ) } else { self . ptr . add ( i) . read ( ) } }
303
+ unsafe { self . ptr . add ( i) . read ( ) }
309
304
}
310
305
}
311
306
312
307
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
313
308
impl < T , A : Allocator > DoubleEndedIterator for IntoIter < T , A > {
314
309
#[ inline]
315
310
fn next_back ( & mut self ) -> Option < T > {
311
+ if self . ptr . as_ptr ( ) == self . end as * mut _ {
312
+ return None ;
313
+ }
316
314
if T :: IS_ZST {
317
- if self . end as * mut _ == self . ptr . as_ptr ( ) {
318
- None
319
- } else {
320
- // See above for why 'ptr.offset' isn't used
321
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
322
-
323
- // Make up a value of this ZST.
324
- Some ( unsafe { mem:: zeroed ( ) } )
325
- }
315
+ // See above for why 'ptr.offset' isn't used
316
+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
317
+ Some ( unsafe { ptr:: read ( self . ptr . as_ptr ( ) ) } )
326
318
} else {
327
- if non_null ! ( self . end, T ) == self . ptr {
328
- None
329
- } else {
330
- let new_end = unsafe { non_null ! ( self . end, T ) . sub ( 1 ) } ;
331
- * non_null ! ( mut self . end, T ) = new_end;
332
-
333
- Some ( unsafe { ptr:: read ( new_end. as_ptr ( ) ) } )
319
+ unsafe {
320
+ self . end = self . end . sub ( 1 ) ;
321
+ Some ( ptr:: read ( self . end ) )
334
322
}
335
323
}
336
324
}
0 commit comments