1 // Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 use ops
::{self, Add, Sub, Try}
;
16 use super::{FusedIterator, TrustedLen}
;
18 /// Objects that can be stepped over in both directions.
20 /// The `steps_between` function provides a way to efficiently compare
21 /// two `Step` objects.
22 #[unstable(feature = "step_trait",
23 reason
= "likely to be replaced by finer-grained traits",
25 pub trait Step
: Clone
+ PartialOrd
+ Sized
{
26 /// Returns the number of steps between two step objects. The count is
27 /// inclusive of `start` and exclusive of `end`.
29 /// Returns `None` if it is not possible to calculate `steps_between`
31 fn steps_between(start
: &Self, end
: &Self) -> Option
<usize>;
33 /// Replaces this step with `1`, returning itself
34 fn replace_one(&mut self) -> Self;
36 /// Replaces this step with `0`, returning itself
37 fn replace_zero(&mut self) -> Self;
39 /// Adds one to this step, returning the result
40 fn add_one(&self) -> Self;
42 /// Subtracts one to this step, returning the result
43 fn sub_one(&self) -> Self;
45 /// Add an usize, returning None on overflow
46 fn add_usize(&self, n
: usize) -> Option
<Self>;
49 // These are still macro-generated because the integer literals resolve to different types.
50 macro_rules
! step_identical_methods
{
53 fn replace_one(&mut self) -> Self {
58 fn replace_zero(&mut self) -> Self {
63 fn add_one(&self) -> Self {
68 fn sub_one(&self) -> Self {
74 macro_rules
! step_impl_unsigned
{
76 #[unstable(feature = "step_trait",
77 reason
= "likely to be replaced by finer-grained traits",
81 #[allow(trivial_numeric_casts)]
82 fn steps_between(start
: &$t
, end
: &$t
) -> Option
<usize> {
84 // Note: We assume $t <= usize here
85 Some((*end
- *start
) as usize)
92 #[allow(unreachable_patterns)]
93 fn add_usize(&self, n
: usize) -> Option
<Self> {
94 match <$t
>::private_try_from(n
) {
95 Ok(n_as_t
) => self.checked_add(n_as_t
),
100 step_identical_methods
!();
104 macro_rules
! step_impl_signed
{
105 ($
( [$t
:ty
: $unsigned
:ty
] )*) => ($
(
106 #[unstable(feature = "step_trait",
107 reason
= "likely to be replaced by finer-grained traits",
111 #[allow(trivial_numeric_casts)]
112 fn steps_between(start
: &$t
, end
: &$t
) -> Option
<usize> {
114 // Note: We assume $t <= isize here
115 // Use .wrapping_sub and cast to usize to compute the
116 // difference that may not fit inside the range of isize.
117 Some((*end
as isize).wrapping_sub(*start
as isize) as usize)
124 #[allow(unreachable_patterns)]
125 fn add_usize(&self, n
: usize) -> Option
<Self> {
126 match <$unsigned
>::private_try_from(n
) {
127 Ok(n_as_unsigned
) => {
128 // Wrapping in unsigned space handles cases like
129 // `-120_i8.add_usize(200) == Some(80_i8)`,
130 // even though 200_usize is out of range for i8.
131 let wrapped
= (*self as $unsigned
).wrapping_add(n_as_unsigned
) as $t
;
132 if wrapped
>= *self {
135 None
// Addition overflowed
142 step_identical_methods
!();
147 macro_rules
! step_impl_no_between
{
149 #[unstable(feature = "step_trait",
150 reason
= "likely to be replaced by finer-grained traits",
154 fn steps_between(_start
: &Self, _end
: &Self) -> Option
<usize> {
159 fn add_usize(&self, n
: usize) -> Option
<Self> {
160 self.checked_add(n
as $t
)
163 step_identical_methods
!();
168 step_impl_unsigned
!(usize u8 u16 u32);
169 step_impl_signed
!([isize: usize] [i8: u8] [i16: u16] [i32: u32]);
170 #[cfg(target_pointer_width = "64")]
171 step_impl_unsigned
!(u64);
172 #[cfg(target_pointer_width = "64")]
173 step_impl_signed
!([i64: u64]);
174 // If the target pointer width is not 64-bits, we
175 // assume here that it is less than 64-bits.
176 #[cfg(not(target_pointer_width = "64"))]
177 step_impl_no_between
!(u64 i64);
178 step_impl_no_between
!(u128 i128
);
180 macro_rules
! range_exact_iter_impl
{
182 #[stable(feature = "rust1", since = "1.0.0")]
183 impl ExactSizeIterator
for ops
::Range
<$t
> { }
187 macro_rules
! range_incl_exact_iter_impl
{
189 #[stable(feature = "inclusive_range", since = "1.26.0")]
190 impl ExactSizeIterator
for ops
::RangeInclusive
<$t
> { }
194 macro_rules
! range_trusted_len_impl
{
196 #[unstable(feature = "trusted_len", issue = "37572")]
197 unsafe impl TrustedLen
for ops
::Range
<$t
> { }
201 macro_rules
! range_incl_trusted_len_impl
{
203 #[stable(feature = "inclusive_range", since = "1.26.0")]
204 unsafe impl TrustedLen
for ops
::RangeInclusive
<$t
> { }
208 #[stable(feature = "rust1", since = "1.0.0")]
209 impl<A
: Step
> Iterator
for ops
::Range
<A
> {
213 fn next(&mut self) -> Option
<A
> {
214 if self.start
< self.end
{
215 // We check for overflow here, even though it can't actually
216 // happen. Adding this check does however help llvm vectorize loops
217 // for some ranges that don't get vectorized otherwise,
218 // and this won't actually result in an extra check in an optimized build.
219 if let Some(mut n
) = self.start
.add_usize(1) {
220 mem
::swap(&mut n
, &mut self.start
);
231 fn size_hint(&self) -> (usize, Option
<usize>) {
232 match Step
::steps_between(&self.start
, &self.end
) {
233 Some(hint
) => (hint
, Some(hint
)),
239 fn nth(&mut self, n
: usize) -> Option
<A
> {
240 if let Some(plus_n
) = self.start
.add_usize(n
) {
241 if plus_n
< self.end
{
242 self.start
= plus_n
.add_one();
247 self.start
= self.end
.clone();
252 fn last(mut self) -> Option
<A
> {
257 fn min(mut self) -> Option
<A
> {
262 fn max(mut self) -> Option
<A
> {
267 // These macros generate `ExactSizeIterator` impls for various range types.
268 // Range<{u,i}64> and RangeInclusive<{u,i}{32,64,size}> are excluded
269 // because they cannot guarantee having a length <= usize::MAX, which is
270 // required by ExactSizeIterator.
271 range_exact_iter_impl
!(usize u8 u16 u32 isize i8 i16 i32);
272 range_incl_exact_iter_impl
!(u8 u16 i8 i16);
274 // These macros generate `TrustedLen` impls.
276 // They need to guarantee that .size_hint() is either exact, or that
277 // the upper bound is None when it does not fit the type limits.
278 range_trusted_len_impl
!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
279 range_incl_trusted_len_impl
!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
281 #[stable(feature = "rust1", since = "1.0.0")]
282 impl<A
: Step
> DoubleEndedIterator
for ops
::Range
<A
> {
284 fn next_back(&mut self) -> Option
<A
> {
285 if self.start
< self.end
{
286 self.end
= self.end
.sub_one();
287 Some(self.end
.clone())
294 #[stable(feature = "fused", since = "1.26.0")]
295 impl<A
: Step
> FusedIterator
for ops
::Range
<A
> {}
297 #[stable(feature = "rust1", since = "1.0.0")]
298 impl<A
: Step
> Iterator
for ops
::RangeFrom
<A
> {
302 fn next(&mut self) -> Option
<A
> {
303 let mut n
= self.start
.add_one();
304 mem
::swap(&mut n
, &mut self.start
);
309 fn size_hint(&self) -> (usize, Option
<usize>) {
314 fn nth(&mut self, n
: usize) -> Option
<A
> {
315 let plus_n
= self.start
.add_usize(n
).expect("overflow in RangeFrom::nth");
316 self.start
= plus_n
.add_one();
321 #[stable(feature = "fused", since = "1.26.0")]
322 impl<A
: Step
> FusedIterator
for ops
::RangeFrom
<A
> {}
324 #[unstable(feature = "trusted_len", issue = "37572")]
325 unsafe impl<A
: Step
> TrustedLen
for ops
::RangeFrom
<A
> {}
327 #[stable(feature = "inclusive_range", since = "1.26.0")]
328 impl<A
: Step
> Iterator
for ops
::RangeInclusive
<A
> {
332 fn next(&mut self) -> Option
<A
> {
333 if self.start
<= self.end
{
334 if self.start
< self.end
{
335 let n
= self.start
.add_one();
336 Some(mem
::replace(&mut self.start
, n
))
338 let last
= self.start
.replace_one();
339 self.end
.replace_zero();
348 fn size_hint(&self) -> (usize, Option
<usize>) {
349 if !(self.start
<= self.end
) {
353 match Step
::steps_between(&self.start
, &self.end
) {
354 Some(hint
) => (hint
.saturating_add(1), hint
.checked_add(1)),
360 fn nth(&mut self, n
: usize) -> Option
<A
> {
361 if let Some(plus_n
) = self.start
.add_usize(n
) {
362 use cmp
::Ordering
::*;
364 match plus_n
.partial_cmp(&self.end
) {
366 self.start
= plus_n
.add_one();
370 self.start
.replace_one();
371 self.end
.replace_zero();
378 self.start
.replace_one();
379 self.end
.replace_zero();
384 fn last(mut self) -> Option
<A
> {
389 fn min(mut self) -> Option
<A
> {
394 fn max(mut self) -> Option
<A
> {
399 fn try_fold
<B
, F
, R
>(&mut self, init
: B
, mut f
: F
) -> R
where
400 Self: Sized
, F
: FnMut(B
, Self::Item
) -> R
, R
: Try
<Ok
=B
>
402 let mut accum
= init
;
403 if self.start
<= self.end
{
406 if self.start
< self.end
{
407 let n
= self.start
.add_one();
408 (mem
::replace(&mut self.start
, n
), false)
410 self.end
.replace_zero();
411 (self.start
.replace_one(), true)
413 accum
= f(accum
, x
)?
;
421 #[stable(feature = "inclusive_range", since = "1.26.0")]
422 impl<A
: Step
> DoubleEndedIterator
for ops
::RangeInclusive
<A
> {
424 fn next_back(&mut self) -> Option
<A
> {
425 if self.start
<= self.end
{
426 if self.start
< self.end
{
427 let n
= self.end
.sub_one();
428 Some(mem
::replace(&mut self.end
, n
))
430 let last
= self.end
.replace_zero();
431 self.start
.replace_one();
440 fn try_rfold
<B
, F
, R
>(&mut self, init
: B
, mut f
: F
) -> R
where
441 Self: Sized
, F
: FnMut(B
, Self::Item
) -> R
, R
: Try
<Ok
=B
>
443 let mut accum
= init
;
444 if self.start
<= self.end
{
447 if self.start
< self.end
{
448 let n
= self.end
.sub_one();
449 (mem
::replace(&mut self.end
, n
), false)
451 self.start
.replace_one();
452 (self.end
.replace_zero(), true)
454 accum
= f(accum
, x
)?
;
462 #[stable(feature = "fused", since = "1.26.0")]
463 impl<A
: Step
> FusedIterator
for ops
::RangeInclusive
<A
> {}
465 /// Compensate removal of some impls per
466 /// https://github.com/rust-lang/rust/pull/49305#issuecomment-376293243
467 trait PrivateTryFromUsize
: Sized
{
468 fn private_try_from(n
: usize) -> Result
<Self, ()>;
471 impl<T
> PrivateTryFromUsize
for T
where T
: TryFrom
<usize> {
473 fn private_try_from(n
: usize) -> Result
<Self, ()> {
474 T
::try_from(n
).map_err(|_
| ())
478 // no possible bounds violation
479 macro_rules
! try_from_unbounded
{
480 ($
($target
:ty
),*) => {$
(
481 impl PrivateTryFromUsize
for $target
{
483 fn private_try_from(value
: usize) -> Result
<Self, ()> {
490 // unsigned to signed (only positive bound)
491 macro_rules
! try_from_upper_bounded
{
492 ($
($target
:ty
),*) => {$
(
493 impl PrivateTryFromUsize
for $target
{
495 fn private_try_from(u
: usize) -> Result
<$target
, ()> {
496 if u
> (<$target
>::max_value() as usize) {
507 #[cfg(target_pointer_width = "16")]
508 mod ptr_try_from_impls
{
509 use super::PrivateTryFromUsize
;
511 try_from_unbounded
!(u16, u32, u64, u128
);
512 try_from_unbounded
!(i32, i64, i128
);
515 #[cfg(target_pointer_width = "32")]
516 mod ptr_try_from_impls
{
517 use super::PrivateTryFromUsize
;
519 try_from_upper_bounded
!(u16);
520 try_from_unbounded
!(u32, u64, u128
);
521 try_from_upper_bounded
!(i32);
522 try_from_unbounded
!(i64, i128
);
525 #[cfg(target_pointer_width = "64")]
526 mod ptr_try_from_impls
{
527 use super::PrivateTryFromUsize
;
529 try_from_upper_bounded
!(u16, u32);
530 try_from_unbounded
!(u64, u128
);
531 try_from_upper_bounded
!(i32, i64);
532 try_from_unbounded
!(i128
);