1 // Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 use ops
::{self, Add, Sub}
;
16 use super::{FusedIterator, TrustedLen}
;
18 /// Objects that can be stepped over in both directions.
20 /// The `steps_between` function provides a way to efficiently compare
21 /// two `Step` objects.
22 #[unstable(feature = "step_trait",
23 reason
= "likely to be replaced by finer-grained traits",
25 pub trait Step
: Clone
+ PartialOrd
+ Sized
{
26 /// Returns the number of steps between two step objects. The count is
27 /// inclusive of `start` and exclusive of `end`.
29 /// Returns `None` if it is not possible to calculate `steps_between`
31 fn steps_between(start
: &Self, end
: &Self) -> Option
<usize>;
33 /// Replaces this step with `1`, returning itself
34 fn replace_one(&mut self) -> Self;
36 /// Replaces this step with `0`, returning itself
37 fn replace_zero(&mut self) -> Self;
39 /// Adds one to this step, returning the result
40 fn add_one(&self) -> Self;
42 /// Subtracts one to this step, returning the result
43 fn sub_one(&self) -> Self;
45 /// Add an usize, returning None on overflow
46 fn add_usize(&self, n
: usize) -> Option
<Self>;
49 // These are still macro-generated because the integer literals resolve to different types.
50 macro_rules
! step_identical_methods
{
53 fn replace_one(&mut self) -> Self {
58 fn replace_zero(&mut self) -> Self {
63 fn add_one(&self) -> Self {
68 fn sub_one(&self) -> Self {
74 macro_rules
! step_impl_unsigned
{
76 #[unstable(feature = "step_trait",
77 reason
= "likely to be replaced by finer-grained traits",
81 #[allow(trivial_numeric_casts)]
82 fn steps_between(start
: &$t
, end
: &$t
) -> Option
<usize> {
84 // Note: We assume $t <= usize here
85 Some((*end
- *start
) as usize)
92 #[allow(unreachable_patterns)]
93 fn add_usize(&self, n
: usize) -> Option
<Self> {
94 match <$t
>::try_from(n
) {
95 Ok(n_as_t
) => self.checked_add(n_as_t
),
100 step_identical_methods
!();
104 macro_rules
! step_impl_signed
{
105 ($
( [$t
:ty
: $unsigned
:ty
] )*) => ($
(
106 #[unstable(feature = "step_trait",
107 reason
= "likely to be replaced by finer-grained traits",
111 #[allow(trivial_numeric_casts)]
112 fn steps_between(start
: &$t
, end
: &$t
) -> Option
<usize> {
114 // Note: We assume $t <= isize here
115 // Use .wrapping_sub and cast to usize to compute the
116 // difference that may not fit inside the range of isize.
117 Some((*end
as isize).wrapping_sub(*start
as isize) as usize)
124 #[allow(unreachable_patterns)]
125 fn add_usize(&self, n
: usize) -> Option
<Self> {
126 match <$unsigned
>::try_from(n
) {
127 Ok(n_as_unsigned
) => {
128 // Wrapping in unsigned space handles cases like
129 // `-120_i8.add_usize(200) == Some(80_i8)`,
130 // even though 200_usize is out of range for i8.
131 let wrapped
= (*self as $unsigned
).wrapping_add(n_as_unsigned
) as $t
;
132 if wrapped
>= *self {
135 None
// Addition overflowed
142 step_identical_methods
!();
147 macro_rules
! step_impl_no_between
{
149 #[unstable(feature = "step_trait",
150 reason
= "likely to be replaced by finer-grained traits",
154 fn steps_between(_start
: &Self, _end
: &Self) -> Option
<usize> {
159 fn add_usize(&self, n
: usize) -> Option
<Self> {
160 self.checked_add(n
as $t
)
163 step_identical_methods
!();
168 step_impl_unsigned
!(usize u8 u16 u32);
169 step_impl_signed
!([isize: usize] [i8: u8] [i16: u16] [i32: u32]);
170 #[cfg(target_pointer_width = "64")]
171 step_impl_unsigned
!(u64);
172 #[cfg(target_pointer_width = "64")]
173 step_impl_signed
!([i64: u64]);
174 // If the target pointer width is not 64-bits, we
175 // assume here that it is less than 64-bits.
176 #[cfg(not(target_pointer_width = "64"))]
177 step_impl_no_between
!(u64 i64);
178 step_impl_no_between
!(u128 i128
);
180 macro_rules
! range_exact_iter_impl
{
182 #[stable(feature = "rust1", since = "1.0.0")]
183 impl ExactSizeIterator
for ops
::Range
<$t
> { }
187 macro_rules
! range_incl_exact_iter_impl
{
189 #[unstable(feature = "inclusive_range",
190 reason
= "recently added, follows RFC",
192 impl ExactSizeIterator
for ops
::RangeInclusive
<$t
> { }
196 macro_rules
! range_trusted_len_impl
{
198 #[unstable(feature = "trusted_len", issue = "37572")]
199 unsafe impl TrustedLen
for ops
::Range
<$t
> { }
203 macro_rules
! range_incl_trusted_len_impl
{
205 #[unstable(feature = "inclusive_range",
206 reason
= "recently added, follows RFC",
208 unsafe impl TrustedLen
for ops
::RangeInclusive
<$t
> { }
212 #[stable(feature = "rust1", since = "1.0.0")]
213 impl<A
: Step
> Iterator
for ops
::Range
<A
> {
217 fn next(&mut self) -> Option
<A
> {
218 if self.start
< self.end
{
219 // We check for overflow here, even though it can't actually
220 // happen. Adding this check does however help llvm vectorize loops
221 // for some ranges that don't get vectorized otherwise,
222 // and this won't actually result in an extra check in an optimized build.
223 if let Some(mut n
) = self.start
.add_usize(1) {
224 mem
::swap(&mut n
, &mut self.start
);
235 fn size_hint(&self) -> (usize, Option
<usize>) {
236 match Step
::steps_between(&self.start
, &self.end
) {
237 Some(hint
) => (hint
, Some(hint
)),
243 fn nth(&mut self, n
: usize) -> Option
<A
> {
244 if let Some(plus_n
) = self.start
.add_usize(n
) {
245 if plus_n
< self.end
{
246 self.start
= plus_n
.add_one();
251 self.start
= self.end
.clone();
256 // These macros generate `ExactSizeIterator` impls for various range types.
257 // Range<{u,i}64> and RangeInclusive<{u,i}{32,64,size}> are excluded
258 // because they cannot guarantee having a length <= usize::MAX, which is
259 // required by ExactSizeIterator.
260 range_exact_iter_impl
!(usize u8 u16 u32 isize i8 i16 i32);
261 range_incl_exact_iter_impl
!(u8 u16 i8 i16);
263 // These macros generate `TrustedLen` impls.
265 // They need to guarantee that .size_hint() is either exact, or that
266 // the upper bound is None when it does not fit the type limits.
267 range_trusted_len_impl
!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
268 range_incl_trusted_len_impl
!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
270 #[stable(feature = "rust1", since = "1.0.0")]
271 impl<A
: Step
> DoubleEndedIterator
for ops
::Range
<A
> {
273 fn next_back(&mut self) -> Option
<A
> {
274 if self.start
< self.end
{
275 self.end
= self.end
.sub_one();
276 Some(self.end
.clone())
283 #[unstable(feature = "fused", issue = "35602")]
284 impl<A
: Step
> FusedIterator
for ops
::Range
<A
> {}
286 #[stable(feature = "rust1", since = "1.0.0")]
287 impl<A
: Step
> Iterator
for ops
::RangeFrom
<A
> {
291 fn next(&mut self) -> Option
<A
> {
292 let mut n
= self.start
.add_one();
293 mem
::swap(&mut n
, &mut self.start
);
298 fn size_hint(&self) -> (usize, Option
<usize>) {
303 fn nth(&mut self, n
: usize) -> Option
<A
> {
304 let plus_n
= self.start
.add_usize(n
).expect("overflow in RangeFrom::nth");
305 self.start
= plus_n
.add_one();
310 #[unstable(feature = "fused", issue = "35602")]
311 impl<A
: Step
> FusedIterator
for ops
::RangeFrom
<A
> {}
313 #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
314 impl<A
: Step
> Iterator
for ops
::RangeInclusive
<A
> {
318 fn next(&mut self) -> Option
<A
> {
319 use cmp
::Ordering
::*;
321 match self.start
.partial_cmp(&self.end
) {
323 let n
= self.start
.add_one();
324 Some(mem
::replace(&mut self.start
, n
))
327 let last
= self.start
.replace_one();
328 self.end
.replace_zero();
336 fn size_hint(&self) -> (usize, Option
<usize>) {
337 if !(self.start
<= self.end
) {
341 match Step
::steps_between(&self.start
, &self.end
) {
342 Some(hint
) => (hint
.saturating_add(1), hint
.checked_add(1)),
348 fn nth(&mut self, n
: usize) -> Option
<A
> {
349 if let Some(plus_n
) = self.start
.add_usize(n
) {
350 use cmp
::Ordering
::*;
352 match plus_n
.partial_cmp(&self.end
) {
354 self.start
= plus_n
.add_one();
358 self.start
.replace_one();
359 self.end
.replace_zero();
366 self.start
.replace_one();
367 self.end
.replace_zero();
372 #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
373 impl<A
: Step
> DoubleEndedIterator
for ops
::RangeInclusive
<A
> {
375 fn next_back(&mut self) -> Option
<A
> {
376 use cmp
::Ordering
::*;
378 match self.start
.partial_cmp(&self.end
) {
380 let n
= self.end
.sub_one();
381 Some(mem
::replace(&mut self.end
, n
))
384 let last
= self.end
.replace_zero();
385 self.start
.replace_one();
393 #[unstable(feature = "fused", issue = "35602")]
394 impl<A
: Step
> FusedIterator
for ops
::RangeInclusive
<A
> {}