]> git.proxmox.com Git - rustc.git/blob - src/libcore/ptr/mut_ptr.rs
New upstream version 1.46.0~beta.2+dfsg1
[rustc.git] / src / libcore / ptr / mut_ptr.rs
1 use super::*;
2 use crate::cmp::Ordering::{self, Equal, Greater, Less};
3 use crate::intrinsics;
4
5 #[lang = "mut_ptr"]
6 impl<T: ?Sized> *mut T {
7 /// Returns `true` if the pointer is null.
8 ///
9 /// Note that unsized types have many possible null pointers, as only the
10 /// raw data pointer is considered, not their length, vtable, etc.
11 /// Therefore, two pointers that are null may still not compare equal to
12 /// each other.
13 ///
14 /// # Examples
15 ///
16 /// Basic usage:
17 ///
18 /// ```
19 /// let mut s = [1, 2, 3];
20 /// let ptr: *mut u32 = s.as_mut_ptr();
21 /// assert!(!ptr.is_null());
22 /// ```
23 #[stable(feature = "rust1", since = "1.0.0")]
24 #[inline]
25 pub fn is_null(self) -> bool {
26 // Compare via a cast to a thin pointer, so fat pointers are only
27 // considering their "data" part for null-ness.
28 (self as *mut u8) == null_mut()
29 }
30
31 /// Casts to a pointer of another type.
32 #[stable(feature = "ptr_cast", since = "1.38.0")]
33 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
34 #[inline]
35 pub const fn cast<U>(self) -> *mut U {
36 self as _
37 }
38
39 /// Returns `None` if the pointer is null, or else returns a reference to
40 /// the value wrapped in `Some`.
41 ///
42 /// # Safety
43 ///
44 /// While this method and its mutable counterpart are useful for
45 /// null-safety, it is important to note that this is still an unsafe
46 /// operation because the returned value could be pointing to invalid
47 /// memory.
48 ///
49 /// When calling this method, you have to ensure that if the pointer is
50 /// non-NULL, then it is properly aligned, dereferenceable (for the whole
51 /// size of `T`) and points to an initialized instance of `T`. This applies
52 /// even if the result of this method is unused!
53 /// (The part about being initialized is not yet fully decided, but until
54 /// it is, the only safe approach is to ensure that they are indeed initialized.)
55 ///
56 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
57 /// not necessarily reflect the actual lifetime of the data. It is up to the
58 /// caller to ensure that for the duration of this lifetime, the memory this
59 /// pointer points to does not get written to outside of `UnsafeCell<U>`.
60 ///
61 /// # Examples
62 ///
63 /// Basic usage:
64 ///
65 /// ```
66 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
67 ///
68 /// unsafe {
69 /// if let Some(val_back) = ptr.as_ref() {
70 /// println!("We got back the value: {}!", val_back);
71 /// }
72 /// }
73 /// ```
74 ///
75 /// # Null-unchecked version
76 ///
77 /// If you are sure the pointer can never be null and are looking for some kind of
78 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
79 /// dereference the pointer directly.
80 ///
81 /// ```
82 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
83 ///
84 /// unsafe {
85 /// let val_back = &*ptr;
86 /// println!("We got back the value: {}!", val_back);
87 /// }
88 /// ```
89 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
90 #[inline]
91 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> {
92 // SAFETY: the caller must guarantee that `self` is valid for a
93 // reference if it isn't null.
94 if self.is_null() { None } else { unsafe { Some(&*self) } }
95 }
96
97 /// Calculates the offset from a pointer.
98 ///
99 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
100 /// offset of `3 * size_of::<T>()` bytes.
101 ///
102 /// # Safety
103 ///
104 /// If any of the following conditions are violated, the result is Undefined
105 /// Behavior:
106 ///
107 /// * Both the starting and resulting pointer must be either in bounds or one
108 /// byte past the end of the same allocated object. Note that in Rust,
109 /// every (stack-allocated) variable is considered a separate allocated object.
110 ///
111 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
112 ///
113 /// * The offset being in bounds cannot rely on "wrapping around" the address
114 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
115 ///
116 /// The compiler and standard library generally tries to ensure allocations
117 /// never reach a size where an offset is a concern. For instance, `Vec`
118 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
119 /// `vec.as_ptr().add(vec.len())` is always safe.
120 ///
121 /// Most platforms fundamentally can't even construct such an allocation.
122 /// For instance, no known 64-bit platform can ever serve a request
123 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
124 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
125 /// more than `isize::MAX` bytes with things like Physical Address
126 /// Extension. As such, memory acquired directly from allocators or memory
127 /// mapped files *may* be too large to handle with this function.
128 ///
129 /// Consider using [`wrapping_offset`] instead if these constraints are
130 /// difficult to satisfy. The only advantage of this method is that it
131 /// enables more aggressive compiler optimizations.
132 ///
133 /// [`wrapping_offset`]: #method.wrapping_offset
134 ///
135 /// # Examples
136 ///
137 /// Basic usage:
138 ///
139 /// ```
140 /// let mut s = [1, 2, 3];
141 /// let ptr: *mut u32 = s.as_mut_ptr();
142 ///
143 /// unsafe {
144 /// println!("{}", *ptr.offset(1));
145 /// println!("{}", *ptr.offset(2));
146 /// }
147 /// ```
148 #[stable(feature = "rust1", since = "1.0.0")]
149 #[must_use = "returns a new pointer rather than modifying its argument"]
150 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
151 #[inline]
152 pub const unsafe fn offset(self, count: isize) -> *mut T
153 where
154 T: Sized,
155 {
156 // SAFETY: the caller must uphold the safety contract for `offset`.
157 // The obtained pointer is valid for writes since the caller must
158 // guarantee that it points to the same allocated object as `self`.
159 unsafe { intrinsics::offset(self, count) as *mut T }
160 }
161
162 /// Calculates the offset from a pointer using wrapping arithmetic.
163 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
164 /// offset of `3 * size_of::<T>()` bytes.
165 ///
166 /// # Safety
167 ///
168 /// The resulting pointer does not need to be in bounds, but it is
169 /// potentially hazardous to dereference (which requires `unsafe`).
170 ///
171 /// In particular, the resulting pointer remains attached to the same allocated
172 /// object that `self` points to. It may *not* be used to access a
173 /// different allocated object. Note that in Rust,
174 /// every (stack-allocated) variable is considered a separate allocated object.
175 ///
176 /// In other words, `x.wrapping_offset(y.wrapping_offset_from(x))` is
177 /// *not* the same as `y`, and dereferencing it is undefined behavior
178 /// unless `x` and `y` point into the same allocated object.
179 ///
180 /// Compared to [`offset`], this method basically delays the requirement of staying
181 /// within the same allocated object: [`offset`] is immediate Undefined Behavior when
182 /// crossing object boundaries; `wrapping_offset` produces a pointer but still leads
183 /// to Undefined Behavior if that pointer is dereferenced. [`offset`] can be optimized
184 /// better and is thus preferable in performance-sensitive code.
185 ///
186 /// If you need to cross object boundaries, cast the pointer to an integer and
187 /// do the arithmetic there.
188 ///
189 /// [`offset`]: #method.offset
190 ///
191 /// # Examples
192 ///
193 /// Basic usage:
194 ///
195 /// ```
196 /// // Iterate using a raw pointer in increments of two elements
197 /// let mut data = [1u8, 2, 3, 4, 5];
198 /// let mut ptr: *mut u8 = data.as_mut_ptr();
199 /// let step = 2;
200 /// let end_rounded_up = ptr.wrapping_offset(6);
201 ///
202 /// while ptr != end_rounded_up {
203 /// unsafe {
204 /// *ptr = 0;
205 /// }
206 /// ptr = ptr.wrapping_offset(step);
207 /// }
208 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
209 /// ```
210 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
211 #[must_use = "returns a new pointer rather than modifying its argument"]
212 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
213 #[inline]
214 pub const fn wrapping_offset(self, count: isize) -> *mut T
215 where
216 T: Sized,
217 {
218 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
219 unsafe { intrinsics::arith_offset(self, count) as *mut T }
220 }
221
222 /// Returns `None` if the pointer is null, or else returns a mutable
223 /// reference to the value wrapped in `Some`.
224 ///
225 /// # Safety
226 ///
227 /// As with [`as_ref`], this is unsafe because it cannot verify the validity
228 /// of the returned pointer, nor can it ensure that the lifetime `'a`
229 /// returned is indeed a valid lifetime for the contained data.
230 ///
231 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
232 /// all of the following is true:
233 /// - it is properly aligned
234 /// - it must point to an initialized instance of T; in particular, the pointer must be
235 /// "dereferenceable" in the sense defined [here].
236 ///
237 /// This applies even if the result of this method is unused!
238 /// (The part about being initialized is not yet fully decided, but until
239 /// it is the only safe approach is to ensure that they are indeed initialized.)
240 ///
241 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
242 /// not necessarily reflect the actual lifetime of the data. *You* must enforce
243 /// Rust's aliasing rules. In particular, for the duration of this lifetime,
244 /// the memory this pointer points to must not get accessed (read or written)
245 /// through any other pointer.
246 ///
247 /// [here]: crate::ptr#safety
248 /// [`as_ref`]: #method.as_ref
249 ///
250 /// # Examples
251 ///
252 /// Basic usage:
253 ///
254 /// ```
255 /// let mut s = [1, 2, 3];
256 /// let ptr: *mut u32 = s.as_mut_ptr();
257 /// let first_value = unsafe { ptr.as_mut().unwrap() };
258 /// *first_value = 4;
259 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
260 /// ```
261 ///
262 /// # Null-unchecked version
263 ///
264 /// If you are sure the pointer can never be null and are looking for some kind of
265 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
266 /// you can dereference the pointer directly.
267 ///
268 /// ```
269 /// let mut s = [1, 2, 3];
270 /// let ptr: *mut u32 = s.as_mut_ptr();
271 /// let first_value = unsafe { &mut *ptr };
272 /// *first_value = 4;
273 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
274 /// ```
275 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
276 #[inline]
277 pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
278 // SAFETY: the caller must guarantee that `self` is be valid for
279 // a mutable reference if it isn't null.
280 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
281 }
282
283 /// Returns whether two pointers are guaranteed to be equal.
284 ///
285 /// At runtime this function behaves like `self == other`.
286 /// However, in some contexts (e.g., compile-time evaluation),
287 /// it is not always possible to determine equality of two pointers, so this function may
288 /// spuriously return `false` for pointers that later actually turn out to be equal.
289 /// But when it returns `true`, the pointers are guaranteed to be equal.
290 ///
291 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
292 /// comparisons for which both functions return `false`.
293 ///
294 /// [`guaranteed_ne`]: #method.guaranteed_ne
295 ///
296 /// The return value may change depending on the compiler version and unsafe code may not
297 /// rely on the result of this function for soundness. It is suggested to only use this function
298 /// for performance optimizations where spurious `false` return values by this function do not
299 /// affect the outcome, but just the performance.
300 /// The consequences of using this method to make runtime and compile-time code behave
301 /// differently have not been explored. This method should not be used to introduce such
302 /// differences, and it should also not be stabilized before we have a better understanding
303 /// of this issue.
304 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
305 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
306 #[inline]
307 #[cfg(not(bootstrap))]
308 pub const fn guaranteed_eq(self, other: *mut T) -> bool
309 where
310 T: Sized,
311 {
312 intrinsics::ptr_guaranteed_eq(self as *const _, other as *const _)
313 }
314
315 /// Returns whether two pointers are guaranteed to be inequal.
316 ///
317 /// At runtime this function behaves like `self != other`.
318 /// However, in some contexts (e.g., compile-time evaluation),
319 /// it is not always possible to determine the inequality of two pointers, so this function may
320 /// spuriously return `false` for pointers that later actually turn out to be inequal.
321 /// But when it returns `true`, the pointers are guaranteed to be inequal.
322 ///
323 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
324 /// comparisons for which both functions return `false`.
325 ///
326 /// [`guaranteed_eq`]: #method.guaranteed_eq
327 ///
328 /// The return value may change depending on the compiler version and unsafe code may not
329 /// rely on the result of this function for soundness. It is suggested to only use this function
330 /// for performance optimizations where spurious `false` return values by this function do not
331 /// affect the outcome, but just the performance.
332 /// The consequences of using this method to make runtime and compile-time code behave
333 /// differently have not been explored. This method should not be used to introduce such
334 /// differences, and it should also not be stabilized before we have a better understanding
335 /// of this issue.
336 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
337 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
338 #[inline]
339 #[cfg(not(bootstrap))]
340 pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
341 where
342 T: Sized,
343 {
344 intrinsics::ptr_guaranteed_ne(self as *const _, other as *const _)
345 }
346
347 /// Calculates the distance between two pointers. The returned value is in
348 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
349 ///
350 /// This function is the inverse of [`offset`].
351 ///
352 /// [`offset`]: #method.offset-1
353 /// [`wrapping_offset_from`]: #method.wrapping_offset_from-1
354 ///
355 /// # Safety
356 ///
357 /// If any of the following conditions are violated, the result is Undefined
358 /// Behavior:
359 ///
360 /// * Both the starting and other pointer must be either in bounds or one
361 /// byte past the end of the same allocated object. Note that in Rust,
362 /// every (stack-allocated) variable is considered a separate allocated object.
363 ///
364 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
365 ///
366 /// * The distance between the pointers, in bytes, must be an exact multiple
367 /// of the size of `T`.
368 ///
369 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
370 ///
371 /// The compiler and standard library generally try to ensure allocations
372 /// never reach a size where an offset is a concern. For instance, `Vec`
373 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
374 /// `ptr_into_vec.offset_from(vec.as_ptr())` is always safe.
375 ///
376 /// Most platforms fundamentally can't even construct such an allocation.
377 /// For instance, no known 64-bit platform can ever serve a request
378 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
379 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
380 /// more than `isize::MAX` bytes with things like Physical Address
381 /// Extension. As such, memory acquired directly from allocators or memory
382 /// mapped files *may* be too large to handle with this function.
383 ///
384 /// Consider using [`wrapping_offset_from`] instead if these constraints are
385 /// difficult to satisfy. The only advantage of this method is that it
386 /// enables more aggressive compiler optimizations.
387 ///
388 /// # Panics
389 ///
390 /// This function panics if `T` is a Zero-Sized Type ("ZST").
391 ///
392 /// # Examples
393 ///
394 /// Basic usage:
395 ///
396 /// ```
397 /// #![feature(ptr_offset_from)]
398 ///
399 /// let mut a = [0; 5];
400 /// let ptr1: *mut i32 = &mut a[1];
401 /// let ptr2: *mut i32 = &mut a[3];
402 /// unsafe {
403 /// assert_eq!(ptr2.offset_from(ptr1), 2);
404 /// assert_eq!(ptr1.offset_from(ptr2), -2);
405 /// assert_eq!(ptr1.offset(2), ptr2);
406 /// assert_eq!(ptr2.offset(-2), ptr1);
407 /// }
408 /// ```
409 #[unstable(feature = "ptr_offset_from", issue = "41079")]
410 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
411 #[inline]
412 pub const unsafe fn offset_from(self, origin: *const T) -> isize
413 where
414 T: Sized,
415 {
416 // SAFETY: the caller must uphold the safety contract for `offset_from`.
417 unsafe { (self as *const T).offset_from(origin) }
418 }
419
420 /// Calculates the distance between two pointers. The returned value is in
421 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
422 ///
423 /// If the address different between the two pointers is not a multiple of
424 /// `mem::size_of::<T>()` then the result of the division is rounded towards
425 /// zero.
426 ///
427 /// Though this method is safe for any two pointers, note that its result
428 /// will be mostly useless if the two pointers aren't into the same allocated
429 /// object, for example if they point to two different local variables.
430 ///
431 /// # Panics
432 ///
433 /// This function panics if `T` is a zero-sized type.
434 ///
435 /// # Examples
436 ///
437 /// Basic usage:
438 ///
439 /// ```
440 /// #![feature(ptr_wrapping_offset_from)]
441 ///
442 /// let mut a = [0; 5];
443 /// let ptr1: *mut i32 = &mut a[1];
444 /// let ptr2: *mut i32 = &mut a[3];
445 /// assert_eq!(ptr2.wrapping_offset_from(ptr1), 2);
446 /// assert_eq!(ptr1.wrapping_offset_from(ptr2), -2);
447 /// assert_eq!(ptr1.wrapping_offset(2), ptr2);
448 /// assert_eq!(ptr2.wrapping_offset(-2), ptr1);
449 ///
450 /// let ptr1: *mut i32 = 3 as _;
451 /// let ptr2: *mut i32 = 13 as _;
452 /// assert_eq!(ptr2.wrapping_offset_from(ptr1), 2);
453 /// ```
454 #[unstable(feature = "ptr_wrapping_offset_from", issue = "41079")]
455 #[rustc_deprecated(
456 since = "1.46.0",
457 reason = "Pointer distances across allocation \
458 boundaries are not typically meaningful. \
459 Use integer subtraction if you really need this."
460 )]
461 #[inline]
462 pub fn wrapping_offset_from(self, origin: *const T) -> isize
463 where
464 T: Sized,
465 {
466 #[allow(deprecated_in_future, deprecated)]
467 (self as *const T).wrapping_offset_from(origin)
468 }
469
470 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
471 ///
472 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
473 /// offset of `3 * size_of::<T>()` bytes.
474 ///
475 /// # Safety
476 ///
477 /// If any of the following conditions are violated, the result is Undefined
478 /// Behavior:
479 ///
480 /// * Both the starting and resulting pointer must be either in bounds or one
481 /// byte past the end of the same allocated object. Note that in Rust,
482 /// every (stack-allocated) variable is considered a separate allocated object.
483 ///
484 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
485 ///
486 /// * The offset being in bounds cannot rely on "wrapping around" the address
487 /// space. That is, the infinite-precision sum must fit in a `usize`.
488 ///
489 /// The compiler and standard library generally tries to ensure allocations
490 /// never reach a size where an offset is a concern. For instance, `Vec`
491 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
492 /// `vec.as_ptr().add(vec.len())` is always safe.
493 ///
494 /// Most platforms fundamentally can't even construct such an allocation.
495 /// For instance, no known 64-bit platform can ever serve a request
496 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
497 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
498 /// more than `isize::MAX` bytes with things like Physical Address
499 /// Extension. As such, memory acquired directly from allocators or memory
500 /// mapped files *may* be too large to handle with this function.
501 ///
502 /// Consider using [`wrapping_add`] instead if these constraints are
503 /// difficult to satisfy. The only advantage of this method is that it
504 /// enables more aggressive compiler optimizations.
505 ///
506 /// [`wrapping_add`]: #method.wrapping_add
507 ///
508 /// # Examples
509 ///
510 /// Basic usage:
511 ///
512 /// ```
513 /// let s: &str = "123";
514 /// let ptr: *const u8 = s.as_ptr();
515 ///
516 /// unsafe {
517 /// println!("{}", *ptr.add(1) as char);
518 /// println!("{}", *ptr.add(2) as char);
519 /// }
520 /// ```
521 #[stable(feature = "pointer_methods", since = "1.26.0")]
522 #[must_use = "returns a new pointer rather than modifying its argument"]
523 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
524 #[inline]
525 pub const unsafe fn add(self, count: usize) -> Self
526 where
527 T: Sized,
528 {
529 // SAFETY: the caller must uphold the safety contract for `offset`.
530 unsafe { self.offset(count as isize) }
531 }
532
533 /// Calculates the offset from a pointer (convenience for
534 /// `.offset((count as isize).wrapping_neg())`).
535 ///
536 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
537 /// offset of `3 * size_of::<T>()` bytes.
538 ///
539 /// # Safety
540 ///
541 /// If any of the following conditions are violated, the result is Undefined
542 /// Behavior:
543 ///
544 /// * Both the starting and resulting pointer must be either in bounds or one
545 /// byte past the end of the same allocated object. Note that in Rust,
546 /// every (stack-allocated) variable is considered a separate allocated object.
547 ///
548 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
549 ///
550 /// * The offset being in bounds cannot rely on "wrapping around" the address
551 /// space. That is, the infinite-precision sum must fit in a usize.
552 ///
553 /// The compiler and standard library generally tries to ensure allocations
554 /// never reach a size where an offset is a concern. For instance, `Vec`
555 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
556 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
557 ///
558 /// Most platforms fundamentally can't even construct such an allocation.
559 /// For instance, no known 64-bit platform can ever serve a request
560 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
561 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
562 /// more than `isize::MAX` bytes with things like Physical Address
563 /// Extension. As such, memory acquired directly from allocators or memory
564 /// mapped files *may* be too large to handle with this function.
565 ///
566 /// Consider using [`wrapping_sub`] instead if these constraints are
567 /// difficult to satisfy. The only advantage of this method is that it
568 /// enables more aggressive compiler optimizations.
569 ///
570 /// [`wrapping_sub`]: #method.wrapping_sub
571 ///
572 /// # Examples
573 ///
574 /// Basic usage:
575 ///
576 /// ```
577 /// let s: &str = "123";
578 ///
579 /// unsafe {
580 /// let end: *const u8 = s.as_ptr().add(3);
581 /// println!("{}", *end.sub(1) as char);
582 /// println!("{}", *end.sub(2) as char);
583 /// }
584 /// ```
585 #[stable(feature = "pointer_methods", since = "1.26.0")]
586 #[must_use = "returns a new pointer rather than modifying its argument"]
587 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
588 #[inline]
589 pub const unsafe fn sub(self, count: usize) -> Self
590 where
591 T: Sized,
592 {
593 // SAFETY: the caller must uphold the safety contract for `offset`.
594 unsafe { self.offset((count as isize).wrapping_neg()) }
595 }
596
597 /// Calculates the offset from a pointer using wrapping arithmetic.
598 /// (convenience for `.wrapping_offset(count as isize)`)
599 ///
600 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
601 /// offset of `3 * size_of::<T>()` bytes.
602 ///
603 /// # Safety
604 ///
605 /// The resulting pointer does not need to be in bounds, but it is
606 /// potentially hazardous to dereference (which requires `unsafe`).
607 ///
608 /// In particular, the resulting pointer remains attached to the same allocated
609 /// object that `self` points to. It may *not* be used to access a
610 /// different allocated object. Note that in Rust,
611 /// every (stack-allocated) variable is considered a separate allocated object.
612 ///
613 /// Compared to [`add`], this method basically delays the requirement of staying
614 /// within the same allocated object: [`add`] is immediate Undefined Behavior when
615 /// crossing object boundaries; `wrapping_add` produces a pointer but still leads
616 /// to Undefined Behavior if that pointer is dereferenced. [`add`] can be optimized
617 /// better and is thus preferable in performance-sensitive code.
618 ///
619 /// If you need to cross object boundaries, cast the pointer to an integer and
620 /// do the arithmetic there.
621 ///
622 /// [`add`]: #method.add
623 ///
624 /// # Examples
625 ///
626 /// Basic usage:
627 ///
628 /// ```
629 /// // Iterate using a raw pointer in increments of two elements
630 /// let data = [1u8, 2, 3, 4, 5];
631 /// let mut ptr: *const u8 = data.as_ptr();
632 /// let step = 2;
633 /// let end_rounded_up = ptr.wrapping_add(6);
634 ///
635 /// // This loop prints "1, 3, 5, "
636 /// while ptr != end_rounded_up {
637 /// unsafe {
638 /// print!("{}, ", *ptr);
639 /// }
640 /// ptr = ptr.wrapping_add(step);
641 /// }
642 /// ```
643 #[stable(feature = "pointer_methods", since = "1.26.0")]
644 #[must_use = "returns a new pointer rather than modifying its argument"]
645 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
646 #[inline]
647 pub const fn wrapping_add(self, count: usize) -> Self
648 where
649 T: Sized,
650 {
651 self.wrapping_offset(count as isize)
652 }
653
654 /// Calculates the offset from a pointer using wrapping arithmetic.
655 /// (convenience for `.wrapping_offset((count as isize).wrapping_sub())`)
656 ///
657 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
658 /// offset of `3 * size_of::<T>()` bytes.
659 ///
660 /// # Safety
661 ///
662 /// The resulting pointer does not need to be in bounds, but it is
663 /// potentially hazardous to dereference (which requires `unsafe`).
664 ///
665 /// In particular, the resulting pointer remains attached to the same allocated
666 /// object that `self` points to. It may *not* be used to access a
667 /// different allocated object. Note that in Rust,
668 /// every (stack-allocated) variable is considered a separate allocated object.
669 ///
670 /// Compared to [`sub`], this method basically delays the requirement of staying
671 /// within the same allocated object: [`sub`] is immediate Undefined Behavior when
672 /// crossing object boundaries; `wrapping_sub` produces a pointer but still leads
673 /// to Undefined Behavior if that pointer is dereferenced. [`sub`] can be optimized
674 /// better and is thus preferable in performance-sensitive code.
675 ///
676 /// If you need to cross object boundaries, cast the pointer to an integer and
677 /// do the arithmetic there.
678 ///
679 /// [`sub`]: #method.sub
680 ///
681 /// # Examples
682 ///
683 /// Basic usage:
684 ///
685 /// ```
686 /// // Iterate using a raw pointer in increments of two elements (backwards)
687 /// let data = [1u8, 2, 3, 4, 5];
688 /// let mut ptr: *const u8 = data.as_ptr();
689 /// let start_rounded_down = ptr.wrapping_sub(2);
690 /// ptr = ptr.wrapping_add(4);
691 /// let step = 2;
692 /// // This loop prints "5, 3, 1, "
693 /// while ptr != start_rounded_down {
694 /// unsafe {
695 /// print!("{}, ", *ptr);
696 /// }
697 /// ptr = ptr.wrapping_sub(step);
698 /// }
699 /// ```
700 #[stable(feature = "pointer_methods", since = "1.26.0")]
701 #[must_use = "returns a new pointer rather than modifying its argument"]
702 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
703 #[inline]
704 pub const fn wrapping_sub(self, count: usize) -> Self
705 where
706 T: Sized,
707 {
708 self.wrapping_offset((count as isize).wrapping_neg())
709 }
710
711 /// Reads the value from `self` without moving it. This leaves the
712 /// memory in `self` unchanged.
713 ///
714 /// See [`ptr::read`] for safety concerns and examples.
715 ///
716 /// [`ptr::read`]: ./ptr/fn.read.html
717 #[stable(feature = "pointer_methods", since = "1.26.0")]
718 #[inline]
719 pub unsafe fn read(self) -> T
720 where
721 T: Sized,
722 {
723 // SAFETY: the caller must uphold the safety contract for ``.
724 unsafe { read(self) }
725 }
726
727 /// Performs a volatile read of the value from `self` without moving it. This
728 /// leaves the memory in `self` unchanged.
729 ///
730 /// Volatile operations are intended to act on I/O memory, and are guaranteed
731 /// to not be elided or reordered by the compiler across other volatile
732 /// operations.
733 ///
734 /// See [`ptr::read_volatile`] for safety concerns and examples.
735 ///
736 /// [`ptr::read_volatile`]: ./ptr/fn.read_volatile.html
737 #[stable(feature = "pointer_methods", since = "1.26.0")]
738 #[inline]
739 pub unsafe fn read_volatile(self) -> T
740 where
741 T: Sized,
742 {
743 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
744 unsafe { read_volatile(self) }
745 }
746
747 /// Reads the value from `self` without moving it. This leaves the
748 /// memory in `self` unchanged.
749 ///
750 /// Unlike `read`, the pointer may be unaligned.
751 ///
752 /// See [`ptr::read_unaligned`] for safety concerns and examples.
753 ///
754 /// [`ptr::read_unaligned`]: ./ptr/fn.read_unaligned.html
755 #[stable(feature = "pointer_methods", since = "1.26.0")]
756 #[inline]
757 pub unsafe fn read_unaligned(self) -> T
758 where
759 T: Sized,
760 {
761 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
762 unsafe { read_unaligned(self) }
763 }
764
765 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
766 /// and destination may overlap.
767 ///
768 /// NOTE: this has the *same* argument order as [`ptr::copy`].
769 ///
770 /// See [`ptr::copy`] for safety concerns and examples.
771 ///
772 /// [`ptr::copy`]: ./ptr/fn.copy.html
773 #[stable(feature = "pointer_methods", since = "1.26.0")]
774 #[inline]
775 pub unsafe fn copy_to(self, dest: *mut T, count: usize)
776 where
777 T: Sized,
778 {
779 // SAFETY: the caller must uphold the safety contract for `copy`.
780 unsafe { copy(self, dest, count) }
781 }
782
783 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
784 /// and destination may *not* overlap.
785 ///
786 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
787 ///
788 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
789 ///
790 /// [`ptr::copy_nonoverlapping`]: ./ptr/fn.copy_nonoverlapping.html
791 #[stable(feature = "pointer_methods", since = "1.26.0")]
792 #[inline]
793 pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
794 where
795 T: Sized,
796 {
797 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
798 unsafe { copy_nonoverlapping(self, dest, count) }
799 }
800
801 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
802 /// and destination may overlap.
803 ///
804 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
805 ///
806 /// See [`ptr::copy`] for safety concerns and examples.
807 ///
808 /// [`ptr::copy`]: ./ptr/fn.copy.html
809 #[stable(feature = "pointer_methods", since = "1.26.0")]
810 #[inline]
811 pub unsafe fn copy_from(self, src: *const T, count: usize)
812 where
813 T: Sized,
814 {
815 // SAFETY: the caller must uphold the safety contract for `copy`.
816 unsafe { copy(src, self, count) }
817 }
818
819 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
820 /// and destination may *not* overlap.
821 ///
822 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
823 ///
824 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
825 ///
826 /// [`ptr::copy_nonoverlapping`]: ./ptr/fn.copy_nonoverlapping.html
827 #[stable(feature = "pointer_methods", since = "1.26.0")]
828 #[inline]
829 pub unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
830 where
831 T: Sized,
832 {
833 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
834 unsafe { copy_nonoverlapping(src, self, count) }
835 }
836
837 /// Executes the destructor (if any) of the pointed-to value.
838 ///
839 /// See [`ptr::drop_in_place`] for safety concerns and examples.
840 ///
841 /// [`ptr::drop_in_place`]: ./ptr/fn.drop_in_place.html
842 #[stable(feature = "pointer_methods", since = "1.26.0")]
843 #[inline]
844 pub unsafe fn drop_in_place(self) {
845 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
846 unsafe { drop_in_place(self) }
847 }
848
849 /// Overwrites a memory location with the given value without reading or
850 /// dropping the old value.
851 ///
852 /// See [`ptr::write`] for safety concerns and examples.
853 ///
854 /// [`ptr::write`]: ./ptr/fn.write.html
855 #[stable(feature = "pointer_methods", since = "1.26.0")]
856 #[inline]
857 pub unsafe fn write(self, val: T)
858 where
859 T: Sized,
860 {
861 // SAFETY: the caller must uphold the safety contract for `write`.
862 unsafe { write(self, val) }
863 }
864
865 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
866 /// bytes of memory starting at `self` to `val`.
867 ///
868 /// See [`ptr::write_bytes`] for safety concerns and examples.
869 ///
870 /// [`ptr::write_bytes`]: ./ptr/fn.write_bytes.html
871 #[stable(feature = "pointer_methods", since = "1.26.0")]
872 #[inline]
873 pub unsafe fn write_bytes(self, val: u8, count: usize)
874 where
875 T: Sized,
876 {
877 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
878 unsafe { write_bytes(self, val, count) }
879 }
880
881 /// Performs a volatile write of a memory location with the given value without
882 /// reading or dropping the old value.
883 ///
884 /// Volatile operations are intended to act on I/O memory, and are guaranteed
885 /// to not be elided or reordered by the compiler across other volatile
886 /// operations.
887 ///
888 /// See [`ptr::write_volatile`] for safety concerns and examples.
889 ///
890 /// [`ptr::write_volatile`]: ./ptr/fn.write_volatile.html
891 #[stable(feature = "pointer_methods", since = "1.26.0")]
892 #[inline]
893 pub unsafe fn write_volatile(self, val: T)
894 where
895 T: Sized,
896 {
897 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
898 unsafe { write_volatile(self, val) }
899 }
900
901 /// Overwrites a memory location with the given value without reading or
902 /// dropping the old value.
903 ///
904 /// Unlike `write`, the pointer may be unaligned.
905 ///
906 /// See [`ptr::write_unaligned`] for safety concerns and examples.
907 ///
908 /// [`ptr::write_unaligned`]: ./ptr/fn.write_unaligned.html
909 #[stable(feature = "pointer_methods", since = "1.26.0")]
910 #[inline]
911 pub unsafe fn write_unaligned(self, val: T)
912 where
913 T: Sized,
914 {
915 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
916 unsafe { write_unaligned(self, val) }
917 }
918
919 /// Replaces the value at `self` with `src`, returning the old
920 /// value, without dropping either.
921 ///
922 /// See [`ptr::replace`] for safety concerns and examples.
923 ///
924 /// [`ptr::replace`]: ./ptr/fn.replace.html
925 #[stable(feature = "pointer_methods", since = "1.26.0")]
926 #[inline]
927 pub unsafe fn replace(self, src: T) -> T
928 where
929 T: Sized,
930 {
931 // SAFETY: the caller must uphold the safety contract for `replace`.
932 unsafe { replace(self, src) }
933 }
934
935 /// Swaps the values at two mutable locations of the same type, without
936 /// deinitializing either. They may overlap, unlike `mem::swap` which is
937 /// otherwise equivalent.
938 ///
939 /// See [`ptr::swap`] for safety concerns and examples.
940 ///
941 /// [`ptr::swap`]: ./ptr/fn.swap.html
942 #[stable(feature = "pointer_methods", since = "1.26.0")]
943 #[inline]
944 pub unsafe fn swap(self, with: *mut T)
945 where
946 T: Sized,
947 {
948 // SAFETY: the caller must uphold the safety contract for `swap`.
949 unsafe { swap(self, with) }
950 }
951
952 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
953 /// `align`.
954 ///
955 /// If it is not possible to align the pointer, the implementation returns
956 /// `usize::MAX`. It is permissible for the implementation to *always*
957 /// return `usize::MAX`. Only your algorithm's performance can depend
958 /// on getting a usable offset here, not its correctness.
959 ///
960 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
961 /// used with the `wrapping_add` method.
962 ///
963 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
964 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
965 /// the returned offset is correct in all terms other than alignment.
966 ///
967 /// # Panics
968 ///
969 /// The function panics if `align` is not a power-of-two.
970 ///
971 /// # Examples
972 ///
973 /// Accessing adjacent `u8` as `u16`
974 ///
975 /// ```
976 /// # fn foo(n: usize) {
977 /// # use std::mem::align_of;
978 /// # unsafe {
979 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
980 /// let ptr = &x[n] as *const u8;
981 /// let offset = ptr.align_offset(align_of::<u16>());
982 /// if offset < x.len() - n - 1 {
983 /// let u16_ptr = ptr.add(offset) as *const u16;
984 /// assert_ne!(*u16_ptr, 500);
985 /// } else {
986 /// // while the pointer can be aligned via `offset`, it would point
987 /// // outside the allocation
988 /// }
989 /// # } }
990 /// ```
991 #[stable(feature = "align_offset", since = "1.36.0")]
992 pub fn align_offset(self, align: usize) -> usize
993 where
994 T: Sized,
995 {
996 if !align.is_power_of_two() {
997 panic!("align_offset: align is not a power-of-two");
998 }
999 // SAFETY: `align` has been checked to be a power of 2 above
1000 unsafe { align_offset(self, align) }
1001 }
1002 }
1003
1004 #[lang = "mut_slice_ptr"]
1005 impl<T> *mut [T] {
1006 /// Returns the length of a raw slice.
1007 ///
1008 /// The returned value is the number of **elements**, not the number of bytes.
1009 ///
1010 /// This function is safe, even when the raw slice cannot be cast to a slice
1011 /// reference because the pointer is null or unaligned.
1012 ///
1013 /// # Examples
1014 ///
1015 /// ```rust
1016 /// #![feature(slice_ptr_len)]
1017 ///
1018 /// use std::ptr;
1019 ///
1020 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1021 /// assert_eq!(slice.len(), 3);
1022 /// ```
1023 #[inline]
1024 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1025 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1026 pub const fn len(self) -> usize {
1027 // SAFETY: this is safe because `*const [T]` and `FatPtr<T>` have the same layout.
1028 // Only `std` can make this guarantee.
1029 unsafe { Repr { rust_mut: self }.raw }.len
1030 }
1031 }
1032
1033 // Equality for pointers
1034 #[stable(feature = "rust1", since = "1.0.0")]
1035 impl<T: ?Sized> PartialEq for *mut T {
1036 #[inline]
1037 fn eq(&self, other: &*mut T) -> bool {
1038 *self == *other
1039 }
1040 }
1041
1042 #[stable(feature = "rust1", since = "1.0.0")]
1043 impl<T: ?Sized> Eq for *mut T {}
1044
1045 #[stable(feature = "rust1", since = "1.0.0")]
1046 impl<T: ?Sized> Ord for *mut T {
1047 #[inline]
1048 fn cmp(&self, other: &*mut T) -> Ordering {
1049 if self < other {
1050 Less
1051 } else if self == other {
1052 Equal
1053 } else {
1054 Greater
1055 }
1056 }
1057 }
1058
1059 #[stable(feature = "rust1", since = "1.0.0")]
1060 impl<T: ?Sized> PartialOrd for *mut T {
1061 #[inline]
1062 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1063 Some(self.cmp(other))
1064 }
1065
1066 #[inline]
1067 fn lt(&self, other: &*mut T) -> bool {
1068 *self < *other
1069 }
1070
1071 #[inline]
1072 fn le(&self, other: &*mut T) -> bool {
1073 *self <= *other
1074 }
1075
1076 #[inline]
1077 fn gt(&self, other: &*mut T) -> bool {
1078 *self > *other
1079 }
1080
1081 #[inline]
1082 fn ge(&self, other: &*mut T) -> bool {
1083 *self >= *other
1084 }
1085 }