]> git.proxmox.com Git - rustc.git/blob - library/core/src/ptr/mut_ptr.rs
New upstream version 1.52.0+dfsg1
[rustc.git] / library / core / src / ptr / mut_ptr.rs
1 use super::*;
2 use crate::cmp::Ordering::{self, Equal, Greater, Less};
3 use crate::intrinsics;
4 use crate::slice::{self, SliceIndex};
5
6 #[lang = "mut_ptr"]
7 impl<T: ?Sized> *mut T {
8 /// Returns `true` if the pointer is null.
9 ///
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
13 /// each other.
14 ///
15 /// ## Behavior during const evaluation
16 ///
17 /// When this function is used during const evaluation, it may return `false` for pointers
18 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
19 /// is offset beyond its bounds in such a way that the resulting pointer is null,
20 /// the function will still return `false`. There is no way for CTFE to know
21 /// the absolute position of that memory, so we cannot tell if the pointer is
22 /// null or not.
23 ///
24 /// # Examples
25 ///
26 /// Basic usage:
27 ///
28 /// ```
29 /// let mut s = [1, 2, 3];
30 /// let ptr: *mut u32 = s.as_mut_ptr();
31 /// assert!(!ptr.is_null());
32 /// ```
33 #[stable(feature = "rust1", since = "1.0.0")]
34 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
35 #[inline]
36 pub const fn is_null(self) -> bool {
37 // Compare via a cast to a thin pointer, so fat pointers are only
38 // considering their "data" part for null-ness.
39 (self as *mut u8).guaranteed_eq(null_mut())
40 }
41
42 /// Casts to a pointer of another type.
43 #[stable(feature = "ptr_cast", since = "1.38.0")]
44 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
45 #[inline]
46 pub const fn cast<U>(self) -> *mut U {
47 self as _
48 }
49
50 /// Decompose a (possibly wide) pointer into is address and metadata components.
51 ///
52 /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
53 #[cfg(not(bootstrap))]
54 #[unstable(feature = "ptr_metadata", issue = "81513")]
55 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
56 #[inline]
57 pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
58 (self.cast(), super::metadata(self))
59 }
60
61 /// Returns `None` if the pointer is null, or else returns a shared reference to
62 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
63 /// must be used instead.
64 ///
65 /// For the mutable counterpart see [`as_mut`].
66 ///
67 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
68 /// [`as_mut`]: #method.as_mut
69 ///
70 /// # Safety
71 ///
72 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
73 /// all of the following is true:
74 ///
75 /// * The pointer must be properly aligned.
76 ///
77 /// * It must be "dereferencable" in the sense defined in [the module documentation].
78 ///
79 /// * The pointer must point to an initialized instance of `T`.
80 ///
81 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
82 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
83 /// In particular, for the duration of this lifetime, the memory the pointer points to must
84 /// not get mutated (except inside `UnsafeCell`).
85 ///
86 /// This applies even if the result of this method is unused!
87 /// (The part about being initialized is not yet fully decided, but until
88 /// it is, the only safe approach is to ensure that they are indeed initialized.)
89 ///
90 /// [the module documentation]: crate::ptr#safety
91 ///
92 /// # Examples
93 ///
94 /// Basic usage:
95 ///
96 /// ```
97 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
98 ///
99 /// unsafe {
100 /// if let Some(val_back) = ptr.as_ref() {
101 /// println!("We got back the value: {}!", val_back);
102 /// }
103 /// }
104 /// ```
105 ///
106 /// # Null-unchecked version
107 ///
108 /// If you are sure the pointer can never be null and are looking for some kind of
109 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
110 /// dereference the pointer directly.
111 ///
112 /// ```
113 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
114 ///
115 /// unsafe {
116 /// let val_back = &*ptr;
117 /// println!("We got back the value: {}!", val_back);
118 /// }
119 /// ```
120 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
121 #[inline]
122 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> {
123 // SAFETY: the caller must guarantee that `self` is valid for a
124 // reference if it isn't null.
125 if self.is_null() { None } else { unsafe { Some(&*self) } }
126 }
127
128 /// Returns `None` if the pointer is null, or else returns a shared reference to
129 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
130 /// that the value has to be initialized.
131 ///
132 /// For the mutable counterpart see [`as_uninit_mut`].
133 ///
134 /// [`as_ref`]: #method.as_ref-1
135 /// [`as_uninit_mut`]: #method.as_uninit_mut
136 ///
137 /// # Safety
138 ///
139 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
140 /// all of the following is true:
141 ///
142 /// * The pointer must be properly aligned.
143 ///
144 /// * It must be "dereferencable" in the sense defined in [the module documentation].
145 ///
146 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
147 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
148 /// In particular, for the duration of this lifetime, the memory the pointer points to must
149 /// not get mutated (except inside `UnsafeCell`).
150 ///
151 /// This applies even if the result of this method is unused!
152 ///
153 /// [the module documentation]: crate::ptr#safety
154 ///
155 /// # Examples
156 ///
157 /// Basic usage:
158 ///
159 /// ```
160 /// #![feature(ptr_as_uninit)]
161 ///
162 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
163 ///
164 /// unsafe {
165 /// if let Some(val_back) = ptr.as_uninit_ref() {
166 /// println!("We got back the value: {}!", val_back.assume_init());
167 /// }
168 /// }
169 /// ```
170 #[inline]
171 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
172 pub unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
173 where
174 T: Sized,
175 {
176 // SAFETY: the caller must guarantee that `self` meets all the
177 // requirements for a reference.
178 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
179 }
180
181 /// Calculates the offset from a pointer.
182 ///
183 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
184 /// offset of `3 * size_of::<T>()` bytes.
185 ///
186 /// # Safety
187 ///
188 /// If any of the following conditions are violated, the result is Undefined
189 /// Behavior:
190 ///
191 /// * Both the starting and resulting pointer must be either in bounds or one
192 /// byte past the end of the same allocated object. Note that in Rust,
193 /// every (stack-allocated) variable is considered a separate allocated object.
194 ///
195 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
196 ///
197 /// * The offset being in bounds cannot rely on "wrapping around" the address
198 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
199 ///
200 /// The compiler and standard library generally tries to ensure allocations
201 /// never reach a size where an offset is a concern. For instance, `Vec`
202 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
203 /// `vec.as_ptr().add(vec.len())` is always safe.
204 ///
205 /// Most platforms fundamentally can't even construct such an allocation.
206 /// For instance, no known 64-bit platform can ever serve a request
207 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
208 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
209 /// more than `isize::MAX` bytes with things like Physical Address
210 /// Extension. As such, memory acquired directly from allocators or memory
211 /// mapped files *may* be too large to handle with this function.
212 ///
213 /// Consider using [`wrapping_offset`] instead if these constraints are
214 /// difficult to satisfy. The only advantage of this method is that it
215 /// enables more aggressive compiler optimizations.
216 ///
217 /// [`wrapping_offset`]: #method.wrapping_offset
218 ///
219 /// # Examples
220 ///
221 /// Basic usage:
222 ///
223 /// ```
224 /// let mut s = [1, 2, 3];
225 /// let ptr: *mut u32 = s.as_mut_ptr();
226 ///
227 /// unsafe {
228 /// println!("{}", *ptr.offset(1));
229 /// println!("{}", *ptr.offset(2));
230 /// }
231 /// ```
232 #[stable(feature = "rust1", since = "1.0.0")]
233 #[must_use = "returns a new pointer rather than modifying its argument"]
234 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
235 #[inline]
236 pub const unsafe fn offset(self, count: isize) -> *mut T
237 where
238 T: Sized,
239 {
240 // SAFETY: the caller must uphold the safety contract for `offset`.
241 // The obtained pointer is valid for writes since the caller must
242 // guarantee that it points to the same allocated object as `self`.
243 unsafe { intrinsics::offset(self, count) as *mut T }
244 }
245
246 /// Calculates the offset from a pointer using wrapping arithmetic.
247 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
248 /// offset of `3 * size_of::<T>()` bytes.
249 ///
250 /// # Safety
251 ///
252 /// This operation itself is always safe, but using the resulting pointer is not.
253 ///
254 /// The resulting pointer remains attached to the same allocated object that `self` points to.
255 /// It may *not* be used to access a different allocated object. Note that in Rust, every
256 /// (stack-allocated) variable is considered a separate allocated object.
257 ///
258 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
259 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
260 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
261 /// `x` and `y` point into the same allocated object.
262 ///
263 /// Compared to [`offset`], this method basically delays the requirement of staying within the
264 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
265 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
266 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
267 /// can be optimized better and is thus preferable in performance-sensitive code.
268 ///
269 /// The delayed check only considers the value of the pointer that was dereferenced, not the
270 /// intermediate values used during the computation of the final result. For example,
271 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
272 /// words, leaving the allocated object and then re-entering it later is permitted.
273 ///
274 /// If you need to cross object boundaries, cast the pointer to an integer and
275 /// do the arithmetic there.
276 ///
277 /// [`offset`]: #method.offset
278 ///
279 /// # Examples
280 ///
281 /// Basic usage:
282 ///
283 /// ```
284 /// // Iterate using a raw pointer in increments of two elements
285 /// let mut data = [1u8, 2, 3, 4, 5];
286 /// let mut ptr: *mut u8 = data.as_mut_ptr();
287 /// let step = 2;
288 /// let end_rounded_up = ptr.wrapping_offset(6);
289 ///
290 /// while ptr != end_rounded_up {
291 /// unsafe {
292 /// *ptr = 0;
293 /// }
294 /// ptr = ptr.wrapping_offset(step);
295 /// }
296 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
297 /// ```
298 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
299 #[must_use = "returns a new pointer rather than modifying its argument"]
300 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
301 #[inline]
302 pub const fn wrapping_offset(self, count: isize) -> *mut T
303 where
304 T: Sized,
305 {
306 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
307 unsafe { intrinsics::arith_offset(self, count) as *mut T }
308 }
309
310 /// Returns `None` if the pointer is null, or else returns a unique reference to
311 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
312 /// must be used instead.
313 ///
314 /// For the shared counterpart see [`as_ref`].
315 ///
316 /// [`as_uninit_mut`]: #method.as_uninit_mut
317 /// [`as_ref`]: #method.as_ref-1
318 ///
319 /// # Safety
320 ///
321 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
322 /// all of the following is true:
323 ///
324 /// * The pointer must be properly aligned.
325 ///
326 /// * It must be "dereferencable" in the sense defined in [the module documentation].
327 ///
328 /// * The pointer must point to an initialized instance of `T`.
329 ///
330 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
331 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
332 /// In particular, for the duration of this lifetime, the memory the pointer points to must
333 /// not get accessed (read or written) through any other pointer.
334 ///
335 /// This applies even if the result of this method is unused!
336 /// (The part about being initialized is not yet fully decided, but until
337 /// it is, the only safe approach is to ensure that they are indeed initialized.)
338 ///
339 /// [the module documentation]: crate::ptr#safety
340 ///
341 /// # Examples
342 ///
343 /// Basic usage:
344 ///
345 /// ```
346 /// let mut s = [1, 2, 3];
347 /// let ptr: *mut u32 = s.as_mut_ptr();
348 /// let first_value = unsafe { ptr.as_mut().unwrap() };
349 /// *first_value = 4;
350 /// # assert_eq!(s, [4, 2, 3]);
351 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
352 /// ```
353 ///
354 /// # Null-unchecked version
355 ///
356 /// If you are sure the pointer can never be null and are looking for some kind of
357 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
358 /// you can dereference the pointer directly.
359 ///
360 /// ```
361 /// let mut s = [1, 2, 3];
362 /// let ptr: *mut u32 = s.as_mut_ptr();
363 /// let first_value = unsafe { &mut *ptr };
364 /// *first_value = 4;
365 /// # assert_eq!(s, [4, 2, 3]);
366 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
367 /// ```
368 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
369 #[inline]
370 pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
371 // SAFETY: the caller must guarantee that `self` is be valid for
372 // a mutable reference if it isn't null.
373 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
374 }
375
376 /// Returns `None` if the pointer is null, or else returns a unique reference to
377 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
378 /// that the value has to be initialized.
379 ///
380 /// For the shared counterpart see [`as_uninit_ref`].
381 ///
382 /// [`as_mut`]: #method.as_mut
383 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
384 ///
385 /// # Safety
386 ///
387 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
388 /// all of the following is true:
389 ///
390 /// * The pointer must be properly aligned.
391 ///
392 /// * It must be "dereferencable" in the sense defined in [the module documentation].
393 ///
394 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
395 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
396 /// In particular, for the duration of this lifetime, the memory the pointer points to must
397 /// not get accessed (read or written) through any other pointer.
398 ///
399 /// This applies even if the result of this method is unused!
400 ///
401 /// [the module documentation]: crate::ptr#safety
402 #[inline]
403 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
404 pub unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
405 where
406 T: Sized,
407 {
408 // SAFETY: the caller must guarantee that `self` meets all the
409 // requirements for a reference.
410 if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
411 }
412
413 /// Returns whether two pointers are guaranteed to be equal.
414 ///
415 /// At runtime this function behaves like `self == other`.
416 /// However, in some contexts (e.g., compile-time evaluation),
417 /// it is not always possible to determine equality of two pointers, so this function may
418 /// spuriously return `false` for pointers that later actually turn out to be equal.
419 /// But when it returns `true`, the pointers are guaranteed to be equal.
420 ///
421 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
422 /// comparisons for which both functions return `false`.
423 ///
424 /// [`guaranteed_ne`]: #method.guaranteed_ne
425 ///
426 /// The return value may change depending on the compiler version and unsafe code may not
427 /// rely on the result of this function for soundness. It is suggested to only use this function
428 /// for performance optimizations where spurious `false` return values by this function do not
429 /// affect the outcome, but just the performance.
430 /// The consequences of using this method to make runtime and compile-time code behave
431 /// differently have not been explored. This method should not be used to introduce such
432 /// differences, and it should also not be stabilized before we have a better understanding
433 /// of this issue.
434 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
435 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
436 #[inline]
437 pub const fn guaranteed_eq(self, other: *mut T) -> bool
438 where
439 T: Sized,
440 {
441 intrinsics::ptr_guaranteed_eq(self as *const _, other as *const _)
442 }
443
444 /// Returns whether two pointers are guaranteed to be unequal.
445 ///
446 /// At runtime this function behaves like `self != other`.
447 /// However, in some contexts (e.g., compile-time evaluation),
448 /// it is not always possible to determine the inequality of two pointers, so this function may
449 /// spuriously return `false` for pointers that later actually turn out to be unequal.
450 /// But when it returns `true`, the pointers are guaranteed to be unequal.
451 ///
452 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
453 /// comparisons for which both functions return `false`.
454 ///
455 /// [`guaranteed_eq`]: #method.guaranteed_eq
456 ///
457 /// The return value may change depending on the compiler version and unsafe code may not
458 /// rely on the result of this function for soundness. It is suggested to only use this function
459 /// for performance optimizations where spurious `false` return values by this function do not
460 /// affect the outcome, but just the performance.
461 /// The consequences of using this method to make runtime and compile-time code behave
462 /// differently have not been explored. This method should not be used to introduce such
463 /// differences, and it should also not be stabilized before we have a better understanding
464 /// of this issue.
465 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
466 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
467 #[inline]
468 pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
469 where
470 T: Sized,
471 {
472 intrinsics::ptr_guaranteed_ne(self as *const _, other as *const _)
473 }
474
475 /// Calculates the distance between two pointers. The returned value is in
476 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
477 ///
478 /// This function is the inverse of [`offset`].
479 ///
480 /// [`offset`]: #method.offset-1
481 ///
482 /// # Safety
483 ///
484 /// If any of the following conditions are violated, the result is Undefined
485 /// Behavior:
486 ///
487 /// * Both the starting and other pointer must be either in bounds or one
488 /// byte past the end of the same allocated object. Note that in Rust,
489 /// every (stack-allocated) variable is considered a separate allocated object.
490 ///
491 /// * Both pointers must be *derived from* a pointer to the same object.
492 /// (See below for an example.)
493 ///
494 /// * The distance between the pointers, in bytes, must be an exact multiple
495 /// of the size of `T`.
496 ///
497 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
498 ///
499 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
500 ///
501 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
502 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
503 /// the last two conditions. The standard library also generally ensures that allocations
504 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
505 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
506 /// always satisfies the last two conditions.
507 ///
508 /// Most platforms fundamentally can't even construct such a large allocation.
509 /// For instance, no known 64-bit platform can ever serve a request
510 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
511 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
512 /// more than `isize::MAX` bytes with things like Physical Address
513 /// Extension. As such, memory acquired directly from allocators or memory
514 /// mapped files *may* be too large to handle with this function.
515 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
516 /// such large allocations either.)
517 ///
518 /// [`add`]: #method.add
519 ///
520 /// # Panics
521 ///
522 /// This function panics if `T` is a Zero-Sized Type ("ZST").
523 ///
524 /// # Examples
525 ///
526 /// Basic usage:
527 ///
528 /// ```
529 /// let mut a = [0; 5];
530 /// let ptr1: *mut i32 = &mut a[1];
531 /// let ptr2: *mut i32 = &mut a[3];
532 /// unsafe {
533 /// assert_eq!(ptr2.offset_from(ptr1), 2);
534 /// assert_eq!(ptr1.offset_from(ptr2), -2);
535 /// assert_eq!(ptr1.offset(2), ptr2);
536 /// assert_eq!(ptr2.offset(-2), ptr1);
537 /// }
538 /// ```
539 ///
540 /// *Incorrect* usage:
541 ///
542 /// ```rust,no_run
543 /// let ptr1 = Box::into_raw(Box::new(0u8));
544 /// let ptr2 = Box::into_raw(Box::new(1u8));
545 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
546 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
547 /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff);
548 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
549 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
550 /// // computing their offset is undefined behavior, even though
551 /// // they point to the same address!
552 /// unsafe {
553 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
554 /// }
555 /// ```
556 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
557 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
558 #[inline]
559 pub const unsafe fn offset_from(self, origin: *const T) -> isize
560 where
561 T: Sized,
562 {
563 // SAFETY: the caller must uphold the safety contract for `offset_from`.
564 unsafe { (self as *const T).offset_from(origin) }
565 }
566
567 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
568 ///
569 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
570 /// offset of `3 * size_of::<T>()` bytes.
571 ///
572 /// # Safety
573 ///
574 /// If any of the following conditions are violated, the result is Undefined
575 /// Behavior:
576 ///
577 /// * Both the starting and resulting pointer must be either in bounds or one
578 /// byte past the end of the same allocated object. Note that in Rust,
579 /// every (stack-allocated) variable is considered a separate allocated object.
580 ///
581 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
582 ///
583 /// * The offset being in bounds cannot rely on "wrapping around" the address
584 /// space. That is, the infinite-precision sum must fit in a `usize`.
585 ///
586 /// The compiler and standard library generally tries to ensure allocations
587 /// never reach a size where an offset is a concern. For instance, `Vec`
588 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
589 /// `vec.as_ptr().add(vec.len())` is always safe.
590 ///
591 /// Most platforms fundamentally can't even construct such an allocation.
592 /// For instance, no known 64-bit platform can ever serve a request
593 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
594 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
595 /// more than `isize::MAX` bytes with things like Physical Address
596 /// Extension. As such, memory acquired directly from allocators or memory
597 /// mapped files *may* be too large to handle with this function.
598 ///
599 /// Consider using [`wrapping_add`] instead if these constraints are
600 /// difficult to satisfy. The only advantage of this method is that it
601 /// enables more aggressive compiler optimizations.
602 ///
603 /// [`wrapping_add`]: #method.wrapping_add
604 ///
605 /// # Examples
606 ///
607 /// Basic usage:
608 ///
609 /// ```
610 /// let s: &str = "123";
611 /// let ptr: *const u8 = s.as_ptr();
612 ///
613 /// unsafe {
614 /// println!("{}", *ptr.add(1) as char);
615 /// println!("{}", *ptr.add(2) as char);
616 /// }
617 /// ```
618 #[stable(feature = "pointer_methods", since = "1.26.0")]
619 #[must_use = "returns a new pointer rather than modifying its argument"]
620 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
621 #[inline]
622 pub const unsafe fn add(self, count: usize) -> Self
623 where
624 T: Sized,
625 {
626 // SAFETY: the caller must uphold the safety contract for `offset`.
627 unsafe { self.offset(count as isize) }
628 }
629
630 /// Calculates the offset from a pointer (convenience for
631 /// `.offset((count as isize).wrapping_neg())`).
632 ///
633 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
634 /// offset of `3 * size_of::<T>()` bytes.
635 ///
636 /// # Safety
637 ///
638 /// If any of the following conditions are violated, the result is Undefined
639 /// Behavior:
640 ///
641 /// * Both the starting and resulting pointer must be either in bounds or one
642 /// byte past the end of the same allocated object. Note that in Rust,
643 /// every (stack-allocated) variable is considered a separate allocated object.
644 ///
645 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
646 ///
647 /// * The offset being in bounds cannot rely on "wrapping around" the address
648 /// space. That is, the infinite-precision sum must fit in a usize.
649 ///
650 /// The compiler and standard library generally tries to ensure allocations
651 /// never reach a size where an offset is a concern. For instance, `Vec`
652 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
653 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
654 ///
655 /// Most platforms fundamentally can't even construct such an allocation.
656 /// For instance, no known 64-bit platform can ever serve a request
657 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
658 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
659 /// more than `isize::MAX` bytes with things like Physical Address
660 /// Extension. As such, memory acquired directly from allocators or memory
661 /// mapped files *may* be too large to handle with this function.
662 ///
663 /// Consider using [`wrapping_sub`] instead if these constraints are
664 /// difficult to satisfy. The only advantage of this method is that it
665 /// enables more aggressive compiler optimizations.
666 ///
667 /// [`wrapping_sub`]: #method.wrapping_sub
668 ///
669 /// # Examples
670 ///
671 /// Basic usage:
672 ///
673 /// ```
674 /// let s: &str = "123";
675 ///
676 /// unsafe {
677 /// let end: *const u8 = s.as_ptr().add(3);
678 /// println!("{}", *end.sub(1) as char);
679 /// println!("{}", *end.sub(2) as char);
680 /// }
681 /// ```
682 #[stable(feature = "pointer_methods", since = "1.26.0")]
683 #[must_use = "returns a new pointer rather than modifying its argument"]
684 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
685 #[inline]
686 pub const unsafe fn sub(self, count: usize) -> Self
687 where
688 T: Sized,
689 {
690 // SAFETY: the caller must uphold the safety contract for `offset`.
691 unsafe { self.offset((count as isize).wrapping_neg()) }
692 }
693
694 /// Calculates the offset from a pointer using wrapping arithmetic.
695 /// (convenience for `.wrapping_offset(count as isize)`)
696 ///
697 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
698 /// offset of `3 * size_of::<T>()` bytes.
699 ///
700 /// # Safety
701 ///
702 /// This operation itself is always safe, but using the resulting pointer is not.
703 ///
704 /// The resulting pointer remains attached to the same allocated object that `self` points to.
705 /// It may *not* be used to access a different allocated object. Note that in Rust, every
706 /// (stack-allocated) variable is considered a separate allocated object.
707 ///
708 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
709 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
710 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
711 /// `x` and `y` point into the same allocated object.
712 ///
713 /// Compared to [`add`], this method basically delays the requirement of staying within the
714 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
715 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
716 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
717 /// can be optimized better and is thus preferable in performance-sensitive code.
718 ///
719 /// The delayed check only considers the value of the pointer that was dereferenced, not the
720 /// intermediate values used during the computation of the final result. For example,
721 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
722 /// allocated object and then re-entering it later is permitted.
723 ///
724 /// If you need to cross object boundaries, cast the pointer to an integer and
725 /// do the arithmetic there.
726 ///
727 /// [`add`]: #method.add
728 ///
729 /// # Examples
730 ///
731 /// Basic usage:
732 ///
733 /// ```
734 /// // Iterate using a raw pointer in increments of two elements
735 /// let data = [1u8, 2, 3, 4, 5];
736 /// let mut ptr: *const u8 = data.as_ptr();
737 /// let step = 2;
738 /// let end_rounded_up = ptr.wrapping_add(6);
739 ///
740 /// // This loop prints "1, 3, 5, "
741 /// while ptr != end_rounded_up {
742 /// unsafe {
743 /// print!("{}, ", *ptr);
744 /// }
745 /// ptr = ptr.wrapping_add(step);
746 /// }
747 /// ```
748 #[stable(feature = "pointer_methods", since = "1.26.0")]
749 #[must_use = "returns a new pointer rather than modifying its argument"]
750 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
751 #[inline]
752 pub const fn wrapping_add(self, count: usize) -> Self
753 where
754 T: Sized,
755 {
756 self.wrapping_offset(count as isize)
757 }
758
759 /// Calculates the offset from a pointer using wrapping arithmetic.
760 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
761 ///
762 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
763 /// offset of `3 * size_of::<T>()` bytes.
764 ///
765 /// # Safety
766 ///
767 /// This operation itself is always safe, but using the resulting pointer is not.
768 ///
769 /// The resulting pointer remains attached to the same allocated object that `self` points to.
770 /// It may *not* be used to access a different allocated object. Note that in Rust, every
771 /// (stack-allocated) variable is considered a separate allocated object.
772 ///
773 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
774 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
775 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
776 /// `x` and `y` point into the same allocated object.
777 ///
778 /// Compared to [`sub`], this method basically delays the requirement of staying within the
779 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
780 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
781 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
782 /// can be optimized better and is thus preferable in performance-sensitive code.
783 ///
784 /// The delayed check only considers the value of the pointer that was dereferenced, not the
785 /// intermediate values used during the computation of the final result. For example,
786 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
787 /// allocated object and then re-entering it later is permitted.
788 ///
789 /// If you need to cross object boundaries, cast the pointer to an integer and
790 /// do the arithmetic there.
791 ///
792 /// [`sub`]: #method.sub
793 ///
794 /// # Examples
795 ///
796 /// Basic usage:
797 ///
798 /// ```
799 /// // Iterate using a raw pointer in increments of two elements (backwards)
800 /// let data = [1u8, 2, 3, 4, 5];
801 /// let mut ptr: *const u8 = data.as_ptr();
802 /// let start_rounded_down = ptr.wrapping_sub(2);
803 /// ptr = ptr.wrapping_add(4);
804 /// let step = 2;
805 /// // This loop prints "5, 3, 1, "
806 /// while ptr != start_rounded_down {
807 /// unsafe {
808 /// print!("{}, ", *ptr);
809 /// }
810 /// ptr = ptr.wrapping_sub(step);
811 /// }
812 /// ```
813 #[stable(feature = "pointer_methods", since = "1.26.0")]
814 #[must_use = "returns a new pointer rather than modifying its argument"]
815 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
816 #[inline]
817 pub const fn wrapping_sub(self, count: usize) -> Self
818 where
819 T: Sized,
820 {
821 self.wrapping_offset((count as isize).wrapping_neg())
822 }
823
824 /// Sets the pointer value to `ptr`.
825 ///
826 /// In case `self` is a (fat) pointer to an unsized type, this operation
827 /// will only affect the pointer part, whereas for (thin) pointers to
828 /// sized types, this has the same effect as a simple assignment.
829 ///
830 /// The resulting pointer will have provenance of `val`, i.e., for a fat
831 /// pointer, this operation is semantically the same as creating a new
832 /// fat pointer with the data pointer value of `val` but the metadata of
833 /// `self`.
834 ///
835 /// # Examples
836 ///
837 /// This function is primarily useful for allowing byte-wise pointer
838 /// arithmetic on potentially fat pointers:
839 ///
840 /// ```
841 /// #![feature(set_ptr_value)]
842 /// # use core::fmt::Debug;
843 /// let mut arr: [i32; 3] = [1, 2, 3];
844 /// let mut ptr = &mut arr[0] as *mut dyn Debug;
845 /// let thin = ptr as *mut u8;
846 /// unsafe {
847 /// ptr = ptr.set_ptr_value(thin.add(8));
848 /// # assert_eq!(*(ptr as *mut i32), 3);
849 /// println!("{:?}", &*ptr); // will print "3"
850 /// }
851 /// ```
852 #[unstable(feature = "set_ptr_value", issue = "75091")]
853 #[must_use = "returns a new pointer rather than modifying its argument"]
854 #[inline]
855 pub fn set_ptr_value(mut self, val: *mut u8) -> Self {
856 let thin = &mut self as *mut *mut T as *mut *mut u8;
857 // SAFETY: In case of a thin pointer, this operations is identical
858 // to a simple assignment. In case of a fat pointer, with the current
859 // fat pointer layout implementation, the first field of such a
860 // pointer is always the data pointer, which is likewise assigned.
861 unsafe { *thin = val };
862 self
863 }
864
865 /// Reads the value from `self` without moving it. This leaves the
866 /// memory in `self` unchanged.
867 ///
868 /// See [`ptr::read`] for safety concerns and examples.
869 ///
870 /// [`ptr::read`]: crate::ptr::read()
871 #[stable(feature = "pointer_methods", since = "1.26.0")]
872 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
873 #[inline]
874 pub const unsafe fn read(self) -> T
875 where
876 T: Sized,
877 {
878 // SAFETY: the caller must uphold the safety contract for ``.
879 unsafe { read(self) }
880 }
881
882 /// Performs a volatile read of the value from `self` without moving it. This
883 /// leaves the memory in `self` unchanged.
884 ///
885 /// Volatile operations are intended to act on I/O memory, and are guaranteed
886 /// to not be elided or reordered by the compiler across other volatile
887 /// operations.
888 ///
889 /// See [`ptr::read_volatile`] for safety concerns and examples.
890 ///
891 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
892 #[stable(feature = "pointer_methods", since = "1.26.0")]
893 #[inline]
894 pub unsafe fn read_volatile(self) -> T
895 where
896 T: Sized,
897 {
898 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
899 unsafe { read_volatile(self) }
900 }
901
902 /// Reads the value from `self` without moving it. This leaves the
903 /// memory in `self` unchanged.
904 ///
905 /// Unlike `read`, the pointer may be unaligned.
906 ///
907 /// See [`ptr::read_unaligned`] for safety concerns and examples.
908 ///
909 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
910 #[stable(feature = "pointer_methods", since = "1.26.0")]
911 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
912 #[inline]
913 pub const unsafe fn read_unaligned(self) -> T
914 where
915 T: Sized,
916 {
917 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
918 unsafe { read_unaligned(self) }
919 }
920
921 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
922 /// and destination may overlap.
923 ///
924 /// NOTE: this has the *same* argument order as [`ptr::copy`].
925 ///
926 /// See [`ptr::copy`] for safety concerns and examples.
927 ///
928 /// [`ptr::copy`]: crate::ptr::copy()
929 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
930 #[stable(feature = "pointer_methods", since = "1.26.0")]
931 #[inline]
932 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
933 where
934 T: Sized,
935 {
936 // SAFETY: the caller must uphold the safety contract for `copy`.
937 unsafe { copy(self, dest, count) }
938 }
939
940 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
941 /// and destination may *not* overlap.
942 ///
943 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
944 ///
945 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
946 ///
947 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
948 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
949 #[stable(feature = "pointer_methods", since = "1.26.0")]
950 #[inline]
951 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
952 where
953 T: Sized,
954 {
955 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
956 unsafe { copy_nonoverlapping(self, dest, count) }
957 }
958
959 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
960 /// and destination may overlap.
961 ///
962 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
963 ///
964 /// See [`ptr::copy`] for safety concerns and examples.
965 ///
966 /// [`ptr::copy`]: crate::ptr::copy()
967 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
968 #[stable(feature = "pointer_methods", since = "1.26.0")]
969 #[inline]
970 pub const unsafe fn copy_from(self, src: *const T, count: usize)
971 where
972 T: Sized,
973 {
974 // SAFETY: the caller must uphold the safety contract for `copy`.
975 unsafe { copy(src, self, count) }
976 }
977
978 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
979 /// and destination may *not* overlap.
980 ///
981 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
982 ///
983 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
984 ///
985 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
986 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
987 #[stable(feature = "pointer_methods", since = "1.26.0")]
988 #[inline]
989 pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
990 where
991 T: Sized,
992 {
993 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
994 unsafe { copy_nonoverlapping(src, self, count) }
995 }
996
997 /// Executes the destructor (if any) of the pointed-to value.
998 ///
999 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1000 ///
1001 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1002 #[stable(feature = "pointer_methods", since = "1.26.0")]
1003 #[inline]
1004 pub unsafe fn drop_in_place(self) {
1005 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1006 unsafe { drop_in_place(self) }
1007 }
1008
1009 /// Overwrites a memory location with the given value without reading or
1010 /// dropping the old value.
1011 ///
1012 /// See [`ptr::write`] for safety concerns and examples.
1013 ///
1014 /// [`ptr::write`]: crate::ptr::write()
1015 #[stable(feature = "pointer_methods", since = "1.26.0")]
1016 #[inline]
1017 pub unsafe fn write(self, val: T)
1018 where
1019 T: Sized,
1020 {
1021 // SAFETY: the caller must uphold the safety contract for `write`.
1022 unsafe { write(self, val) }
1023 }
1024
1025 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1026 /// bytes of memory starting at `self` to `val`.
1027 ///
1028 /// See [`ptr::write_bytes`] for safety concerns and examples.
1029 ///
1030 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1031 #[stable(feature = "pointer_methods", since = "1.26.0")]
1032 #[inline]
1033 pub unsafe fn write_bytes(self, val: u8, count: usize)
1034 where
1035 T: Sized,
1036 {
1037 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1038 unsafe { write_bytes(self, val, count) }
1039 }
1040
1041 /// Performs a volatile write of a memory location with the given value without
1042 /// reading or dropping the old value.
1043 ///
1044 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1045 /// to not be elided or reordered by the compiler across other volatile
1046 /// operations.
1047 ///
1048 /// See [`ptr::write_volatile`] for safety concerns and examples.
1049 ///
1050 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1051 #[stable(feature = "pointer_methods", since = "1.26.0")]
1052 #[inline]
1053 pub unsafe fn write_volatile(self, val: T)
1054 where
1055 T: Sized,
1056 {
1057 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1058 unsafe { write_volatile(self, val) }
1059 }
1060
1061 /// Overwrites a memory location with the given value without reading or
1062 /// dropping the old value.
1063 ///
1064 /// Unlike `write`, the pointer may be unaligned.
1065 ///
1066 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1067 ///
1068 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1069 #[stable(feature = "pointer_methods", since = "1.26.0")]
1070 #[rustc_const_unstable(feature = "const_ptr_write", issue = "none")]
1071 #[inline]
1072 pub const unsafe fn write_unaligned(self, val: T)
1073 where
1074 T: Sized,
1075 {
1076 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1077 unsafe { write_unaligned(self, val) }
1078 }
1079
1080 /// Replaces the value at `self` with `src`, returning the old
1081 /// value, without dropping either.
1082 ///
1083 /// See [`ptr::replace`] for safety concerns and examples.
1084 ///
1085 /// [`ptr::replace`]: crate::ptr::replace()
1086 #[stable(feature = "pointer_methods", since = "1.26.0")]
1087 #[inline]
1088 pub unsafe fn replace(self, src: T) -> T
1089 where
1090 T: Sized,
1091 {
1092 // SAFETY: the caller must uphold the safety contract for `replace`.
1093 unsafe { replace(self, src) }
1094 }
1095
1096 /// Swaps the values at two mutable locations of the same type, without
1097 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1098 /// otherwise equivalent.
1099 ///
1100 /// See [`ptr::swap`] for safety concerns and examples.
1101 ///
1102 /// [`ptr::swap`]: crate::ptr::swap()
1103 #[stable(feature = "pointer_methods", since = "1.26.0")]
1104 #[inline]
1105 pub unsafe fn swap(self, with: *mut T)
1106 where
1107 T: Sized,
1108 {
1109 // SAFETY: the caller must uphold the safety contract for `swap`.
1110 unsafe { swap(self, with) }
1111 }
1112
1113 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1114 /// `align`.
1115 ///
1116 /// If it is not possible to align the pointer, the implementation returns
1117 /// `usize::MAX`. It is permissible for the implementation to *always*
1118 /// return `usize::MAX`. Only your algorithm's performance can depend
1119 /// on getting a usable offset here, not its correctness.
1120 ///
1121 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1122 /// used with the `wrapping_add` method.
1123 ///
1124 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1125 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1126 /// the returned offset is correct in all terms other than alignment.
1127 ///
1128 /// # Panics
1129 ///
1130 /// The function panics if `align` is not a power-of-two.
1131 ///
1132 /// # Examples
1133 ///
1134 /// Accessing adjacent `u8` as `u16`
1135 ///
1136 /// ```
1137 /// # fn foo(n: usize) {
1138 /// # use std::mem::align_of;
1139 /// # unsafe {
1140 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
1141 /// let ptr = x.as_ptr().add(n) as *const u8;
1142 /// let offset = ptr.align_offset(align_of::<u16>());
1143 /// if offset < x.len() - n - 1 {
1144 /// let u16_ptr = ptr.add(offset) as *const u16;
1145 /// assert_ne!(*u16_ptr, 500);
1146 /// } else {
1147 /// // while the pointer can be aligned via `offset`, it would point
1148 /// // outside the allocation
1149 /// }
1150 /// # } }
1151 /// ```
1152 #[stable(feature = "align_offset", since = "1.36.0")]
1153 pub fn align_offset(self, align: usize) -> usize
1154 where
1155 T: Sized,
1156 {
1157 if !align.is_power_of_two() {
1158 panic!("align_offset: align is not a power-of-two");
1159 }
1160 // SAFETY: `align` has been checked to be a power of 2 above
1161 unsafe { align_offset(self, align) }
1162 }
1163 }
1164
1165 #[lang = "mut_slice_ptr"]
1166 impl<T> *mut [T] {
1167 /// Returns the length of a raw slice.
1168 ///
1169 /// The returned value is the number of **elements**, not the number of bytes.
1170 ///
1171 /// This function is safe, even when the raw slice cannot be cast to a slice
1172 /// reference because the pointer is null or unaligned.
1173 ///
1174 /// # Examples
1175 ///
1176 /// ```rust
1177 /// #![feature(slice_ptr_len)]
1178 /// use std::ptr;
1179 ///
1180 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1181 /// assert_eq!(slice.len(), 3);
1182 /// ```
1183 #[inline]
1184 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1185 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1186 pub const fn len(self) -> usize {
1187 #[cfg(bootstrap)]
1188 {
1189 // SAFETY: this is safe because `*const [T]` and `FatPtr<T>` have the same layout.
1190 // Only `std` can make this guarantee.
1191 unsafe { Repr { rust_mut: self }.raw }.len
1192 }
1193 #[cfg(not(bootstrap))]
1194 metadata(self)
1195 }
1196
1197 /// Returns a raw pointer to the slice's buffer.
1198 ///
1199 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1200 ///
1201 /// # Examples
1202 ///
1203 /// ```rust
1204 /// #![feature(slice_ptr_get)]
1205 /// use std::ptr;
1206 ///
1207 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1208 /// assert_eq!(slice.as_mut_ptr(), 0 as *mut i8);
1209 /// ```
1210 #[inline]
1211 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1212 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
1213 pub const fn as_mut_ptr(self) -> *mut T {
1214 self as *mut T
1215 }
1216
1217 /// Returns a raw pointer to an element or subslice, without doing bounds
1218 /// checking.
1219 ///
1220 /// Calling this method with an out-of-bounds index or when `self` is not dereferencable
1221 /// is *[undefined behavior]* even if the resulting pointer is not used.
1222 ///
1223 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1224 ///
1225 /// # Examples
1226 ///
1227 /// ```
1228 /// #![feature(slice_ptr_get)]
1229 ///
1230 /// let x = &mut [1, 2, 4] as *mut [i32];
1231 ///
1232 /// unsafe {
1233 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1234 /// }
1235 /// ```
1236 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1237 #[inline]
1238 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1239 where
1240 I: SliceIndex<[T]>,
1241 {
1242 // SAFETY: the caller ensures that `self` is dereferencable and `index` in-bounds.
1243 unsafe { index.get_unchecked_mut(self) }
1244 }
1245
1246 /// Returns `None` if the pointer is null, or else returns a shared slice to
1247 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1248 /// that the value has to be initialized.
1249 ///
1250 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1251 ///
1252 /// [`as_ref`]: #method.as_ref-1
1253 /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
1254 ///
1255 /// # Safety
1256 ///
1257 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
1258 /// all of the following is true:
1259 ///
1260 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
1261 /// and it must be properly aligned. This means in particular:
1262 ///
1263 /// * The entire memory range of this slice must be contained within a single allocated object!
1264 /// Slices can never span across multiple allocated objects.
1265 ///
1266 /// * The pointer must be aligned even for zero-length slices. One
1267 /// reason for this is that enum layout optimizations may rely on references
1268 /// (including slices of any length) being aligned and non-null to distinguish
1269 /// them from other data. You can obtain a pointer that is usable as `data`
1270 /// for zero-length slices using [`NonNull::dangling()`].
1271 ///
1272 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1273 /// See the safety documentation of [`pointer::offset`].
1274 ///
1275 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1276 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1277 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1278 /// not get mutated (except inside `UnsafeCell`).
1279 ///
1280 /// This applies even if the result of this method is unused!
1281 ///
1282 /// See also [`slice::from_raw_parts`][].
1283 ///
1284 /// [valid]: crate::ptr#safety
1285 #[inline]
1286 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1287 pub unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1288 if self.is_null() {
1289 None
1290 } else {
1291 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1292 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1293 }
1294 }
1295
1296 /// Returns `None` if the pointer is null, or else returns a unique slice to
1297 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1298 /// that the value has to be initialized.
1299 ///
1300 /// For the shared counterpart see [`as_uninit_slice`].
1301 ///
1302 /// [`as_mut`]: #method.as_mut
1303 /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1304 ///
1305 /// # Safety
1306 ///
1307 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
1308 /// all of the following is true:
1309 ///
1310 /// * The pointer must be [valid] for reads and writes for `ptr.len() * mem::size_of::<T>()`
1311 /// many bytes, and it must be properly aligned. This means in particular:
1312 ///
1313 /// * The entire memory range of this slice must be contained within a single allocated object!
1314 /// Slices can never span across multiple allocated objects.
1315 ///
1316 /// * The pointer must be aligned even for zero-length slices. One
1317 /// reason for this is that enum layout optimizations may rely on references
1318 /// (including slices of any length) being aligned and non-null to distinguish
1319 /// them from other data. You can obtain a pointer that is usable as `data`
1320 /// for zero-length slices using [`NonNull::dangling()`].
1321 ///
1322 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1323 /// See the safety documentation of [`pointer::offset`].
1324 ///
1325 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1326 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1327 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1328 /// not get accessed (read or written) through any other pointer.
1329 ///
1330 /// This applies even if the result of this method is unused!
1331 ///
1332 /// See also [`slice::from_raw_parts_mut`][].
1333 ///
1334 /// [valid]: crate::ptr#safety
1335 #[inline]
1336 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1337 pub unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
1338 if self.is_null() {
1339 None
1340 } else {
1341 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1342 Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1343 }
1344 }
1345 }
1346
1347 // Equality for pointers
1348 #[stable(feature = "rust1", since = "1.0.0")]
1349 impl<T: ?Sized> PartialEq for *mut T {
1350 #[inline]
1351 fn eq(&self, other: &*mut T) -> bool {
1352 *self == *other
1353 }
1354 }
1355
1356 #[stable(feature = "rust1", since = "1.0.0")]
1357 impl<T: ?Sized> Eq for *mut T {}
1358
1359 #[stable(feature = "rust1", since = "1.0.0")]
1360 impl<T: ?Sized> Ord for *mut T {
1361 #[inline]
1362 fn cmp(&self, other: &*mut T) -> Ordering {
1363 if self < other {
1364 Less
1365 } else if self == other {
1366 Equal
1367 } else {
1368 Greater
1369 }
1370 }
1371 }
1372
1373 #[stable(feature = "rust1", since = "1.0.0")]
1374 impl<T: ?Sized> PartialOrd for *mut T {
1375 #[inline]
1376 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1377 Some(self.cmp(other))
1378 }
1379
1380 #[inline]
1381 fn lt(&self, other: &*mut T) -> bool {
1382 *self < *other
1383 }
1384
1385 #[inline]
1386 fn le(&self, other: &*mut T) -> bool {
1387 *self <= *other
1388 }
1389
1390 #[inline]
1391 fn gt(&self, other: &*mut T) -> bool {
1392 *self > *other
1393 }
1394
1395 #[inline]
1396 fn ge(&self, other: &*mut T) -> bool {
1397 *self >= *other
1398 }
1399 }