]> git.proxmox.com Git - rustc.git/blame - library/core/src/ptr/mut_ptr.rs
New upstream version 1.64.0+dfsg1
[rustc.git] / library / core / src / ptr / mut_ptr.rs
CommitLineData
dfeec247
XL
1use super::*;
2use crate::cmp::Ordering::{self, Equal, Greater, Less};
3use crate::intrinsics;
3dfed10e 4use crate::slice::{self, SliceIndex};
dfeec247 5
dfeec247
XL
6impl<T: ?Sized> *mut T {
7 /// Returns `true` if the pointer is null.
8 ///
9 /// Note that unsized types have many possible null pointers, as only the
10 /// raw data pointer is considered, not their length, vtable, etc.
11 /// Therefore, two pointers that are null may still not compare equal to
12 /// each other.
13 ///
3dfed10e
XL
14 /// ## Behavior during const evaluation
15 ///
16 /// When this function is used during const evaluation, it may return `false` for pointers
17 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
18 /// is offset beyond its bounds in such a way that the resulting pointer is null,
19 /// the function will still return `false`. There is no way for CTFE to know
20 /// the absolute position of that memory, so we cannot tell if the pointer is
21 /// null or not.
22 ///
dfeec247
XL
23 /// # Examples
24 ///
25 /// Basic usage:
26 ///
27 /// ```
28 /// let mut s = [1, 2, 3];
29 /// let ptr: *mut u32 = s.as_mut_ptr();
30 /// assert!(!ptr.is_null());
31 /// ```
32 #[stable(feature = "rust1", since = "1.0.0")]
3dfed10e 33 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
dfeec247 34 #[inline]
3dfed10e 35 pub const fn is_null(self) -> bool {
dfeec247
XL
36 // Compare via a cast to a thin pointer, so fat pointers are only
37 // considering their "data" part for null-ness.
3dfed10e 38 (self as *mut u8).guaranteed_eq(null_mut())
dfeec247
XL
39 }
40
41 /// Casts to a pointer of another type.
42 #[stable(feature = "ptr_cast", since = "1.38.0")]
43 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
17df50a5 44 #[inline(always)]
dfeec247
XL
45 pub const fn cast<U>(self) -> *mut U {
46 self as _
47 }
48
5e7ed085
FG
49 /// Use the pointer value in a new pointer of another type.
50 ///
51 /// In case `val` is a (fat) pointer to an unsized type, this operation
52 /// will ignore the pointer part, whereas for (thin) pointers to sized
53 /// types, this has the same effect as a simple cast.
54 ///
55 /// The resulting pointer will have provenance of `self`, i.e., for a fat
56 /// pointer, this operation is semantically the same as creating a new
57 /// fat pointer with the data pointer value of `self` but the metadata of
58 /// `val`.
59 ///
60 /// # Examples
61 ///
62 /// This function is primarily useful for allowing byte-wise pointer
63 /// arithmetic on potentially fat pointers:
64 ///
65 /// ```
66 /// #![feature(set_ptr_value)]
67 /// # use core::fmt::Debug;
68 /// let mut arr: [i32; 3] = [1, 2, 3];
69 /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
70 /// let thin = ptr as *mut u8;
71 /// unsafe {
72 /// ptr = thin.add(8).with_metadata_of(ptr);
73 /// # assert_eq!(*(ptr as *mut i32), 3);
74 /// println!("{:?}", &*ptr); // will print "3"
75 /// }
76 /// ```
77 #[unstable(feature = "set_ptr_value", issue = "75091")]
78 #[must_use = "returns a new pointer rather than modifying its argument"]
79 #[inline]
80 pub fn with_metadata_of<U>(self, mut val: *mut U) -> *mut U
81 where
82 U: ?Sized,
83 {
84 let target = &mut val as *mut *mut U as *mut *mut u8;
85 // SAFETY: In case of a thin pointer, this operations is identical
86 // to a simple assignment. In case of a fat pointer, with the current
87 // fat pointer layout implementation, the first field of such a
88 // pointer is always the data pointer, which is likewise assigned.
89 unsafe { *target = self as *mut u8 };
90 val
91 }
92
5099ac24
FG
93 /// Changes constness without changing the type.
94 ///
95 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
96 /// refactored.
97 ///
98 /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
064997fb 99 /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
5099ac24 100 /// coercion.
064997fb
FG
101 ///
102 /// [`cast_mut`]: #method.cast_mut
5099ac24
FG
103 #[unstable(feature = "ptr_const_cast", issue = "92675")]
104 #[rustc_const_unstable(feature = "ptr_const_cast", issue = "92675")]
064997fb 105 pub const fn cast_const(self) -> *const T {
5099ac24
FG
106 self as _
107 }
108
a2a8927a
XL
109 /// Casts a pointer to its raw bits.
110 ///
111 /// This is equivalent to `as usize`, but is more specific to enhance readability.
112 /// The inverse method is [`from_bits`](#method.from_bits-1).
113 ///
114 /// In particular, `*p as usize` and `p as usize` will both compile for
115 /// pointers to numeric types but do very different things, so using this
116 /// helps emphasize that reading the bits was intentional.
117 ///
118 /// # Examples
119 ///
120 /// ```
121 /// #![feature(ptr_to_from_bits)]
122 /// let mut array = [13, 42];
123 /// let mut it = array.iter_mut();
124 /// let p0: *mut i32 = it.next().unwrap();
125 /// assert_eq!(<*mut _>::from_bits(p0.to_bits()), p0);
126 /// let p1: *mut i32 = it.next().unwrap();
127 /// assert_eq!(p1.to_bits() - p0.to_bits(), 4);
128 /// ```
129 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
130 pub fn to_bits(self) -> usize
131 where
132 T: Sized,
133 {
134 self as usize
135 }
136
137 /// Creates a pointer from its raw bits.
138 ///
139 /// This is equivalent to `as *mut T`, but is more specific to enhance readability.
140 /// The inverse method is [`to_bits`](#method.to_bits-1).
141 ///
142 /// # Examples
143 ///
144 /// ```
145 /// #![feature(ptr_to_from_bits)]
146 /// use std::ptr::NonNull;
147 /// let dangling: *mut u8 = NonNull::dangling().as_ptr();
148 /// assert_eq!(<*mut u8>::from_bits(1), dangling);
149 /// ```
150 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
151 pub fn from_bits(bits: usize) -> Self
152 where
153 T: Sized,
154 {
155 bits as Self
156 }
157
5e7ed085
FG
158 /// Gets the "address" portion of the pointer.
159 ///
04454e1e
FG
160 /// This is similar to `self as usize`, which semantically discards *provenance* and
161 /// *address-space* information. However, unlike `self as usize`, casting the returned address
162 /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To
163 /// properly restore the lost information and obtain a dereferencable pointer, use
164 /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
165 ///
166 /// If using those APIs is not possible because there is no way to preserve a pointer with the
167 /// required provenance, use [`expose_addr`][pointer::expose_addr] and
168 /// [`from_exposed_addr_mut`][from_exposed_addr_mut] instead. However, note that this makes
169 /// your code less portable and less amenable to tools that check for compliance with the Rust
170 /// memory model.
5e7ed085
FG
171 ///
172 /// On most platforms this will produce a value with the same bytes as the original
173 /// pointer, because all the bytes are dedicated to describing the address.
174 /// Platforms which need to store additional information in the pointer may
175 /// perform a change of representation to produce a value containing only the address
176 /// portion of the pointer. What that means is up to the platform to define.
177 ///
04454e1e
FG
178 /// This API and its claimed semantics are part of the Strict Provenance experiment, and as such
179 /// might change in the future (including possibly weakening this so it becomes wholly
180 /// equivalent to `self as usize`). See the [module documentation][crate::ptr] for details.
5e7ed085
FG
181 #[must_use]
182 #[inline]
183 #[unstable(feature = "strict_provenance", issue = "95228")]
184 pub fn addr(self) -> usize
185 where
186 T: Sized,
187 {
188 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
923072b8
FG
189 // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
190 // provenance).
191 unsafe { mem::transmute(self) }
5e7ed085
FG
192 }
193
04454e1e
FG
194 /// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
195 /// use in [`from_exposed_addr`][].
196 ///
197 /// This is equivalent to `self as usize`, which semantically discards *provenance* and
198 /// *address-space* information. Furthermore, this (like the `as` cast) has the implicit
199 /// side-effect of marking the provenance as 'exposed', so on platforms that support it you can
200 /// later call [`from_exposed_addr_mut`][] to reconstitute the original pointer including its
201 /// provenance. (Reconstructing address space information, if required, is your responsibility.)
202 ///
203 /// Using this method means that code is *not* following Strict Provenance rules. Supporting
204 /// [`from_exposed_addr_mut`][] complicates specification and reasoning and may not be supported
205 /// by tools that help you to stay conformant with the Rust memory model, so it is recommended
206 /// to use [`addr`][pointer::addr] wherever possible.
207 ///
208 /// On most platforms this will produce a value with the same bytes as the original pointer,
209 /// because all the bytes are dedicated to describing the address. Platforms which need to store
210 /// additional information in the pointer may not support this operation, since the 'expose'
211 /// side-effect which is required for [`from_exposed_addr_mut`][] to work is typically not
212 /// available.
213 ///
214 /// This API and its claimed semantics are part of the Strict Provenance experiment, see the
215 /// [module documentation][crate::ptr] for details.
216 ///
217 /// [`from_exposed_addr_mut`]: from_exposed_addr_mut
218 #[must_use]
219 #[inline]
220 #[unstable(feature = "strict_provenance", issue = "95228")]
221 pub fn expose_addr(self) -> usize
222 where
223 T: Sized,
224 {
225 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
226 self as usize
227 }
228
5e7ed085
FG
229 /// Creates a new pointer with the given address.
230 ///
231 /// This performs the same operation as an `addr as ptr` cast, but copies
232 /// the *address-space* and *provenance* of `self` to the new pointer.
233 /// This allows us to dynamically preserve and propagate this important
234 /// information in a way that is otherwise impossible with a unary cast.
235 ///
236 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
237 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
238 ///
239 /// This API and its claimed semantics are part of the Strict Provenance experiment,
240 /// see the [module documentation][crate::ptr] for details.
241 #[must_use]
242 #[inline]
243 #[unstable(feature = "strict_provenance", issue = "95228")]
244 pub fn with_addr(self, addr: usize) -> Self
245 where
246 T: Sized,
247 {
248 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
249 //
250 // In the mean-time, this operation is defined to be "as if" it was
251 // a wrapping_offset, so we can emulate it as such. This should properly
252 // restore pointer provenance even under today's compiler.
253 let self_addr = self.addr() as isize;
254 let dest_addr = addr as isize;
255 let offset = dest_addr.wrapping_sub(self_addr);
256
257 // This is the canonical desugarring of this operation
258 self.cast::<u8>().wrapping_offset(offset).cast::<T>()
259 }
260
261 /// Creates a new pointer by mapping `self`'s address to a new one.
262 ///
263 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
264 ///
265 /// This API and its claimed semantics are part of the Strict Provenance experiment,
266 /// see the [module documentation][crate::ptr] for details.
267 #[must_use]
268 #[inline]
269 #[unstable(feature = "strict_provenance", issue = "95228")]
270 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
271 where
272 T: Sized,
273 {
274 self.with_addr(f(self.addr()))
275 }
276
94222f64 277 /// Decompose a (possibly wide) pointer into its address and metadata components.
6a06907d
XL
278 ///
279 /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
6a06907d
XL
280 #[unstable(feature = "ptr_metadata", issue = "81513")]
281 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
282 #[inline]
283 pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
284 (self.cast(), super::metadata(self))
285 }
286
3dfed10e
XL
287 /// Returns `None` if the pointer is null, or else returns a shared reference to
288 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
289 /// must be used instead.
290 ///
291 /// For the mutable counterpart see [`as_mut`].
292 ///
293 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
064997fb 294 /// [`as_mut`]: #method.as_mut
dfeec247
XL
295 ///
296 /// # Safety
297 ///
17df50a5 298 /// When calling this method, you have to ensure that *either* the pointer is null *or*
3dfed10e
XL
299 /// all of the following is true:
300 ///
301 /// * The pointer must be properly aligned.
302 ///
a2a8927a 303 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
3dfed10e
XL
304 ///
305 /// * The pointer must point to an initialized instance of `T`.
dfeec247 306 ///
3dfed10e
XL
307 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
308 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
04454e1e 309 /// In particular, while this reference exists, the memory the pointer points to must
3dfed10e
XL
310 /// not get mutated (except inside `UnsafeCell`).
311 ///
312 /// This applies even if the result of this method is unused!
dfeec247
XL
313 /// (The part about being initialized is not yet fully decided, but until
314 /// it is, the only safe approach is to ensure that they are indeed initialized.)
315 ///
3dfed10e 316 /// [the module documentation]: crate::ptr#safety
dfeec247
XL
317 ///
318 /// # Examples
319 ///
320 /// Basic usage:
321 ///
322 /// ```
323 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
324 ///
325 /// unsafe {
326 /// if let Some(val_back) = ptr.as_ref() {
5e7ed085 327 /// println!("We got back the value: {val_back}!");
dfeec247
XL
328 /// }
329 /// }
330 /// ```
331 ///
332 /// # Null-unchecked version
333 ///
334 /// If you are sure the pointer can never be null and are looking for some kind of
335 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
336 /// dereference the pointer directly.
337 ///
338 /// ```
339 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
340 ///
341 /// unsafe {
342 /// let val_back = &*ptr;
5e7ed085 343 /// println!("We got back the value: {val_back}!");
dfeec247
XL
344 /// }
345 /// ```
346 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
a2a8927a 347 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
dfeec247 348 #[inline]
a2a8927a 349 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
f035d41b
XL
350 // SAFETY: the caller must guarantee that `self` is valid for a
351 // reference if it isn't null.
352 if self.is_null() { None } else { unsafe { Some(&*self) } }
dfeec247
XL
353 }
354
3dfed10e
XL
355 /// Returns `None` if the pointer is null, or else returns a shared reference to
356 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
357 /// that the value has to be initialized.
358 ///
359 /// For the mutable counterpart see [`as_uninit_mut`].
360 ///
361 /// [`as_ref`]: #method.as_ref-1
362 /// [`as_uninit_mut`]: #method.as_uninit_mut
363 ///
364 /// # Safety
365 ///
17df50a5 366 /// When calling this method, you have to ensure that *either* the pointer is null *or*
3dfed10e
XL
367 /// all of the following is true:
368 ///
369 /// * The pointer must be properly aligned.
370 ///
a2a8927a 371 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
3dfed10e
XL
372 ///
373 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
374 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
04454e1e 375 /// In particular, while this reference exists, the memory the pointer points to must
3dfed10e
XL
376 /// not get mutated (except inside `UnsafeCell`).
377 ///
378 /// This applies even if the result of this method is unused!
379 ///
380 /// [the module documentation]: crate::ptr#safety
381 ///
382 /// # Examples
383 ///
384 /// Basic usage:
385 ///
386 /// ```
387 /// #![feature(ptr_as_uninit)]
388 ///
389 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
390 ///
391 /// unsafe {
392 /// if let Some(val_back) = ptr.as_uninit_ref() {
393 /// println!("We got back the value: {}!", val_back.assume_init());
394 /// }
395 /// }
396 /// ```
397 #[inline]
398 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
a2a8927a
XL
399 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
400 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
3dfed10e
XL
401 where
402 T: Sized,
403 {
404 // SAFETY: the caller must guarantee that `self` meets all the
405 // requirements for a reference.
406 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
407 }
408
dfeec247
XL
409 /// Calculates the offset from a pointer.
410 ///
411 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
412 /// offset of `3 * size_of::<T>()` bytes.
413 ///
414 /// # Safety
415 ///
416 /// If any of the following conditions are violated, the result is Undefined
417 /// Behavior:
418 ///
419 /// * Both the starting and resulting pointer must be either in bounds or one
cdc7bbd5 420 /// byte past the end of the same [allocated object].
dfeec247
XL
421 ///
422 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
423 ///
424 /// * The offset being in bounds cannot rely on "wrapping around" the address
425 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
426 ///
427 /// The compiler and standard library generally tries to ensure allocations
428 /// never reach a size where an offset is a concern. For instance, `Vec`
429 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
430 /// `vec.as_ptr().add(vec.len())` is always safe.
431 ///
432 /// Most platforms fundamentally can't even construct such an allocation.
433 /// For instance, no known 64-bit platform can ever serve a request
434 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
435 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
436 /// more than `isize::MAX` bytes with things like Physical Address
437 /// Extension. As such, memory acquired directly from allocators or memory
438 /// mapped files *may* be too large to handle with this function.
439 ///
440 /// Consider using [`wrapping_offset`] instead if these constraints are
441 /// difficult to satisfy. The only advantage of this method is that it
442 /// enables more aggressive compiler optimizations.
443 ///
444 /// [`wrapping_offset`]: #method.wrapping_offset
cdc7bbd5 445 /// [allocated object]: crate::ptr#allocated-object
dfeec247
XL
446 ///
447 /// # Examples
448 ///
449 /// Basic usage:
450 ///
451 /// ```
452 /// let mut s = [1, 2, 3];
453 /// let ptr: *mut u32 = s.as_mut_ptr();
454 ///
455 /// unsafe {
456 /// println!("{}", *ptr.offset(1));
457 /// println!("{}", *ptr.offset(2));
458 /// }
459 /// ```
460 #[stable(feature = "rust1", since = "1.0.0")]
f9f354fc 461 #[must_use = "returns a new pointer rather than modifying its argument"]
5e7ed085 462 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
cdc7bbd5 463 #[inline(always)]
064997fb 464 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
f9f354fc 465 pub const unsafe fn offset(self, count: isize) -> *mut T
dfeec247
XL
466 where
467 T: Sized,
468 {
f035d41b
XL
469 // SAFETY: the caller must uphold the safety contract for `offset`.
470 // The obtained pointer is valid for writes since the caller must
471 // guarantee that it points to the same allocated object as `self`.
472 unsafe { intrinsics::offset(self, count) as *mut T }
dfeec247
XL
473 }
474
923072b8
FG
475 /// Calculates the offset from a pointer in bytes.
476 ///
477 /// `count` is in units of **bytes**.
478 ///
479 /// This is purely a convenience for casting to a `u8` pointer and
480 /// using [offset][pointer::offset] on it. See that method for documentation
481 /// and safety requirements.
482 ///
483 /// For non-`Sized` pointees this operation changes only the data pointer,
484 /// leaving the metadata untouched.
485 #[must_use]
486 #[inline(always)]
487 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
488 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
064997fb 489 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
923072b8
FG
490 pub const unsafe fn byte_offset(self, count: isize) -> Self {
491 // SAFETY: the caller must uphold the safety contract for `offset`.
492 let this = unsafe { self.cast::<u8>().offset(count).cast::<()>() };
493 from_raw_parts_mut::<T>(this, metadata(self))
494 }
495
dfeec247
XL
496 /// Calculates the offset from a pointer using wrapping arithmetic.
497 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
498 /// offset of `3 * size_of::<T>()` bytes.
499 ///
500 /// # Safety
501 ///
5869c6ff 502 /// This operation itself is always safe, but using the resulting pointer is not.
dfeec247 503 ///
94222f64 504 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
cdc7bbd5 505 /// be used to read or write other allocated objects.
dfeec247 506 ///
5869c6ff
XL
507 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
508 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
509 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
510 /// `x` and `y` point into the same allocated object.
dfeec247 511 ///
5869c6ff
XL
512 /// Compared to [`offset`], this method basically delays the requirement of staying within the
513 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
514 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
515 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
516 /// can be optimized better and is thus preferable in performance-sensitive code.
517 ///
518 /// The delayed check only considers the value of the pointer that was dereferenced, not the
519 /// intermediate values used during the computation of the final result. For example,
520 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
521 /// words, leaving the allocated object and then re-entering it later is permitted.
dfeec247 522 ///
dfeec247 523 /// [`offset`]: #method.offset
cdc7bbd5 524 /// [allocated object]: crate::ptr#allocated-object
dfeec247
XL
525 ///
526 /// # Examples
527 ///
528 /// Basic usage:
529 ///
530 /// ```
531 /// // Iterate using a raw pointer in increments of two elements
532 /// let mut data = [1u8, 2, 3, 4, 5];
533 /// let mut ptr: *mut u8 = data.as_mut_ptr();
534 /// let step = 2;
535 /// let end_rounded_up = ptr.wrapping_offset(6);
536 ///
537 /// while ptr != end_rounded_up {
538 /// unsafe {
539 /// *ptr = 0;
540 /// }
541 /// ptr = ptr.wrapping_offset(step);
542 /// }
543 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
544 /// ```
545 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
f9f354fc 546 #[must_use = "returns a new pointer rather than modifying its argument"]
5e7ed085 547 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
cdc7bbd5 548 #[inline(always)]
f9f354fc 549 pub const fn wrapping_offset(self, count: isize) -> *mut T
dfeec247
XL
550 where
551 T: Sized,
552 {
f9f354fc 553 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
dfeec247
XL
554 unsafe { intrinsics::arith_offset(self, count) as *mut T }
555 }
556
923072b8
FG
557 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
558 ///
559 /// `count` is in units of **bytes**.
560 ///
561 /// This is purely a convenience for casting to a `u8` pointer and
562 /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
563 /// for documentation.
564 ///
565 /// For non-`Sized` pointees this operation changes only the data pointer,
566 /// leaving the metadata untouched.
567 #[must_use]
568 #[inline(always)]
569 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
570 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
571 pub const fn wrapping_byte_offset(self, count: isize) -> Self {
572 from_raw_parts_mut::<T>(
573 self.cast::<u8>().wrapping_offset(count).cast::<()>(),
574 metadata(self),
575 )
576 }
577
3dfed10e
XL
578 /// Returns `None` if the pointer is null, or else returns a unique reference to
579 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
580 /// must be used instead.
dfeec247 581 ///
3dfed10e 582 /// For the shared counterpart see [`as_ref`].
dfeec247 583 ///
3dfed10e
XL
584 /// [`as_uninit_mut`]: #method.as_uninit_mut
585 /// [`as_ref`]: #method.as_ref-1
586 ///
587 /// # Safety
dfeec247 588 ///
17df50a5 589 /// When calling this method, you have to ensure that *either* the pointer is null *or*
dfeec247 590 /// all of the following is true:
3dfed10e
XL
591 ///
592 /// * The pointer must be properly aligned.
593 ///
a2a8927a 594 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
3dfed10e
XL
595 ///
596 /// * The pointer must point to an initialized instance of `T`.
597 ///
598 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
599 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
04454e1e 600 /// In particular, while this reference exists, the memory the pointer points to must
3dfed10e 601 /// not get accessed (read or written) through any other pointer.
dfeec247
XL
602 ///
603 /// This applies even if the result of this method is unused!
604 /// (The part about being initialized is not yet fully decided, but until
3dfed10e 605 /// it is, the only safe approach is to ensure that they are indeed initialized.)
dfeec247 606 ///
3dfed10e 607 /// [the module documentation]: crate::ptr#safety
dfeec247
XL
608 ///
609 /// # Examples
610 ///
611 /// Basic usage:
612 ///
613 /// ```
614 /// let mut s = [1, 2, 3];
615 /// let ptr: *mut u32 = s.as_mut_ptr();
616 /// let first_value = unsafe { ptr.as_mut().unwrap() };
617 /// *first_value = 4;
3dfed10e 618 /// # assert_eq!(s, [4, 2, 3]);
5e7ed085 619 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
dfeec247
XL
620 /// ```
621 ///
622 /// # Null-unchecked version
623 ///
624 /// If you are sure the pointer can never be null and are looking for some kind of
625 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
626 /// you can dereference the pointer directly.
627 ///
628 /// ```
629 /// let mut s = [1, 2, 3];
630 /// let ptr: *mut u32 = s.as_mut_ptr();
631 /// let first_value = unsafe { &mut *ptr };
632 /// *first_value = 4;
3dfed10e 633 /// # assert_eq!(s, [4, 2, 3]);
5e7ed085 634 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
dfeec247
XL
635 /// ```
636 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
a2a8927a 637 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
dfeec247 638 #[inline]
a2a8927a 639 pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
f035d41b
XL
640 // SAFETY: the caller must guarantee that `self` is be valid for
641 // a mutable reference if it isn't null.
642 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
643 }
644
3dfed10e
XL
645 /// Returns `None` if the pointer is null, or else returns a unique reference to
646 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
647 /// that the value has to be initialized.
648 ///
649 /// For the shared counterpart see [`as_uninit_ref`].
650 ///
651 /// [`as_mut`]: #method.as_mut
652 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
653 ///
654 /// # Safety
655 ///
17df50a5 656 /// When calling this method, you have to ensure that *either* the pointer is null *or*
3dfed10e
XL
657 /// all of the following is true:
658 ///
659 /// * The pointer must be properly aligned.
660 ///
a2a8927a 661 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
3dfed10e
XL
662 ///
663 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
664 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
04454e1e 665 /// In particular, while this reference exists, the memory the pointer points to must
3dfed10e
XL
666 /// not get accessed (read or written) through any other pointer.
667 ///
668 /// This applies even if the result of this method is unused!
669 ///
670 /// [the module documentation]: crate::ptr#safety
671 #[inline]
672 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
a2a8927a
XL
673 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
674 pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
3dfed10e
XL
675 where
676 T: Sized,
677 {
678 // SAFETY: the caller must guarantee that `self` meets all the
679 // requirements for a reference.
680 if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
681 }
682
f035d41b
XL
683 /// Returns whether two pointers are guaranteed to be equal.
684 ///
685 /// At runtime this function behaves like `self == other`.
686 /// However, in some contexts (e.g., compile-time evaluation),
687 /// it is not always possible to determine equality of two pointers, so this function may
688 /// spuriously return `false` for pointers that later actually turn out to be equal.
689 /// But when it returns `true`, the pointers are guaranteed to be equal.
690 ///
691 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
692 /// comparisons for which both functions return `false`.
693 ///
694 /// [`guaranteed_ne`]: #method.guaranteed_ne
695 ///
94222f64 696 /// The return value may change depending on the compiler version and unsafe code might not
f035d41b
XL
697 /// rely on the result of this function for soundness. It is suggested to only use this function
698 /// for performance optimizations where spurious `false` return values by this function do not
699 /// affect the outcome, but just the performance.
700 /// The consequences of using this method to make runtime and compile-time code behave
701 /// differently have not been explored. This method should not be used to introduce such
702 /// differences, and it should also not be stabilized before we have a better understanding
703 /// of this issue.
704 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
705 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
706 #[inline]
f035d41b
XL
707 pub const fn guaranteed_eq(self, other: *mut T) -> bool
708 where
709 T: Sized,
710 {
711 intrinsics::ptr_guaranteed_eq(self as *const _, other as *const _)
712 }
713
3dfed10e 714 /// Returns whether two pointers are guaranteed to be unequal.
f035d41b
XL
715 ///
716 /// At runtime this function behaves like `self != other`.
717 /// However, in some contexts (e.g., compile-time evaluation),
718 /// it is not always possible to determine the inequality of two pointers, so this function may
3dfed10e
XL
719 /// spuriously return `false` for pointers that later actually turn out to be unequal.
720 /// But when it returns `true`, the pointers are guaranteed to be unequal.
f035d41b
XL
721 ///
722 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
723 /// comparisons for which both functions return `false`.
724 ///
725 /// [`guaranteed_eq`]: #method.guaranteed_eq
726 ///
94222f64 727 /// The return value may change depending on the compiler version and unsafe code might not
f035d41b
XL
728 /// rely on the result of this function for soundness. It is suggested to only use this function
729 /// for performance optimizations where spurious `false` return values by this function do not
730 /// affect the outcome, but just the performance.
731 /// The consequences of using this method to make runtime and compile-time code behave
732 /// differently have not been explored. This method should not be used to introduce such
733 /// differences, and it should also not be stabilized before we have a better understanding
734 /// of this issue.
735 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
736 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
737 #[inline]
f035d41b
XL
738 pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
739 where
740 T: Sized,
741 {
742 intrinsics::ptr_guaranteed_ne(self as *const _, other as *const _)
dfeec247
XL
743 }
744
745 /// Calculates the distance between two pointers. The returned value is in
04454e1e 746 /// units of T: the distance in bytes divided by `mem::size_of::<T>()`.
dfeec247
XL
747 ///
748 /// This function is the inverse of [`offset`].
749 ///
750 /// [`offset`]: #method.offset-1
dfeec247
XL
751 ///
752 /// # Safety
753 ///
754 /// If any of the following conditions are violated, the result is Undefined
755 /// Behavior:
756 ///
757 /// * Both the starting and other pointer must be either in bounds or one
cdc7bbd5 758 /// byte past the end of the same [allocated object].
dfeec247 759 ///
3dfed10e
XL
760 /// * Both pointers must be *derived from* a pointer to the same object.
761 /// (See below for an example.)
762 ///
dfeec247
XL
763 /// * The distance between the pointers, in bytes, must be an exact multiple
764 /// of the size of `T`.
765 ///
6a06907d
XL
766 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
767 ///
dfeec247
XL
768 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
769 ///
6a06907d
XL
770 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
771 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
772 /// the last two conditions. The standard library also generally ensures that allocations
773 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
774 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
775 /// always satisfies the last two conditions.
dfeec247 776 ///
6a06907d 777 /// Most platforms fundamentally can't even construct such a large allocation.
dfeec247
XL
778 /// For instance, no known 64-bit platform can ever serve a request
779 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
780 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
781 /// more than `isize::MAX` bytes with things like Physical Address
782 /// Extension. As such, memory acquired directly from allocators or memory
783 /// mapped files *may* be too large to handle with this function.
6a06907d
XL
784 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
785 /// such large allocations either.)
786 ///
787 /// [`add`]: #method.add
cdc7bbd5 788 /// [allocated object]: crate::ptr#allocated-object
dfeec247 789 ///
dfeec247
XL
790 /// # Panics
791 ///
792 /// This function panics if `T` is a Zero-Sized Type ("ZST").
793 ///
794 /// # Examples
795 ///
796 /// Basic usage:
797 ///
798 /// ```
dfeec247
XL
799 /// let mut a = [0; 5];
800 /// let ptr1: *mut i32 = &mut a[1];
801 /// let ptr2: *mut i32 = &mut a[3];
802 /// unsafe {
803 /// assert_eq!(ptr2.offset_from(ptr1), 2);
804 /// assert_eq!(ptr1.offset_from(ptr2), -2);
805 /// assert_eq!(ptr1.offset(2), ptr2);
806 /// assert_eq!(ptr2.offset(-2), ptr1);
807 /// }
808 /// ```
3dfed10e
XL
809 ///
810 /// *Incorrect* usage:
811 ///
812 /// ```rust,no_run
813 /// let ptr1 = Box::into_raw(Box::new(0u8));
814 /// let ptr2 = Box::into_raw(Box::new(1u8));
815 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
816 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
817 /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff);
818 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
819 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
820 /// // computing their offset is undefined behavior, even though
821 /// // they point to the same address!
822 /// unsafe {
823 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
824 /// }
825 /// ```
826 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
5099ac24 827 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "92980")]
17df50a5 828 #[inline(always)]
064997fb 829 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
dfeec247
XL
830 pub const unsafe fn offset_from(self, origin: *const T) -> isize
831 where
832 T: Sized,
833 {
f035d41b
XL
834 // SAFETY: the caller must uphold the safety contract for `offset_from`.
835 unsafe { (self as *const T).offset_from(origin) }
dfeec247
XL
836 }
837
923072b8
FG
838 /// Calculates the distance between two pointers. The returned value is in
839 /// units of **bytes**.
840 ///
841 /// This is purely a convenience for casting to a `u8` pointer and
842 /// using [offset_from][pointer::offset_from] on it. See that method for
843 /// documentation and safety requirements.
844 ///
845 /// For non-`Sized` pointees this operation considers only the data pointers,
846 /// ignoring the metadata.
847 #[inline(always)]
848 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
849 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
064997fb 850 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
923072b8
FG
851 pub const unsafe fn byte_offset_from(self, origin: *const T) -> isize {
852 // SAFETY: the caller must uphold the safety contract for `offset_from`.
853 unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
854 }
855
04454e1e
FG
856 /// Calculates the distance between two pointers, *where it's known that
857 /// `self` is equal to or greater than `origin`*. The returned value is in
858 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
859 ///
860 /// This computes the same value that [`offset_from`](#method.offset_from)
861 /// would compute, but with the added precondition that that the offset is
862 /// guaranteed to be non-negative. This method is equivalent to
863 /// `usize::from(self.offset_from(origin)).unwrap_unchecked()`,
864 /// but it provides slightly more information to the optimizer, which can
865 /// sometimes allow it to optimize slightly better with some backends.
866 ///
867 /// This method can be though of as recovering the `count` that was passed
868 /// to [`add`](#method.add) (or, with the parameters in the other order,
869 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
870 /// that their safety preconditions are met:
871 /// ```rust
872 /// # #![feature(ptr_sub_ptr)]
873 /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool {
874 /// ptr.sub_ptr(origin) == count
875 /// # &&
876 /// origin.add(count) == ptr
877 /// # &&
878 /// ptr.sub(count) == origin
879 /// # }
880 /// ```
881 ///
882 /// # Safety
883 ///
884 /// - The distance between the pointers must be non-negative (`self >= origin`)
885 ///
886 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
887 /// apply to this method as well; see it for the full details.
888 ///
889 /// Importantly, despite the return type of this method being able to represent
890 /// a larger offset, it's still *not permitted* to pass pointers which differ
891 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
892 /// always be less than or equal to `isize::MAX as usize`.
893 ///
894 /// # Panics
895 ///
896 /// This function panics if `T` is a Zero-Sized Type ("ZST").
897 ///
898 /// # Examples
899 ///
900 /// ```
901 /// #![feature(ptr_sub_ptr)]
902 ///
903 /// let mut a = [0; 5];
904 /// let p: *mut i32 = a.as_mut_ptr();
905 /// unsafe {
906 /// let ptr1: *mut i32 = p.add(1);
907 /// let ptr2: *mut i32 = p.add(3);
908 ///
909 /// assert_eq!(ptr2.sub_ptr(ptr1), 2);
910 /// assert_eq!(ptr1.add(2), ptr2);
911 /// assert_eq!(ptr2.sub(2), ptr1);
912 /// assert_eq!(ptr2.sub_ptr(ptr2), 0);
913 /// }
914 ///
915 /// // This would be incorrect, as the pointers are not correctly ordered:
916 /// // ptr1.offset_from(ptr2)
917 #[unstable(feature = "ptr_sub_ptr", issue = "95892")]
918 #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")]
919 #[inline]
064997fb 920 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
04454e1e
FG
921 pub const unsafe fn sub_ptr(self, origin: *const T) -> usize
922 where
923 T: Sized,
924 {
925 // SAFETY: the caller must uphold the safety contract for `sub_ptr`.
926 unsafe { (self as *const T).sub_ptr(origin) }
927 }
928
dfeec247
XL
929 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
930 ///
931 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
932 /// offset of `3 * size_of::<T>()` bytes.
933 ///
934 /// # Safety
935 ///
936 /// If any of the following conditions are violated, the result is Undefined
937 /// Behavior:
938 ///
939 /// * Both the starting and resulting pointer must be either in bounds or one
cdc7bbd5 940 /// byte past the end of the same [allocated object].
dfeec247
XL
941 ///
942 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
943 ///
944 /// * The offset being in bounds cannot rely on "wrapping around" the address
945 /// space. That is, the infinite-precision sum must fit in a `usize`.
946 ///
947 /// The compiler and standard library generally tries to ensure allocations
948 /// never reach a size where an offset is a concern. For instance, `Vec`
949 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
950 /// `vec.as_ptr().add(vec.len())` is always safe.
951 ///
952 /// Most platforms fundamentally can't even construct such an allocation.
953 /// For instance, no known 64-bit platform can ever serve a request
954 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
955 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
956 /// more than `isize::MAX` bytes with things like Physical Address
957 /// Extension. As such, memory acquired directly from allocators or memory
958 /// mapped files *may* be too large to handle with this function.
959 ///
960 /// Consider using [`wrapping_add`] instead if these constraints are
961 /// difficult to satisfy. The only advantage of this method is that it
962 /// enables more aggressive compiler optimizations.
963 ///
964 /// [`wrapping_add`]: #method.wrapping_add
94222f64 965 /// [allocated object]: crate::ptr#allocated-object
dfeec247
XL
966 ///
967 /// # Examples
968 ///
969 /// Basic usage:
970 ///
971 /// ```
972 /// let s: &str = "123";
973 /// let ptr: *const u8 = s.as_ptr();
974 ///
975 /// unsafe {
976 /// println!("{}", *ptr.add(1) as char);
977 /// println!("{}", *ptr.add(2) as char);
978 /// }
979 /// ```
980 #[stable(feature = "pointer_methods", since = "1.26.0")]
f9f354fc 981 #[must_use = "returns a new pointer rather than modifying its argument"]
5e7ed085 982 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
cdc7bbd5 983 #[inline(always)]
064997fb 984 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
f9f354fc 985 pub const unsafe fn add(self, count: usize) -> Self
dfeec247
XL
986 where
987 T: Sized,
988 {
f035d41b
XL
989 // SAFETY: the caller must uphold the safety contract for `offset`.
990 unsafe { self.offset(count as isize) }
dfeec247
XL
991 }
992
923072b8
FG
993 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
994 ///
995 /// `count` is in units of bytes.
996 ///
997 /// This is purely a convenience for casting to a `u8` pointer and
998 /// using [add][pointer::add] on it. See that method for documentation
999 /// and safety requirements.
1000 ///
1001 /// For non-`Sized` pointees this operation changes only the data pointer,
1002 /// leaving the metadata untouched.
1003 #[must_use]
1004 #[inline(always)]
1005 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
1006 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
064997fb 1007 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
923072b8
FG
1008 pub const unsafe fn byte_add(self, count: usize) -> Self {
1009 // SAFETY: the caller must uphold the safety contract for `add`.
1010 let this = unsafe { self.cast::<u8>().add(count).cast::<()>() };
1011 from_raw_parts_mut::<T>(this, metadata(self))
1012 }
1013
dfeec247
XL
1014 /// Calculates the offset from a pointer (convenience for
1015 /// `.offset((count as isize).wrapping_neg())`).
1016 ///
1017 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1018 /// offset of `3 * size_of::<T>()` bytes.
1019 ///
1020 /// # Safety
1021 ///
1022 /// If any of the following conditions are violated, the result is Undefined
1023 /// Behavior:
1024 ///
1025 /// * Both the starting and resulting pointer must be either in bounds or one
cdc7bbd5 1026 /// byte past the end of the same [allocated object].
dfeec247
XL
1027 ///
1028 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
1029 ///
1030 /// * The offset being in bounds cannot rely on "wrapping around" the address
1031 /// space. That is, the infinite-precision sum must fit in a usize.
1032 ///
1033 /// The compiler and standard library generally tries to ensure allocations
1034 /// never reach a size where an offset is a concern. For instance, `Vec`
1035 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
1036 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
1037 ///
1038 /// Most platforms fundamentally can't even construct such an allocation.
1039 /// For instance, no known 64-bit platform can ever serve a request
1040 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
1041 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
1042 /// more than `isize::MAX` bytes with things like Physical Address
1043 /// Extension. As such, memory acquired directly from allocators or memory
1044 /// mapped files *may* be too large to handle with this function.
1045 ///
1046 /// Consider using [`wrapping_sub`] instead if these constraints are
1047 /// difficult to satisfy. The only advantage of this method is that it
1048 /// enables more aggressive compiler optimizations.
1049 ///
1050 /// [`wrapping_sub`]: #method.wrapping_sub
cdc7bbd5 1051 /// [allocated object]: crate::ptr#allocated-object
dfeec247
XL
1052 ///
1053 /// # Examples
1054 ///
1055 /// Basic usage:
1056 ///
1057 /// ```
1058 /// let s: &str = "123";
1059 ///
1060 /// unsafe {
1061 /// let end: *const u8 = s.as_ptr().add(3);
1062 /// println!("{}", *end.sub(1) as char);
1063 /// println!("{}", *end.sub(2) as char);
1064 /// }
1065 /// ```
1066 #[stable(feature = "pointer_methods", since = "1.26.0")]
f9f354fc 1067 #[must_use = "returns a new pointer rather than modifying its argument"]
5e7ed085 1068 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
dfeec247 1069 #[inline]
064997fb 1070 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
f9f354fc 1071 pub const unsafe fn sub(self, count: usize) -> Self
dfeec247
XL
1072 where
1073 T: Sized,
1074 {
f035d41b
XL
1075 // SAFETY: the caller must uphold the safety contract for `offset`.
1076 unsafe { self.offset((count as isize).wrapping_neg()) }
dfeec247
XL
1077 }
1078
923072b8
FG
1079 /// Calculates the offset from a pointer in bytes (convenience for
1080 /// `.byte_offset((count as isize).wrapping_neg())`).
1081 ///
1082 /// `count` is in units of bytes.
1083 ///
1084 /// This is purely a convenience for casting to a `u8` pointer and
1085 /// using [sub][pointer::sub] on it. See that method for documentation
1086 /// and safety requirements.
1087 ///
1088 /// For non-`Sized` pointees this operation changes only the data pointer,
1089 /// leaving the metadata untouched.
1090 #[must_use]
1091 #[inline(always)]
1092 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
1093 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
064997fb 1094 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
923072b8
FG
1095 pub const unsafe fn byte_sub(self, count: usize) -> Self {
1096 // SAFETY: the caller must uphold the safety contract for `sub`.
1097 let this = unsafe { self.cast::<u8>().sub(count).cast::<()>() };
1098 from_raw_parts_mut::<T>(this, metadata(self))
1099 }
1100
dfeec247
XL
1101 /// Calculates the offset from a pointer using wrapping arithmetic.
1102 /// (convenience for `.wrapping_offset(count as isize)`)
1103 ///
1104 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1105 /// offset of `3 * size_of::<T>()` bytes.
1106 ///
1107 /// # Safety
1108 ///
5869c6ff
XL
1109 /// This operation itself is always safe, but using the resulting pointer is not.
1110 ///
94222f64 1111 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
cdc7bbd5 1112 /// be used to read or write other allocated objects.
5869c6ff
XL
1113 ///
1114 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1115 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1116 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1117 /// `x` and `y` point into the same allocated object.
dfeec247 1118 ///
5869c6ff
XL
1119 /// Compared to [`add`], this method basically delays the requirement of staying within the
1120 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1121 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1122 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1123 /// can be optimized better and is thus preferable in performance-sensitive code.
dfeec247 1124 ///
5869c6ff
XL
1125 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1126 /// intermediate values used during the computation of the final result. For example,
1127 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1128 /// allocated object and then re-entering it later is permitted.
dfeec247 1129 ///
dfeec247 1130 /// [`add`]: #method.add
cdc7bbd5 1131 /// [allocated object]: crate::ptr#allocated-object
dfeec247
XL
1132 ///
1133 /// # Examples
1134 ///
1135 /// Basic usage:
1136 ///
1137 /// ```
1138 /// // Iterate using a raw pointer in increments of two elements
1139 /// let data = [1u8, 2, 3, 4, 5];
1140 /// let mut ptr: *const u8 = data.as_ptr();
1141 /// let step = 2;
1142 /// let end_rounded_up = ptr.wrapping_add(6);
1143 ///
1144 /// // This loop prints "1, 3, 5, "
1145 /// while ptr != end_rounded_up {
1146 /// unsafe {
1147 /// print!("{}, ", *ptr);
1148 /// }
1149 /// ptr = ptr.wrapping_add(step);
1150 /// }
1151 /// ```
1152 #[stable(feature = "pointer_methods", since = "1.26.0")]
f9f354fc 1153 #[must_use = "returns a new pointer rather than modifying its argument"]
5e7ed085 1154 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
cdc7bbd5 1155 #[inline(always)]
f9f354fc 1156 pub const fn wrapping_add(self, count: usize) -> Self
dfeec247
XL
1157 where
1158 T: Sized,
1159 {
1160 self.wrapping_offset(count as isize)
1161 }
1162
923072b8
FG
1163 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1164 /// (convenience for `.wrapping_byte_offset(count as isize)`)
1165 ///
1166 /// `count` is in units of bytes.
1167 ///
1168 /// This is purely a convenience for casting to a `u8` pointer and
1169 /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1170 ///
1171 /// For non-`Sized` pointees this operation changes only the data pointer,
1172 /// leaving the metadata untouched.
1173 #[must_use]
1174 #[inline(always)]
1175 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
1176 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
1177 pub const fn wrapping_byte_add(self, count: usize) -> Self {
1178 from_raw_parts_mut::<T>(self.cast::<u8>().wrapping_add(count).cast::<()>(), metadata(self))
1179 }
1180
dfeec247 1181 /// Calculates the offset from a pointer using wrapping arithmetic.
5869c6ff 1182 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
dfeec247
XL
1183 ///
1184 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1185 /// offset of `3 * size_of::<T>()` bytes.
1186 ///
1187 /// # Safety
1188 ///
5869c6ff
XL
1189 /// This operation itself is always safe, but using the resulting pointer is not.
1190 ///
94222f64 1191 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
cdc7bbd5 1192 /// be used to read or write other allocated objects.
5869c6ff
XL
1193 ///
1194 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1195 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1196 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1197 /// `x` and `y` point into the same allocated object.
dfeec247 1198 ///
5869c6ff
XL
1199 /// Compared to [`sub`], this method basically delays the requirement of staying within the
1200 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1201 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1202 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1203 /// can be optimized better and is thus preferable in performance-sensitive code.
dfeec247 1204 ///
5869c6ff
XL
1205 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1206 /// intermediate values used during the computation of the final result. For example,
1207 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1208 /// allocated object and then re-entering it later is permitted.
dfeec247 1209 ///
dfeec247 1210 /// [`sub`]: #method.sub
cdc7bbd5 1211 /// [allocated object]: crate::ptr#allocated-object
dfeec247
XL
1212 ///
1213 /// # Examples
1214 ///
1215 /// Basic usage:
1216 ///
1217 /// ```
1218 /// // Iterate using a raw pointer in increments of two elements (backwards)
1219 /// let data = [1u8, 2, 3, 4, 5];
1220 /// let mut ptr: *const u8 = data.as_ptr();
1221 /// let start_rounded_down = ptr.wrapping_sub(2);
1222 /// ptr = ptr.wrapping_add(4);
1223 /// let step = 2;
1224 /// // This loop prints "5, 3, 1, "
1225 /// while ptr != start_rounded_down {
1226 /// unsafe {
1227 /// print!("{}, ", *ptr);
1228 /// }
1229 /// ptr = ptr.wrapping_sub(step);
1230 /// }
1231 /// ```
1232 #[stable(feature = "pointer_methods", since = "1.26.0")]
f9f354fc 1233 #[must_use = "returns a new pointer rather than modifying its argument"]
5e7ed085 1234 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
dfeec247 1235 #[inline]
f9f354fc 1236 pub const fn wrapping_sub(self, count: usize) -> Self
dfeec247
XL
1237 where
1238 T: Sized,
1239 {
1240 self.wrapping_offset((count as isize).wrapping_neg())
1241 }
1242
923072b8
FG
1243 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1244 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1245 ///
1246 /// `count` is in units of bytes.
1247 ///
1248 /// This is purely a convenience for casting to a `u8` pointer and
1249 /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1250 ///
1251 /// For non-`Sized` pointees this operation changes only the data pointer,
1252 /// leaving the metadata untouched.
1253 #[must_use]
1254 #[inline(always)]
1255 #[unstable(feature = "pointer_byte_offsets", issue = "96283")]
1256 #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")]
1257 pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1258 from_raw_parts_mut::<T>(self.cast::<u8>().wrapping_sub(count).cast::<()>(), metadata(self))
1259 }
1260
dfeec247
XL
1261 /// Reads the value from `self` without moving it. This leaves the
1262 /// memory in `self` unchanged.
1263 ///
1264 /// See [`ptr::read`] for safety concerns and examples.
1265 ///
fc512014 1266 /// [`ptr::read`]: crate::ptr::read()
dfeec247 1267 #[stable(feature = "pointer_methods", since = "1.26.0")]
5869c6ff 1268 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
17df50a5 1269 #[inline(always)]
064997fb 1270 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
5869c6ff 1271 pub const unsafe fn read(self) -> T
dfeec247
XL
1272 where
1273 T: Sized,
1274 {
f035d41b
XL
1275 // SAFETY: the caller must uphold the safety contract for ``.
1276 unsafe { read(self) }
dfeec247
XL
1277 }
1278
1279 /// Performs a volatile read of the value from `self` without moving it. This
1280 /// leaves the memory in `self` unchanged.
1281 ///
1282 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1283 /// to not be elided or reordered by the compiler across other volatile
1284 /// operations.
1285 ///
1286 /// See [`ptr::read_volatile`] for safety concerns and examples.
1287 ///
fc512014 1288 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
dfeec247 1289 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1290 #[inline(always)]
064997fb 1291 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
dfeec247
XL
1292 pub unsafe fn read_volatile(self) -> T
1293 where
1294 T: Sized,
1295 {
f035d41b
XL
1296 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1297 unsafe { read_volatile(self) }
dfeec247
XL
1298 }
1299
1300 /// Reads the value from `self` without moving it. This leaves the
1301 /// memory in `self` unchanged.
1302 ///
1303 /// Unlike `read`, the pointer may be unaligned.
1304 ///
1305 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1306 ///
fc512014 1307 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
dfeec247 1308 #[stable(feature = "pointer_methods", since = "1.26.0")]
5869c6ff 1309 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
17df50a5 1310 #[inline(always)]
064997fb 1311 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
5869c6ff 1312 pub const unsafe fn read_unaligned(self) -> T
dfeec247
XL
1313 where
1314 T: Sized,
1315 {
f035d41b
XL
1316 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1317 unsafe { read_unaligned(self) }
dfeec247
XL
1318 }
1319
1320 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1321 /// and destination may overlap.
1322 ///
1323 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1324 ///
1325 /// See [`ptr::copy`] for safety concerns and examples.
1326 ///
fc512014 1327 /// [`ptr::copy`]: crate::ptr::copy()
923072b8 1328 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
dfeec247 1329 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1330 #[inline(always)]
064997fb 1331 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
6a06907d 1332 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
dfeec247
XL
1333 where
1334 T: Sized,
1335 {
f035d41b
XL
1336 // SAFETY: the caller must uphold the safety contract for `copy`.
1337 unsafe { copy(self, dest, count) }
dfeec247
XL
1338 }
1339
1340 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1341 /// and destination may *not* overlap.
1342 ///
1343 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1344 ///
1345 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1346 ///
fc512014 1347 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
923072b8 1348 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
dfeec247 1349 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1350 #[inline(always)]
064997fb 1351 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
6a06907d 1352 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
dfeec247
XL
1353 where
1354 T: Sized,
1355 {
f035d41b
XL
1356 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1357 unsafe { copy_nonoverlapping(self, dest, count) }
dfeec247
XL
1358 }
1359
1360 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1361 /// and destination may overlap.
1362 ///
1363 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1364 ///
1365 /// See [`ptr::copy`] for safety concerns and examples.
1366 ///
fc512014 1367 /// [`ptr::copy`]: crate::ptr::copy()
923072b8 1368 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
dfeec247 1369 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1370 #[inline(always)]
064997fb 1371 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
6a06907d 1372 pub const unsafe fn copy_from(self, src: *const T, count: usize)
dfeec247
XL
1373 where
1374 T: Sized,
1375 {
f035d41b
XL
1376 // SAFETY: the caller must uphold the safety contract for `copy`.
1377 unsafe { copy(src, self, count) }
dfeec247
XL
1378 }
1379
1380 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1381 /// and destination may *not* overlap.
1382 ///
1383 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1384 ///
1385 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1386 ///
fc512014 1387 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
923072b8 1388 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
dfeec247 1389 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1390 #[inline(always)]
064997fb 1391 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
6a06907d 1392 pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
dfeec247
XL
1393 where
1394 T: Sized,
1395 {
f035d41b
XL
1396 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1397 unsafe { copy_nonoverlapping(src, self, count) }
dfeec247
XL
1398 }
1399
1400 /// Executes the destructor (if any) of the pointed-to value.
1401 ///
1402 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1403 ///
fc512014 1404 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
dfeec247 1405 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1406 #[inline(always)]
dfeec247 1407 pub unsafe fn drop_in_place(self) {
f035d41b
XL
1408 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1409 unsafe { drop_in_place(self) }
dfeec247
XL
1410 }
1411
1412 /// Overwrites a memory location with the given value without reading or
1413 /// dropping the old value.
1414 ///
1415 /// See [`ptr::write`] for safety concerns and examples.
1416 ///
fc512014 1417 /// [`ptr::write`]: crate::ptr::write()
dfeec247 1418 #[stable(feature = "pointer_methods", since = "1.26.0")]
136023e0 1419 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
17df50a5 1420 #[inline(always)]
064997fb 1421 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
136023e0 1422 pub const unsafe fn write(self, val: T)
dfeec247
XL
1423 where
1424 T: Sized,
1425 {
f035d41b
XL
1426 // SAFETY: the caller must uphold the safety contract for `write`.
1427 unsafe { write(self, val) }
dfeec247
XL
1428 }
1429
1430 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1431 /// bytes of memory starting at `self` to `val`.
1432 ///
1433 /// See [`ptr::write_bytes`] for safety concerns and examples.
1434 ///
fc512014 1435 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
923072b8 1436 #[doc(alias = "memset")]
dfeec247 1437 #[stable(feature = "pointer_methods", since = "1.26.0")]
a2a8927a 1438 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
17df50a5 1439 #[inline(always)]
064997fb 1440 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
a2a8927a 1441 pub const unsafe fn write_bytes(self, val: u8, count: usize)
dfeec247
XL
1442 where
1443 T: Sized,
1444 {
f035d41b
XL
1445 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1446 unsafe { write_bytes(self, val, count) }
dfeec247
XL
1447 }
1448
1449 /// Performs a volatile write of a memory location with the given value without
1450 /// reading or dropping the old value.
1451 ///
1452 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1453 /// to not be elided or reordered by the compiler across other volatile
1454 /// operations.
1455 ///
1456 /// See [`ptr::write_volatile`] for safety concerns and examples.
1457 ///
fc512014 1458 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
dfeec247 1459 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1460 #[inline(always)]
064997fb 1461 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
dfeec247
XL
1462 pub unsafe fn write_volatile(self, val: T)
1463 where
1464 T: Sized,
1465 {
f035d41b
XL
1466 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1467 unsafe { write_volatile(self, val) }
dfeec247
XL
1468 }
1469
1470 /// Overwrites a memory location with the given value without reading or
1471 /// dropping the old value.
1472 ///
1473 /// Unlike `write`, the pointer may be unaligned.
1474 ///
1475 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1476 ///
fc512014 1477 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
dfeec247 1478 #[stable(feature = "pointer_methods", since = "1.26.0")]
136023e0 1479 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
17df50a5 1480 #[inline(always)]
064997fb 1481 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
6a06907d 1482 pub const unsafe fn write_unaligned(self, val: T)
dfeec247
XL
1483 where
1484 T: Sized,
1485 {
f035d41b
XL
1486 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1487 unsafe { write_unaligned(self, val) }
dfeec247
XL
1488 }
1489
1490 /// Replaces the value at `self` with `src`, returning the old
1491 /// value, without dropping either.
1492 ///
1493 /// See [`ptr::replace`] for safety concerns and examples.
1494 ///
fc512014 1495 /// [`ptr::replace`]: crate::ptr::replace()
dfeec247 1496 #[stable(feature = "pointer_methods", since = "1.26.0")]
17df50a5 1497 #[inline(always)]
dfeec247
XL
1498 pub unsafe fn replace(self, src: T) -> T
1499 where
1500 T: Sized,
1501 {
f035d41b
XL
1502 // SAFETY: the caller must uphold the safety contract for `replace`.
1503 unsafe { replace(self, src) }
dfeec247
XL
1504 }
1505
1506 /// Swaps the values at two mutable locations of the same type, without
1507 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1508 /// otherwise equivalent.
1509 ///
1510 /// See [`ptr::swap`] for safety concerns and examples.
1511 ///
fc512014 1512 /// [`ptr::swap`]: crate::ptr::swap()
dfeec247 1513 #[stable(feature = "pointer_methods", since = "1.26.0")]
3c0e092e 1514 #[rustc_const_unstable(feature = "const_swap", issue = "83163")]
17df50a5 1515 #[inline(always)]
3c0e092e 1516 pub const unsafe fn swap(self, with: *mut T)
dfeec247
XL
1517 where
1518 T: Sized,
1519 {
f035d41b
XL
1520 // SAFETY: the caller must uphold the safety contract for `swap`.
1521 unsafe { swap(self, with) }
dfeec247
XL
1522 }
1523
1524 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1525 /// `align`.
1526 ///
1527 /// If it is not possible to align the pointer, the implementation returns
ba9703b0
XL
1528 /// `usize::MAX`. It is permissible for the implementation to *always*
1529 /// return `usize::MAX`. Only your algorithm's performance can depend
dfeec247
XL
1530 /// on getting a usable offset here, not its correctness.
1531 ///
1532 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1533 /// used with the `wrapping_add` method.
1534 ///
1535 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1536 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1537 /// the returned offset is correct in all terms other than alignment.
1538 ///
1539 /// # Panics
1540 ///
1541 /// The function panics if `align` is not a power-of-two.
1542 ///
1543 /// # Examples
1544 ///
1545 /// Accessing adjacent `u8` as `u16`
1546 ///
1547 /// ```
1548 /// # fn foo(n: usize) {
1549 /// # use std::mem::align_of;
1550 /// # unsafe {
1551 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
1b1a35ee 1552 /// let ptr = x.as_ptr().add(n) as *const u8;
dfeec247
XL
1553 /// let offset = ptr.align_offset(align_of::<u16>());
1554 /// if offset < x.len() - n - 1 {
1555 /// let u16_ptr = ptr.add(offset) as *const u16;
1556 /// assert_ne!(*u16_ptr, 500);
1557 /// } else {
1558 /// // while the pointer can be aligned via `offset`, it would point
1559 /// // outside the allocation
1560 /// }
1561 /// # } }
1562 /// ```
1563 #[stable(feature = "align_offset", since = "1.36.0")]
3c0e092e
XL
1564 #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")]
1565 pub const fn align_offset(self, align: usize) -> usize
dfeec247
XL
1566 where
1567 T: Sized,
1568 {
1569 if !align.is_power_of_two() {
1570 panic!("align_offset: align is not a power-of-two");
1571 }
3c0e092e
XL
1572
1573 fn rt_impl<T>(p: *mut T, align: usize) -> usize {
1574 // SAFETY: `align` has been checked to be a power of 2 above
1575 unsafe { align_offset(p, align) }
1576 }
1577
1578 const fn ctfe_impl<T>(_: *mut T, _: usize) -> usize {
1579 usize::MAX
1580 }
1581
1582 // SAFETY:
923072b8 1583 // It is permissible for `align_offset` to always return `usize::MAX`,
3c0e092e
XL
1584 // algorithm correctness can not depend on `align_offset` returning non-max values.
1585 //
1586 // As such the behaviour can't change after replacing `align_offset` with `usize::MAX`, only performance can.
1587 unsafe { intrinsics::const_eval_select((self, align), ctfe_impl, rt_impl) }
dfeec247 1588 }
923072b8
FG
1589
1590 /// Returns whether the pointer is properly aligned for `T`.
1591 #[must_use]
1592 #[inline]
1593 #[unstable(feature = "pointer_is_aligned", issue = "96284")]
1594 pub fn is_aligned(self) -> bool
1595 where
1596 T: Sized,
1597 {
1598 self.is_aligned_to(core::mem::align_of::<T>())
1599 }
1600
1601 /// Returns whether the pointer is aligned to `align`.
1602 ///
1603 /// For non-`Sized` pointees this operation considers only the data pointer,
1604 /// ignoring the metadata.
1605 ///
1606 /// # Panics
1607 ///
1608 /// The function panics if `align` is not a power-of-two (this includes 0).
1609 #[must_use]
1610 #[inline]
1611 #[unstable(feature = "pointer_is_aligned", issue = "96284")]
1612 pub fn is_aligned_to(self, align: usize) -> bool {
1613 if !align.is_power_of_two() {
1614 panic!("is_aligned_to: align is not a power-of-two");
1615 }
1616
1617 // SAFETY: `is_power_of_two()` will return `false` for zero.
1618 unsafe { core::intrinsics::assume(align != 0) };
1619
1620 // Cast is needed for `T: !Sized`
1621 self.cast::<u8>().addr() % align == 0
1622 }
dfeec247
XL
1623}
1624
ba9703b0
XL
1625impl<T> *mut [T] {
1626 /// Returns the length of a raw slice.
1627 ///
1628 /// The returned value is the number of **elements**, not the number of bytes.
1629 ///
1630 /// This function is safe, even when the raw slice cannot be cast to a slice
1631 /// reference because the pointer is null or unaligned.
1632 ///
1633 /// # Examples
1634 ///
1635 /// ```rust
1636 /// #![feature(slice_ptr_len)]
ba9703b0
XL
1637 /// use std::ptr;
1638 ///
1639 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1640 /// assert_eq!(slice.len(), 3);
1641 /// ```
17df50a5 1642 #[inline(always)]
ba9703b0
XL
1643 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1644 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1645 pub const fn len(self) -> usize {
6a06907d 1646 metadata(self)
ba9703b0 1647 }
3dfed10e 1648
923072b8
FG
1649 /// Returns `true` if the raw slice has a length of 0.
1650 ///
1651 /// # Examples
1652 ///
1653 /// ```
1654 /// #![feature(slice_ptr_len)]
1655 ///
1656 /// let mut a = [1, 2, 3];
1657 /// let ptr = &mut a as *mut [_];
1658 /// assert!(!ptr.is_empty());
1659 /// ```
1660 #[inline(always)]
1661 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1662 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1663 pub const fn is_empty(self) -> bool {
1664 self.len() == 0
1665 }
1666
1667 /// Divides one mutable raw slice into two at an index.
1668 ///
1669 /// The first will contain all indices from `[0, mid)` (excluding
1670 /// the index `mid` itself) and the second will contain all
1671 /// indices from `[mid, len)` (excluding the index `len` itself).
1672 ///
1673 /// # Panics
1674 ///
1675 /// Panics if `mid > len`.
1676 ///
1677 /// # Safety
1678 ///
1679 /// `mid` must be [in-bounds] of the underlying [allocated object].
1680 /// Which means `self` must be dereferenceable and span a single allocation
1681 /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1682 /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1683 ///
1684 /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1685 /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1686 /// The explicit bounds check is only as useful as `len` is correct.
1687 ///
1688 /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1689 /// [in-bounds]: #method.add
1690 /// [allocated object]: crate::ptr#allocated-object
1691 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1692 ///
1693 /// # Examples
1694 ///
1695 /// ```
1696 /// #![feature(raw_slice_split)]
1697 /// #![feature(slice_ptr_get)]
1698 ///
1699 /// let mut v = [1, 0, 3, 0, 5, 6];
1700 /// let ptr = &mut v as *mut [_];
1701 /// unsafe {
1702 /// let (left, right) = ptr.split_at_mut(2);
1703 /// assert_eq!(&*left, [1, 0]);
1704 /// assert_eq!(&*right, [3, 0, 5, 6]);
1705 /// }
1706 /// ```
1707 #[inline(always)]
1708 #[track_caller]
1709 #[unstable(feature = "raw_slice_split", issue = "95595")]
1710 pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1711 assert!(mid <= self.len());
1712 // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1713 // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1714 unsafe { self.split_at_mut_unchecked(mid) }
1715 }
1716
1717 /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1718 ///
1719 /// The first will contain all indices from `[0, mid)` (excluding
1720 /// the index `mid` itself) and the second will contain all
1721 /// indices from `[mid, len)` (excluding the index `len` itself).
1722 ///
1723 /// # Safety
1724 ///
1725 /// `mid` must be [in-bounds] of the underlying [allocated object].
1726 /// Which means `self` must be dereferenceable and span a single allocation
1727 /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1728 /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1729 ///
1730 /// [in-bounds]: #method.add
1731 /// [out-of-bounds index]: #method.add
1732 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1733 ///
1734 /// # Examples
1735 ///
1736 /// ```
1737 /// #![feature(raw_slice_split)]
1738 ///
1739 /// let mut v = [1, 0, 3, 0, 5, 6];
1740 /// // scoped to restrict the lifetime of the borrows
1741 /// unsafe {
1742 /// let ptr = &mut v as *mut [_];
1743 /// let (left, right) = ptr.split_at_mut_unchecked(2);
1744 /// assert_eq!(&*left, [1, 0]);
1745 /// assert_eq!(&*right, [3, 0, 5, 6]);
1746 /// (&mut *left)[1] = 2;
1747 /// (&mut *right)[1] = 4;
1748 /// }
1749 /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1750 /// ```
1751 #[inline(always)]
1752 #[unstable(feature = "raw_slice_split", issue = "95595")]
1753 pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1754 let len = self.len();
1755 let ptr = self.as_mut_ptr();
1756
1757 // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1758 let tail = unsafe { ptr.add(mid) };
1759 (
1760 crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1761 crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1762 )
1763 }
1764
3dfed10e
XL
1765 /// Returns a raw pointer to the slice's buffer.
1766 ///
1767 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1768 ///
1769 /// # Examples
1770 ///
1771 /// ```rust
1772 /// #![feature(slice_ptr_get)]
1773 /// use std::ptr;
1774 ///
1775 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
5e7ed085 1776 /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
3dfed10e 1777 /// ```
17df50a5 1778 #[inline(always)]
3dfed10e
XL
1779 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1780 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
1781 pub const fn as_mut_ptr(self) -> *mut T {
1782 self as *mut T
1783 }
1784
1785 /// Returns a raw pointer to an element or subslice, without doing bounds
1786 /// checking.
1787 ///
923072b8 1788 /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
3dfed10e
XL
1789 /// is *[undefined behavior]* even if the resulting pointer is not used.
1790 ///
923072b8 1791 /// [out-of-bounds index]: #method.add
3dfed10e
XL
1792 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1793 ///
1794 /// # Examples
1795 ///
1796 /// ```
1797 /// #![feature(slice_ptr_get)]
1798 ///
1799 /// let x = &mut [1, 2, 4] as *mut [i32];
1800 ///
1801 /// unsafe {
1802 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1803 /// }
1804 /// ```
1805 #[unstable(feature = "slice_ptr_get", issue = "74265")]
5e7ed085 1806 #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
17df50a5 1807 #[inline(always)]
5e7ed085 1808 pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
3dfed10e 1809 where
5e7ed085 1810 I: ~const SliceIndex<[T]>,
3dfed10e 1811 {
a2a8927a 1812 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
3dfed10e
XL
1813 unsafe { index.get_unchecked_mut(self) }
1814 }
1815
1816 /// Returns `None` if the pointer is null, or else returns a shared slice to
1817 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1818 /// that the value has to be initialized.
1819 ///
1820 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1821 ///
1822 /// [`as_ref`]: #method.as_ref-1
1823 /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
1824 ///
1825 /// # Safety
1826 ///
17df50a5 1827 /// When calling this method, you have to ensure that *either* the pointer is null *or*
3dfed10e
XL
1828 /// all of the following is true:
1829 ///
1830 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
1831 /// and it must be properly aligned. This means in particular:
1832 ///
cdc7bbd5 1833 /// * The entire memory range of this slice must be contained within a single [allocated object]!
3dfed10e
XL
1834 /// Slices can never span across multiple allocated objects.
1835 ///
1836 /// * The pointer must be aligned even for zero-length slices. One
1837 /// reason for this is that enum layout optimizations may rely on references
1838 /// (including slices of any length) being aligned and non-null to distinguish
1839 /// them from other data. You can obtain a pointer that is usable as `data`
1840 /// for zero-length slices using [`NonNull::dangling()`].
1841 ///
1842 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1843 /// See the safety documentation of [`pointer::offset`].
1844 ///
1845 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1846 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
04454e1e 1847 /// In particular, while this reference exists, the memory the pointer points to must
3dfed10e
XL
1848 /// not get mutated (except inside `UnsafeCell`).
1849 ///
1850 /// This applies even if the result of this method is unused!
1851 ///
1852 /// See also [`slice::from_raw_parts`][].
1853 ///
1854 /// [valid]: crate::ptr#safety
cdc7bbd5 1855 /// [allocated object]: crate::ptr#allocated-object
3dfed10e
XL
1856 #[inline]
1857 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
a2a8927a
XL
1858 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
1859 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
3dfed10e
XL
1860 if self.is_null() {
1861 None
1862 } else {
1863 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1864 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1865 }
1866 }
1867
1868 /// Returns `None` if the pointer is null, or else returns a unique slice to
1869 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1870 /// that the value has to be initialized.
1871 ///
1872 /// For the shared counterpart see [`as_uninit_slice`].
1873 ///
1874 /// [`as_mut`]: #method.as_mut
1875 /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1876 ///
1877 /// # Safety
1878 ///
17df50a5 1879 /// When calling this method, you have to ensure that *either* the pointer is null *or*
3dfed10e
XL
1880 /// all of the following is true:
1881 ///
1882 /// * The pointer must be [valid] for reads and writes for `ptr.len() * mem::size_of::<T>()`
1883 /// many bytes, and it must be properly aligned. This means in particular:
1884 ///
cdc7bbd5 1885 /// * The entire memory range of this slice must be contained within a single [allocated object]!
3dfed10e
XL
1886 /// Slices can never span across multiple allocated objects.
1887 ///
1888 /// * The pointer must be aligned even for zero-length slices. One
1889 /// reason for this is that enum layout optimizations may rely on references
1890 /// (including slices of any length) being aligned and non-null to distinguish
1891 /// them from other data. You can obtain a pointer that is usable as `data`
1892 /// for zero-length slices using [`NonNull::dangling()`].
1893 ///
1894 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1895 /// See the safety documentation of [`pointer::offset`].
1896 ///
1897 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1898 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
04454e1e 1899 /// In particular, while this reference exists, the memory the pointer points to must
3dfed10e
XL
1900 /// not get accessed (read or written) through any other pointer.
1901 ///
1902 /// This applies even if the result of this method is unused!
1903 ///
1904 /// See also [`slice::from_raw_parts_mut`][].
1905 ///
1906 /// [valid]: crate::ptr#safety
cdc7bbd5 1907 /// [allocated object]: crate::ptr#allocated-object
3dfed10e
XL
1908 #[inline]
1909 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
a2a8927a
XL
1910 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
1911 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
3dfed10e
XL
1912 if self.is_null() {
1913 None
1914 } else {
1915 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1916 Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1917 }
1918 }
ba9703b0
XL
1919}
1920
dfeec247
XL
1921// Equality for pointers
1922#[stable(feature = "rust1", since = "1.0.0")]
1923impl<T: ?Sized> PartialEq for *mut T {
17df50a5 1924 #[inline(always)]
dfeec247
XL
1925 fn eq(&self, other: &*mut T) -> bool {
1926 *self == *other
1927 }
1928}
1929
1930#[stable(feature = "rust1", since = "1.0.0")]
1931impl<T: ?Sized> Eq for *mut T {}
1932
1933#[stable(feature = "rust1", since = "1.0.0")]
1934impl<T: ?Sized> Ord for *mut T {
1935 #[inline]
1936 fn cmp(&self, other: &*mut T) -> Ordering {
1937 if self < other {
1938 Less
1939 } else if self == other {
1940 Equal
1941 } else {
1942 Greater
1943 }
1944 }
1945}
1946
1947#[stable(feature = "rust1", since = "1.0.0")]
1948impl<T: ?Sized> PartialOrd for *mut T {
17df50a5 1949 #[inline(always)]
dfeec247
XL
1950 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1951 Some(self.cmp(other))
1952 }
1953
17df50a5 1954 #[inline(always)]
dfeec247
XL
1955 fn lt(&self, other: &*mut T) -> bool {
1956 *self < *other
1957 }
1958
17df50a5 1959 #[inline(always)]
dfeec247
XL
1960 fn le(&self, other: &*mut T) -> bool {
1961 *self <= *other
1962 }
1963
17df50a5 1964 #[inline(always)]
dfeec247
XL
1965 fn gt(&self, other: &*mut T) -> bool {
1966 *self > *other
1967 }
1968
17df50a5 1969 #[inline(always)]
dfeec247
XL
1970 fn ge(&self, other: &*mut T) -> bool {
1971 *self >= *other
1972 }
1973}