]>
Commit | Line | Data |
---|---|---|
dfeec247 XL |
1 | use super::*; |
2 | use crate::cmp::Ordering::{self, Equal, Greater, Less}; | |
3 | use crate::intrinsics; | |
4 | use crate::mem; | |
3dfed10e | 5 | use crate::slice::{self, SliceIndex}; |
dfeec247 | 6 | |
5e7ed085 | 7 | #[cfg_attr(bootstrap, lang = "const_ptr")] |
dfeec247 XL |
8 | impl<T: ?Sized> *const T { |
9 | /// Returns `true` if the pointer is null. | |
10 | /// | |
11 | /// Note that unsized types have many possible null pointers, as only the | |
12 | /// raw data pointer is considered, not their length, vtable, etc. | |
13 | /// Therefore, two pointers that are null may still not compare equal to | |
14 | /// each other. | |
15 | /// | |
3dfed10e XL |
16 | /// ## Behavior during const evaluation |
17 | /// | |
18 | /// When this function is used during const evaluation, it may return `false` for pointers | |
19 | /// that turn out to be null at runtime. Specifically, when a pointer to some memory | |
20 | /// is offset beyond its bounds in such a way that the resulting pointer is null, | |
21 | /// the function will still return `false`. There is no way for CTFE to know | |
22 | /// the absolute position of that memory, so we cannot tell if the pointer is | |
23 | /// null or not. | |
24 | /// | |
dfeec247 XL |
25 | /// # Examples |
26 | /// | |
27 | /// Basic usage: | |
28 | /// | |
29 | /// ``` | |
30 | /// let s: &str = "Follow the rabbit"; | |
31 | /// let ptr: *const u8 = s.as_ptr(); | |
32 | /// assert!(!ptr.is_null()); | |
33 | /// ``` | |
34 | #[stable(feature = "rust1", since = "1.0.0")] | |
3dfed10e | 35 | #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")] |
dfeec247 | 36 | #[inline] |
3dfed10e | 37 | pub const fn is_null(self) -> bool { |
dfeec247 XL |
38 | // Compare via a cast to a thin pointer, so fat pointers are only |
39 | // considering their "data" part for null-ness. | |
3dfed10e | 40 | (self as *const u8).guaranteed_eq(null()) |
dfeec247 XL |
41 | } |
42 | ||
43 | /// Casts to a pointer of another type. | |
44 | #[stable(feature = "ptr_cast", since = "1.38.0")] | |
45 | #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")] | |
46 | #[inline] | |
47 | pub const fn cast<U>(self) -> *const U { | |
48 | self as _ | |
49 | } | |
50 | ||
5e7ed085 FG |
51 | /// Use the pointer value in a new pointer of another type. |
52 | /// | |
53 | /// In case `val` is a (fat) pointer to an unsized type, this operation | |
54 | /// will ignore the pointer part, whereas for (thin) pointers to sized | |
55 | /// types, this has the same effect as a simple cast. | |
56 | /// | |
57 | /// The resulting pointer will have provenance of `self`, i.e., for a fat | |
58 | /// pointer, this operation is semantically the same as creating a new | |
59 | /// fat pointer with the data pointer value of `self` but the metadata of | |
60 | /// `val`. | |
61 | /// | |
62 | /// # Examples | |
63 | /// | |
64 | /// This function is primarily useful for allowing byte-wise pointer | |
65 | /// arithmetic on potentially fat pointers: | |
66 | /// | |
67 | /// ``` | |
68 | /// #![feature(set_ptr_value)] | |
69 | /// # use core::fmt::Debug; | |
70 | /// let arr: [i32; 3] = [1, 2, 3]; | |
71 | /// let mut ptr = arr.as_ptr() as *const dyn Debug; | |
72 | /// let thin = ptr as *const u8; | |
73 | /// unsafe { | |
74 | /// ptr = thin.add(8).with_metadata_of(ptr); | |
75 | /// # assert_eq!(*(ptr as *const i32), 3); | |
76 | /// println!("{:?}", &*ptr); // will print "3" | |
77 | /// } | |
78 | /// ``` | |
79 | #[unstable(feature = "set_ptr_value", issue = "75091")] | |
80 | #[must_use = "returns a new pointer rather than modifying its argument"] | |
81 | #[inline] | |
82 | pub fn with_metadata_of<U>(self, mut val: *const U) -> *const U | |
83 | where | |
84 | U: ?Sized, | |
85 | { | |
86 | let target = &mut val as *mut *const U as *mut *const u8; | |
87 | // SAFETY: In case of a thin pointer, this operations is identical | |
88 | // to a simple assignment. In case of a fat pointer, with the current | |
89 | // fat pointer layout implementation, the first field of such a | |
90 | // pointer is always the data pointer, which is likewise assigned. | |
91 | unsafe { *target = self as *const u8 }; | |
92 | val | |
93 | } | |
94 | ||
5099ac24 FG |
95 | /// Changes constness without changing the type. |
96 | /// | |
97 | /// This is a bit safer than `as` because it wouldn't silently change the type if the code is | |
98 | /// refactored. | |
99 | #[unstable(feature = "ptr_const_cast", issue = "92675")] | |
100 | #[rustc_const_unstable(feature = "ptr_const_cast", issue = "92675")] | |
101 | pub const fn as_mut(self) -> *mut T { | |
102 | self as _ | |
103 | } | |
104 | ||
a2a8927a XL |
105 | /// Casts a pointer to its raw bits. |
106 | /// | |
107 | /// This is equivalent to `as usize`, but is more specific to enhance readability. | |
108 | /// The inverse method is [`from_bits`](#method.from_bits). | |
109 | /// | |
110 | /// In particular, `*p as usize` and `p as usize` will both compile for | |
111 | /// pointers to numeric types but do very different things, so using this | |
112 | /// helps emphasize that reading the bits was intentional. | |
113 | /// | |
114 | /// # Examples | |
115 | /// | |
116 | /// ``` | |
117 | /// #![feature(ptr_to_from_bits)] | |
118 | /// let array = [13, 42]; | |
119 | /// let p0: *const i32 = &array[0]; | |
120 | /// assert_eq!(<*const _>::from_bits(p0.to_bits()), p0); | |
121 | /// let p1: *const i32 = &array[1]; | |
122 | /// assert_eq!(p1.to_bits() - p0.to_bits(), 4); | |
123 | /// ``` | |
124 | #[unstable(feature = "ptr_to_from_bits", issue = "91126")] | |
125 | pub fn to_bits(self) -> usize | |
126 | where | |
127 | T: Sized, | |
128 | { | |
129 | self as usize | |
130 | } | |
131 | ||
132 | /// Creates a pointer from its raw bits. | |
133 | /// | |
134 | /// This is equivalent to `as *const T`, but is more specific to enhance readability. | |
135 | /// The inverse method is [`to_bits`](#method.to_bits). | |
136 | /// | |
137 | /// # Examples | |
138 | /// | |
139 | /// ``` | |
140 | /// #![feature(ptr_to_from_bits)] | |
141 | /// use std::ptr::NonNull; | |
142 | /// let dangling: *const u8 = NonNull::dangling().as_ptr(); | |
143 | /// assert_eq!(<*const u8>::from_bits(1), dangling); | |
144 | /// ``` | |
145 | #[unstable(feature = "ptr_to_from_bits", issue = "91126")] | |
146 | pub fn from_bits(bits: usize) -> Self | |
147 | where | |
148 | T: Sized, | |
149 | { | |
150 | bits as Self | |
151 | } | |
152 | ||
5e7ed085 FG |
153 | /// Gets the "address" portion of the pointer. |
154 | /// | |
155 | /// This is equivalent to `self as usize`, which semantically discards | |
156 | /// *provenance* and *address-space* information. To properly restore that information, | |
157 | /// use [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. | |
158 | /// | |
159 | /// On most platforms this will produce a value with the same bytes as the original | |
160 | /// pointer, because all the bytes are dedicated to describing the address. | |
161 | /// Platforms which need to store additional information in the pointer may | |
162 | /// perform a change of representation to produce a value containing only the address | |
163 | /// portion of the pointer. What that means is up to the platform to define. | |
164 | /// | |
165 | /// This API and its claimed semantics are part of the Strict Provenance experiment, | |
166 | /// see the [module documentation][crate::ptr] for details. | |
167 | #[must_use] | |
168 | #[inline] | |
169 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
170 | pub fn addr(self) -> usize | |
171 | where | |
172 | T: Sized, | |
173 | { | |
174 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. | |
175 | self as usize | |
176 | } | |
177 | ||
178 | /// Creates a new pointer with the given address. | |
179 | /// | |
180 | /// This performs the same operation as an `addr as ptr` cast, but copies | |
181 | /// the *address-space* and *provenance* of `self` to the new pointer. | |
182 | /// This allows us to dynamically preserve and propagate this important | |
183 | /// information in a way that is otherwise impossible with a unary cast. | |
184 | /// | |
185 | /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset | |
186 | /// `self` to the given address, and therefore has all the same capabilities and restrictions. | |
187 | /// | |
188 | /// This API and its claimed semantics are part of the Strict Provenance experiment, | |
189 | /// see the [module documentation][crate::ptr] for details. | |
190 | #[must_use] | |
191 | #[inline] | |
192 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
193 | pub fn with_addr(self, addr: usize) -> Self | |
194 | where | |
195 | T: Sized, | |
196 | { | |
197 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. | |
198 | // | |
199 | // In the mean-time, this operation is defined to be "as if" it was | |
200 | // a wrapping_offset, so we can emulate it as such. This should properly | |
201 | // restore pointer provenance even under today's compiler. | |
202 | let self_addr = self.addr() as isize; | |
203 | let dest_addr = addr as isize; | |
204 | let offset = dest_addr.wrapping_sub(self_addr); | |
205 | ||
206 | // This is the canonical desugarring of this operation | |
207 | self.cast::<u8>().wrapping_offset(offset).cast::<T>() | |
208 | } | |
209 | ||
210 | /// Creates a new pointer by mapping `self`'s address to a new one. | |
211 | /// | |
212 | /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details. | |
213 | /// | |
214 | /// This API and its claimed semantics are part of the Strict Provenance experiment, | |
215 | /// see the [module documentation][crate::ptr] for details. | |
216 | #[must_use] | |
217 | #[inline] | |
218 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
219 | pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self | |
220 | where | |
221 | T: Sized, | |
222 | { | |
223 | self.with_addr(f(self.addr())) | |
224 | } | |
225 | ||
94222f64 | 226 | /// Decompose a (possibly wide) pointer into its address and metadata components. |
6a06907d XL |
227 | /// |
228 | /// The pointer can be later reconstructed with [`from_raw_parts`]. | |
6a06907d XL |
229 | #[unstable(feature = "ptr_metadata", issue = "81513")] |
230 | #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")] | |
231 | #[inline] | |
232 | pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) { | |
233 | (self.cast(), metadata(self)) | |
234 | } | |
235 | ||
3dfed10e XL |
236 | /// Returns `None` if the pointer is null, or else returns a shared reference to |
237 | /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`] | |
238 | /// must be used instead. | |
dfeec247 | 239 | /// |
3dfed10e | 240 | /// [`as_uninit_ref`]: #method.as_uninit_ref |
dfeec247 | 241 | /// |
3dfed10e | 242 | /// # Safety |
dfeec247 | 243 | /// |
17df50a5 | 244 | /// When calling this method, you have to ensure that *either* the pointer is null *or* |
dfeec247 | 245 | /// all of the following is true: |
3dfed10e XL |
246 | /// |
247 | /// * The pointer must be properly aligned. | |
248 | /// | |
a2a8927a | 249 | /// * It must be "dereferenceable" in the sense defined in [the module documentation]. |
3dfed10e XL |
250 | /// |
251 | /// * The pointer must point to an initialized instance of `T`. | |
252 | /// | |
253 | /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is | |
254 | /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data. | |
255 | /// In particular, for the duration of this lifetime, the memory the pointer points to must | |
256 | /// not get mutated (except inside `UnsafeCell`). | |
dfeec247 XL |
257 | /// |
258 | /// This applies even if the result of this method is unused! | |
259 | /// (The part about being initialized is not yet fully decided, but until | |
260 | /// it is, the only safe approach is to ensure that they are indeed initialized.) | |
261 | /// | |
3dfed10e | 262 | /// [the module documentation]: crate::ptr#safety |
dfeec247 XL |
263 | /// |
264 | /// # Examples | |
265 | /// | |
266 | /// Basic usage: | |
267 | /// | |
268 | /// ``` | |
269 | /// let ptr: *const u8 = &10u8 as *const u8; | |
270 | /// | |
271 | /// unsafe { | |
272 | /// if let Some(val_back) = ptr.as_ref() { | |
5e7ed085 | 273 | /// println!("We got back the value: {val_back}!"); |
dfeec247 XL |
274 | /// } |
275 | /// } | |
276 | /// ``` | |
277 | /// | |
278 | /// # Null-unchecked version | |
279 | /// | |
280 | /// If you are sure the pointer can never be null and are looking for some kind of | |
281 | /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can | |
282 | /// dereference the pointer directly. | |
283 | /// | |
284 | /// ``` | |
285 | /// let ptr: *const u8 = &10u8 as *const u8; | |
286 | /// | |
287 | /// unsafe { | |
288 | /// let val_back = &*ptr; | |
5e7ed085 | 289 | /// println!("We got back the value: {val_back}!"); |
dfeec247 XL |
290 | /// } |
291 | /// ``` | |
292 | #[stable(feature = "ptr_as_ref", since = "1.9.0")] | |
a2a8927a | 293 | #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")] |
dfeec247 | 294 | #[inline] |
a2a8927a | 295 | pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> { |
f035d41b XL |
296 | // SAFETY: the caller must guarantee that `self` is valid |
297 | // for a reference if it isn't null. | |
298 | if self.is_null() { None } else { unsafe { Some(&*self) } } | |
dfeec247 XL |
299 | } |
300 | ||
3dfed10e XL |
301 | /// Returns `None` if the pointer is null, or else returns a shared reference to |
302 | /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require | |
303 | /// that the value has to be initialized. | |
304 | /// | |
305 | /// [`as_ref`]: #method.as_ref | |
306 | /// | |
307 | /// # Safety | |
308 | /// | |
17df50a5 | 309 | /// When calling this method, you have to ensure that *either* the pointer is null *or* |
3dfed10e XL |
310 | /// all of the following is true: |
311 | /// | |
312 | /// * The pointer must be properly aligned. | |
313 | /// | |
a2a8927a | 314 | /// * It must be "dereferenceable" in the sense defined in [the module documentation]. |
3dfed10e XL |
315 | /// |
316 | /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is | |
317 | /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data. | |
318 | /// In particular, for the duration of this lifetime, the memory the pointer points to must | |
319 | /// not get mutated (except inside `UnsafeCell`). | |
320 | /// | |
321 | /// This applies even if the result of this method is unused! | |
322 | /// | |
323 | /// [the module documentation]: crate::ptr#safety | |
324 | /// | |
325 | /// # Examples | |
326 | /// | |
327 | /// Basic usage: | |
328 | /// | |
329 | /// ``` | |
330 | /// #![feature(ptr_as_uninit)] | |
331 | /// | |
332 | /// let ptr: *const u8 = &10u8 as *const u8; | |
333 | /// | |
334 | /// unsafe { | |
335 | /// if let Some(val_back) = ptr.as_uninit_ref() { | |
336 | /// println!("We got back the value: {}!", val_back.assume_init()); | |
337 | /// } | |
338 | /// } | |
339 | /// ``` | |
340 | #[inline] | |
341 | #[unstable(feature = "ptr_as_uninit", issue = "75402")] | |
a2a8927a XL |
342 | #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")] |
343 | pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>> | |
3dfed10e XL |
344 | where |
345 | T: Sized, | |
346 | { | |
347 | // SAFETY: the caller must guarantee that `self` meets all the | |
348 | // requirements for a reference. | |
349 | if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) } | |
350 | } | |
351 | ||
dfeec247 XL |
352 | /// Calculates the offset from a pointer. |
353 | /// | |
354 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
355 | /// offset of `3 * size_of::<T>()` bytes. | |
356 | /// | |
357 | /// # Safety | |
358 | /// | |
359 | /// If any of the following conditions are violated, the result is Undefined | |
360 | /// Behavior: | |
361 | /// | |
362 | /// * Both the starting and resulting pointer must be either in bounds or one | |
cdc7bbd5 | 363 | /// byte past the end of the same [allocated object]. |
dfeec247 XL |
364 | /// |
365 | /// * The computed offset, **in bytes**, cannot overflow an `isize`. | |
366 | /// | |
367 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
368 | /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize. | |
369 | /// | |
370 | /// The compiler and standard library generally tries to ensure allocations | |
371 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
372 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
373 | /// `vec.as_ptr().add(vec.len())` is always safe. | |
374 | /// | |
375 | /// Most platforms fundamentally can't even construct such an allocation. | |
376 | /// For instance, no known 64-bit platform can ever serve a request | |
377 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
378 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
379 | /// more than `isize::MAX` bytes with things like Physical Address | |
380 | /// Extension. As such, memory acquired directly from allocators or memory | |
381 | /// mapped files *may* be too large to handle with this function. | |
382 | /// | |
383 | /// Consider using [`wrapping_offset`] instead if these constraints are | |
384 | /// difficult to satisfy. The only advantage of this method is that it | |
385 | /// enables more aggressive compiler optimizations. | |
386 | /// | |
387 | /// [`wrapping_offset`]: #method.wrapping_offset | |
cdc7bbd5 | 388 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
389 | /// |
390 | /// # Examples | |
391 | /// | |
392 | /// Basic usage: | |
393 | /// | |
394 | /// ``` | |
395 | /// let s: &str = "123"; | |
396 | /// let ptr: *const u8 = s.as_ptr(); | |
397 | /// | |
398 | /// unsafe { | |
399 | /// println!("{}", *ptr.offset(1) as char); | |
400 | /// println!("{}", *ptr.offset(2) as char); | |
401 | /// } | |
402 | /// ``` | |
403 | #[stable(feature = "rust1", since = "1.0.0")] | |
f9f354fc | 404 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 405 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 406 | #[inline(always)] |
f9f354fc | 407 | pub const unsafe fn offset(self, count: isize) -> *const T |
dfeec247 XL |
408 | where |
409 | T: Sized, | |
410 | { | |
f035d41b XL |
411 | // SAFETY: the caller must uphold the safety contract for `offset`. |
412 | unsafe { intrinsics::offset(self, count) } | |
dfeec247 XL |
413 | } |
414 | ||
415 | /// Calculates the offset from a pointer using wrapping arithmetic. | |
416 | /// | |
417 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
418 | /// offset of `3 * size_of::<T>()` bytes. | |
419 | /// | |
420 | /// # Safety | |
421 | /// | |
5869c6ff | 422 | /// This operation itself is always safe, but using the resulting pointer is not. |
dfeec247 | 423 | /// |
94222f64 | 424 | /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not |
cdc7bbd5 | 425 | /// be used to read or write other allocated objects. |
dfeec247 | 426 | /// |
5869c6ff XL |
427 | /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z` |
428 | /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still | |
429 | /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless | |
430 | /// `x` and `y` point into the same allocated object. | |
dfeec247 | 431 | /// |
5869c6ff XL |
432 | /// Compared to [`offset`], this method basically delays the requirement of staying within the |
433 | /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object | |
434 | /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a | |
435 | /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`] | |
436 | /// can be optimized better and is thus preferable in performance-sensitive code. | |
437 | /// | |
438 | /// The delayed check only considers the value of the pointer that was dereferenced, not the | |
439 | /// intermediate values used during the computation of the final result. For example, | |
440 | /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other | |
441 | /// words, leaving the allocated object and then re-entering it later is permitted. | |
dfeec247 | 442 | /// |
dfeec247 | 443 | /// [`offset`]: #method.offset |
cdc7bbd5 | 444 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
445 | /// |
446 | /// # Examples | |
447 | /// | |
448 | /// Basic usage: | |
449 | /// | |
450 | /// ``` | |
451 | /// // Iterate using a raw pointer in increments of two elements | |
452 | /// let data = [1u8, 2, 3, 4, 5]; | |
453 | /// let mut ptr: *const u8 = data.as_ptr(); | |
454 | /// let step = 2; | |
455 | /// let end_rounded_up = ptr.wrapping_offset(6); | |
456 | /// | |
457 | /// // This loop prints "1, 3, 5, " | |
458 | /// while ptr != end_rounded_up { | |
459 | /// unsafe { | |
460 | /// print!("{}, ", *ptr); | |
461 | /// } | |
462 | /// ptr = ptr.wrapping_offset(step); | |
463 | /// } | |
464 | /// ``` | |
465 | #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] | |
f9f354fc | 466 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 467 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 468 | #[inline(always)] |
f9f354fc | 469 | pub const fn wrapping_offset(self, count: isize) -> *const T |
dfeec247 XL |
470 | where |
471 | T: Sized, | |
472 | { | |
f9f354fc | 473 | // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called. |
dfeec247 XL |
474 | unsafe { intrinsics::arith_offset(self, count) } |
475 | } | |
476 | ||
477 | /// Calculates the distance between two pointers. The returned value is in | |
478 | /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`. | |
479 | /// | |
480 | /// This function is the inverse of [`offset`]. | |
481 | /// | |
482 | /// [`offset`]: #method.offset | |
dfeec247 XL |
483 | /// |
484 | /// # Safety | |
485 | /// | |
486 | /// If any of the following conditions are violated, the result is Undefined | |
487 | /// Behavior: | |
488 | /// | |
489 | /// * Both the starting and other pointer must be either in bounds or one | |
cdc7bbd5 | 490 | /// byte past the end of the same [allocated object]. |
dfeec247 | 491 | /// |
3dfed10e XL |
492 | /// * Both pointers must be *derived from* a pointer to the same object. |
493 | /// (See below for an example.) | |
494 | /// | |
dfeec247 XL |
495 | /// * The distance between the pointers, in bytes, must be an exact multiple |
496 | /// of the size of `T`. | |
497 | /// | |
6a06907d XL |
498 | /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`. |
499 | /// | |
dfeec247 XL |
500 | /// * The distance being in bounds cannot rely on "wrapping around" the address space. |
501 | /// | |
6a06907d XL |
502 | /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the |
503 | /// address space, so two pointers within some value of any Rust type `T` will always satisfy | |
504 | /// the last two conditions. The standard library also generally ensures that allocations | |
505 | /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they | |
506 | /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())` | |
507 | /// always satisfies the last two conditions. | |
dfeec247 | 508 | /// |
6a06907d | 509 | /// Most platforms fundamentally can't even construct such a large allocation. |
dfeec247 XL |
510 | /// For instance, no known 64-bit platform can ever serve a request |
511 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
512 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
513 | /// more than `isize::MAX` bytes with things like Physical Address | |
514 | /// Extension. As such, memory acquired directly from allocators or memory | |
515 | /// mapped files *may* be too large to handle with this function. | |
6a06907d XL |
516 | /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on |
517 | /// such large allocations either.) | |
518 | /// | |
519 | /// [`add`]: #method.add | |
cdc7bbd5 | 520 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 | 521 | /// |
dfeec247 XL |
522 | /// # Panics |
523 | /// | |
524 | /// This function panics if `T` is a Zero-Sized Type ("ZST"). | |
525 | /// | |
526 | /// # Examples | |
527 | /// | |
528 | /// Basic usage: | |
529 | /// | |
530 | /// ``` | |
dfeec247 XL |
531 | /// let a = [0; 5]; |
532 | /// let ptr1: *const i32 = &a[1]; | |
533 | /// let ptr2: *const i32 = &a[3]; | |
534 | /// unsafe { | |
535 | /// assert_eq!(ptr2.offset_from(ptr1), 2); | |
536 | /// assert_eq!(ptr1.offset_from(ptr2), -2); | |
537 | /// assert_eq!(ptr1.offset(2), ptr2); | |
538 | /// assert_eq!(ptr2.offset(-2), ptr1); | |
539 | /// } | |
540 | /// ``` | |
3dfed10e XL |
541 | /// |
542 | /// *Incorrect* usage: | |
543 | /// | |
544 | /// ```rust,no_run | |
545 | /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8; | |
546 | /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8; | |
547 | /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize); | |
548 | /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1. | |
549 | /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff); | |
550 | /// assert_eq!(ptr2 as usize, ptr2_other as usize); | |
551 | /// // Since ptr2_other and ptr2 are derived from pointers to different objects, | |
552 | /// // computing their offset is undefined behavior, even though | |
553 | /// // they point to the same address! | |
554 | /// unsafe { | |
555 | /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior | |
556 | /// } | |
557 | /// ``` | |
558 | #[stable(feature = "ptr_offset_from", since = "1.47.0")] | |
5099ac24 | 559 | #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "92980")] |
dfeec247 XL |
560 | #[inline] |
561 | pub const unsafe fn offset_from(self, origin: *const T) -> isize | |
562 | where | |
563 | T: Sized, | |
564 | { | |
565 | let pointee_size = mem::size_of::<T>(); | |
f035d41b XL |
566 | assert!(0 < pointee_size && pointee_size <= isize::MAX as usize); |
567 | // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`. | |
568 | unsafe { intrinsics::ptr_offset_from(self, origin) } | |
569 | } | |
570 | ||
571 | /// Returns whether two pointers are guaranteed to be equal. | |
572 | /// | |
573 | /// At runtime this function behaves like `self == other`. | |
574 | /// However, in some contexts (e.g., compile-time evaluation), | |
575 | /// it is not always possible to determine equality of two pointers, so this function may | |
576 | /// spuriously return `false` for pointers that later actually turn out to be equal. | |
577 | /// But when it returns `true`, the pointers are guaranteed to be equal. | |
578 | /// | |
579 | /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer | |
580 | /// comparisons for which both functions return `false`. | |
581 | /// | |
582 | /// [`guaranteed_ne`]: #method.guaranteed_ne | |
583 | /// | |
5e7ed085 | 584 | /// The return value may change depending on the compiler version and unsafe code must not |
f035d41b XL |
585 | /// rely on the result of this function for soundness. It is suggested to only use this function |
586 | /// for performance optimizations where spurious `false` return values by this function do not | |
587 | /// affect the outcome, but just the performance. | |
588 | /// The consequences of using this method to make runtime and compile-time code behave | |
589 | /// differently have not been explored. This method should not be used to introduce such | |
590 | /// differences, and it should also not be stabilized before we have a better understanding | |
591 | /// of this issue. | |
592 | #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
593 | #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
594 | #[inline] | |
f035d41b XL |
595 | pub const fn guaranteed_eq(self, other: *const T) -> bool |
596 | where | |
597 | T: Sized, | |
598 | { | |
599 | intrinsics::ptr_guaranteed_eq(self, other) | |
600 | } | |
601 | ||
3dfed10e | 602 | /// Returns whether two pointers are guaranteed to be unequal. |
f035d41b XL |
603 | /// |
604 | /// At runtime this function behaves like `self != other`. | |
605 | /// However, in some contexts (e.g., compile-time evaluation), | |
606 | /// it is not always possible to determine the inequality of two pointers, so this function may | |
3dfed10e XL |
607 | /// spuriously return `false` for pointers that later actually turn out to be unequal. |
608 | /// But when it returns `true`, the pointers are guaranteed to be unequal. | |
f035d41b XL |
609 | /// |
610 | /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer | |
611 | /// comparisons for which both functions return `false`. | |
612 | /// | |
613 | /// [`guaranteed_eq`]: #method.guaranteed_eq | |
614 | /// | |
5e7ed085 | 615 | /// The return value may change depending on the compiler version and unsafe code must not |
f035d41b XL |
616 | /// rely on the result of this function for soundness. It is suggested to only use this function |
617 | /// for performance optimizations where spurious `false` return values by this function do not | |
618 | /// affect the outcome, but just the performance. | |
619 | /// The consequences of using this method to make runtime and compile-time code behave | |
620 | /// differently have not been explored. This method should not be used to introduce such | |
621 | /// differences, and it should also not be stabilized before we have a better understanding | |
622 | /// of this issue. | |
623 | #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
624 | #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
625 | #[inline] | |
f035d41b XL |
626 | pub const fn guaranteed_ne(self, other: *const T) -> bool |
627 | where | |
628 | T: Sized, | |
629 | { | |
630 | intrinsics::ptr_guaranteed_ne(self, other) | |
dfeec247 XL |
631 | } |
632 | ||
dfeec247 XL |
633 | /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`). |
634 | /// | |
635 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
636 | /// offset of `3 * size_of::<T>()` bytes. | |
637 | /// | |
638 | /// # Safety | |
639 | /// | |
640 | /// If any of the following conditions are violated, the result is Undefined | |
641 | /// Behavior: | |
642 | /// | |
643 | /// * Both the starting and resulting pointer must be either in bounds or one | |
cdc7bbd5 | 644 | /// byte past the end of the same [allocated object]. |
dfeec247 XL |
645 | /// |
646 | /// * The computed offset, **in bytes**, cannot overflow an `isize`. | |
647 | /// | |
648 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
649 | /// space. That is, the infinite-precision sum must fit in a `usize`. | |
650 | /// | |
651 | /// The compiler and standard library generally tries to ensure allocations | |
652 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
653 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
654 | /// `vec.as_ptr().add(vec.len())` is always safe. | |
655 | /// | |
656 | /// Most platforms fundamentally can't even construct such an allocation. | |
657 | /// For instance, no known 64-bit platform can ever serve a request | |
658 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
659 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
660 | /// more than `isize::MAX` bytes with things like Physical Address | |
661 | /// Extension. As such, memory acquired directly from allocators or memory | |
662 | /// mapped files *may* be too large to handle with this function. | |
663 | /// | |
664 | /// Consider using [`wrapping_add`] instead if these constraints are | |
665 | /// difficult to satisfy. The only advantage of this method is that it | |
666 | /// enables more aggressive compiler optimizations. | |
667 | /// | |
668 | /// [`wrapping_add`]: #method.wrapping_add | |
cdc7bbd5 | 669 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
670 | /// |
671 | /// # Examples | |
672 | /// | |
673 | /// Basic usage: | |
674 | /// | |
675 | /// ``` | |
676 | /// let s: &str = "123"; | |
677 | /// let ptr: *const u8 = s.as_ptr(); | |
678 | /// | |
679 | /// unsafe { | |
680 | /// println!("{}", *ptr.add(1) as char); | |
681 | /// println!("{}", *ptr.add(2) as char); | |
682 | /// } | |
683 | /// ``` | |
684 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 685 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 686 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 687 | #[inline(always)] |
f9f354fc | 688 | pub const unsafe fn add(self, count: usize) -> Self |
dfeec247 XL |
689 | where |
690 | T: Sized, | |
691 | { | |
f035d41b XL |
692 | // SAFETY: the caller must uphold the safety contract for `offset`. |
693 | unsafe { self.offset(count as isize) } | |
dfeec247 XL |
694 | } |
695 | ||
696 | /// Calculates the offset from a pointer (convenience for | |
697 | /// `.offset((count as isize).wrapping_neg())`). | |
698 | /// | |
699 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
700 | /// offset of `3 * size_of::<T>()` bytes. | |
701 | /// | |
702 | /// # Safety | |
703 | /// | |
704 | /// If any of the following conditions are violated, the result is Undefined | |
705 | /// Behavior: | |
706 | /// | |
707 | /// * Both the starting and resulting pointer must be either in bounds or one | |
cdc7bbd5 | 708 | /// byte past the end of the same [allocated object]. |
dfeec247 XL |
709 | /// |
710 | /// * The computed offset cannot exceed `isize::MAX` **bytes**. | |
711 | /// | |
712 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
713 | /// space. That is, the infinite-precision sum must fit in a usize. | |
714 | /// | |
715 | /// The compiler and standard library generally tries to ensure allocations | |
716 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
717 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
718 | /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe. | |
719 | /// | |
720 | /// Most platforms fundamentally can't even construct such an allocation. | |
721 | /// For instance, no known 64-bit platform can ever serve a request | |
722 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
723 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
724 | /// more than `isize::MAX` bytes with things like Physical Address | |
725 | /// Extension. As such, memory acquired directly from allocators or memory | |
726 | /// mapped files *may* be too large to handle with this function. | |
727 | /// | |
728 | /// Consider using [`wrapping_sub`] instead if these constraints are | |
729 | /// difficult to satisfy. The only advantage of this method is that it | |
730 | /// enables more aggressive compiler optimizations. | |
731 | /// | |
732 | /// [`wrapping_sub`]: #method.wrapping_sub | |
cdc7bbd5 | 733 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
734 | /// |
735 | /// # Examples | |
736 | /// | |
737 | /// Basic usage: | |
738 | /// | |
739 | /// ``` | |
740 | /// let s: &str = "123"; | |
741 | /// | |
742 | /// unsafe { | |
743 | /// let end: *const u8 = s.as_ptr().add(3); | |
744 | /// println!("{}", *end.sub(1) as char); | |
745 | /// println!("{}", *end.sub(2) as char); | |
746 | /// } | |
747 | /// ``` | |
748 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 749 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 750 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
dfeec247 | 751 | #[inline] |
f9f354fc | 752 | pub const unsafe fn sub(self, count: usize) -> Self |
dfeec247 XL |
753 | where |
754 | T: Sized, | |
755 | { | |
f035d41b XL |
756 | // SAFETY: the caller must uphold the safety contract for `offset`. |
757 | unsafe { self.offset((count as isize).wrapping_neg()) } | |
dfeec247 XL |
758 | } |
759 | ||
760 | /// Calculates the offset from a pointer using wrapping arithmetic. | |
761 | /// (convenience for `.wrapping_offset(count as isize)`) | |
762 | /// | |
763 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
764 | /// offset of `3 * size_of::<T>()` bytes. | |
765 | /// | |
766 | /// # Safety | |
767 | /// | |
5869c6ff XL |
768 | /// This operation itself is always safe, but using the resulting pointer is not. |
769 | /// | |
94222f64 | 770 | /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not |
cdc7bbd5 | 771 | /// be used to read or write other allocated objects. |
5869c6ff XL |
772 | /// |
773 | /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z` | |
774 | /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still | |
775 | /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless | |
776 | /// `x` and `y` point into the same allocated object. | |
dfeec247 | 777 | /// |
5869c6ff XL |
778 | /// Compared to [`add`], this method basically delays the requirement of staying within the |
779 | /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object | |
780 | /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a | |
781 | /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`] | |
782 | /// can be optimized better and is thus preferable in performance-sensitive code. | |
dfeec247 | 783 | /// |
5869c6ff XL |
784 | /// The delayed check only considers the value of the pointer that was dereferenced, not the |
785 | /// intermediate values used during the computation of the final result. For example, | |
786 | /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the | |
787 | /// allocated object and then re-entering it later is permitted. | |
dfeec247 | 788 | /// |
dfeec247 | 789 | /// [`add`]: #method.add |
cdc7bbd5 | 790 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
791 | /// |
792 | /// # Examples | |
793 | /// | |
794 | /// Basic usage: | |
795 | /// | |
796 | /// ``` | |
797 | /// // Iterate using a raw pointer in increments of two elements | |
798 | /// let data = [1u8, 2, 3, 4, 5]; | |
799 | /// let mut ptr: *const u8 = data.as_ptr(); | |
800 | /// let step = 2; | |
801 | /// let end_rounded_up = ptr.wrapping_add(6); | |
802 | /// | |
803 | /// // This loop prints "1, 3, 5, " | |
804 | /// while ptr != end_rounded_up { | |
805 | /// unsafe { | |
806 | /// print!("{}, ", *ptr); | |
807 | /// } | |
808 | /// ptr = ptr.wrapping_add(step); | |
809 | /// } | |
810 | /// ``` | |
811 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 812 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 813 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 814 | #[inline(always)] |
f9f354fc | 815 | pub const fn wrapping_add(self, count: usize) -> Self |
dfeec247 XL |
816 | where |
817 | T: Sized, | |
818 | { | |
819 | self.wrapping_offset(count as isize) | |
820 | } | |
821 | ||
822 | /// Calculates the offset from a pointer using wrapping arithmetic. | |
5869c6ff | 823 | /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`) |
dfeec247 XL |
824 | /// |
825 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
826 | /// offset of `3 * size_of::<T>()` bytes. | |
827 | /// | |
828 | /// # Safety | |
829 | /// | |
5869c6ff XL |
830 | /// This operation itself is always safe, but using the resulting pointer is not. |
831 | /// | |
94222f64 | 832 | /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not |
cdc7bbd5 | 833 | /// be used to read or write other allocated objects. |
5869c6ff XL |
834 | /// |
835 | /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z` | |
836 | /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still | |
837 | /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless | |
838 | /// `x` and `y` point into the same allocated object. | |
dfeec247 | 839 | /// |
5869c6ff XL |
840 | /// Compared to [`sub`], this method basically delays the requirement of staying within the |
841 | /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object | |
842 | /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a | |
843 | /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`] | |
844 | /// can be optimized better and is thus preferable in performance-sensitive code. | |
dfeec247 | 845 | /// |
5869c6ff XL |
846 | /// The delayed check only considers the value of the pointer that was dereferenced, not the |
847 | /// intermediate values used during the computation of the final result. For example, | |
848 | /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the | |
849 | /// allocated object and then re-entering it later is permitted. | |
dfeec247 | 850 | /// |
dfeec247 | 851 | /// [`sub`]: #method.sub |
cdc7bbd5 | 852 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
853 | /// |
854 | /// # Examples | |
855 | /// | |
856 | /// Basic usage: | |
857 | /// | |
858 | /// ``` | |
859 | /// // Iterate using a raw pointer in increments of two elements (backwards) | |
860 | /// let data = [1u8, 2, 3, 4, 5]; | |
861 | /// let mut ptr: *const u8 = data.as_ptr(); | |
862 | /// let start_rounded_down = ptr.wrapping_sub(2); | |
863 | /// ptr = ptr.wrapping_add(4); | |
864 | /// let step = 2; | |
865 | /// // This loop prints "5, 3, 1, " | |
866 | /// while ptr != start_rounded_down { | |
867 | /// unsafe { | |
868 | /// print!("{}, ", *ptr); | |
869 | /// } | |
870 | /// ptr = ptr.wrapping_sub(step); | |
871 | /// } | |
872 | /// ``` | |
873 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 874 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 875 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
dfeec247 | 876 | #[inline] |
f9f354fc | 877 | pub const fn wrapping_sub(self, count: usize) -> Self |
dfeec247 XL |
878 | where |
879 | T: Sized, | |
880 | { | |
881 | self.wrapping_offset((count as isize).wrapping_neg()) | |
882 | } | |
883 | ||
884 | /// Reads the value from `self` without moving it. This leaves the | |
885 | /// memory in `self` unchanged. | |
886 | /// | |
887 | /// See [`ptr::read`] for safety concerns and examples. | |
888 | /// | |
fc512014 | 889 | /// [`ptr::read`]: crate::ptr::read() |
dfeec247 | 890 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
5869c6ff | 891 | #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")] |
dfeec247 | 892 | #[inline] |
5869c6ff | 893 | pub const unsafe fn read(self) -> T |
dfeec247 XL |
894 | where |
895 | T: Sized, | |
896 | { | |
f035d41b XL |
897 | // SAFETY: the caller must uphold the safety contract for `read`. |
898 | unsafe { read(self) } | |
dfeec247 XL |
899 | } |
900 | ||
901 | /// Performs a volatile read of the value from `self` without moving it. This | |
902 | /// leaves the memory in `self` unchanged. | |
903 | /// | |
904 | /// Volatile operations are intended to act on I/O memory, and are guaranteed | |
905 | /// to not be elided or reordered by the compiler across other volatile | |
906 | /// operations. | |
907 | /// | |
908 | /// See [`ptr::read_volatile`] for safety concerns and examples. | |
909 | /// | |
fc512014 | 910 | /// [`ptr::read_volatile`]: crate::ptr::read_volatile() |
dfeec247 XL |
911 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
912 | #[inline] | |
913 | pub unsafe fn read_volatile(self) -> T | |
914 | where | |
915 | T: Sized, | |
916 | { | |
f035d41b XL |
917 | // SAFETY: the caller must uphold the safety contract for `read_volatile`. |
918 | unsafe { read_volatile(self) } | |
dfeec247 XL |
919 | } |
920 | ||
921 | /// Reads the value from `self` without moving it. This leaves the | |
922 | /// memory in `self` unchanged. | |
923 | /// | |
924 | /// Unlike `read`, the pointer may be unaligned. | |
925 | /// | |
926 | /// See [`ptr::read_unaligned`] for safety concerns and examples. | |
927 | /// | |
fc512014 | 928 | /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned() |
dfeec247 | 929 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
5869c6ff | 930 | #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")] |
dfeec247 | 931 | #[inline] |
5869c6ff | 932 | pub const unsafe fn read_unaligned(self) -> T |
dfeec247 XL |
933 | where |
934 | T: Sized, | |
935 | { | |
f035d41b XL |
936 | // SAFETY: the caller must uphold the safety contract for `read_unaligned`. |
937 | unsafe { read_unaligned(self) } | |
dfeec247 XL |
938 | } |
939 | ||
940 | /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source | |
941 | /// and destination may overlap. | |
942 | /// | |
943 | /// NOTE: this has the *same* argument order as [`ptr::copy`]. | |
944 | /// | |
945 | /// See [`ptr::copy`] for safety concerns and examples. | |
946 | /// | |
fc512014 | 947 | /// [`ptr::copy`]: crate::ptr::copy() |
6a06907d | 948 | #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] |
dfeec247 XL |
949 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
950 | #[inline] | |
6a06907d | 951 | pub const unsafe fn copy_to(self, dest: *mut T, count: usize) |
dfeec247 XL |
952 | where |
953 | T: Sized, | |
954 | { | |
f035d41b XL |
955 | // SAFETY: the caller must uphold the safety contract for `copy`. |
956 | unsafe { copy(self, dest, count) } | |
dfeec247 XL |
957 | } |
958 | ||
959 | /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source | |
960 | /// and destination may *not* overlap. | |
961 | /// | |
962 | /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`]. | |
963 | /// | |
964 | /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. | |
965 | /// | |
fc512014 | 966 | /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() |
6a06907d | 967 | #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] |
dfeec247 XL |
968 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
969 | #[inline] | |
6a06907d | 970 | pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) |
dfeec247 XL |
971 | where |
972 | T: Sized, | |
973 | { | |
f035d41b XL |
974 | // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`. |
975 | unsafe { copy_nonoverlapping(self, dest, count) } | |
dfeec247 XL |
976 | } |
977 | ||
978 | /// Computes the offset that needs to be applied to the pointer in order to make it aligned to | |
979 | /// `align`. | |
980 | /// | |
981 | /// If it is not possible to align the pointer, the implementation returns | |
ba9703b0 XL |
982 | /// `usize::MAX`. It is permissible for the implementation to *always* |
983 | /// return `usize::MAX`. Only your algorithm's performance can depend | |
dfeec247 XL |
984 | /// on getting a usable offset here, not its correctness. |
985 | /// | |
986 | /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be | |
987 | /// used with the `wrapping_add` method. | |
988 | /// | |
989 | /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go | |
990 | /// beyond the allocation that the pointer points into. It is up to the caller to ensure that | |
991 | /// the returned offset is correct in all terms other than alignment. | |
992 | /// | |
993 | /// # Panics | |
994 | /// | |
995 | /// The function panics if `align` is not a power-of-two. | |
996 | /// | |
997 | /// # Examples | |
998 | /// | |
999 | /// Accessing adjacent `u8` as `u16` | |
1000 | /// | |
1001 | /// ``` | |
1002 | /// # fn foo(n: usize) { | |
1003 | /// # use std::mem::align_of; | |
1004 | /// # unsafe { | |
1005 | /// let x = [5u8, 6u8, 7u8, 8u8, 9u8]; | |
1b1a35ee | 1006 | /// let ptr = x.as_ptr().add(n) as *const u8; |
dfeec247 XL |
1007 | /// let offset = ptr.align_offset(align_of::<u16>()); |
1008 | /// if offset < x.len() - n - 1 { | |
1009 | /// let u16_ptr = ptr.add(offset) as *const u16; | |
1010 | /// assert_ne!(*u16_ptr, 500); | |
1011 | /// } else { | |
1012 | /// // while the pointer can be aligned via `offset`, it would point | |
1013 | /// // outside the allocation | |
1014 | /// } | |
1015 | /// # } } | |
1016 | /// ``` | |
1017 | #[stable(feature = "align_offset", since = "1.36.0")] | |
3c0e092e XL |
1018 | #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")] |
1019 | pub const fn align_offset(self, align: usize) -> usize | |
dfeec247 XL |
1020 | where |
1021 | T: Sized, | |
1022 | { | |
1023 | if !align.is_power_of_two() { | |
1024 | panic!("align_offset: align is not a power-of-two"); | |
1025 | } | |
3c0e092e XL |
1026 | |
1027 | fn rt_impl<T>(p: *const T, align: usize) -> usize { | |
1028 | // SAFETY: `align` has been checked to be a power of 2 above | |
1029 | unsafe { align_offset(p, align) } | |
1030 | } | |
1031 | ||
1032 | const fn ctfe_impl<T>(_: *const T, _: usize) -> usize { | |
1033 | usize::MAX | |
1034 | } | |
1035 | ||
1036 | // SAFETY: | |
1037 | // It is permisseble for `align_offset` to always return `usize::MAX`, | |
1038 | // algorithm correctness can not depend on `align_offset` returning non-max values. | |
1039 | // | |
1040 | // As such the behaviour can't change after replacing `align_offset` with `usize::MAX`, only performance can. | |
1041 | unsafe { intrinsics::const_eval_select((self, align), ctfe_impl, rt_impl) } | |
dfeec247 XL |
1042 | } |
1043 | } | |
1044 | ||
5e7ed085 | 1045 | #[cfg_attr(bootstrap, lang = "const_slice_ptr")] |
ba9703b0 XL |
1046 | impl<T> *const [T] { |
1047 | /// Returns the length of a raw slice. | |
1048 | /// | |
1049 | /// The returned value is the number of **elements**, not the number of bytes. | |
1050 | /// | |
1051 | /// This function is safe, even when the raw slice cannot be cast to a slice | |
1052 | /// reference because the pointer is null or unaligned. | |
1053 | /// | |
1054 | /// # Examples | |
1055 | /// | |
1056 | /// ```rust | |
1057 | /// #![feature(slice_ptr_len)] | |
1058 | /// | |
1059 | /// use std::ptr; | |
1060 | /// | |
1061 | /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3); | |
1062 | /// assert_eq!(slice.len(), 3); | |
1063 | /// ``` | |
1064 | #[inline] | |
1065 | #[unstable(feature = "slice_ptr_len", issue = "71146")] | |
1066 | #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")] | |
1067 | pub const fn len(self) -> usize { | |
6a06907d | 1068 | metadata(self) |
ba9703b0 | 1069 | } |
3dfed10e XL |
1070 | |
1071 | /// Returns a raw pointer to the slice's buffer. | |
1072 | /// | |
1073 | /// This is equivalent to casting `self` to `*const T`, but more type-safe. | |
1074 | /// | |
1075 | /// # Examples | |
1076 | /// | |
1077 | /// ```rust | |
1078 | /// #![feature(slice_ptr_get)] | |
1079 | /// use std::ptr; | |
1080 | /// | |
1081 | /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3); | |
5e7ed085 | 1082 | /// assert_eq!(slice.as_ptr(), ptr::null()); |
3dfed10e XL |
1083 | /// ``` |
1084 | #[inline] | |
1085 | #[unstable(feature = "slice_ptr_get", issue = "74265")] | |
1086 | #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")] | |
1087 | pub const fn as_ptr(self) -> *const T { | |
1088 | self as *const T | |
1089 | } | |
1090 | ||
1091 | /// Returns a raw pointer to an element or subslice, without doing bounds | |
1092 | /// checking. | |
1093 | /// | |
a2a8927a | 1094 | /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable |
3dfed10e XL |
1095 | /// is *[undefined behavior]* even if the resulting pointer is not used. |
1096 | /// | |
1097 | /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html | |
1098 | /// | |
1099 | /// # Examples | |
1100 | /// | |
1101 | /// ``` | |
1102 | /// #![feature(slice_ptr_get)] | |
1103 | /// | |
1104 | /// let x = &[1, 2, 4] as *const [i32]; | |
1105 | /// | |
1106 | /// unsafe { | |
1107 | /// assert_eq!(x.get_unchecked(1), x.as_ptr().add(1)); | |
1108 | /// } | |
1109 | /// ``` | |
1110 | #[unstable(feature = "slice_ptr_get", issue = "74265")] | |
5e7ed085 | 1111 | #[rustc_const_unstable(feature = "const_slice_index", issue = "none")] |
3dfed10e | 1112 | #[inline] |
5e7ed085 | 1113 | pub const unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output |
3dfed10e | 1114 | where |
5e7ed085 | 1115 | I: ~const SliceIndex<[T]>, |
3dfed10e | 1116 | { |
a2a8927a | 1117 | // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds. |
3dfed10e XL |
1118 | unsafe { index.get_unchecked(self) } |
1119 | } | |
1120 | ||
1121 | /// Returns `None` if the pointer is null, or else returns a shared slice to | |
1122 | /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require | |
1123 | /// that the value has to be initialized. | |
1124 | /// | |
1125 | /// [`as_ref`]: #method.as_ref | |
1126 | /// | |
1127 | /// # Safety | |
1128 | /// | |
17df50a5 | 1129 | /// When calling this method, you have to ensure that *either* the pointer is null *or* |
3dfed10e XL |
1130 | /// all of the following is true: |
1131 | /// | |
1132 | /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes, | |
1133 | /// and it must be properly aligned. This means in particular: | |
1134 | /// | |
cdc7bbd5 | 1135 | /// * The entire memory range of this slice must be contained within a single [allocated object]! |
3dfed10e XL |
1136 | /// Slices can never span across multiple allocated objects. |
1137 | /// | |
1138 | /// * The pointer must be aligned even for zero-length slices. One | |
1139 | /// reason for this is that enum layout optimizations may rely on references | |
1140 | /// (including slices of any length) being aligned and non-null to distinguish | |
1141 | /// them from other data. You can obtain a pointer that is usable as `data` | |
1142 | /// for zero-length slices using [`NonNull::dangling()`]. | |
1143 | /// | |
1144 | /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`. | |
1145 | /// See the safety documentation of [`pointer::offset`]. | |
1146 | /// | |
1147 | /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is | |
1148 | /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data. | |
1149 | /// In particular, for the duration of this lifetime, the memory the pointer points to must | |
1150 | /// not get mutated (except inside `UnsafeCell`). | |
1151 | /// | |
1152 | /// This applies even if the result of this method is unused! | |
1153 | /// | |
1154 | /// See also [`slice::from_raw_parts`][]. | |
1155 | /// | |
1156 | /// [valid]: crate::ptr#safety | |
cdc7bbd5 | 1157 | /// [allocated object]: crate::ptr#allocated-object |
3dfed10e XL |
1158 | #[inline] |
1159 | #[unstable(feature = "ptr_as_uninit", issue = "75402")] | |
a2a8927a XL |
1160 | #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")] |
1161 | pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> { | |
3dfed10e XL |
1162 | if self.is_null() { |
1163 | None | |
1164 | } else { | |
1165 | // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`. | |
1166 | Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) }) | |
1167 | } | |
1168 | } | |
ba9703b0 XL |
1169 | } |
1170 | ||
dfeec247 XL |
1171 | // Equality for pointers |
1172 | #[stable(feature = "rust1", since = "1.0.0")] | |
1173 | impl<T: ?Sized> PartialEq for *const T { | |
1174 | #[inline] | |
1175 | fn eq(&self, other: &*const T) -> bool { | |
1176 | *self == *other | |
1177 | } | |
1178 | } | |
1179 | ||
1180 | #[stable(feature = "rust1", since = "1.0.0")] | |
1181 | impl<T: ?Sized> Eq for *const T {} | |
1182 | ||
1183 | // Comparison for pointers | |
1184 | #[stable(feature = "rust1", since = "1.0.0")] | |
1185 | impl<T: ?Sized> Ord for *const T { | |
1186 | #[inline] | |
1187 | fn cmp(&self, other: &*const T) -> Ordering { | |
1188 | if self < other { | |
1189 | Less | |
1190 | } else if self == other { | |
1191 | Equal | |
1192 | } else { | |
1193 | Greater | |
1194 | } | |
1195 | } | |
1196 | } | |
1197 | ||
1198 | #[stable(feature = "rust1", since = "1.0.0")] | |
1199 | impl<T: ?Sized> PartialOrd for *const T { | |
1200 | #[inline] | |
1201 | fn partial_cmp(&self, other: &*const T) -> Option<Ordering> { | |
1202 | Some(self.cmp(other)) | |
1203 | } | |
1204 | ||
1205 | #[inline] | |
1206 | fn lt(&self, other: &*const T) -> bool { | |
1207 | *self < *other | |
1208 | } | |
1209 | ||
1210 | #[inline] | |
1211 | fn le(&self, other: &*const T) -> bool { | |
1212 | *self <= *other | |
1213 | } | |
1214 | ||
1215 | #[inline] | |
1216 | fn gt(&self, other: &*const T) -> bool { | |
1217 | *self > *other | |
1218 | } | |
1219 | ||
1220 | #[inline] | |
1221 | fn ge(&self, other: &*const T) -> bool { | |
1222 | *self >= *other | |
1223 | } | |
1224 | } |