]>
Commit | Line | Data |
---|---|---|
dfeec247 XL |
1 | use super::*; |
2 | use crate::cmp::Ordering::{self, Equal, Greater, Less}; | |
3 | use crate::intrinsics; | |
4 | use crate::mem; | |
3dfed10e | 5 | use crate::slice::{self, SliceIndex}; |
dfeec247 | 6 | |
dfeec247 XL |
7 | impl<T: ?Sized> *const T { |
8 | /// Returns `true` if the pointer is null. | |
9 | /// | |
10 | /// Note that unsized types have many possible null pointers, as only the | |
11 | /// raw data pointer is considered, not their length, vtable, etc. | |
12 | /// Therefore, two pointers that are null may still not compare equal to | |
13 | /// each other. | |
14 | /// | |
3dfed10e XL |
15 | /// ## Behavior during const evaluation |
16 | /// | |
17 | /// When this function is used during const evaluation, it may return `false` for pointers | |
18 | /// that turn out to be null at runtime. Specifically, when a pointer to some memory | |
19 | /// is offset beyond its bounds in such a way that the resulting pointer is null, | |
20 | /// the function will still return `false`. There is no way for CTFE to know | |
21 | /// the absolute position of that memory, so we cannot tell if the pointer is | |
22 | /// null or not. | |
23 | /// | |
dfeec247 XL |
24 | /// # Examples |
25 | /// | |
26 | /// Basic usage: | |
27 | /// | |
28 | /// ``` | |
29 | /// let s: &str = "Follow the rabbit"; | |
30 | /// let ptr: *const u8 = s.as_ptr(); | |
31 | /// assert!(!ptr.is_null()); | |
32 | /// ``` | |
33 | #[stable(feature = "rust1", since = "1.0.0")] | |
3dfed10e | 34 | #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")] |
dfeec247 | 35 | #[inline] |
3dfed10e | 36 | pub const fn is_null(self) -> bool { |
dfeec247 XL |
37 | // Compare via a cast to a thin pointer, so fat pointers are only |
38 | // considering their "data" part for null-ness. | |
f2b60f7d FG |
39 | match (self as *const u8).guaranteed_eq(null()) { |
40 | None => false, | |
41 | Some(res) => res, | |
42 | } | |
dfeec247 XL |
43 | } |
44 | ||
45 | /// Casts to a pointer of another type. | |
46 | #[stable(feature = "ptr_cast", since = "1.38.0")] | |
47 | #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")] | |
48 | #[inline] | |
49 | pub const fn cast<U>(self) -> *const U { | |
50 | self as _ | |
51 | } | |
52 | ||
5e7ed085 FG |
53 | /// Use the pointer value in a new pointer of another type. |
54 | /// | |
55 | /// In case `val` is a (fat) pointer to an unsized type, this operation | |
56 | /// will ignore the pointer part, whereas for (thin) pointers to sized | |
57 | /// types, this has the same effect as a simple cast. | |
58 | /// | |
59 | /// The resulting pointer will have provenance of `self`, i.e., for a fat | |
60 | /// pointer, this operation is semantically the same as creating a new | |
61 | /// fat pointer with the data pointer value of `self` but the metadata of | |
62 | /// `val`. | |
63 | /// | |
64 | /// # Examples | |
65 | /// | |
66 | /// This function is primarily useful for allowing byte-wise pointer | |
67 | /// arithmetic on potentially fat pointers: | |
68 | /// | |
69 | /// ``` | |
70 | /// #![feature(set_ptr_value)] | |
71 | /// # use core::fmt::Debug; | |
72 | /// let arr: [i32; 3] = [1, 2, 3]; | |
73 | /// let mut ptr = arr.as_ptr() as *const dyn Debug; | |
74 | /// let thin = ptr as *const u8; | |
75 | /// unsafe { | |
76 | /// ptr = thin.add(8).with_metadata_of(ptr); | |
77 | /// # assert_eq!(*(ptr as *const i32), 3); | |
78 | /// println!("{:?}", &*ptr); // will print "3" | |
79 | /// } | |
80 | /// ``` | |
81 | #[unstable(feature = "set_ptr_value", issue = "75091")] | |
82 | #[must_use = "returns a new pointer rather than modifying its argument"] | |
83 | #[inline] | |
84 | pub fn with_metadata_of<U>(self, mut val: *const U) -> *const U | |
85 | where | |
86 | U: ?Sized, | |
87 | { | |
88 | let target = &mut val as *mut *const U as *mut *const u8; | |
89 | // SAFETY: In case of a thin pointer, this operations is identical | |
90 | // to a simple assignment. In case of a fat pointer, with the current | |
91 | // fat pointer layout implementation, the first field of such a | |
92 | // pointer is always the data pointer, which is likewise assigned. | |
93 | unsafe { *target = self as *const u8 }; | |
94 | val | |
95 | } | |
96 | ||
5099ac24 FG |
97 | /// Changes constness without changing the type. |
98 | /// | |
99 | /// This is a bit safer than `as` because it wouldn't silently change the type if the code is | |
100 | /// refactored. | |
f2b60f7d FG |
101 | #[stable(feature = "ptr_const_cast", since = "1.65.0")] |
102 | #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")] | |
064997fb | 103 | pub const fn cast_mut(self) -> *mut T { |
5099ac24 FG |
104 | self as _ |
105 | } | |
106 | ||
a2a8927a XL |
107 | /// Casts a pointer to its raw bits. |
108 | /// | |
109 | /// This is equivalent to `as usize`, but is more specific to enhance readability. | |
110 | /// The inverse method is [`from_bits`](#method.from_bits). | |
111 | /// | |
112 | /// In particular, `*p as usize` and `p as usize` will both compile for | |
113 | /// pointers to numeric types but do very different things, so using this | |
114 | /// helps emphasize that reading the bits was intentional. | |
115 | /// | |
116 | /// # Examples | |
117 | /// | |
118 | /// ``` | |
119 | /// #![feature(ptr_to_from_bits)] | |
120 | /// let array = [13, 42]; | |
121 | /// let p0: *const i32 = &array[0]; | |
122 | /// assert_eq!(<*const _>::from_bits(p0.to_bits()), p0); | |
123 | /// let p1: *const i32 = &array[1]; | |
124 | /// assert_eq!(p1.to_bits() - p0.to_bits(), 4); | |
125 | /// ``` | |
126 | #[unstable(feature = "ptr_to_from_bits", issue = "91126")] | |
127 | pub fn to_bits(self) -> usize | |
128 | where | |
129 | T: Sized, | |
130 | { | |
131 | self as usize | |
132 | } | |
133 | ||
134 | /// Creates a pointer from its raw bits. | |
135 | /// | |
136 | /// This is equivalent to `as *const T`, but is more specific to enhance readability. | |
137 | /// The inverse method is [`to_bits`](#method.to_bits). | |
138 | /// | |
139 | /// # Examples | |
140 | /// | |
141 | /// ``` | |
142 | /// #![feature(ptr_to_from_bits)] | |
143 | /// use std::ptr::NonNull; | |
144 | /// let dangling: *const u8 = NonNull::dangling().as_ptr(); | |
145 | /// assert_eq!(<*const u8>::from_bits(1), dangling); | |
146 | /// ``` | |
147 | #[unstable(feature = "ptr_to_from_bits", issue = "91126")] | |
148 | pub fn from_bits(bits: usize) -> Self | |
149 | where | |
150 | T: Sized, | |
151 | { | |
152 | bits as Self | |
153 | } | |
154 | ||
5e7ed085 FG |
155 | /// Gets the "address" portion of the pointer. |
156 | /// | |
04454e1e FG |
157 | /// This is similar to `self as usize`, which semantically discards *provenance* and |
158 | /// *address-space* information. However, unlike `self as usize`, casting the returned address | |
159 | /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To | |
f2b60f7d | 160 | /// properly restore the lost information and obtain a dereferenceable pointer, use |
04454e1e FG |
161 | /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. |
162 | /// | |
163 | /// If using those APIs is not possible because there is no way to preserve a pointer with the | |
164 | /// required provenance, use [`expose_addr`][pointer::expose_addr] and | |
165 | /// [`from_exposed_addr`][from_exposed_addr] instead. However, note that this makes | |
166 | /// your code less portable and less amenable to tools that check for compliance with the Rust | |
167 | /// memory model. | |
5e7ed085 FG |
168 | /// |
169 | /// On most platforms this will produce a value with the same bytes as the original | |
170 | /// pointer, because all the bytes are dedicated to describing the address. | |
171 | /// Platforms which need to store additional information in the pointer may | |
172 | /// perform a change of representation to produce a value containing only the address | |
173 | /// portion of the pointer. What that means is up to the platform to define. | |
174 | /// | |
04454e1e FG |
175 | /// This API and its claimed semantics are part of the Strict Provenance experiment, and as such |
176 | /// might change in the future (including possibly weakening this so it becomes wholly | |
177 | /// equivalent to `self as usize`). See the [module documentation][crate::ptr] for details. | |
5e7ed085 FG |
178 | #[must_use] |
179 | #[inline] | |
180 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
181 | pub fn addr(self) -> usize | |
182 | where | |
183 | T: Sized, | |
184 | { | |
185 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. | |
923072b8 FG |
186 | // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the |
187 | // provenance). | |
188 | unsafe { mem::transmute(self) } | |
5e7ed085 FG |
189 | } |
190 | ||
04454e1e FG |
191 | /// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future |
192 | /// use in [`from_exposed_addr`][]. | |
193 | /// | |
194 | /// This is equivalent to `self as usize`, which semantically discards *provenance* and | |
195 | /// *address-space* information. Furthermore, this (like the `as` cast) has the implicit | |
196 | /// side-effect of marking the provenance as 'exposed', so on platforms that support it you can | |
197 | /// later call [`from_exposed_addr`][] to reconstitute the original pointer including its | |
198 | /// provenance. (Reconstructing address space information, if required, is your responsibility.) | |
199 | /// | |
200 | /// Using this method means that code is *not* following Strict Provenance rules. Supporting | |
201 | /// [`from_exposed_addr`][] complicates specification and reasoning and may not be supported by | |
202 | /// tools that help you to stay conformant with the Rust memory model, so it is recommended to | |
203 | /// use [`addr`][pointer::addr] wherever possible. | |
204 | /// | |
205 | /// On most platforms this will produce a value with the same bytes as the original pointer, | |
206 | /// because all the bytes are dedicated to describing the address. Platforms which need to store | |
207 | /// additional information in the pointer may not support this operation, since the 'expose' | |
208 | /// side-effect which is required for [`from_exposed_addr`][] to work is typically not | |
209 | /// available. | |
210 | /// | |
211 | /// This API and its claimed semantics are part of the Strict Provenance experiment, see the | |
212 | /// [module documentation][crate::ptr] for details. | |
213 | /// | |
214 | /// [`from_exposed_addr`]: from_exposed_addr | |
215 | #[must_use] | |
216 | #[inline] | |
217 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
218 | pub fn expose_addr(self) -> usize | |
219 | where | |
220 | T: Sized, | |
221 | { | |
222 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. | |
223 | self as usize | |
224 | } | |
225 | ||
5e7ed085 FG |
226 | /// Creates a new pointer with the given address. |
227 | /// | |
228 | /// This performs the same operation as an `addr as ptr` cast, but copies | |
229 | /// the *address-space* and *provenance* of `self` to the new pointer. | |
230 | /// This allows us to dynamically preserve and propagate this important | |
231 | /// information in a way that is otherwise impossible with a unary cast. | |
232 | /// | |
233 | /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset | |
234 | /// `self` to the given address, and therefore has all the same capabilities and restrictions. | |
235 | /// | |
236 | /// This API and its claimed semantics are part of the Strict Provenance experiment, | |
237 | /// see the [module documentation][crate::ptr] for details. | |
238 | #[must_use] | |
239 | #[inline] | |
240 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
241 | pub fn with_addr(self, addr: usize) -> Self | |
242 | where | |
243 | T: Sized, | |
244 | { | |
245 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. | |
246 | // | |
247 | // In the mean-time, this operation is defined to be "as if" it was | |
248 | // a wrapping_offset, so we can emulate it as such. This should properly | |
249 | // restore pointer provenance even under today's compiler. | |
250 | let self_addr = self.addr() as isize; | |
251 | let dest_addr = addr as isize; | |
252 | let offset = dest_addr.wrapping_sub(self_addr); | |
253 | ||
254 | // This is the canonical desugarring of this operation | |
f2b60f7d | 255 | self.wrapping_byte_offset(offset) |
5e7ed085 FG |
256 | } |
257 | ||
258 | /// Creates a new pointer by mapping `self`'s address to a new one. | |
259 | /// | |
260 | /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details. | |
261 | /// | |
262 | /// This API and its claimed semantics are part of the Strict Provenance experiment, | |
263 | /// see the [module documentation][crate::ptr] for details. | |
264 | #[must_use] | |
265 | #[inline] | |
266 | #[unstable(feature = "strict_provenance", issue = "95228")] | |
267 | pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self | |
268 | where | |
269 | T: Sized, | |
270 | { | |
271 | self.with_addr(f(self.addr())) | |
272 | } | |
273 | ||
94222f64 | 274 | /// Decompose a (possibly wide) pointer into its address and metadata components. |
6a06907d XL |
275 | /// |
276 | /// The pointer can be later reconstructed with [`from_raw_parts`]. | |
6a06907d XL |
277 | #[unstable(feature = "ptr_metadata", issue = "81513")] |
278 | #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")] | |
279 | #[inline] | |
280 | pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) { | |
281 | (self.cast(), metadata(self)) | |
282 | } | |
283 | ||
3dfed10e XL |
284 | /// Returns `None` if the pointer is null, or else returns a shared reference to |
285 | /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`] | |
286 | /// must be used instead. | |
dfeec247 | 287 | /// |
3dfed10e | 288 | /// [`as_uninit_ref`]: #method.as_uninit_ref |
dfeec247 | 289 | /// |
3dfed10e | 290 | /// # Safety |
dfeec247 | 291 | /// |
17df50a5 | 292 | /// When calling this method, you have to ensure that *either* the pointer is null *or* |
dfeec247 | 293 | /// all of the following is true: |
3dfed10e XL |
294 | /// |
295 | /// * The pointer must be properly aligned. | |
296 | /// | |
a2a8927a | 297 | /// * It must be "dereferenceable" in the sense defined in [the module documentation]. |
3dfed10e XL |
298 | /// |
299 | /// * The pointer must point to an initialized instance of `T`. | |
300 | /// | |
301 | /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is | |
302 | /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data. | |
04454e1e | 303 | /// In particular, while this reference exists, the memory the pointer points to must |
3dfed10e | 304 | /// not get mutated (except inside `UnsafeCell`). |
dfeec247 XL |
305 | /// |
306 | /// This applies even if the result of this method is unused! | |
307 | /// (The part about being initialized is not yet fully decided, but until | |
308 | /// it is, the only safe approach is to ensure that they are indeed initialized.) | |
309 | /// | |
3dfed10e | 310 | /// [the module documentation]: crate::ptr#safety |
dfeec247 XL |
311 | /// |
312 | /// # Examples | |
313 | /// | |
314 | /// Basic usage: | |
315 | /// | |
316 | /// ``` | |
317 | /// let ptr: *const u8 = &10u8 as *const u8; | |
318 | /// | |
319 | /// unsafe { | |
320 | /// if let Some(val_back) = ptr.as_ref() { | |
5e7ed085 | 321 | /// println!("We got back the value: {val_back}!"); |
dfeec247 XL |
322 | /// } |
323 | /// } | |
324 | /// ``` | |
325 | /// | |
326 | /// # Null-unchecked version | |
327 | /// | |
328 | /// If you are sure the pointer can never be null and are looking for some kind of | |
329 | /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can | |
330 | /// dereference the pointer directly. | |
331 | /// | |
332 | /// ``` | |
333 | /// let ptr: *const u8 = &10u8 as *const u8; | |
334 | /// | |
335 | /// unsafe { | |
336 | /// let val_back = &*ptr; | |
5e7ed085 | 337 | /// println!("We got back the value: {val_back}!"); |
dfeec247 XL |
338 | /// } |
339 | /// ``` | |
340 | #[stable(feature = "ptr_as_ref", since = "1.9.0")] | |
a2a8927a | 341 | #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")] |
dfeec247 | 342 | #[inline] |
a2a8927a | 343 | pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> { |
f035d41b XL |
344 | // SAFETY: the caller must guarantee that `self` is valid |
345 | // for a reference if it isn't null. | |
346 | if self.is_null() { None } else { unsafe { Some(&*self) } } | |
dfeec247 XL |
347 | } |
348 | ||
3dfed10e XL |
349 | /// Returns `None` if the pointer is null, or else returns a shared reference to |
350 | /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require | |
351 | /// that the value has to be initialized. | |
352 | /// | |
353 | /// [`as_ref`]: #method.as_ref | |
354 | /// | |
355 | /// # Safety | |
356 | /// | |
17df50a5 | 357 | /// When calling this method, you have to ensure that *either* the pointer is null *or* |
3dfed10e XL |
358 | /// all of the following is true: |
359 | /// | |
360 | /// * The pointer must be properly aligned. | |
361 | /// | |
a2a8927a | 362 | /// * It must be "dereferenceable" in the sense defined in [the module documentation]. |
3dfed10e XL |
363 | /// |
364 | /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is | |
365 | /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data. | |
04454e1e | 366 | /// In particular, while this reference exists, the memory the pointer points to must |
3dfed10e XL |
367 | /// not get mutated (except inside `UnsafeCell`). |
368 | /// | |
369 | /// This applies even if the result of this method is unused! | |
370 | /// | |
371 | /// [the module documentation]: crate::ptr#safety | |
372 | /// | |
373 | /// # Examples | |
374 | /// | |
375 | /// Basic usage: | |
376 | /// | |
377 | /// ``` | |
378 | /// #![feature(ptr_as_uninit)] | |
379 | /// | |
380 | /// let ptr: *const u8 = &10u8 as *const u8; | |
381 | /// | |
382 | /// unsafe { | |
383 | /// if let Some(val_back) = ptr.as_uninit_ref() { | |
384 | /// println!("We got back the value: {}!", val_back.assume_init()); | |
385 | /// } | |
386 | /// } | |
387 | /// ``` | |
388 | #[inline] | |
389 | #[unstable(feature = "ptr_as_uninit", issue = "75402")] | |
a2a8927a XL |
390 | #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")] |
391 | pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>> | |
3dfed10e XL |
392 | where |
393 | T: Sized, | |
394 | { | |
395 | // SAFETY: the caller must guarantee that `self` meets all the | |
396 | // requirements for a reference. | |
397 | if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) } | |
398 | } | |
399 | ||
dfeec247 XL |
400 | /// Calculates the offset from a pointer. |
401 | /// | |
402 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
403 | /// offset of `3 * size_of::<T>()` bytes. | |
404 | /// | |
405 | /// # Safety | |
406 | /// | |
407 | /// If any of the following conditions are violated, the result is Undefined | |
408 | /// Behavior: | |
409 | /// | |
410 | /// * Both the starting and resulting pointer must be either in bounds or one | |
cdc7bbd5 | 411 | /// byte past the end of the same [allocated object]. |
dfeec247 XL |
412 | /// |
413 | /// * The computed offset, **in bytes**, cannot overflow an `isize`. | |
414 | /// | |
415 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
416 | /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize. | |
417 | /// | |
418 | /// The compiler and standard library generally tries to ensure allocations | |
419 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
420 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
421 | /// `vec.as_ptr().add(vec.len())` is always safe. | |
422 | /// | |
423 | /// Most platforms fundamentally can't even construct such an allocation. | |
424 | /// For instance, no known 64-bit platform can ever serve a request | |
425 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
426 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
427 | /// more than `isize::MAX` bytes with things like Physical Address | |
428 | /// Extension. As such, memory acquired directly from allocators or memory | |
429 | /// mapped files *may* be too large to handle with this function. | |
430 | /// | |
431 | /// Consider using [`wrapping_offset`] instead if these constraints are | |
432 | /// difficult to satisfy. The only advantage of this method is that it | |
433 | /// enables more aggressive compiler optimizations. | |
434 | /// | |
435 | /// [`wrapping_offset`]: #method.wrapping_offset | |
cdc7bbd5 | 436 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
437 | /// |
438 | /// # Examples | |
439 | /// | |
440 | /// Basic usage: | |
441 | /// | |
442 | /// ``` | |
443 | /// let s: &str = "123"; | |
444 | /// let ptr: *const u8 = s.as_ptr(); | |
445 | /// | |
446 | /// unsafe { | |
447 | /// println!("{}", *ptr.offset(1) as char); | |
448 | /// println!("{}", *ptr.offset(2) as char); | |
449 | /// } | |
450 | /// ``` | |
451 | #[stable(feature = "rust1", since = "1.0.0")] | |
f9f354fc | 452 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 453 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 454 | #[inline(always)] |
064997fb | 455 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
f9f354fc | 456 | pub const unsafe fn offset(self, count: isize) -> *const T |
dfeec247 XL |
457 | where |
458 | T: Sized, | |
459 | { | |
f035d41b XL |
460 | // SAFETY: the caller must uphold the safety contract for `offset`. |
461 | unsafe { intrinsics::offset(self, count) } | |
dfeec247 XL |
462 | } |
463 | ||
923072b8 FG |
464 | /// Calculates the offset from a pointer in bytes. |
465 | /// | |
466 | /// `count` is in units of **bytes**. | |
467 | /// | |
468 | /// This is purely a convenience for casting to a `u8` pointer and | |
469 | /// using [offset][pointer::offset] on it. See that method for documentation | |
470 | /// and safety requirements. | |
471 | /// | |
472 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
473 | /// leaving the metadata untouched. | |
474 | #[must_use] | |
475 | #[inline(always)] | |
476 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
477 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
064997fb | 478 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
923072b8 FG |
479 | pub const unsafe fn byte_offset(self, count: isize) -> Self { |
480 | // SAFETY: the caller must uphold the safety contract for `offset`. | |
481 | let this = unsafe { self.cast::<u8>().offset(count).cast::<()>() }; | |
482 | from_raw_parts::<T>(this, metadata(self)) | |
483 | } | |
484 | ||
dfeec247 XL |
485 | /// Calculates the offset from a pointer using wrapping arithmetic. |
486 | /// | |
487 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
488 | /// offset of `3 * size_of::<T>()` bytes. | |
489 | /// | |
490 | /// # Safety | |
491 | /// | |
5869c6ff | 492 | /// This operation itself is always safe, but using the resulting pointer is not. |
dfeec247 | 493 | /// |
94222f64 | 494 | /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not |
cdc7bbd5 | 495 | /// be used to read or write other allocated objects. |
dfeec247 | 496 | /// |
5869c6ff XL |
497 | /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z` |
498 | /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still | |
499 | /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless | |
500 | /// `x` and `y` point into the same allocated object. | |
dfeec247 | 501 | /// |
5869c6ff XL |
502 | /// Compared to [`offset`], this method basically delays the requirement of staying within the |
503 | /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object | |
504 | /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a | |
505 | /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`] | |
506 | /// can be optimized better and is thus preferable in performance-sensitive code. | |
507 | /// | |
508 | /// The delayed check only considers the value of the pointer that was dereferenced, not the | |
509 | /// intermediate values used during the computation of the final result. For example, | |
510 | /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other | |
511 | /// words, leaving the allocated object and then re-entering it later is permitted. | |
dfeec247 | 512 | /// |
dfeec247 | 513 | /// [`offset`]: #method.offset |
cdc7bbd5 | 514 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
515 | /// |
516 | /// # Examples | |
517 | /// | |
518 | /// Basic usage: | |
519 | /// | |
520 | /// ``` | |
521 | /// // Iterate using a raw pointer in increments of two elements | |
522 | /// let data = [1u8, 2, 3, 4, 5]; | |
523 | /// let mut ptr: *const u8 = data.as_ptr(); | |
524 | /// let step = 2; | |
525 | /// let end_rounded_up = ptr.wrapping_offset(6); | |
526 | /// | |
527 | /// // This loop prints "1, 3, 5, " | |
528 | /// while ptr != end_rounded_up { | |
529 | /// unsafe { | |
530 | /// print!("{}, ", *ptr); | |
531 | /// } | |
532 | /// ptr = ptr.wrapping_offset(step); | |
533 | /// } | |
534 | /// ``` | |
535 | #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] | |
f9f354fc | 536 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 537 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 538 | #[inline(always)] |
f9f354fc | 539 | pub const fn wrapping_offset(self, count: isize) -> *const T |
dfeec247 XL |
540 | where |
541 | T: Sized, | |
542 | { | |
f9f354fc | 543 | // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called. |
dfeec247 XL |
544 | unsafe { intrinsics::arith_offset(self, count) } |
545 | } | |
546 | ||
923072b8 FG |
547 | /// Calculates the offset from a pointer in bytes using wrapping arithmetic. |
548 | /// | |
549 | /// `count` is in units of **bytes**. | |
550 | /// | |
551 | /// This is purely a convenience for casting to a `u8` pointer and | |
552 | /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method | |
553 | /// for documentation. | |
554 | /// | |
555 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
556 | /// leaving the metadata untouched. | |
557 | #[must_use] | |
558 | #[inline(always)] | |
559 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
560 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
561 | pub const fn wrapping_byte_offset(self, count: isize) -> Self { | |
562 | from_raw_parts::<T>(self.cast::<u8>().wrapping_offset(count).cast::<()>(), metadata(self)) | |
563 | } | |
564 | ||
f2b60f7d FG |
565 | /// Masks out bits of the pointer according to a mask. |
566 | /// | |
567 | /// This is convenience for `ptr.map_addr(|a| a & mask)`. | |
568 | /// | |
569 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
570 | /// leaving the metadata untouched. | |
f2b60f7d FG |
571 | #[unstable(feature = "ptr_mask", issue = "98290")] |
572 | #[must_use = "returns a new pointer rather than modifying its argument"] | |
573 | #[inline(always)] | |
574 | pub fn mask(self, mask: usize) -> *const T { | |
575 | let this = intrinsics::ptr_mask(self.cast::<()>(), mask); | |
576 | from_raw_parts::<T>(this, metadata(self)) | |
577 | } | |
578 | ||
dfeec247 | 579 | /// Calculates the distance between two pointers. The returned value is in |
04454e1e | 580 | /// units of T: the distance in bytes divided by `mem::size_of::<T>()`. |
dfeec247 XL |
581 | /// |
582 | /// This function is the inverse of [`offset`]. | |
583 | /// | |
584 | /// [`offset`]: #method.offset | |
dfeec247 XL |
585 | /// |
586 | /// # Safety | |
587 | /// | |
588 | /// If any of the following conditions are violated, the result is Undefined | |
589 | /// Behavior: | |
590 | /// | |
591 | /// * Both the starting and other pointer must be either in bounds or one | |
cdc7bbd5 | 592 | /// byte past the end of the same [allocated object]. |
dfeec247 | 593 | /// |
3dfed10e XL |
594 | /// * Both pointers must be *derived from* a pointer to the same object. |
595 | /// (See below for an example.) | |
596 | /// | |
dfeec247 XL |
597 | /// * The distance between the pointers, in bytes, must be an exact multiple |
598 | /// of the size of `T`. | |
599 | /// | |
6a06907d XL |
600 | /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`. |
601 | /// | |
dfeec247 XL |
602 | /// * The distance being in bounds cannot rely on "wrapping around" the address space. |
603 | /// | |
6a06907d XL |
604 | /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the |
605 | /// address space, so two pointers within some value of any Rust type `T` will always satisfy | |
606 | /// the last two conditions. The standard library also generally ensures that allocations | |
607 | /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they | |
608 | /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())` | |
609 | /// always satisfies the last two conditions. | |
dfeec247 | 610 | /// |
6a06907d | 611 | /// Most platforms fundamentally can't even construct such a large allocation. |
dfeec247 XL |
612 | /// For instance, no known 64-bit platform can ever serve a request |
613 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
614 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
615 | /// more than `isize::MAX` bytes with things like Physical Address | |
616 | /// Extension. As such, memory acquired directly from allocators or memory | |
617 | /// mapped files *may* be too large to handle with this function. | |
6a06907d XL |
618 | /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on |
619 | /// such large allocations either.) | |
620 | /// | |
621 | /// [`add`]: #method.add | |
cdc7bbd5 | 622 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 | 623 | /// |
dfeec247 XL |
624 | /// # Panics |
625 | /// | |
626 | /// This function panics if `T` is a Zero-Sized Type ("ZST"). | |
627 | /// | |
628 | /// # Examples | |
629 | /// | |
630 | /// Basic usage: | |
631 | /// | |
632 | /// ``` | |
dfeec247 XL |
633 | /// let a = [0; 5]; |
634 | /// let ptr1: *const i32 = &a[1]; | |
635 | /// let ptr2: *const i32 = &a[3]; | |
636 | /// unsafe { | |
637 | /// assert_eq!(ptr2.offset_from(ptr1), 2); | |
638 | /// assert_eq!(ptr1.offset_from(ptr2), -2); | |
639 | /// assert_eq!(ptr1.offset(2), ptr2); | |
640 | /// assert_eq!(ptr2.offset(-2), ptr1); | |
641 | /// } | |
642 | /// ``` | |
3dfed10e XL |
643 | /// |
644 | /// *Incorrect* usage: | |
645 | /// | |
646 | /// ```rust,no_run | |
647 | /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8; | |
648 | /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8; | |
649 | /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize); | |
650 | /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1. | |
651 | /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff); | |
652 | /// assert_eq!(ptr2 as usize, ptr2_other as usize); | |
653 | /// // Since ptr2_other and ptr2 are derived from pointers to different objects, | |
654 | /// // computing their offset is undefined behavior, even though | |
655 | /// // they point to the same address! | |
656 | /// unsafe { | |
657 | /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior | |
658 | /// } | |
659 | /// ``` | |
660 | #[stable(feature = "ptr_offset_from", since = "1.47.0")] | |
f2b60f7d | 661 | #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")] |
dfeec247 | 662 | #[inline] |
064997fb | 663 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
dfeec247 XL |
664 | pub const unsafe fn offset_from(self, origin: *const T) -> isize |
665 | where | |
666 | T: Sized, | |
667 | { | |
668 | let pointee_size = mem::size_of::<T>(); | |
f035d41b XL |
669 | assert!(0 < pointee_size && pointee_size <= isize::MAX as usize); |
670 | // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`. | |
671 | unsafe { intrinsics::ptr_offset_from(self, origin) } | |
672 | } | |
673 | ||
923072b8 FG |
674 | /// Calculates the distance between two pointers. The returned value is in |
675 | /// units of **bytes**. | |
676 | /// | |
677 | /// This is purely a convenience for casting to a `u8` pointer and | |
678 | /// using [offset_from][pointer::offset_from] on it. See that method for | |
679 | /// documentation and safety requirements. | |
680 | /// | |
681 | /// For non-`Sized` pointees this operation considers only the data pointers, | |
682 | /// ignoring the metadata. | |
683 | #[inline(always)] | |
684 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
685 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
064997fb | 686 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
923072b8 FG |
687 | pub const unsafe fn byte_offset_from(self, origin: *const T) -> isize { |
688 | // SAFETY: the caller must uphold the safety contract for `offset_from`. | |
689 | unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) } | |
690 | } | |
691 | ||
04454e1e FG |
692 | /// Calculates the distance between two pointers, *where it's known that |
693 | /// `self` is equal to or greater than `origin`*. The returned value is in | |
694 | /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`. | |
695 | /// | |
696 | /// This computes the same value that [`offset_from`](#method.offset_from) | |
2b03887a | 697 | /// would compute, but with the added precondition that the offset is |
04454e1e FG |
698 | /// guaranteed to be non-negative. This method is equivalent to |
699 | /// `usize::from(self.offset_from(origin)).unwrap_unchecked()`, | |
700 | /// but it provides slightly more information to the optimizer, which can | |
701 | /// sometimes allow it to optimize slightly better with some backends. | |
702 | /// | |
703 | /// This method can be though of as recovering the `count` that was passed | |
704 | /// to [`add`](#method.add) (or, with the parameters in the other order, | |
705 | /// to [`sub`](#method.sub)). The following are all equivalent, assuming | |
706 | /// that their safety preconditions are met: | |
707 | /// ```rust | |
708 | /// # #![feature(ptr_sub_ptr)] | |
709 | /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { | |
710 | /// ptr.sub_ptr(origin) == count | |
711 | /// # && | |
712 | /// origin.add(count) == ptr | |
713 | /// # && | |
714 | /// ptr.sub(count) == origin | |
715 | /// # } | |
716 | /// ``` | |
717 | /// | |
718 | /// # Safety | |
719 | /// | |
720 | /// - The distance between the pointers must be non-negative (`self >= origin`) | |
721 | /// | |
722 | /// - *All* the safety conditions of [`offset_from`](#method.offset_from) | |
723 | /// apply to this method as well; see it for the full details. | |
724 | /// | |
725 | /// Importantly, despite the return type of this method being able to represent | |
726 | /// a larger offset, it's still *not permitted* to pass pointers which differ | |
727 | /// by more than `isize::MAX` *bytes*. As such, the result of this method will | |
728 | /// always be less than or equal to `isize::MAX as usize`. | |
729 | /// | |
730 | /// # Panics | |
731 | /// | |
732 | /// This function panics if `T` is a Zero-Sized Type ("ZST"). | |
733 | /// | |
734 | /// # Examples | |
735 | /// | |
736 | /// ``` | |
737 | /// #![feature(ptr_sub_ptr)] | |
738 | /// | |
739 | /// let a = [0; 5]; | |
740 | /// let ptr1: *const i32 = &a[1]; | |
741 | /// let ptr2: *const i32 = &a[3]; | |
742 | /// unsafe { | |
743 | /// assert_eq!(ptr2.sub_ptr(ptr1), 2); | |
744 | /// assert_eq!(ptr1.add(2), ptr2); | |
745 | /// assert_eq!(ptr2.sub(2), ptr1); | |
746 | /// assert_eq!(ptr2.sub_ptr(ptr2), 0); | |
747 | /// } | |
748 | /// | |
749 | /// // This would be incorrect, as the pointers are not correctly ordered: | |
064997fb | 750 | /// // ptr1.sub_ptr(ptr2) |
04454e1e FG |
751 | /// ``` |
752 | #[unstable(feature = "ptr_sub_ptr", issue = "95892")] | |
753 | #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")] | |
754 | #[inline] | |
064997fb | 755 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
04454e1e FG |
756 | pub const unsafe fn sub_ptr(self, origin: *const T) -> usize |
757 | where | |
758 | T: Sized, | |
759 | { | |
f2b60f7d | 760 | let this = self; |
04454e1e FG |
761 | // SAFETY: The comparison has no side-effects, and the intrinsic |
762 | // does this check internally in the CTFE implementation. | |
f2b60f7d | 763 | unsafe { |
2b03887a FG |
764 | assert_unsafe_precondition!( |
765 | "ptr::sub_ptr requires `this >= origin`", | |
766 | [T](this: *const T, origin: *const T) => this >= origin | |
767 | ) | |
f2b60f7d | 768 | }; |
04454e1e FG |
769 | |
770 | let pointee_size = mem::size_of::<T>(); | |
771 | assert!(0 < pointee_size && pointee_size <= isize::MAX as usize); | |
772 | // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`. | |
773 | unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) } | |
774 | } | |
775 | ||
f035d41b XL |
776 | /// Returns whether two pointers are guaranteed to be equal. |
777 | /// | |
f2b60f7d | 778 | /// At runtime this function behaves like `Some(self == other)`. |
f035d41b XL |
779 | /// However, in some contexts (e.g., compile-time evaluation), |
780 | /// it is not always possible to determine equality of two pointers, so this function may | |
f2b60f7d FG |
781 | /// spuriously return `None` for pointers that later actually turn out to have its equality known. |
782 | /// But when it returns `Some`, the pointers' equality is guaranteed to be known. | |
f035d41b | 783 | /// |
f2b60f7d FG |
784 | /// The return value may change from `Some` to `None` and vice versa depending on the compiler |
785 | /// version and unsafe code must not | |
f035d41b | 786 | /// rely on the result of this function for soundness. It is suggested to only use this function |
f2b60f7d | 787 | /// for performance optimizations where spurious `None` return values by this function do not |
f035d41b XL |
788 | /// affect the outcome, but just the performance. |
789 | /// The consequences of using this method to make runtime and compile-time code behave | |
790 | /// differently have not been explored. This method should not be used to introduce such | |
791 | /// differences, and it should also not be stabilized before we have a better understanding | |
792 | /// of this issue. | |
793 | #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
794 | #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
795 | #[inline] | |
f2b60f7d | 796 | pub const fn guaranteed_eq(self, other: *const T) -> Option<bool> |
f035d41b XL |
797 | where |
798 | T: Sized, | |
799 | { | |
f2b60f7d FG |
800 | match intrinsics::ptr_guaranteed_cmp(self as _, other as _) { |
801 | 2 => None, | |
802 | other => Some(other == 1), | |
803 | } | |
f035d41b XL |
804 | } |
805 | ||
f2b60f7d | 806 | /// Returns whether two pointers are guaranteed to be inequal. |
f035d41b | 807 | /// |
2b03887a | 808 | /// At runtime this function behaves like `Some(self != other)`. |
f035d41b | 809 | /// However, in some contexts (e.g., compile-time evaluation), |
f2b60f7d FG |
810 | /// it is not always possible to determine inequality of two pointers, so this function may |
811 | /// spuriously return `None` for pointers that later actually turn out to have its inequality known. | |
812 | /// But when it returns `Some`, the pointers' inequality is guaranteed to be known. | |
f035d41b | 813 | /// |
f2b60f7d FG |
814 | /// The return value may change from `Some` to `None` and vice versa depending on the compiler |
815 | /// version and unsafe code must not | |
f035d41b | 816 | /// rely on the result of this function for soundness. It is suggested to only use this function |
f2b60f7d | 817 | /// for performance optimizations where spurious `None` return values by this function do not |
f035d41b XL |
818 | /// affect the outcome, but just the performance. |
819 | /// The consequences of using this method to make runtime and compile-time code behave | |
820 | /// differently have not been explored. This method should not be used to introduce such | |
821 | /// differences, and it should also not be stabilized before we have a better understanding | |
822 | /// of this issue. | |
823 | #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
824 | #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")] | |
825 | #[inline] | |
f2b60f7d | 826 | pub const fn guaranteed_ne(self, other: *const T) -> Option<bool> |
f035d41b XL |
827 | where |
828 | T: Sized, | |
829 | { | |
f2b60f7d FG |
830 | match self.guaranteed_eq(other) { |
831 | None => None, | |
832 | Some(eq) => Some(!eq), | |
833 | } | |
dfeec247 XL |
834 | } |
835 | ||
dfeec247 XL |
836 | /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`). |
837 | /// | |
838 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
839 | /// offset of `3 * size_of::<T>()` bytes. | |
840 | /// | |
841 | /// # Safety | |
842 | /// | |
843 | /// If any of the following conditions are violated, the result is Undefined | |
844 | /// Behavior: | |
845 | /// | |
846 | /// * Both the starting and resulting pointer must be either in bounds or one | |
cdc7bbd5 | 847 | /// byte past the end of the same [allocated object]. |
dfeec247 XL |
848 | /// |
849 | /// * The computed offset, **in bytes**, cannot overflow an `isize`. | |
850 | /// | |
851 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
852 | /// space. That is, the infinite-precision sum must fit in a `usize`. | |
853 | /// | |
854 | /// The compiler and standard library generally tries to ensure allocations | |
855 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
856 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
857 | /// `vec.as_ptr().add(vec.len())` is always safe. | |
858 | /// | |
859 | /// Most platforms fundamentally can't even construct such an allocation. | |
860 | /// For instance, no known 64-bit platform can ever serve a request | |
861 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
862 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
863 | /// more than `isize::MAX` bytes with things like Physical Address | |
864 | /// Extension. As such, memory acquired directly from allocators or memory | |
865 | /// mapped files *may* be too large to handle with this function. | |
866 | /// | |
867 | /// Consider using [`wrapping_add`] instead if these constraints are | |
868 | /// difficult to satisfy. The only advantage of this method is that it | |
869 | /// enables more aggressive compiler optimizations. | |
870 | /// | |
871 | /// [`wrapping_add`]: #method.wrapping_add | |
cdc7bbd5 | 872 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
873 | /// |
874 | /// # Examples | |
875 | /// | |
876 | /// Basic usage: | |
877 | /// | |
878 | /// ``` | |
879 | /// let s: &str = "123"; | |
880 | /// let ptr: *const u8 = s.as_ptr(); | |
881 | /// | |
882 | /// unsafe { | |
883 | /// println!("{}", *ptr.add(1) as char); | |
884 | /// println!("{}", *ptr.add(2) as char); | |
885 | /// } | |
886 | /// ``` | |
887 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 888 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 889 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 890 | #[inline(always)] |
064997fb | 891 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
f9f354fc | 892 | pub const unsafe fn add(self, count: usize) -> Self |
dfeec247 XL |
893 | where |
894 | T: Sized, | |
895 | { | |
f035d41b XL |
896 | // SAFETY: the caller must uphold the safety contract for `offset`. |
897 | unsafe { self.offset(count as isize) } | |
dfeec247 XL |
898 | } |
899 | ||
923072b8 FG |
900 | /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`). |
901 | /// | |
902 | /// `count` is in units of bytes. | |
903 | /// | |
904 | /// This is purely a convenience for casting to a `u8` pointer and | |
905 | /// using [add][pointer::add] on it. See that method for documentation | |
906 | /// and safety requirements. | |
907 | /// | |
908 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
909 | /// leaving the metadata untouched. | |
910 | #[must_use] | |
911 | #[inline(always)] | |
912 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
913 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
064997fb | 914 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
923072b8 FG |
915 | pub const unsafe fn byte_add(self, count: usize) -> Self { |
916 | // SAFETY: the caller must uphold the safety contract for `add`. | |
917 | let this = unsafe { self.cast::<u8>().add(count).cast::<()>() }; | |
918 | from_raw_parts::<T>(this, metadata(self)) | |
919 | } | |
920 | ||
dfeec247 XL |
921 | /// Calculates the offset from a pointer (convenience for |
922 | /// `.offset((count as isize).wrapping_neg())`). | |
923 | /// | |
924 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
925 | /// offset of `3 * size_of::<T>()` bytes. | |
926 | /// | |
927 | /// # Safety | |
928 | /// | |
929 | /// If any of the following conditions are violated, the result is Undefined | |
930 | /// Behavior: | |
931 | /// | |
932 | /// * Both the starting and resulting pointer must be either in bounds or one | |
cdc7bbd5 | 933 | /// byte past the end of the same [allocated object]. |
dfeec247 XL |
934 | /// |
935 | /// * The computed offset cannot exceed `isize::MAX` **bytes**. | |
936 | /// | |
937 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
938 | /// space. That is, the infinite-precision sum must fit in a usize. | |
939 | /// | |
940 | /// The compiler and standard library generally tries to ensure allocations | |
941 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
942 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
943 | /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe. | |
944 | /// | |
945 | /// Most platforms fundamentally can't even construct such an allocation. | |
946 | /// For instance, no known 64-bit platform can ever serve a request | |
947 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
948 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
949 | /// more than `isize::MAX` bytes with things like Physical Address | |
950 | /// Extension. As such, memory acquired directly from allocators or memory | |
951 | /// mapped files *may* be too large to handle with this function. | |
952 | /// | |
953 | /// Consider using [`wrapping_sub`] instead if these constraints are | |
954 | /// difficult to satisfy. The only advantage of this method is that it | |
955 | /// enables more aggressive compiler optimizations. | |
956 | /// | |
957 | /// [`wrapping_sub`]: #method.wrapping_sub | |
cdc7bbd5 | 958 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
959 | /// |
960 | /// # Examples | |
961 | /// | |
962 | /// Basic usage: | |
963 | /// | |
964 | /// ``` | |
965 | /// let s: &str = "123"; | |
966 | /// | |
967 | /// unsafe { | |
968 | /// let end: *const u8 = s.as_ptr().add(3); | |
969 | /// println!("{}", *end.sub(1) as char); | |
970 | /// println!("{}", *end.sub(2) as char); | |
971 | /// } | |
972 | /// ``` | |
973 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 974 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 975 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
dfeec247 | 976 | #[inline] |
064997fb | 977 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
f9f354fc | 978 | pub const unsafe fn sub(self, count: usize) -> Self |
dfeec247 XL |
979 | where |
980 | T: Sized, | |
981 | { | |
f035d41b XL |
982 | // SAFETY: the caller must uphold the safety contract for `offset`. |
983 | unsafe { self.offset((count as isize).wrapping_neg()) } | |
dfeec247 XL |
984 | } |
985 | ||
923072b8 FG |
986 | /// Calculates the offset from a pointer in bytes (convenience for |
987 | /// `.byte_offset((count as isize).wrapping_neg())`). | |
988 | /// | |
989 | /// `count` is in units of bytes. | |
990 | /// | |
991 | /// This is purely a convenience for casting to a `u8` pointer and | |
992 | /// using [sub][pointer::sub] on it. See that method for documentation | |
993 | /// and safety requirements. | |
994 | /// | |
995 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
996 | /// leaving the metadata untouched. | |
997 | #[must_use] | |
998 | #[inline(always)] | |
999 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
1000 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
064997fb | 1001 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
923072b8 FG |
1002 | pub const unsafe fn byte_sub(self, count: usize) -> Self { |
1003 | // SAFETY: the caller must uphold the safety contract for `sub`. | |
1004 | let this = unsafe { self.cast::<u8>().sub(count).cast::<()>() }; | |
1005 | from_raw_parts::<T>(this, metadata(self)) | |
1006 | } | |
1007 | ||
dfeec247 XL |
1008 | /// Calculates the offset from a pointer using wrapping arithmetic. |
1009 | /// (convenience for `.wrapping_offset(count as isize)`) | |
1010 | /// | |
1011 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
1012 | /// offset of `3 * size_of::<T>()` bytes. | |
1013 | /// | |
1014 | /// # Safety | |
1015 | /// | |
5869c6ff XL |
1016 | /// This operation itself is always safe, but using the resulting pointer is not. |
1017 | /// | |
94222f64 | 1018 | /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not |
cdc7bbd5 | 1019 | /// be used to read or write other allocated objects. |
5869c6ff XL |
1020 | /// |
1021 | /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z` | |
1022 | /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still | |
1023 | /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless | |
1024 | /// `x` and `y` point into the same allocated object. | |
dfeec247 | 1025 | /// |
5869c6ff XL |
1026 | /// Compared to [`add`], this method basically delays the requirement of staying within the |
1027 | /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object | |
1028 | /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a | |
1029 | /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`] | |
1030 | /// can be optimized better and is thus preferable in performance-sensitive code. | |
dfeec247 | 1031 | /// |
5869c6ff XL |
1032 | /// The delayed check only considers the value of the pointer that was dereferenced, not the |
1033 | /// intermediate values used during the computation of the final result. For example, | |
1034 | /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the | |
1035 | /// allocated object and then re-entering it later is permitted. | |
dfeec247 | 1036 | /// |
dfeec247 | 1037 | /// [`add`]: #method.add |
cdc7bbd5 | 1038 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
1039 | /// |
1040 | /// # Examples | |
1041 | /// | |
1042 | /// Basic usage: | |
1043 | /// | |
1044 | /// ``` | |
1045 | /// // Iterate using a raw pointer in increments of two elements | |
1046 | /// let data = [1u8, 2, 3, 4, 5]; | |
1047 | /// let mut ptr: *const u8 = data.as_ptr(); | |
1048 | /// let step = 2; | |
1049 | /// let end_rounded_up = ptr.wrapping_add(6); | |
1050 | /// | |
1051 | /// // This loop prints "1, 3, 5, " | |
1052 | /// while ptr != end_rounded_up { | |
1053 | /// unsafe { | |
1054 | /// print!("{}, ", *ptr); | |
1055 | /// } | |
1056 | /// ptr = ptr.wrapping_add(step); | |
1057 | /// } | |
1058 | /// ``` | |
1059 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 1060 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 1061 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
cdc7bbd5 | 1062 | #[inline(always)] |
f9f354fc | 1063 | pub const fn wrapping_add(self, count: usize) -> Self |
dfeec247 XL |
1064 | where |
1065 | T: Sized, | |
1066 | { | |
1067 | self.wrapping_offset(count as isize) | |
1068 | } | |
1069 | ||
923072b8 FG |
1070 | /// Calculates the offset from a pointer in bytes using wrapping arithmetic. |
1071 | /// (convenience for `.wrapping_byte_offset(count as isize)`) | |
1072 | /// | |
1073 | /// `count` is in units of bytes. | |
1074 | /// | |
1075 | /// This is purely a convenience for casting to a `u8` pointer and | |
1076 | /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation. | |
1077 | /// | |
1078 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
1079 | /// leaving the metadata untouched. | |
1080 | #[must_use] | |
1081 | #[inline(always)] | |
1082 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
1083 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
1084 | pub const fn wrapping_byte_add(self, count: usize) -> Self { | |
1085 | from_raw_parts::<T>(self.cast::<u8>().wrapping_add(count).cast::<()>(), metadata(self)) | |
1086 | } | |
1087 | ||
dfeec247 | 1088 | /// Calculates the offset from a pointer using wrapping arithmetic. |
5869c6ff | 1089 | /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`) |
dfeec247 XL |
1090 | /// |
1091 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
1092 | /// offset of `3 * size_of::<T>()` bytes. | |
1093 | /// | |
1094 | /// # Safety | |
1095 | /// | |
5869c6ff XL |
1096 | /// This operation itself is always safe, but using the resulting pointer is not. |
1097 | /// | |
94222f64 | 1098 | /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not |
cdc7bbd5 | 1099 | /// be used to read or write other allocated objects. |
5869c6ff XL |
1100 | /// |
1101 | /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z` | |
1102 | /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still | |
1103 | /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless | |
1104 | /// `x` and `y` point into the same allocated object. | |
dfeec247 | 1105 | /// |
5869c6ff XL |
1106 | /// Compared to [`sub`], this method basically delays the requirement of staying within the |
1107 | /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object | |
1108 | /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a | |
1109 | /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`] | |
1110 | /// can be optimized better and is thus preferable in performance-sensitive code. | |
dfeec247 | 1111 | /// |
5869c6ff XL |
1112 | /// The delayed check only considers the value of the pointer that was dereferenced, not the |
1113 | /// intermediate values used during the computation of the final result. For example, | |
1114 | /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the | |
1115 | /// allocated object and then re-entering it later is permitted. | |
dfeec247 | 1116 | /// |
dfeec247 | 1117 | /// [`sub`]: #method.sub |
cdc7bbd5 | 1118 | /// [allocated object]: crate::ptr#allocated-object |
dfeec247 XL |
1119 | /// |
1120 | /// # Examples | |
1121 | /// | |
1122 | /// Basic usage: | |
1123 | /// | |
1124 | /// ``` | |
1125 | /// // Iterate using a raw pointer in increments of two elements (backwards) | |
1126 | /// let data = [1u8, 2, 3, 4, 5]; | |
1127 | /// let mut ptr: *const u8 = data.as_ptr(); | |
1128 | /// let start_rounded_down = ptr.wrapping_sub(2); | |
1129 | /// ptr = ptr.wrapping_add(4); | |
1130 | /// let step = 2; | |
1131 | /// // This loop prints "5, 3, 1, " | |
1132 | /// while ptr != start_rounded_down { | |
1133 | /// unsafe { | |
1134 | /// print!("{}, ", *ptr); | |
1135 | /// } | |
1136 | /// ptr = ptr.wrapping_sub(step); | |
1137 | /// } | |
1138 | /// ``` | |
1139 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
f9f354fc | 1140 | #[must_use = "returns a new pointer rather than modifying its argument"] |
5e7ed085 | 1141 | #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] |
dfeec247 | 1142 | #[inline] |
f9f354fc | 1143 | pub const fn wrapping_sub(self, count: usize) -> Self |
dfeec247 XL |
1144 | where |
1145 | T: Sized, | |
1146 | { | |
1147 | self.wrapping_offset((count as isize).wrapping_neg()) | |
1148 | } | |
1149 | ||
923072b8 FG |
1150 | /// Calculates the offset from a pointer in bytes using wrapping arithmetic. |
1151 | /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`) | |
1152 | /// | |
1153 | /// `count` is in units of bytes. | |
1154 | /// | |
1155 | /// This is purely a convenience for casting to a `u8` pointer and | |
1156 | /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation. | |
1157 | /// | |
1158 | /// For non-`Sized` pointees this operation changes only the data pointer, | |
1159 | /// leaving the metadata untouched. | |
1160 | #[must_use] | |
1161 | #[inline(always)] | |
1162 | #[unstable(feature = "pointer_byte_offsets", issue = "96283")] | |
1163 | #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] | |
1164 | pub const fn wrapping_byte_sub(self, count: usize) -> Self { | |
1165 | from_raw_parts::<T>(self.cast::<u8>().wrapping_sub(count).cast::<()>(), metadata(self)) | |
1166 | } | |
1167 | ||
dfeec247 XL |
1168 | /// Reads the value from `self` without moving it. This leaves the |
1169 | /// memory in `self` unchanged. | |
1170 | /// | |
1171 | /// See [`ptr::read`] for safety concerns and examples. | |
1172 | /// | |
fc512014 | 1173 | /// [`ptr::read`]: crate::ptr::read() |
dfeec247 | 1174 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
5869c6ff | 1175 | #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")] |
dfeec247 | 1176 | #[inline] |
064997fb | 1177 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
5869c6ff | 1178 | pub const unsafe fn read(self) -> T |
dfeec247 XL |
1179 | where |
1180 | T: Sized, | |
1181 | { | |
f035d41b XL |
1182 | // SAFETY: the caller must uphold the safety contract for `read`. |
1183 | unsafe { read(self) } | |
dfeec247 XL |
1184 | } |
1185 | ||
1186 | /// Performs a volatile read of the value from `self` without moving it. This | |
1187 | /// leaves the memory in `self` unchanged. | |
1188 | /// | |
1189 | /// Volatile operations are intended to act on I/O memory, and are guaranteed | |
1190 | /// to not be elided or reordered by the compiler across other volatile | |
1191 | /// operations. | |
1192 | /// | |
1193 | /// See [`ptr::read_volatile`] for safety concerns and examples. | |
1194 | /// | |
fc512014 | 1195 | /// [`ptr::read_volatile`]: crate::ptr::read_volatile() |
dfeec247 XL |
1196 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
1197 | #[inline] | |
064997fb | 1198 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
dfeec247 XL |
1199 | pub unsafe fn read_volatile(self) -> T |
1200 | where | |
1201 | T: Sized, | |
1202 | { | |
f035d41b XL |
1203 | // SAFETY: the caller must uphold the safety contract for `read_volatile`. |
1204 | unsafe { read_volatile(self) } | |
dfeec247 XL |
1205 | } |
1206 | ||
1207 | /// Reads the value from `self` without moving it. This leaves the | |
1208 | /// memory in `self` unchanged. | |
1209 | /// | |
1210 | /// Unlike `read`, the pointer may be unaligned. | |
1211 | /// | |
1212 | /// See [`ptr::read_unaligned`] for safety concerns and examples. | |
1213 | /// | |
fc512014 | 1214 | /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned() |
dfeec247 | 1215 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
5869c6ff | 1216 | #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")] |
dfeec247 | 1217 | #[inline] |
064997fb | 1218 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
5869c6ff | 1219 | pub const unsafe fn read_unaligned(self) -> T |
dfeec247 XL |
1220 | where |
1221 | T: Sized, | |
1222 | { | |
f035d41b XL |
1223 | // SAFETY: the caller must uphold the safety contract for `read_unaligned`. |
1224 | unsafe { read_unaligned(self) } | |
dfeec247 XL |
1225 | } |
1226 | ||
1227 | /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source | |
1228 | /// and destination may overlap. | |
1229 | /// | |
1230 | /// NOTE: this has the *same* argument order as [`ptr::copy`]. | |
1231 | /// | |
1232 | /// See [`ptr::copy`] for safety concerns and examples. | |
1233 | /// | |
fc512014 | 1234 | /// [`ptr::copy`]: crate::ptr::copy() |
923072b8 | 1235 | #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")] |
dfeec247 XL |
1236 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
1237 | #[inline] | |
064997fb | 1238 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
6a06907d | 1239 | pub const unsafe fn copy_to(self, dest: *mut T, count: usize) |
dfeec247 XL |
1240 | where |
1241 | T: Sized, | |
1242 | { | |
f035d41b XL |
1243 | // SAFETY: the caller must uphold the safety contract for `copy`. |
1244 | unsafe { copy(self, dest, count) } | |
dfeec247 XL |
1245 | } |
1246 | ||
1247 | /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source | |
1248 | /// and destination may *not* overlap. | |
1249 | /// | |
1250 | /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`]. | |
1251 | /// | |
1252 | /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. | |
1253 | /// | |
fc512014 | 1254 | /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() |
923072b8 | 1255 | #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")] |
dfeec247 XL |
1256 | #[stable(feature = "pointer_methods", since = "1.26.0")] |
1257 | #[inline] | |
064997fb | 1258 | #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces |
6a06907d | 1259 | pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) |
dfeec247 XL |
1260 | where |
1261 | T: Sized, | |
1262 | { | |
f035d41b XL |
1263 | // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`. |
1264 | unsafe { copy_nonoverlapping(self, dest, count) } | |
dfeec247 XL |
1265 | } |
1266 | ||
1267 | /// Computes the offset that needs to be applied to the pointer in order to make it aligned to | |
1268 | /// `align`. | |
1269 | /// | |
1270 | /// If it is not possible to align the pointer, the implementation returns | |
ba9703b0 XL |
1271 | /// `usize::MAX`. It is permissible for the implementation to *always* |
1272 | /// return `usize::MAX`. Only your algorithm's performance can depend | |
dfeec247 XL |
1273 | /// on getting a usable offset here, not its correctness. |
1274 | /// | |
1275 | /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be | |
1276 | /// used with the `wrapping_add` method. | |
1277 | /// | |
1278 | /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go | |
1279 | /// beyond the allocation that the pointer points into. It is up to the caller to ensure that | |
1280 | /// the returned offset is correct in all terms other than alignment. | |
1281 | /// | |
1282 | /// # Panics | |
1283 | /// | |
1284 | /// The function panics if `align` is not a power-of-two. | |
1285 | /// | |
1286 | /// # Examples | |
1287 | /// | |
1288 | /// Accessing adjacent `u8` as `u16` | |
1289 | /// | |
1290 | /// ``` | |
f2b60f7d FG |
1291 | /// use std::mem::align_of; |
1292 | /// | |
dfeec247 | 1293 | /// # unsafe { |
f2b60f7d FG |
1294 | /// let x = [5_u8, 6, 7, 8, 9]; |
1295 | /// let ptr = x.as_ptr(); | |
dfeec247 | 1296 | /// let offset = ptr.align_offset(align_of::<u16>()); |
f2b60f7d FG |
1297 | /// |
1298 | /// if offset < x.len() - 1 { | |
1299 | /// let u16_ptr = ptr.add(offset).cast::<u16>(); | |
1300 | /// assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7])); | |
dfeec247 XL |
1301 | /// } else { |
1302 | /// // while the pointer can be aligned via `offset`, it would point | |
1303 | /// // outside the allocation | |
1304 | /// } | |
f2b60f7d | 1305 | /// # } |
dfeec247 XL |
1306 | /// ``` |
1307 | #[stable(feature = "align_offset", since = "1.36.0")] | |
3c0e092e XL |
1308 | #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")] |
1309 | pub const fn align_offset(self, align: usize) -> usize | |
dfeec247 XL |
1310 | where |
1311 | T: Sized, | |
1312 | { | |
1313 | if !align.is_power_of_two() { | |
1314 | panic!("align_offset: align is not a power-of-two"); | |
1315 | } | |
3c0e092e XL |
1316 | |
1317 | fn rt_impl<T>(p: *const T, align: usize) -> usize { | |
1318 | // SAFETY: `align` has been checked to be a power of 2 above | |
1319 | unsafe { align_offset(p, align) } | |
1320 | } | |
1321 | ||
1322 | const fn ctfe_impl<T>(_: *const T, _: usize) -> usize { | |
1323 | usize::MAX | |
1324 | } | |
1325 | ||
1326 | // SAFETY: | |
923072b8 | 1327 | // It is permissible for `align_offset` to always return `usize::MAX`, |
3c0e092e XL |
1328 | // algorithm correctness can not depend on `align_offset` returning non-max values. |
1329 | // | |
1330 | // As such the behaviour can't change after replacing `align_offset` with `usize::MAX`, only performance can. | |
1331 | unsafe { intrinsics::const_eval_select((self, align), ctfe_impl, rt_impl) } | |
dfeec247 | 1332 | } |
923072b8 FG |
1333 | |
1334 | /// Returns whether the pointer is properly aligned for `T`. | |
1335 | #[must_use] | |
1336 | #[inline] | |
1337 | #[unstable(feature = "pointer_is_aligned", issue = "96284")] | |
1338 | pub fn is_aligned(self) -> bool | |
1339 | where | |
1340 | T: Sized, | |
1341 | { | |
1342 | self.is_aligned_to(core::mem::align_of::<T>()) | |
1343 | } | |
1344 | ||
1345 | /// Returns whether the pointer is aligned to `align`. | |
1346 | /// | |
1347 | /// For non-`Sized` pointees this operation considers only the data pointer, | |
1348 | /// ignoring the metadata. | |
1349 | /// | |
1350 | /// # Panics | |
1351 | /// | |
1352 | /// The function panics if `align` is not a power-of-two (this includes 0). | |
1353 | #[must_use] | |
1354 | #[inline] | |
1355 | #[unstable(feature = "pointer_is_aligned", issue = "96284")] | |
1356 | pub fn is_aligned_to(self, align: usize) -> bool { | |
1357 | if !align.is_power_of_two() { | |
1358 | panic!("is_aligned_to: align is not a power-of-two"); | |
1359 | } | |
1360 | ||
923072b8 | 1361 | // Cast is needed for `T: !Sized` |
f2b60f7d | 1362 | self.cast::<u8>().addr() & align - 1 == 0 |
923072b8 | 1363 | } |
dfeec247 XL |
1364 | } |
1365 | ||
ba9703b0 XL |
1366 | impl<T> *const [T] { |
1367 | /// Returns the length of a raw slice. | |
1368 | /// | |
1369 | /// The returned value is the number of **elements**, not the number of bytes. | |
1370 | /// | |
1371 | /// This function is safe, even when the raw slice cannot be cast to a slice | |
1372 | /// reference because the pointer is null or unaligned. | |
1373 | /// | |
1374 | /// # Examples | |
1375 | /// | |
1376 | /// ```rust | |
1377 | /// #![feature(slice_ptr_len)] | |
1378 | /// | |
1379 | /// use std::ptr; | |
1380 | /// | |
1381 | /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3); | |
1382 | /// assert_eq!(slice.len(), 3); | |
1383 | /// ``` | |
1384 | #[inline] | |
1385 | #[unstable(feature = "slice_ptr_len", issue = "71146")] | |
1386 | #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")] | |
1387 | pub const fn len(self) -> usize { | |
6a06907d | 1388 | metadata(self) |
ba9703b0 | 1389 | } |
3dfed10e XL |
1390 | |
1391 | /// Returns a raw pointer to the slice's buffer. | |
1392 | /// | |
1393 | /// This is equivalent to casting `self` to `*const T`, but more type-safe. | |
1394 | /// | |
1395 | /// # Examples | |
1396 | /// | |
1397 | /// ```rust | |
1398 | /// #![feature(slice_ptr_get)] | |
1399 | /// use std::ptr; | |
1400 | /// | |
1401 | /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3); | |
5e7ed085 | 1402 | /// assert_eq!(slice.as_ptr(), ptr::null()); |
3dfed10e XL |
1403 | /// ``` |
1404 | #[inline] | |
1405 | #[unstable(feature = "slice_ptr_get", issue = "74265")] | |
1406 | #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")] | |
1407 | pub const fn as_ptr(self) -> *const T { | |
1408 | self as *const T | |
1409 | } | |
1410 | ||
1411 | /// Returns a raw pointer to an element or subslice, without doing bounds | |
1412 | /// checking. | |
1413 | /// | |
a2a8927a | 1414 | /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable |
3dfed10e XL |
1415 | /// is *[undefined behavior]* even if the resulting pointer is not used. |
1416 | /// | |
1417 | /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html | |
1418 | /// | |
1419 | /// # Examples | |
1420 | /// | |
1421 | /// ``` | |
1422 | /// #![feature(slice_ptr_get)] | |
1423 | /// | |
1424 | /// let x = &[1, 2, 4] as *const [i32]; | |
1425 | /// | |
1426 | /// unsafe { | |
1427 | /// assert_eq!(x.get_unchecked(1), x.as_ptr().add(1)); | |
1428 | /// } | |
1429 | /// ``` | |
1430 | #[unstable(feature = "slice_ptr_get", issue = "74265")] | |
5e7ed085 | 1431 | #[rustc_const_unstable(feature = "const_slice_index", issue = "none")] |
3dfed10e | 1432 | #[inline] |
5e7ed085 | 1433 | pub const unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output |
3dfed10e | 1434 | where |
5e7ed085 | 1435 | I: ~const SliceIndex<[T]>, |
3dfed10e | 1436 | { |
a2a8927a | 1437 | // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds. |
3dfed10e XL |
1438 | unsafe { index.get_unchecked(self) } |
1439 | } | |
1440 | ||
1441 | /// Returns `None` if the pointer is null, or else returns a shared slice to | |
1442 | /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require | |
1443 | /// that the value has to be initialized. | |
1444 | /// | |
1445 | /// [`as_ref`]: #method.as_ref | |
1446 | /// | |
1447 | /// # Safety | |
1448 | /// | |
17df50a5 | 1449 | /// When calling this method, you have to ensure that *either* the pointer is null *or* |
3dfed10e XL |
1450 | /// all of the following is true: |
1451 | /// | |
1452 | /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes, | |
1453 | /// and it must be properly aligned. This means in particular: | |
1454 | /// | |
cdc7bbd5 | 1455 | /// * The entire memory range of this slice must be contained within a single [allocated object]! |
3dfed10e XL |
1456 | /// Slices can never span across multiple allocated objects. |
1457 | /// | |
1458 | /// * The pointer must be aligned even for zero-length slices. One | |
1459 | /// reason for this is that enum layout optimizations may rely on references | |
1460 | /// (including slices of any length) being aligned and non-null to distinguish | |
1461 | /// them from other data. You can obtain a pointer that is usable as `data` | |
1462 | /// for zero-length slices using [`NonNull::dangling()`]. | |
1463 | /// | |
1464 | /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`. | |
1465 | /// See the safety documentation of [`pointer::offset`]. | |
1466 | /// | |
1467 | /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is | |
1468 | /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data. | |
04454e1e | 1469 | /// In particular, while this reference exists, the memory the pointer points to must |
3dfed10e XL |
1470 | /// not get mutated (except inside `UnsafeCell`). |
1471 | /// | |
1472 | /// This applies even if the result of this method is unused! | |
1473 | /// | |
1474 | /// See also [`slice::from_raw_parts`][]. | |
1475 | /// | |
1476 | /// [valid]: crate::ptr#safety | |
cdc7bbd5 | 1477 | /// [allocated object]: crate::ptr#allocated-object |
3dfed10e XL |
1478 | #[inline] |
1479 | #[unstable(feature = "ptr_as_uninit", issue = "75402")] | |
a2a8927a XL |
1480 | #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")] |
1481 | pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> { | |
3dfed10e XL |
1482 | if self.is_null() { |
1483 | None | |
1484 | } else { | |
1485 | // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`. | |
1486 | Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) }) | |
1487 | } | |
1488 | } | |
ba9703b0 XL |
1489 | } |
1490 | ||
dfeec247 XL |
1491 | // Equality for pointers |
1492 | #[stable(feature = "rust1", since = "1.0.0")] | |
1493 | impl<T: ?Sized> PartialEq for *const T { | |
1494 | #[inline] | |
1495 | fn eq(&self, other: &*const T) -> bool { | |
1496 | *self == *other | |
1497 | } | |
1498 | } | |
1499 | ||
1500 | #[stable(feature = "rust1", since = "1.0.0")] | |
1501 | impl<T: ?Sized> Eq for *const T {} | |
1502 | ||
1503 | // Comparison for pointers | |
1504 | #[stable(feature = "rust1", since = "1.0.0")] | |
1505 | impl<T: ?Sized> Ord for *const T { | |
1506 | #[inline] | |
1507 | fn cmp(&self, other: &*const T) -> Ordering { | |
1508 | if self < other { | |
1509 | Less | |
1510 | } else if self == other { | |
1511 | Equal | |
1512 | } else { | |
1513 | Greater | |
1514 | } | |
1515 | } | |
1516 | } | |
1517 | ||
1518 | #[stable(feature = "rust1", since = "1.0.0")] | |
1519 | impl<T: ?Sized> PartialOrd for *const T { | |
1520 | #[inline] | |
1521 | fn partial_cmp(&self, other: &*const T) -> Option<Ordering> { | |
1522 | Some(self.cmp(other)) | |
1523 | } | |
1524 | ||
1525 | #[inline] | |
1526 | fn lt(&self, other: &*const T) -> bool { | |
1527 | *self < *other | |
1528 | } | |
1529 | ||
1530 | #[inline] | |
1531 | fn le(&self, other: &*const T) -> bool { | |
1532 | *self <= *other | |
1533 | } | |
1534 | ||
1535 | #[inline] | |
1536 | fn gt(&self, other: &*const T) -> bool { | |
1537 | *self > *other | |
1538 | } | |
1539 | ||
1540 | #[inline] | |
1541 | fn ge(&self, other: &*const T) -> bool { | |
1542 | *self >= *other | |
1543 | } | |
1544 | } |