]>
Commit | Line | Data |
---|---|---|
dfeec247 XL |
1 | use super::*; |
2 | use crate::cmp::Ordering::{self, Equal, Greater, Less}; | |
3 | use crate::intrinsics; | |
4 | ||
5 | // ignore-tidy-undocumented-unsafe | |
6 | ||
7 | #[lang = "mut_ptr"] | |
8 | impl<T: ?Sized> *mut T { | |
9 | /// Returns `true` if the pointer is null. | |
10 | /// | |
11 | /// Note that unsized types have many possible null pointers, as only the | |
12 | /// raw data pointer is considered, not their length, vtable, etc. | |
13 | /// Therefore, two pointers that are null may still not compare equal to | |
14 | /// each other. | |
15 | /// | |
16 | /// # Examples | |
17 | /// | |
18 | /// Basic usage: | |
19 | /// | |
20 | /// ``` | |
21 | /// let mut s = [1, 2, 3]; | |
22 | /// let ptr: *mut u32 = s.as_mut_ptr(); | |
23 | /// assert!(!ptr.is_null()); | |
24 | /// ``` | |
25 | #[stable(feature = "rust1", since = "1.0.0")] | |
26 | #[inline] | |
27 | pub fn is_null(self) -> bool { | |
28 | // Compare via a cast to a thin pointer, so fat pointers are only | |
29 | // considering their "data" part for null-ness. | |
30 | (self as *mut u8) == null_mut() | |
31 | } | |
32 | ||
33 | /// Casts to a pointer of another type. | |
34 | #[stable(feature = "ptr_cast", since = "1.38.0")] | |
35 | #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")] | |
36 | #[inline] | |
37 | pub const fn cast<U>(self) -> *mut U { | |
38 | self as _ | |
39 | } | |
40 | ||
41 | /// Returns `None` if the pointer is null, or else returns a reference to | |
42 | /// the value wrapped in `Some`. | |
43 | /// | |
44 | /// # Safety | |
45 | /// | |
46 | /// While this method and its mutable counterpart are useful for | |
47 | /// null-safety, it is important to note that this is still an unsafe | |
48 | /// operation because the returned value could be pointing to invalid | |
49 | /// memory. | |
50 | /// | |
51 | /// When calling this method, you have to ensure that if the pointer is | |
74b04a01 | 52 | /// non-NULL, then it is properly aligned, dereferenceable (for the whole |
dfeec247 XL |
53 | /// size of `T`) and points to an initialized instance of `T`. This applies |
54 | /// even if the result of this method is unused! | |
55 | /// (The part about being initialized is not yet fully decided, but until | |
56 | /// it is, the only safe approach is to ensure that they are indeed initialized.) | |
57 | /// | |
58 | /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does | |
59 | /// not necessarily reflect the actual lifetime of the data. It is up to the | |
60 | /// caller to ensure that for the duration of this lifetime, the memory this | |
61 | /// pointer points to does not get written to outside of `UnsafeCell<U>`. | |
62 | /// | |
63 | /// # Examples | |
64 | /// | |
65 | /// Basic usage: | |
66 | /// | |
67 | /// ``` | |
68 | /// let ptr: *mut u8 = &mut 10u8 as *mut u8; | |
69 | /// | |
70 | /// unsafe { | |
71 | /// if let Some(val_back) = ptr.as_ref() { | |
72 | /// println!("We got back the value: {}!", val_back); | |
73 | /// } | |
74 | /// } | |
75 | /// ``` | |
76 | /// | |
77 | /// # Null-unchecked version | |
78 | /// | |
79 | /// If you are sure the pointer can never be null and are looking for some kind of | |
80 | /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can | |
81 | /// dereference the pointer directly. | |
82 | /// | |
83 | /// ``` | |
84 | /// let ptr: *mut u8 = &mut 10u8 as *mut u8; | |
85 | /// | |
86 | /// unsafe { | |
87 | /// let val_back = &*ptr; | |
88 | /// println!("We got back the value: {}!", val_back); | |
89 | /// } | |
90 | /// ``` | |
91 | #[stable(feature = "ptr_as_ref", since = "1.9.0")] | |
92 | #[inline] | |
93 | pub unsafe fn as_ref<'a>(self) -> Option<&'a T> { | |
94 | if self.is_null() { None } else { Some(&*self) } | |
95 | } | |
96 | ||
97 | /// Calculates the offset from a pointer. | |
98 | /// | |
99 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
100 | /// offset of `3 * size_of::<T>()` bytes. | |
101 | /// | |
102 | /// # Safety | |
103 | /// | |
104 | /// If any of the following conditions are violated, the result is Undefined | |
105 | /// Behavior: | |
106 | /// | |
107 | /// * Both the starting and resulting pointer must be either in bounds or one | |
108 | /// byte past the end of the same allocated object. Note that in Rust, | |
109 | /// every (stack-allocated) variable is considered a separate allocated object. | |
110 | /// | |
111 | /// * The computed offset, **in bytes**, cannot overflow an `isize`. | |
112 | /// | |
113 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
114 | /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize. | |
115 | /// | |
116 | /// The compiler and standard library generally tries to ensure allocations | |
117 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
118 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
119 | /// `vec.as_ptr().add(vec.len())` is always safe. | |
120 | /// | |
121 | /// Most platforms fundamentally can't even construct such an allocation. | |
122 | /// For instance, no known 64-bit platform can ever serve a request | |
123 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
124 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
125 | /// more than `isize::MAX` bytes with things like Physical Address | |
126 | /// Extension. As such, memory acquired directly from allocators or memory | |
127 | /// mapped files *may* be too large to handle with this function. | |
128 | /// | |
129 | /// Consider using [`wrapping_offset`] instead if these constraints are | |
130 | /// difficult to satisfy. The only advantage of this method is that it | |
131 | /// enables more aggressive compiler optimizations. | |
132 | /// | |
133 | /// [`wrapping_offset`]: #method.wrapping_offset | |
134 | /// | |
135 | /// # Examples | |
136 | /// | |
137 | /// Basic usage: | |
138 | /// | |
139 | /// ``` | |
140 | /// let mut s = [1, 2, 3]; | |
141 | /// let ptr: *mut u32 = s.as_mut_ptr(); | |
142 | /// | |
143 | /// unsafe { | |
144 | /// println!("{}", *ptr.offset(1)); | |
145 | /// println!("{}", *ptr.offset(2)); | |
146 | /// } | |
147 | /// ``` | |
148 | #[stable(feature = "rust1", since = "1.0.0")] | |
149 | #[inline] | |
150 | pub unsafe fn offset(self, count: isize) -> *mut T | |
151 | where | |
152 | T: Sized, | |
153 | { | |
154 | intrinsics::offset(self, count) as *mut T | |
155 | } | |
156 | ||
157 | /// Calculates the offset from a pointer using wrapping arithmetic. | |
158 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
159 | /// offset of `3 * size_of::<T>()` bytes. | |
160 | /// | |
161 | /// # Safety | |
162 | /// | |
163 | /// The resulting pointer does not need to be in bounds, but it is | |
164 | /// potentially hazardous to dereference (which requires `unsafe`). | |
165 | /// | |
166 | /// In particular, the resulting pointer remains attached to the same allocated | |
167 | /// object that `self` points to. It may *not* be used to access a | |
168 | /// different allocated object. Note that in Rust, | |
169 | /// every (stack-allocated) variable is considered a separate allocated object. | |
170 | /// | |
171 | /// In other words, `x.wrapping_offset(y.wrapping_offset_from(x))` is | |
172 | /// *not* the same as `y`, and dereferencing it is undefined behavior | |
173 | /// unless `x` and `y` point into the same allocated object. | |
174 | /// | |
175 | /// Compared to [`offset`], this method basically delays the requirement of staying | |
176 | /// within the same allocated object: [`offset`] is immediate Undefined Behavior when | |
177 | /// crossing object boundaries; `wrapping_offset` produces a pointer but still leads | |
178 | /// to Undefined Behavior if that pointer is dereferenced. [`offset`] can be optimized | |
74b04a01 | 179 | /// better and is thus preferable in performance-sensitive code. |
dfeec247 XL |
180 | /// |
181 | /// If you need to cross object boundaries, cast the pointer to an integer and | |
182 | /// do the arithmetic there. | |
183 | /// | |
184 | /// [`offset`]: #method.offset | |
185 | /// | |
186 | /// # Examples | |
187 | /// | |
188 | /// Basic usage: | |
189 | /// | |
190 | /// ``` | |
191 | /// // Iterate using a raw pointer in increments of two elements | |
192 | /// let mut data = [1u8, 2, 3, 4, 5]; | |
193 | /// let mut ptr: *mut u8 = data.as_mut_ptr(); | |
194 | /// let step = 2; | |
195 | /// let end_rounded_up = ptr.wrapping_offset(6); | |
196 | /// | |
197 | /// while ptr != end_rounded_up { | |
198 | /// unsafe { | |
199 | /// *ptr = 0; | |
200 | /// } | |
201 | /// ptr = ptr.wrapping_offset(step); | |
202 | /// } | |
203 | /// assert_eq!(&data, &[0, 2, 0, 4, 0]); | |
204 | /// ``` | |
205 | #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] | |
206 | #[inline] | |
207 | pub fn wrapping_offset(self, count: isize) -> *mut T | |
208 | where | |
209 | T: Sized, | |
210 | { | |
211 | unsafe { intrinsics::arith_offset(self, count) as *mut T } | |
212 | } | |
213 | ||
214 | /// Returns `None` if the pointer is null, or else returns a mutable | |
215 | /// reference to the value wrapped in `Some`. | |
216 | /// | |
217 | /// # Safety | |
218 | /// | |
219 | /// As with [`as_ref`], this is unsafe because it cannot verify the validity | |
220 | /// of the returned pointer, nor can it ensure that the lifetime `'a` | |
221 | /// returned is indeed a valid lifetime for the contained data. | |
222 | /// | |
223 | /// When calling this method, you have to ensure that *either* the pointer is NULL *or* | |
224 | /// all of the following is true: | |
225 | /// - it is properly aligned | |
226 | /// - it must point to an initialized instance of T; in particular, the pointer must be | |
74b04a01 | 227 | /// "dereferenceable" in the sense defined [here]. |
dfeec247 XL |
228 | /// |
229 | /// This applies even if the result of this method is unused! | |
230 | /// (The part about being initialized is not yet fully decided, but until | |
231 | /// it is the only safe approach is to ensure that they are indeed initialized.) | |
232 | /// | |
233 | /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does | |
234 | /// not necessarily reflect the actual lifetime of the data. *You* must enforce | |
235 | /// Rust's aliasing rules. In particular, for the duration of this lifetime, | |
236 | /// the memory this pointer points to must not get accessed (read or written) | |
237 | /// through any other pointer. | |
238 | /// | |
239 | /// [here]: crate::ptr#safety | |
240 | /// [`as_ref`]: #method.as_ref | |
241 | /// | |
242 | /// # Examples | |
243 | /// | |
244 | /// Basic usage: | |
245 | /// | |
246 | /// ``` | |
247 | /// let mut s = [1, 2, 3]; | |
248 | /// let ptr: *mut u32 = s.as_mut_ptr(); | |
249 | /// let first_value = unsafe { ptr.as_mut().unwrap() }; | |
250 | /// *first_value = 4; | |
251 | /// println!("{:?}", s); // It'll print: "[4, 2, 3]". | |
252 | /// ``` | |
253 | /// | |
254 | /// # Null-unchecked version | |
255 | /// | |
256 | /// If you are sure the pointer can never be null and are looking for some kind of | |
257 | /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that | |
258 | /// you can dereference the pointer directly. | |
259 | /// | |
260 | /// ``` | |
261 | /// let mut s = [1, 2, 3]; | |
262 | /// let ptr: *mut u32 = s.as_mut_ptr(); | |
263 | /// let first_value = unsafe { &mut *ptr }; | |
264 | /// *first_value = 4; | |
265 | /// println!("{:?}", s); // It'll print: "[4, 2, 3]". | |
266 | /// ``` | |
267 | #[stable(feature = "ptr_as_ref", since = "1.9.0")] | |
268 | #[inline] | |
269 | pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> { | |
270 | if self.is_null() { None } else { Some(&mut *self) } | |
271 | } | |
272 | ||
273 | /// Calculates the distance between two pointers. The returned value is in | |
274 | /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`. | |
275 | /// | |
276 | /// This function is the inverse of [`offset`]. | |
277 | /// | |
278 | /// [`offset`]: #method.offset-1 | |
279 | /// [`wrapping_offset_from`]: #method.wrapping_offset_from-1 | |
280 | /// | |
281 | /// # Safety | |
282 | /// | |
283 | /// If any of the following conditions are violated, the result is Undefined | |
284 | /// Behavior: | |
285 | /// | |
286 | /// * Both the starting and other pointer must be either in bounds or one | |
287 | /// byte past the end of the same allocated object. Note that in Rust, | |
288 | /// every (stack-allocated) variable is considered a separate allocated object. | |
289 | /// | |
290 | /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`. | |
291 | /// | |
292 | /// * The distance between the pointers, in bytes, must be an exact multiple | |
293 | /// of the size of `T`. | |
294 | /// | |
295 | /// * The distance being in bounds cannot rely on "wrapping around" the address space. | |
296 | /// | |
297 | /// The compiler and standard library generally try to ensure allocations | |
298 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
299 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
300 | /// `ptr_into_vec.offset_from(vec.as_ptr())` is always safe. | |
301 | /// | |
302 | /// Most platforms fundamentally can't even construct such an allocation. | |
303 | /// For instance, no known 64-bit platform can ever serve a request | |
304 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
305 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
306 | /// more than `isize::MAX` bytes with things like Physical Address | |
307 | /// Extension. As such, memory acquired directly from allocators or memory | |
308 | /// mapped files *may* be too large to handle with this function. | |
309 | /// | |
310 | /// Consider using [`wrapping_offset_from`] instead if these constraints are | |
311 | /// difficult to satisfy. The only advantage of this method is that it | |
312 | /// enables more aggressive compiler optimizations. | |
313 | /// | |
314 | /// # Panics | |
315 | /// | |
316 | /// This function panics if `T` is a Zero-Sized Type ("ZST"). | |
317 | /// | |
318 | /// # Examples | |
319 | /// | |
320 | /// Basic usage: | |
321 | /// | |
322 | /// ``` | |
323 | /// #![feature(ptr_offset_from)] | |
324 | /// | |
325 | /// let mut a = [0; 5]; | |
326 | /// let ptr1: *mut i32 = &mut a[1]; | |
327 | /// let ptr2: *mut i32 = &mut a[3]; | |
328 | /// unsafe { | |
329 | /// assert_eq!(ptr2.offset_from(ptr1), 2); | |
330 | /// assert_eq!(ptr1.offset_from(ptr2), -2); | |
331 | /// assert_eq!(ptr1.offset(2), ptr2); | |
332 | /// assert_eq!(ptr2.offset(-2), ptr1); | |
333 | /// } | |
334 | /// ``` | |
335 | #[unstable(feature = "ptr_offset_from", issue = "41079")] | |
336 | #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")] | |
337 | #[inline] | |
338 | pub const unsafe fn offset_from(self, origin: *const T) -> isize | |
339 | where | |
340 | T: Sized, | |
341 | { | |
342 | (self as *const T).offset_from(origin) | |
343 | } | |
344 | ||
345 | /// Calculates the distance between two pointers. The returned value is in | |
346 | /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`. | |
347 | /// | |
348 | /// If the address different between the two pointers is not a multiple of | |
349 | /// `mem::size_of::<T>()` then the result of the division is rounded towards | |
350 | /// zero. | |
351 | /// | |
352 | /// Though this method is safe for any two pointers, note that its result | |
353 | /// will be mostly useless if the two pointers aren't into the same allocated | |
354 | /// object, for example if they point to two different local variables. | |
355 | /// | |
356 | /// # Panics | |
357 | /// | |
358 | /// This function panics if `T` is a zero-sized type. | |
359 | /// | |
360 | /// # Examples | |
361 | /// | |
362 | /// Basic usage: | |
363 | /// | |
364 | /// ``` | |
365 | /// #![feature(ptr_wrapping_offset_from)] | |
366 | /// | |
367 | /// let mut a = [0; 5]; | |
368 | /// let ptr1: *mut i32 = &mut a[1]; | |
369 | /// let ptr2: *mut i32 = &mut a[3]; | |
370 | /// assert_eq!(ptr2.wrapping_offset_from(ptr1), 2); | |
371 | /// assert_eq!(ptr1.wrapping_offset_from(ptr2), -2); | |
372 | /// assert_eq!(ptr1.wrapping_offset(2), ptr2); | |
373 | /// assert_eq!(ptr2.wrapping_offset(-2), ptr1); | |
374 | /// | |
375 | /// let ptr1: *mut i32 = 3 as _; | |
376 | /// let ptr2: *mut i32 = 13 as _; | |
377 | /// assert_eq!(ptr2.wrapping_offset_from(ptr1), 2); | |
378 | /// ``` | |
379 | #[unstable(feature = "ptr_wrapping_offset_from", issue = "41079")] | |
380 | #[inline] | |
381 | pub fn wrapping_offset_from(self, origin: *const T) -> isize | |
382 | where | |
383 | T: Sized, | |
384 | { | |
385 | (self as *const T).wrapping_offset_from(origin) | |
386 | } | |
387 | ||
388 | /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`). | |
389 | /// | |
390 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
391 | /// offset of `3 * size_of::<T>()` bytes. | |
392 | /// | |
393 | /// # Safety | |
394 | /// | |
395 | /// If any of the following conditions are violated, the result is Undefined | |
396 | /// Behavior: | |
397 | /// | |
398 | /// * Both the starting and resulting pointer must be either in bounds or one | |
399 | /// byte past the end of the same allocated object. Note that in Rust, | |
400 | /// every (stack-allocated) variable is considered a separate allocated object. | |
401 | /// | |
402 | /// * The computed offset, **in bytes**, cannot overflow an `isize`. | |
403 | /// | |
404 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
405 | /// space. That is, the infinite-precision sum must fit in a `usize`. | |
406 | /// | |
407 | /// The compiler and standard library generally tries to ensure allocations | |
408 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
409 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
410 | /// `vec.as_ptr().add(vec.len())` is always safe. | |
411 | /// | |
412 | /// Most platforms fundamentally can't even construct such an allocation. | |
413 | /// For instance, no known 64-bit platform can ever serve a request | |
414 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
415 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
416 | /// more than `isize::MAX` bytes with things like Physical Address | |
417 | /// Extension. As such, memory acquired directly from allocators or memory | |
418 | /// mapped files *may* be too large to handle with this function. | |
419 | /// | |
420 | /// Consider using [`wrapping_add`] instead if these constraints are | |
421 | /// difficult to satisfy. The only advantage of this method is that it | |
422 | /// enables more aggressive compiler optimizations. | |
423 | /// | |
424 | /// [`wrapping_add`]: #method.wrapping_add | |
425 | /// | |
426 | /// # Examples | |
427 | /// | |
428 | /// Basic usage: | |
429 | /// | |
430 | /// ``` | |
431 | /// let s: &str = "123"; | |
432 | /// let ptr: *const u8 = s.as_ptr(); | |
433 | /// | |
434 | /// unsafe { | |
435 | /// println!("{}", *ptr.add(1) as char); | |
436 | /// println!("{}", *ptr.add(2) as char); | |
437 | /// } | |
438 | /// ``` | |
439 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
440 | #[inline] | |
441 | pub unsafe fn add(self, count: usize) -> Self | |
442 | where | |
443 | T: Sized, | |
444 | { | |
445 | self.offset(count as isize) | |
446 | } | |
447 | ||
448 | /// Calculates the offset from a pointer (convenience for | |
449 | /// `.offset((count as isize).wrapping_neg())`). | |
450 | /// | |
451 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
452 | /// offset of `3 * size_of::<T>()` bytes. | |
453 | /// | |
454 | /// # Safety | |
455 | /// | |
456 | /// If any of the following conditions are violated, the result is Undefined | |
457 | /// Behavior: | |
458 | /// | |
459 | /// * Both the starting and resulting pointer must be either in bounds or one | |
460 | /// byte past the end of the same allocated object. Note that in Rust, | |
461 | /// every (stack-allocated) variable is considered a separate allocated object. | |
462 | /// | |
463 | /// * The computed offset cannot exceed `isize::MAX` **bytes**. | |
464 | /// | |
465 | /// * The offset being in bounds cannot rely on "wrapping around" the address | |
466 | /// space. That is, the infinite-precision sum must fit in a usize. | |
467 | /// | |
468 | /// The compiler and standard library generally tries to ensure allocations | |
469 | /// never reach a size where an offset is a concern. For instance, `Vec` | |
470 | /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so | |
471 | /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe. | |
472 | /// | |
473 | /// Most platforms fundamentally can't even construct such an allocation. | |
474 | /// For instance, no known 64-bit platform can ever serve a request | |
475 | /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. | |
476 | /// However, some 32-bit and 16-bit platforms may successfully serve a request for | |
477 | /// more than `isize::MAX` bytes with things like Physical Address | |
478 | /// Extension. As such, memory acquired directly from allocators or memory | |
479 | /// mapped files *may* be too large to handle with this function. | |
480 | /// | |
481 | /// Consider using [`wrapping_sub`] instead if these constraints are | |
482 | /// difficult to satisfy. The only advantage of this method is that it | |
483 | /// enables more aggressive compiler optimizations. | |
484 | /// | |
485 | /// [`wrapping_sub`]: #method.wrapping_sub | |
486 | /// | |
487 | /// # Examples | |
488 | /// | |
489 | /// Basic usage: | |
490 | /// | |
491 | /// ``` | |
492 | /// let s: &str = "123"; | |
493 | /// | |
494 | /// unsafe { | |
495 | /// let end: *const u8 = s.as_ptr().add(3); | |
496 | /// println!("{}", *end.sub(1) as char); | |
497 | /// println!("{}", *end.sub(2) as char); | |
498 | /// } | |
499 | /// ``` | |
500 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
501 | #[inline] | |
502 | pub unsafe fn sub(self, count: usize) -> Self | |
503 | where | |
504 | T: Sized, | |
505 | { | |
506 | self.offset((count as isize).wrapping_neg()) | |
507 | } | |
508 | ||
509 | /// Calculates the offset from a pointer using wrapping arithmetic. | |
510 | /// (convenience for `.wrapping_offset(count as isize)`) | |
511 | /// | |
512 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
513 | /// offset of `3 * size_of::<T>()` bytes. | |
514 | /// | |
515 | /// # Safety | |
516 | /// | |
517 | /// The resulting pointer does not need to be in bounds, but it is | |
518 | /// potentially hazardous to dereference (which requires `unsafe`). | |
519 | /// | |
520 | /// In particular, the resulting pointer remains attached to the same allocated | |
521 | /// object that `self` points to. It may *not* be used to access a | |
522 | /// different allocated object. Note that in Rust, | |
523 | /// every (stack-allocated) variable is considered a separate allocated object. | |
524 | /// | |
525 | /// Compared to [`add`], this method basically delays the requirement of staying | |
526 | /// within the same allocated object: [`add`] is immediate Undefined Behavior when | |
527 | /// crossing object boundaries; `wrapping_add` produces a pointer but still leads | |
528 | /// to Undefined Behavior if that pointer is dereferenced. [`add`] can be optimized | |
74b04a01 | 529 | /// better and is thus preferable in performance-sensitive code. |
dfeec247 XL |
530 | /// |
531 | /// If you need to cross object boundaries, cast the pointer to an integer and | |
532 | /// do the arithmetic there. | |
533 | /// | |
534 | /// [`add`]: #method.add | |
535 | /// | |
536 | /// # Examples | |
537 | /// | |
538 | /// Basic usage: | |
539 | /// | |
540 | /// ``` | |
541 | /// // Iterate using a raw pointer in increments of two elements | |
542 | /// let data = [1u8, 2, 3, 4, 5]; | |
543 | /// let mut ptr: *const u8 = data.as_ptr(); | |
544 | /// let step = 2; | |
545 | /// let end_rounded_up = ptr.wrapping_add(6); | |
546 | /// | |
547 | /// // This loop prints "1, 3, 5, " | |
548 | /// while ptr != end_rounded_up { | |
549 | /// unsafe { | |
550 | /// print!("{}, ", *ptr); | |
551 | /// } | |
552 | /// ptr = ptr.wrapping_add(step); | |
553 | /// } | |
554 | /// ``` | |
555 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
556 | #[inline] | |
557 | pub fn wrapping_add(self, count: usize) -> Self | |
558 | where | |
559 | T: Sized, | |
560 | { | |
561 | self.wrapping_offset(count as isize) | |
562 | } | |
563 | ||
564 | /// Calculates the offset from a pointer using wrapping arithmetic. | |
565 | /// (convenience for `.wrapping_offset((count as isize).wrapping_sub())`) | |
566 | /// | |
567 | /// `count` is in units of T; e.g., a `count` of 3 represents a pointer | |
568 | /// offset of `3 * size_of::<T>()` bytes. | |
569 | /// | |
570 | /// # Safety | |
571 | /// | |
572 | /// The resulting pointer does not need to be in bounds, but it is | |
573 | /// potentially hazardous to dereference (which requires `unsafe`). | |
574 | /// | |
575 | /// In particular, the resulting pointer remains attached to the same allocated | |
576 | /// object that `self` points to. It may *not* be used to access a | |
577 | /// different allocated object. Note that in Rust, | |
578 | /// every (stack-allocated) variable is considered a separate allocated object. | |
579 | /// | |
580 | /// Compared to [`sub`], this method basically delays the requirement of staying | |
581 | /// within the same allocated object: [`sub`] is immediate Undefined Behavior when | |
582 | /// crossing object boundaries; `wrapping_sub` produces a pointer but still leads | |
583 | /// to Undefined Behavior if that pointer is dereferenced. [`sub`] can be optimized | |
74b04a01 | 584 | /// better and is thus preferable in performance-sensitive code. |
dfeec247 XL |
585 | /// |
586 | /// If you need to cross object boundaries, cast the pointer to an integer and | |
587 | /// do the arithmetic there. | |
588 | /// | |
589 | /// [`sub`]: #method.sub | |
590 | /// | |
591 | /// # Examples | |
592 | /// | |
593 | /// Basic usage: | |
594 | /// | |
595 | /// ``` | |
596 | /// // Iterate using a raw pointer in increments of two elements (backwards) | |
597 | /// let data = [1u8, 2, 3, 4, 5]; | |
598 | /// let mut ptr: *const u8 = data.as_ptr(); | |
599 | /// let start_rounded_down = ptr.wrapping_sub(2); | |
600 | /// ptr = ptr.wrapping_add(4); | |
601 | /// let step = 2; | |
602 | /// // This loop prints "5, 3, 1, " | |
603 | /// while ptr != start_rounded_down { | |
604 | /// unsafe { | |
605 | /// print!("{}, ", *ptr); | |
606 | /// } | |
607 | /// ptr = ptr.wrapping_sub(step); | |
608 | /// } | |
609 | /// ``` | |
610 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
611 | #[inline] | |
612 | pub fn wrapping_sub(self, count: usize) -> Self | |
613 | where | |
614 | T: Sized, | |
615 | { | |
616 | self.wrapping_offset((count as isize).wrapping_neg()) | |
617 | } | |
618 | ||
619 | /// Reads the value from `self` without moving it. This leaves the | |
620 | /// memory in `self` unchanged. | |
621 | /// | |
622 | /// See [`ptr::read`] for safety concerns and examples. | |
623 | /// | |
624 | /// [`ptr::read`]: ./ptr/fn.read.html | |
625 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
626 | #[inline] | |
627 | pub unsafe fn read(self) -> T | |
628 | where | |
629 | T: Sized, | |
630 | { | |
631 | read(self) | |
632 | } | |
633 | ||
634 | /// Performs a volatile read of the value from `self` without moving it. This | |
635 | /// leaves the memory in `self` unchanged. | |
636 | /// | |
637 | /// Volatile operations are intended to act on I/O memory, and are guaranteed | |
638 | /// to not be elided or reordered by the compiler across other volatile | |
639 | /// operations. | |
640 | /// | |
641 | /// See [`ptr::read_volatile`] for safety concerns and examples. | |
642 | /// | |
643 | /// [`ptr::read_volatile`]: ./ptr/fn.read_volatile.html | |
644 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
645 | #[inline] | |
646 | pub unsafe fn read_volatile(self) -> T | |
647 | where | |
648 | T: Sized, | |
649 | { | |
650 | read_volatile(self) | |
651 | } | |
652 | ||
653 | /// Reads the value from `self` without moving it. This leaves the | |
654 | /// memory in `self` unchanged. | |
655 | /// | |
656 | /// Unlike `read`, the pointer may be unaligned. | |
657 | /// | |
658 | /// See [`ptr::read_unaligned`] for safety concerns and examples. | |
659 | /// | |
660 | /// [`ptr::read_unaligned`]: ./ptr/fn.read_unaligned.html | |
661 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
662 | #[inline] | |
663 | pub unsafe fn read_unaligned(self) -> T | |
664 | where | |
665 | T: Sized, | |
666 | { | |
667 | read_unaligned(self) | |
668 | } | |
669 | ||
670 | /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source | |
671 | /// and destination may overlap. | |
672 | /// | |
673 | /// NOTE: this has the *same* argument order as [`ptr::copy`]. | |
674 | /// | |
675 | /// See [`ptr::copy`] for safety concerns and examples. | |
676 | /// | |
677 | /// [`ptr::copy`]: ./ptr/fn.copy.html | |
678 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
679 | #[inline] | |
680 | pub unsafe fn copy_to(self, dest: *mut T, count: usize) | |
681 | where | |
682 | T: Sized, | |
683 | { | |
684 | copy(self, dest, count) | |
685 | } | |
686 | ||
687 | /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source | |
688 | /// and destination may *not* overlap. | |
689 | /// | |
690 | /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`]. | |
691 | /// | |
692 | /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. | |
693 | /// | |
694 | /// [`ptr::copy_nonoverlapping`]: ./ptr/fn.copy_nonoverlapping.html | |
695 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
696 | #[inline] | |
697 | pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) | |
698 | where | |
699 | T: Sized, | |
700 | { | |
701 | copy_nonoverlapping(self, dest, count) | |
702 | } | |
703 | ||
704 | /// Copies `count * size_of<T>` bytes from `src` to `self`. The source | |
705 | /// and destination may overlap. | |
706 | /// | |
707 | /// NOTE: this has the *opposite* argument order of [`ptr::copy`]. | |
708 | /// | |
709 | /// See [`ptr::copy`] for safety concerns and examples. | |
710 | /// | |
711 | /// [`ptr::copy`]: ./ptr/fn.copy.html | |
712 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
713 | #[inline] | |
714 | pub unsafe fn copy_from(self, src: *const T, count: usize) | |
715 | where | |
716 | T: Sized, | |
717 | { | |
718 | copy(src, self, count) | |
719 | } | |
720 | ||
721 | /// Copies `count * size_of<T>` bytes from `src` to `self`. The source | |
722 | /// and destination may *not* overlap. | |
723 | /// | |
724 | /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`]. | |
725 | /// | |
726 | /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. | |
727 | /// | |
728 | /// [`ptr::copy_nonoverlapping`]: ./ptr/fn.copy_nonoverlapping.html | |
729 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
730 | #[inline] | |
731 | pub unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize) | |
732 | where | |
733 | T: Sized, | |
734 | { | |
735 | copy_nonoverlapping(src, self, count) | |
736 | } | |
737 | ||
738 | /// Executes the destructor (if any) of the pointed-to value. | |
739 | /// | |
740 | /// See [`ptr::drop_in_place`] for safety concerns and examples. | |
741 | /// | |
742 | /// [`ptr::drop_in_place`]: ./ptr/fn.drop_in_place.html | |
743 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
744 | #[inline] | |
745 | pub unsafe fn drop_in_place(self) { | |
746 | drop_in_place(self) | |
747 | } | |
748 | ||
749 | /// Overwrites a memory location with the given value without reading or | |
750 | /// dropping the old value. | |
751 | /// | |
752 | /// See [`ptr::write`] for safety concerns and examples. | |
753 | /// | |
754 | /// [`ptr::write`]: ./ptr/fn.write.html | |
755 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
756 | #[inline] | |
757 | pub unsafe fn write(self, val: T) | |
758 | where | |
759 | T: Sized, | |
760 | { | |
761 | write(self, val) | |
762 | } | |
763 | ||
764 | /// Invokes memset on the specified pointer, setting `count * size_of::<T>()` | |
765 | /// bytes of memory starting at `self` to `val`. | |
766 | /// | |
767 | /// See [`ptr::write_bytes`] for safety concerns and examples. | |
768 | /// | |
769 | /// [`ptr::write_bytes`]: ./ptr/fn.write_bytes.html | |
770 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
771 | #[inline] | |
772 | pub unsafe fn write_bytes(self, val: u8, count: usize) | |
773 | where | |
774 | T: Sized, | |
775 | { | |
776 | write_bytes(self, val, count) | |
777 | } | |
778 | ||
779 | /// Performs a volatile write of a memory location with the given value without | |
780 | /// reading or dropping the old value. | |
781 | /// | |
782 | /// Volatile operations are intended to act on I/O memory, and are guaranteed | |
783 | /// to not be elided or reordered by the compiler across other volatile | |
784 | /// operations. | |
785 | /// | |
786 | /// See [`ptr::write_volatile`] for safety concerns and examples. | |
787 | /// | |
788 | /// [`ptr::write_volatile`]: ./ptr/fn.write_volatile.html | |
789 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
790 | #[inline] | |
791 | pub unsafe fn write_volatile(self, val: T) | |
792 | where | |
793 | T: Sized, | |
794 | { | |
795 | write_volatile(self, val) | |
796 | } | |
797 | ||
798 | /// Overwrites a memory location with the given value without reading or | |
799 | /// dropping the old value. | |
800 | /// | |
801 | /// Unlike `write`, the pointer may be unaligned. | |
802 | /// | |
803 | /// See [`ptr::write_unaligned`] for safety concerns and examples. | |
804 | /// | |
805 | /// [`ptr::write_unaligned`]: ./ptr/fn.write_unaligned.html | |
806 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
807 | #[inline] | |
808 | pub unsafe fn write_unaligned(self, val: T) | |
809 | where | |
810 | T: Sized, | |
811 | { | |
812 | write_unaligned(self, val) | |
813 | } | |
814 | ||
815 | /// Replaces the value at `self` with `src`, returning the old | |
816 | /// value, without dropping either. | |
817 | /// | |
818 | /// See [`ptr::replace`] for safety concerns and examples. | |
819 | /// | |
820 | /// [`ptr::replace`]: ./ptr/fn.replace.html | |
821 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
822 | #[inline] | |
823 | pub unsafe fn replace(self, src: T) -> T | |
824 | where | |
825 | T: Sized, | |
826 | { | |
827 | replace(self, src) | |
828 | } | |
829 | ||
830 | /// Swaps the values at two mutable locations of the same type, without | |
831 | /// deinitializing either. They may overlap, unlike `mem::swap` which is | |
832 | /// otherwise equivalent. | |
833 | /// | |
834 | /// See [`ptr::swap`] for safety concerns and examples. | |
835 | /// | |
836 | /// [`ptr::swap`]: ./ptr/fn.swap.html | |
837 | #[stable(feature = "pointer_methods", since = "1.26.0")] | |
838 | #[inline] | |
839 | pub unsafe fn swap(self, with: *mut T) | |
840 | where | |
841 | T: Sized, | |
842 | { | |
843 | swap(self, with) | |
844 | } | |
845 | ||
846 | /// Computes the offset that needs to be applied to the pointer in order to make it aligned to | |
847 | /// `align`. | |
848 | /// | |
849 | /// If it is not possible to align the pointer, the implementation returns | |
850 | /// `usize::max_value()`. It is permissible for the implementation to *always* | |
851 | /// return `usize::max_value()`. Only your algorithm's performance can depend | |
852 | /// on getting a usable offset here, not its correctness. | |
853 | /// | |
854 | /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be | |
855 | /// used with the `wrapping_add` method. | |
856 | /// | |
857 | /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go | |
858 | /// beyond the allocation that the pointer points into. It is up to the caller to ensure that | |
859 | /// the returned offset is correct in all terms other than alignment. | |
860 | /// | |
861 | /// # Panics | |
862 | /// | |
863 | /// The function panics if `align` is not a power-of-two. | |
864 | /// | |
865 | /// # Examples | |
866 | /// | |
867 | /// Accessing adjacent `u8` as `u16` | |
868 | /// | |
869 | /// ``` | |
870 | /// # fn foo(n: usize) { | |
871 | /// # use std::mem::align_of; | |
872 | /// # unsafe { | |
873 | /// let x = [5u8, 6u8, 7u8, 8u8, 9u8]; | |
874 | /// let ptr = &x[n] as *const u8; | |
875 | /// let offset = ptr.align_offset(align_of::<u16>()); | |
876 | /// if offset < x.len() - n - 1 { | |
877 | /// let u16_ptr = ptr.add(offset) as *const u16; | |
878 | /// assert_ne!(*u16_ptr, 500); | |
879 | /// } else { | |
880 | /// // while the pointer can be aligned via `offset`, it would point | |
881 | /// // outside the allocation | |
882 | /// } | |
883 | /// # } } | |
884 | /// ``` | |
885 | #[stable(feature = "align_offset", since = "1.36.0")] | |
886 | pub fn align_offset(self, align: usize) -> usize | |
887 | where | |
888 | T: Sized, | |
889 | { | |
890 | if !align.is_power_of_two() { | |
891 | panic!("align_offset: align is not a power-of-two"); | |
892 | } | |
893 | unsafe { align_offset(self, align) } | |
894 | } | |
895 | } | |
896 | ||
897 | // Equality for pointers | |
898 | #[stable(feature = "rust1", since = "1.0.0")] | |
899 | impl<T: ?Sized> PartialEq for *mut T { | |
900 | #[inline] | |
901 | fn eq(&self, other: &*mut T) -> bool { | |
902 | *self == *other | |
903 | } | |
904 | } | |
905 | ||
906 | #[stable(feature = "rust1", since = "1.0.0")] | |
907 | impl<T: ?Sized> Eq for *mut T {} | |
908 | ||
909 | #[stable(feature = "rust1", since = "1.0.0")] | |
910 | impl<T: ?Sized> Ord for *mut T { | |
911 | #[inline] | |
912 | fn cmp(&self, other: &*mut T) -> Ordering { | |
913 | if self < other { | |
914 | Less | |
915 | } else if self == other { | |
916 | Equal | |
917 | } else { | |
918 | Greater | |
919 | } | |
920 | } | |
921 | } | |
922 | ||
923 | #[stable(feature = "rust1", since = "1.0.0")] | |
924 | impl<T: ?Sized> PartialOrd for *mut T { | |
925 | #[inline] | |
926 | fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> { | |
927 | Some(self.cmp(other)) | |
928 | } | |
929 | ||
930 | #[inline] | |
931 | fn lt(&self, other: &*mut T) -> bool { | |
932 | *self < *other | |
933 | } | |
934 | ||
935 | #[inline] | |
936 | fn le(&self, other: &*mut T) -> bool { | |
937 | *self <= *other | |
938 | } | |
939 | ||
940 | #[inline] | |
941 | fn gt(&self, other: &*mut T) -> bool { | |
942 | *self > *other | |
943 | } | |
944 | ||
945 | #[inline] | |
946 | fn ge(&self, other: &*mut T) -> bool { | |
947 | *self >= *other | |
948 | } | |
949 | } |