1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // FIXME: talk about offset, copy_memory, copy_nonoverlapping_memory
13 //! Raw, unsafe pointers, `*const T`, and `*mut T`.
15 //! *[See also the pointer primitive types](../../std/primitive.pointer.html).*
17 #![stable(feature = "rust1", since = "1.0.0")]
20 use ops
::CoerceUnsized
;
23 use marker
::{PhantomData, Unsize}
;
27 use cmp
::Ordering
::{self, Less, Equal, Greater}
;
29 // FIXME #19649: intrinsic docs don't render, so these have no docs :(
31 #[stable(feature = "rust1", since = "1.0.0")]
32 pub use intrinsics
::copy_nonoverlapping
;
34 #[stable(feature = "rust1", since = "1.0.0")]
35 pub use intrinsics
::copy
;
37 #[stable(feature = "rust1", since = "1.0.0")]
38 pub use intrinsics
::write_bytes
;
40 /// Executes the destructor (if any) of the pointed-to value.
42 /// This has two use cases:
44 /// * It is *required* to use `drop_in_place` to drop unsized types like
45 /// trait objects, because they can't be read out onto the stack and
48 /// * It is friendlier to the optimizer to do this over `ptr::read` when
49 /// dropping manually allocated memory (e.g. when writing Box/Rc/Vec),
50 /// as the compiler doesn't need to prove that it's sound to elide the
53 /// # Undefined Behavior
55 /// This has all the same safety problems as `ptr::read` with respect to
56 /// invalid pointers, types, and double drops.
57 #[stable(feature = "drop_in_place", since = "1.8.0")]
58 #[lang="drop_in_place"]
59 #[allow(unconditional_recursion)]
60 pub unsafe fn drop_in_place
<T
: ?Sized
>(to_drop
: *mut T
) {
61 // Code here does not matter - this is replaced by the
62 // real drop glue by the compiler.
63 drop_in_place(to_drop
);
66 /// Creates a null raw pointer.
73 /// let p: *const i32 = ptr::null();
74 /// assert!(p.is_null());
77 #[stable(feature = "rust1", since = "1.0.0")]
78 pub const fn null
<T
>() -> *const T { 0 as *const T }
80 /// Creates a null mutable raw pointer.
87 /// let p: *mut i32 = ptr::null_mut();
88 /// assert!(p.is_null());
91 #[stable(feature = "rust1", since = "1.0.0")]
92 pub const fn null_mut
<T
>() -> *mut T { 0 as *mut T }
94 /// Swaps the values at two mutable locations of the same type, without
95 /// deinitializing either. They may overlap, unlike `mem::swap` which is
96 /// otherwise equivalent.
100 /// This function copies the memory through the raw pointers passed to it
103 /// Ensure that these pointers are valid before calling `swap`.
105 #[stable(feature = "rust1", since = "1.0.0")]
106 pub unsafe fn swap
<T
>(x
: *mut T
, y
: *mut T
) {
107 // Give ourselves some scratch space to work with
108 let mut tmp
: T
= mem
::uninitialized();
111 copy_nonoverlapping(x
, &mut tmp
, 1);
112 copy(y
, x
, 1); // `x` and `y` may overlap
113 copy_nonoverlapping(&tmp
, y
, 1);
115 // y and t now point to the same thing, but we need to completely forget `tmp`
116 // because it's no longer relevant.
120 /// Swaps a sequence of values at two mutable locations of the same type.
124 /// The two arguments must each point to the beginning of `count` locations
125 /// of valid memory, and the two memory ranges must not overlap.
132 /// #![feature(swap_nonoverlapping)]
136 /// let mut x = [1, 2, 3, 4];
137 /// let mut y = [7, 8, 9];
140 /// ptr::swap_nonoverlapping(x.as_mut_ptr(), y.as_mut_ptr(), 2);
143 /// assert_eq!(x, [7, 8, 3, 4]);
144 /// assert_eq!(y, [1, 2, 9]);
147 #[unstable(feature = "swap_nonoverlapping", issue = "42818")]
148 pub unsafe fn swap_nonoverlapping
<T
>(x
: *mut T
, y
: *mut T
, count
: usize) {
149 let x
= x
as *mut u8;
150 let y
= y
as *mut u8;
151 let len
= mem
::size_of
::<T
>() * count
;
152 swap_nonoverlapping_bytes(x
, y
, len
)
156 unsafe fn swap_nonoverlapping_bytes(x
: *mut u8, y
: *mut u8, len
: usize) {
157 // The approach here is to utilize simd to swap x & y efficiently. Testing reveals
158 // that swapping either 32 bytes or 64 bytes at a time is most efficient for intel
159 // Haswell E processors. LLVM is more able to optimize if we give a struct a
160 // #[repr(simd)], even if we don't actually use this struct directly.
162 // FIXME repr(simd) broken on emscripten and redox
163 // It's also broken on big-endian powerpc64 and s390x. #42778
164 #[cfg_attr(not(any(target_os = "emscripten", target_os = "redox",
165 target_endian
= "big")),
167 struct Block(u64, u64, u64, u64);
168 struct UnalignedBlock(u64, u64, u64, u64);
170 let block_size
= mem
::size_of
::<Block
>();
172 // Loop through x & y, copying them `Block` at a time
173 // The optimizer should unroll the loop fully for most types
174 // N.B. We can't use a for loop as the `range` impl calls `mem::swap` recursively
176 while i
+ block_size
<= len
{
177 // Create some uninitialized memory as scratch space
178 // Declaring `t` here avoids aligning the stack when this loop is unused
179 let mut t
: Block
= mem
::uninitialized();
180 let t
= &mut t
as *mut _
as *mut u8;
181 let x
= x
.offset(i
as isize);
182 let y
= y
.offset(i
as isize);
184 // Swap a block of bytes of x & y, using t as a temporary buffer
185 // This should be optimized into efficient SIMD operations where available
186 copy_nonoverlapping(x
, t
, block_size
);
187 copy_nonoverlapping(y
, x
, block_size
);
188 copy_nonoverlapping(t
, y
, block_size
);
193 // Swap any remaining bytes
194 let mut t
: UnalignedBlock
= mem
::uninitialized();
197 let t
= &mut t
as *mut _
as *mut u8;
198 let x
= x
.offset(i
as isize);
199 let y
= y
.offset(i
as isize);
201 copy_nonoverlapping(x
, t
, rem
);
202 copy_nonoverlapping(y
, x
, rem
);
203 copy_nonoverlapping(t
, y
, rem
);
207 /// Replaces the value at `dest` with `src`, returning the old
208 /// value, without dropping either.
212 /// This is only unsafe because it accepts a raw pointer.
213 /// Otherwise, this operation is identical to `mem::replace`.
215 #[stable(feature = "rust1", since = "1.0.0")]
216 pub unsafe fn replace
<T
>(dest
: *mut T
, mut src
: T
) -> T
{
217 mem
::swap(&mut *dest
, &mut src
); // cannot overlap
221 /// Reads the value from `src` without moving it. This leaves the
222 /// memory in `src` unchanged.
226 /// Beyond accepting a raw pointer, this is unsafe because it semantically
227 /// moves the value out of `src` without preventing further usage of `src`.
228 /// If `T` is not `Copy`, then care must be taken to ensure that the value at
229 /// `src` is not used before the data is overwritten again (e.g. with `write`,
230 /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
231 /// because it will attempt to drop the value previously at `*src`.
233 /// The pointer must be aligned; use `read_unaligned` if that is not the case.
241 /// let y = &x as *const i32;
244 /// assert_eq!(std::ptr::read(y), 12);
248 #[stable(feature = "rust1", since = "1.0.0")]
249 pub unsafe fn read
<T
>(src
: *const T
) -> T
{
250 let mut tmp
: T
= mem
::uninitialized();
251 copy_nonoverlapping(src
, &mut tmp
, 1);
255 /// Reads the value from `src` without moving it. This leaves the
256 /// memory in `src` unchanged.
258 /// Unlike `read`, the pointer may be unaligned.
262 /// Beyond accepting a raw pointer, this is unsafe because it semantically
263 /// moves the value out of `src` without preventing further usage of `src`.
264 /// If `T` is not `Copy`, then care must be taken to ensure that the value at
265 /// `src` is not used before the data is overwritten again (e.g. with `write`,
266 /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
267 /// because it will attempt to drop the value previously at `*src`.
275 /// let y = &x as *const i32;
278 /// assert_eq!(std::ptr::read_unaligned(y), 12);
282 #[stable(feature = "ptr_unaligned", since = "1.17.0")]
283 pub unsafe fn read_unaligned
<T
>(src
: *const T
) -> T
{
284 let mut tmp
: T
= mem
::uninitialized();
285 copy_nonoverlapping(src
as *const u8,
286 &mut tmp
as *mut T
as *mut u8,
287 mem
::size_of
::<T
>());
291 /// Overwrites a memory location with the given value without reading or
292 /// dropping the old value.
296 /// This operation is marked unsafe because it accepts a raw pointer.
298 /// It does not drop the contents of `dst`. This is safe, but it could leak
299 /// allocations or resources, so care must be taken not to overwrite an object
300 /// that should be dropped.
302 /// Additionally, it does not drop `src`. Semantically, `src` is moved into the
303 /// location pointed to by `dst`.
305 /// This is appropriate for initializing uninitialized memory, or overwriting
306 /// memory that has previously been `read` from.
308 /// The pointer must be aligned; use `write_unaligned` if that is not the case.
316 /// let y = &mut x as *mut i32;
320 /// std::ptr::write(y, z);
321 /// assert_eq!(std::ptr::read(y), 12);
325 #[stable(feature = "rust1", since = "1.0.0")]
326 pub unsafe fn write
<T
>(dst
: *mut T
, src
: T
) {
327 intrinsics
::move_val_init(&mut *dst
, src
)
330 /// Overwrites a memory location with the given value without reading or
331 /// dropping the old value.
333 /// Unlike `write`, the pointer may be unaligned.
337 /// This operation is marked unsafe because it accepts a raw pointer.
339 /// It does not drop the contents of `dst`. This is safe, but it could leak
340 /// allocations or resources, so care must be taken not to overwrite an object
341 /// that should be dropped.
343 /// Additionally, it does not drop `src`. Semantically, `src` is moved into the
344 /// location pointed to by `dst`.
346 /// This is appropriate for initializing uninitialized memory, or overwriting
347 /// memory that has previously been `read` from.
355 /// let y = &mut x as *mut i32;
359 /// std::ptr::write_unaligned(y, z);
360 /// assert_eq!(std::ptr::read_unaligned(y), 12);
364 #[stable(feature = "ptr_unaligned", since = "1.17.0")]
365 pub unsafe fn write_unaligned
<T
>(dst
: *mut T
, src
: T
) {
366 copy_nonoverlapping(&src
as *const T
as *const u8,
368 mem
::size_of
::<T
>());
372 /// Performs a volatile read of the value from `src` without moving it. This
373 /// leaves the memory in `src` unchanged.
375 /// Volatile operations are intended to act on I/O memory, and are guaranteed
376 /// to not be elided or reordered by the compiler across other volatile
381 /// Rust does not currently have a rigorously and formally defined memory model,
382 /// so the precise semantics of what "volatile" means here is subject to change
383 /// over time. That being said, the semantics will almost always end up pretty
384 /// similar to [C11's definition of volatile][c11].
386 /// [c11]: http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1570.pdf
390 /// Beyond accepting a raw pointer, this is unsafe because it semantically
391 /// moves the value out of `src` without preventing further usage of `src`.
392 /// If `T` is not `Copy`, then care must be taken to ensure that the value at
393 /// `src` is not used before the data is overwritten again (e.g. with `write`,
394 /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
395 /// because it will attempt to drop the value previously at `*src`.
403 /// let y = &x as *const i32;
406 /// assert_eq!(std::ptr::read_volatile(y), 12);
410 #[stable(feature = "volatile", since = "1.9.0")]
411 pub unsafe fn read_volatile
<T
>(src
: *const T
) -> T
{
412 intrinsics
::volatile_load(src
)
415 /// Performs a volatile write of a memory location with the given value without
416 /// reading or dropping the old value.
418 /// Volatile operations are intended to act on I/O memory, and are guaranteed
419 /// to not be elided or reordered by the compiler across other volatile
424 /// Rust does not currently have a rigorously and formally defined memory model,
425 /// so the precise semantics of what "volatile" means here is subject to change
426 /// over time. That being said, the semantics will almost always end up pretty
427 /// similar to [C11's definition of volatile][c11].
429 /// [c11]: http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1570.pdf
433 /// This operation is marked unsafe because it accepts a raw pointer.
435 /// It does not drop the contents of `dst`. This is safe, but it could leak
436 /// allocations or resources, so care must be taken not to overwrite an object
437 /// that should be dropped.
439 /// This is appropriate for initializing uninitialized memory, or overwriting
440 /// memory that has previously been `read` from.
448 /// let y = &mut x as *mut i32;
452 /// std::ptr::write_volatile(y, z);
453 /// assert_eq!(std::ptr::read_volatile(y), 12);
457 #[stable(feature = "volatile", since = "1.9.0")]
458 pub unsafe fn write_volatile
<T
>(dst
: *mut T
, src
: T
) {
459 intrinsics
::volatile_store(dst
, src
);
462 #[lang = "const_ptr"]
463 impl<T
: ?Sized
> *const T
{
464 /// Returns `true` if the pointer is null.
471 /// let s: &str = "Follow the rabbit";
472 /// let ptr: *const u8 = s.as_ptr();
473 /// assert!(!ptr.is_null());
475 #[stable(feature = "rust1", since = "1.0.0")]
477 pub fn is_null(self) -> bool
where T
: Sized
{
481 /// Returns `None` if the pointer is null, or else returns a reference to
482 /// the value wrapped in `Some`.
486 /// While this method and its mutable counterpart are useful for
487 /// null-safety, it is important to note that this is still an unsafe
488 /// operation because the returned value could be pointing to invalid
491 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
492 /// not necessarily reflect the actual lifetime of the data.
499 /// let ptr: *const u8 = &10u8 as *const u8;
502 /// if let Some(val_back) = ptr.as_ref() {
503 /// println!("We got back the value: {}!", val_back);
507 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
509 pub unsafe fn as_ref
<'a
>(self) -> Option
<&'a T
> where T
: Sized
{
517 /// Calculates the offset from a pointer. `count` is in units of T; e.g. a
518 /// `count` of 3 represents a pointer offset of `3 * size_of::<T>()` bytes.
522 /// Both the starting and resulting pointer must be either in bounds or one
523 /// byte past the end of an allocated object. If either pointer is out of
524 /// bounds or arithmetic overflow occurs then
525 /// any further use of the returned value will result in undefined behavior.
532 /// let s: &str = "123";
533 /// let ptr: *const u8 = s.as_ptr();
536 /// println!("{}", *ptr.offset(1) as char);
537 /// println!("{}", *ptr.offset(2) as char);
540 #[stable(feature = "rust1", since = "1.0.0")]
542 pub unsafe fn offset(self, count
: isize) -> *const T
where T
: Sized
{
543 intrinsics
::offset(self, count
)
546 /// Calculates the offset from a pointer using wrapping arithmetic.
547 /// `count` is in units of T; e.g. a `count` of 3 represents a pointer
548 /// offset of `3 * size_of::<T>()` bytes.
552 /// The resulting pointer does not need to be in bounds, but it is
553 /// potentially hazardous to dereference (which requires `unsafe`).
555 /// Always use `.offset(count)` instead when possible, because `offset`
556 /// allows the compiler to optimize better.
563 /// // Iterate using a raw pointer in increments of two elements
564 /// let data = [1u8, 2, 3, 4, 5];
565 /// let mut ptr: *const u8 = data.as_ptr();
567 /// let end_rounded_up = ptr.wrapping_offset(6);
569 /// // This loop prints "1, 3, 5, "
570 /// while ptr != end_rounded_up {
572 /// print!("{}, ", *ptr);
574 /// ptr = ptr.wrapping_offset(step);
577 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
579 pub fn wrapping_offset(self, count
: isize) -> *const T
where T
: Sized
{
581 intrinsics
::arith_offset(self, count
)
585 /// Calculates the distance between two pointers. The returned value is in
586 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
588 /// If the address different between the two pointers ia not a multiple of
589 /// `mem::size_of::<T>()` then the result of the division is rounded towards
592 /// This function returns `None` if `T` is a zero-sized typed.
599 /// #![feature(offset_to)]
603 /// let ptr1: *const i32 = &a[1];
604 /// let ptr2: *const i32 = &a[3];
605 /// assert_eq!(ptr1.offset_to(ptr2), Some(2));
606 /// assert_eq!(ptr2.offset_to(ptr1), Some(-2));
607 /// assert_eq!(unsafe { ptr1.offset(2) }, ptr2);
608 /// assert_eq!(unsafe { ptr2.offset(-2) }, ptr1);
611 #[unstable(feature = "offset_to", issue = "41079")]
613 pub fn offset_to(self, other
: *const T
) -> Option
<isize> where T
: Sized
{
614 let size
= mem
::size_of
::<T
>();
618 let diff
= (other
as isize).wrapping_sub(self as isize);
619 Some(diff
/ size
as isize)
625 impl<T
: ?Sized
> *mut T
{
626 /// Returns `true` if the pointer is null.
633 /// let mut s = [1, 2, 3];
634 /// let ptr: *mut u32 = s.as_mut_ptr();
635 /// assert!(!ptr.is_null());
637 #[stable(feature = "rust1", since = "1.0.0")]
639 pub fn is_null(self) -> bool
where T
: Sized
{
643 /// Returns `None` if the pointer is null, or else returns a reference to
644 /// the value wrapped in `Some`.
648 /// While this method and its mutable counterpart are useful for
649 /// null-safety, it is important to note that this is still an unsafe
650 /// operation because the returned value could be pointing to invalid
653 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
654 /// not necessarily reflect the actual lifetime of the data.
661 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
664 /// if let Some(val_back) = ptr.as_ref() {
665 /// println!("We got back the value: {}!", val_back);
669 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
671 pub unsafe fn as_ref
<'a
>(self) -> Option
<&'a T
> where T
: Sized
{
679 /// Calculates the offset from a pointer. `count` is in units of T; e.g. a
680 /// `count` of 3 represents a pointer offset of `3 * size_of::<T>()` bytes.
684 /// The offset must be in-bounds of the object, or one-byte-past-the-end.
685 /// Otherwise `offset` invokes Undefined Behavior, regardless of whether
686 /// the pointer is used.
693 /// let mut s = [1, 2, 3];
694 /// let ptr: *mut u32 = s.as_mut_ptr();
697 /// println!("{}", *ptr.offset(1));
698 /// println!("{}", *ptr.offset(2));
701 #[stable(feature = "rust1", since = "1.0.0")]
703 pub unsafe fn offset(self, count
: isize) -> *mut T
where T
: Sized
{
704 intrinsics
::offset(self, count
) as *mut T
707 /// Calculates the offset from a pointer using wrapping arithmetic.
708 /// `count` is in units of T; e.g. a `count` of 3 represents a pointer
709 /// offset of `3 * size_of::<T>()` bytes.
713 /// The resulting pointer does not need to be in bounds, but it is
714 /// potentially hazardous to dereference (which requires `unsafe`).
716 /// Always use `.offset(count)` instead when possible, because `offset`
717 /// allows the compiler to optimize better.
724 /// // Iterate using a raw pointer in increments of two elements
725 /// let mut data = [1u8, 2, 3, 4, 5];
726 /// let mut ptr: *mut u8 = data.as_mut_ptr();
728 /// let end_rounded_up = ptr.wrapping_offset(6);
730 /// while ptr != end_rounded_up {
734 /// ptr = ptr.wrapping_offset(step);
736 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
738 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
740 pub fn wrapping_offset(self, count
: isize) -> *mut T
where T
: Sized
{
742 intrinsics
::arith_offset(self, count
) as *mut T
746 /// Returns `None` if the pointer is null, or else returns a mutable
747 /// reference to the value wrapped in `Some`.
751 /// As with `as_ref`, this is unsafe because it cannot verify the validity
752 /// of the returned pointer, nor can it ensure that the lifetime `'a`
753 /// returned is indeed a valid lifetime for the contained data.
760 /// let mut s = [1, 2, 3];
761 /// let ptr: *mut u32 = s.as_mut_ptr();
762 /// let first_value = unsafe { ptr.as_mut().unwrap() };
763 /// *first_value = 4;
764 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
766 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
768 pub unsafe fn as_mut
<'a
>(self) -> Option
<&'a
mut T
> where T
: Sized
{
776 /// Calculates the distance between two pointers. The returned value is in
777 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
779 /// If the address different between the two pointers ia not a multiple of
780 /// `mem::size_of::<T>()` then the result of the division is rounded towards
783 /// This function returns `None` if `T` is a zero-sized typed.
790 /// #![feature(offset_to)]
793 /// let mut a = [0; 5];
794 /// let ptr1: *mut i32 = &mut a[1];
795 /// let ptr2: *mut i32 = &mut a[3];
796 /// assert_eq!(ptr1.offset_to(ptr2), Some(2));
797 /// assert_eq!(ptr2.offset_to(ptr1), Some(-2));
798 /// assert_eq!(unsafe { ptr1.offset(2) }, ptr2);
799 /// assert_eq!(unsafe { ptr2.offset(-2) }, ptr1);
802 #[unstable(feature = "offset_to", issue = "41079")]
804 pub fn offset_to(self, other
: *const T
) -> Option
<isize> where T
: Sized
{
805 let size
= mem
::size_of
::<T
>();
809 let diff
= (other
as isize).wrapping_sub(self as isize);
810 Some(diff
/ size
as isize)
815 // Equality for pointers
816 #[stable(feature = "rust1", since = "1.0.0")]
817 impl<T
: ?Sized
> PartialEq
for *const T
{
819 fn eq(&self, other
: &*const T
) -> bool { *self == *other }
822 #[stable(feature = "rust1", since = "1.0.0")]
823 impl<T
: ?Sized
> Eq
for *const T {}
825 #[stable(feature = "rust1", since = "1.0.0")]
826 impl<T
: ?Sized
> PartialEq
for *mut T
{
828 fn eq(&self, other
: &*mut T
) -> bool { *self == *other }
831 #[stable(feature = "rust1", since = "1.0.0")]
832 impl<T
: ?Sized
> Eq
for *mut T {}
834 /// Compare raw pointers for equality.
836 /// This is the same as using the `==` operator, but less generic:
837 /// the arguments have to be `*const T` raw pointers,
838 /// not anything that implements `PartialEq`.
840 /// This can be used to compare `&T` references (which coerce to `*const T` implicitly)
841 /// by their address rather than comparing the values they point to
842 /// (which is what the `PartialEq for &T` implementation does).
850 /// let other_five = 5;
851 /// let five_ref = &five;
852 /// let same_five_ref = &five;
853 /// let other_five_ref = &other_five;
855 /// assert!(five_ref == same_five_ref);
856 /// assert!(five_ref == other_five_ref);
858 /// assert!(ptr::eq(five_ref, same_five_ref));
859 /// assert!(!ptr::eq(five_ref, other_five_ref));
861 #[stable(feature = "ptr_eq", since = "1.17.0")]
863 pub fn eq
<T
: ?Sized
>(a
: *const T
, b
: *const T
) -> bool
{
867 #[stable(feature = "rust1", since = "1.0.0")]
868 impl<T
: ?Sized
> Clone
for *const T
{
870 fn clone(&self) -> *const T
{
875 #[stable(feature = "rust1", since = "1.0.0")]
876 impl<T
: ?Sized
> Clone
for *mut T
{
878 fn clone(&self) -> *mut T
{
883 // Impls for function pointers
884 macro_rules
! fnptr_impls_safety_abi
{
885 ($FnTy
: ty
, $
($Arg
: ident
),*) => {
886 #[stable(feature = "rust1", since = "1.0.0")]
887 impl<Ret
, $
($Arg
),*> Clone
for $FnTy
{
889 fn clone(&self) -> Self {
894 #[stable(feature = "fnptr_impls", since = "1.4.0")]
895 impl<Ret
, $
($Arg
),*> PartialEq
for $FnTy
{
897 fn eq(&self, other
: &Self) -> bool
{
898 *self as usize == *other
as usize
902 #[stable(feature = "fnptr_impls", since = "1.4.0")]
903 impl<Ret
, $
($Arg
),*> Eq
for $FnTy {}
905 #[stable(feature = "fnptr_impls", since = "1.4.0")]
906 impl<Ret
, $
($Arg
),*> PartialOrd
for $FnTy
{
908 fn partial_cmp(&self, other
: &Self) -> Option
<Ordering
> {
909 (*self as usize).partial_cmp(&(*other
as usize))
913 #[stable(feature = "fnptr_impls", since = "1.4.0")]
914 impl<Ret
, $
($Arg
),*> Ord
for $FnTy
{
916 fn cmp(&self, other
: &Self) -> Ordering
{
917 (*self as usize).cmp(&(*other
as usize))
921 #[stable(feature = "fnptr_impls", since = "1.4.0")]
922 impl<Ret
, $
($Arg
),*> hash
::Hash
for $FnTy
{
923 fn hash
<HH
: hash
::Hasher
>(&self, state
: &mut HH
) {
924 state
.write_usize(*self as usize)
928 #[stable(feature = "fnptr_impls", since = "1.4.0")]
929 impl<Ret
, $
($Arg
),*> fmt
::Pointer
for $FnTy
{
930 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
931 fmt
::Pointer
::fmt(&(*self as *const ()), f
)
935 #[stable(feature = "fnptr_impls", since = "1.4.0")]
936 impl<Ret
, $
($Arg
),*> fmt
::Debug
for $FnTy
{
937 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
938 fmt
::Pointer
::fmt(&(*self as *const ()), f
)
944 macro_rules
! fnptr_impls_args
{
945 ($
($Arg
: ident
),+) => {
946 fnptr_impls_safety_abi
! { extern "Rust" fn($($Arg),*) -> Ret, $($Arg),* }
947 fnptr_impls_safety_abi
! { extern "C" fn($($Arg),*) -> Ret, $($Arg),* }
948 fnptr_impls_safety_abi
! { extern "C" fn($($Arg),* , ...) -> Ret, $($Arg),* }
949 fnptr_impls_safety_abi
! { unsafe extern "Rust" fn($($Arg),*) -> Ret, $($Arg),* }
950 fnptr_impls_safety_abi
! { unsafe extern "C" fn($($Arg),*) -> Ret, $($Arg),* }
951 fnptr_impls_safety_abi
! { unsafe extern "C" fn($($Arg),* , ...) -> Ret, $($Arg),* }
954 // No variadic functions with 0 parameters
955 fnptr_impls_safety_abi
! { extern "Rust" fn() -> Ret, }
956 fnptr_impls_safety_abi
! { extern "C" fn() -> Ret, }
957 fnptr_impls_safety_abi
! { unsafe extern "Rust" fn() -> Ret, }
958 fnptr_impls_safety_abi
! { unsafe extern "C" fn() -> Ret, }
962 fnptr_impls_args
! { }
963 fnptr_impls_args
! { A }
964 fnptr_impls_args
! { A, B }
965 fnptr_impls_args
! { A, B, C }
966 fnptr_impls_args
! { A, B, C, D }
967 fnptr_impls_args
! { A, B, C, D, E }
968 fnptr_impls_args
! { A, B, C, D, E, F }
969 fnptr_impls_args
! { A, B, C, D, E, F, G }
970 fnptr_impls_args
! { A, B, C, D, E, F, G, H }
971 fnptr_impls_args
! { A, B, C, D, E, F, G, H, I }
972 fnptr_impls_args
! { A, B, C, D, E, F, G, H, I, J }
973 fnptr_impls_args
! { A, B, C, D, E, F, G, H, I, J, K }
974 fnptr_impls_args
! { A, B, C, D, E, F, G, H, I, J, K, L }
976 // Comparison for pointers
977 #[stable(feature = "rust1", since = "1.0.0")]
978 impl<T
: ?Sized
> Ord
for *const T
{
980 fn cmp(&self, other
: &*const T
) -> Ordering
{
983 } else if self == other
{
991 #[stable(feature = "rust1", since = "1.0.0")]
992 impl<T
: ?Sized
> PartialOrd
for *const T
{
994 fn partial_cmp(&self, other
: &*const T
) -> Option
<Ordering
> {
995 Some(self.cmp(other
))
999 fn lt(&self, other
: &*const T
) -> bool { *self < *other }
1002 fn le(&self, other
: &*const T
) -> bool { *self <= *other }
1005 fn gt(&self, other
: &*const T
) -> bool { *self > *other }
1008 fn ge(&self, other
: &*const T
) -> bool { *self >= *other }
1011 #[stable(feature = "rust1", since = "1.0.0")]
1012 impl<T
: ?Sized
> Ord
for *mut T
{
1014 fn cmp(&self, other
: &*mut T
) -> Ordering
{
1017 } else if self == other
{
1025 #[stable(feature = "rust1", since = "1.0.0")]
1026 impl<T
: ?Sized
> PartialOrd
for *mut T
{
1028 fn partial_cmp(&self, other
: &*mut T
) -> Option
<Ordering
> {
1029 Some(self.cmp(other
))
1033 fn lt(&self, other
: &*mut T
) -> bool { *self < *other }
1036 fn le(&self, other
: &*mut T
) -> bool { *self <= *other }
1039 fn gt(&self, other
: &*mut T
) -> bool { *self > *other }
1042 fn ge(&self, other
: &*mut T
) -> bool { *self >= *other }
1045 /// A wrapper around a raw non-null `*mut T` that indicates that the possessor
1046 /// of this wrapper owns the referent. Useful for building abstractions like
1047 /// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
1049 /// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
1050 /// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
1051 /// the kind of strong aliasing guarantees an instance of `T` can expect:
1052 /// the referent of the pointer should not be modified without a unique path to
1053 /// its owning Unique.
1055 /// If you're uncertain of whether it's correct to use `Unique` for your purposes,
1056 /// consider using `Shared`, which has weaker semantics.
1058 /// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
1059 /// is never dereferenced. This is so that enums may use this forbidden value
1060 /// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
1061 /// However the pointer may still dangle if it isn't dereferenced.
1063 /// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
1064 /// for any type which upholds Unique's aliasing requirements.
1065 #[allow(missing_debug_implementations)]
1066 #[unstable(feature = "unique", reason = "needs an RFC to flesh out design",
1068 pub struct Unique
<T
: ?Sized
> {
1069 pointer
: NonZero
<*const T
>,
1070 // NOTE: this marker has no consequences for variance, but is necessary
1071 // for dropck to understand that we logically own a `T`.
1073 // For details, see:
1074 // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
1075 _marker
: PhantomData
<T
>,
1078 /// `Unique` pointers are `Send` if `T` is `Send` because the data they
1079 /// reference is unaliased. Note that this aliasing invariant is
1080 /// unenforced by the type system; the abstraction using the
1081 /// `Unique` must enforce it.
1082 #[unstable(feature = "unique", issue = "27730")]
1083 unsafe impl<T
: Send
+ ?Sized
> Send
for Unique
<T
> { }
1085 /// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
1086 /// reference is unaliased. Note that this aliasing invariant is
1087 /// unenforced by the type system; the abstraction using the
1088 /// `Unique` must enforce it.
1089 #[unstable(feature = "unique", issue = "27730")]
1090 unsafe impl<T
: Sync
+ ?Sized
> Sync
for Unique
<T
> { }
1092 #[unstable(feature = "unique", issue = "27730")]
1093 impl<T
: Sized
> Unique
<T
> {
1094 /// Creates a new `Unique` that is dangling, but well-aligned.
1096 /// This is useful for initializing types which lazily allocate, like
1097 /// `Vec::new` does.
1098 pub fn empty() -> Self {
1100 let ptr
= mem
::align_of
::<T
>() as *mut T
;
1106 #[unstable(feature = "unique", issue = "27730")]
1107 impl<T
: ?Sized
> Unique
<T
> {
1108 /// Creates a new `Unique`.
1112 /// `ptr` must be non-null.
1113 pub const unsafe fn new(ptr
: *mut T
) -> Unique
<T
> {
1114 Unique { pointer: NonZero::new(ptr), _marker: PhantomData }
1117 /// Acquires the underlying `*mut` pointer.
1118 pub fn as_ptr(self) -> *mut T
{
1119 self.pointer
.get() as *mut T
1122 /// Dereferences the content.
1124 /// The resulting lifetime is bound to self so this behaves "as if"
1125 /// it were actually an instance of T that is getting borrowed. If a longer
1126 /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
1127 pub unsafe fn as_ref(&self) -> &T
{
1131 /// Mutably dereferences the content.
1133 /// The resulting lifetime is bound to self so this behaves "as if"
1134 /// it were actually an instance of T that is getting borrowed. If a longer
1135 /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr()`.
1136 pub unsafe fn as_mut(&mut self) -> &mut T
{
1141 #[unstable(feature = "shared", issue = "27730")]
1142 impl<T
: ?Sized
> Clone
for Unique
<T
> {
1143 fn clone(&self) -> Self {
1148 #[unstable(feature = "shared", issue = "27730")]
1149 impl<T
: ?Sized
> Copy
for Unique
<T
> { }
1151 #[unstable(feature = "unique", issue = "27730")]
1152 impl<T
: ?Sized
, U
: ?Sized
> CoerceUnsized
<Unique
<U
>> for Unique
<T
> where T
: Unsize
<U
> { }
1154 #[unstable(feature = "unique", issue = "27730")]
1155 impl<T
: ?Sized
> fmt
::Pointer
for Unique
<T
> {
1156 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
1157 fmt
::Pointer
::fmt(&self.as_ptr(), f
)
1161 /// A wrapper around a raw `*mut T` that indicates that the possessor
1162 /// of this wrapper has shared ownership of the referent. Useful for
1163 /// building abstractions like `Rc<T>`, `Arc<T>`, or doubly-linked lists, which
1164 /// internally use aliased raw pointers to manage the memory that they own.
1166 /// This is similar to `Unique`, except that it doesn't make any aliasing
1167 /// guarantees, and doesn't derive Send and Sync. Note that unlike `&T`,
1168 /// Shared has no special mutability requirements. Shared may mutate data
1169 /// aliased by other Shared pointers. More precise rules require Rust to
1170 /// develop an actual aliasing model.
1172 /// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
1173 /// is never dereferenced. This is so that enums may use this forbidden value
1174 /// as a discriminant -- `Option<Shared<T>>` has the same size as `Shared<T>`.
1175 /// However the pointer may still dangle if it isn't dereferenced.
1177 /// Unlike `*mut T`, `Shared<T>` is covariant over `T`. If this is incorrect
1178 /// for your use case, you should include some PhantomData in your type to
1179 /// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
1180 /// Usually this won't be necessary; covariance is correct for Rc, Arc, and LinkedList
1181 /// because they provide a public API that follows the normal shared XOR mutable
1183 #[allow(missing_debug_implementations)]
1184 #[unstable(feature = "shared", reason = "needs an RFC to flesh out design",
1186 pub struct Shared
<T
: ?Sized
> {
1187 pointer
: NonZero
<*const T
>,
1188 // NOTE: this marker has no consequences for variance, but is necessary
1189 // for dropck to understand that we logically own a `T`.
1191 // For details, see:
1192 // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
1193 _marker
: PhantomData
<T
>,
1196 /// `Shared` pointers are not `Send` because the data they reference may be aliased.
1197 // NB: This impl is unnecessary, but should provide better error messages.
1198 #[unstable(feature = "shared", issue = "27730")]
1199 impl<T
: ?Sized
> !Send
for Shared
<T
> { }
1201 /// `Shared` pointers are not `Sync` because the data they reference may be aliased.
1202 // NB: This impl is unnecessary, but should provide better error messages.
1203 #[unstable(feature = "shared", issue = "27730")]
1204 impl<T
: ?Sized
> !Sync
for Shared
<T
> { }
1206 #[unstable(feature = "shared", issue = "27730")]
1207 impl<T
: Sized
> Shared
<T
> {
1208 /// Creates a new `Shared` that is dangling, but well-aligned.
1210 /// This is useful for initializing types which lazily allocate, like
1211 /// `Vec::new` does.
1212 pub fn empty() -> Self {
1214 let ptr
= mem
::align_of
::<T
>() as *mut T
;
1220 #[unstable(feature = "shared", issue = "27730")]
1221 impl<T
: ?Sized
> Shared
<T
> {
1222 /// Creates a new `Shared`.
1226 /// `ptr` must be non-null.
1227 pub unsafe fn new(ptr
: *mut T
) -> Self {
1228 Shared { pointer: NonZero::new(ptr), _marker: PhantomData }
1231 /// Acquires the underlying `*mut` pointer.
1232 pub fn as_ptr(self) -> *mut T
{
1233 self.pointer
.get() as *mut T
1236 /// Dereferences the content.
1238 /// The resulting lifetime is bound to self so this behaves "as if"
1239 /// it were actually an instance of T that is getting borrowed. If a longer
1240 /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
1241 pub unsafe fn as_ref(&self) -> &T
{
1245 /// Mutably dereferences the content.
1247 /// The resulting lifetime is bound to self so this behaves "as if"
1248 /// it were actually an instance of T that is getting borrowed. If a longer
1249 /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr_mut()`.
1250 pub unsafe fn as_mut(&mut self) -> &mut T
{
1254 /// Acquires the underlying pointer as a `*mut` pointer.
1255 #[rustc_deprecated(since = "1.19", reason = "renamed to `as_ptr` for ergonomics/consistency")]
1256 #[unstable(feature = "shared", issue = "27730")]
1257 pub unsafe fn as_mut_ptr(&self) -> *mut T
{
1262 #[unstable(feature = "shared", issue = "27730")]
1263 impl<T
: ?Sized
> Clone
for Shared
<T
> {
1264 fn clone(&self) -> Self {
1269 #[unstable(feature = "shared", issue = "27730")]
1270 impl<T
: ?Sized
> Copy
for Shared
<T
> { }
1272 #[unstable(feature = "shared", issue = "27730")]
1273 impl<T
: ?Sized
, U
: ?Sized
> CoerceUnsized
<Shared
<U
>> for Shared
<T
> where T
: Unsize
<U
> { }
1275 #[unstable(feature = "shared", issue = "27730")]
1276 impl<T
: ?Sized
> fmt
::Pointer
for Shared
<T
> {
1277 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
1278 fmt
::Pointer
::fmt(&self.as_ptr(), f
)