4 #[cfg(panic = "unwind")]
9 assert_eq
!(size_of
::<u8>(), 1);
10 assert_eq
!(size_of
::<u16>(), 2);
11 assert_eq
!(size_of
::<u32>(), 4);
12 assert_eq
!(size_of
::<u64>(), 8);
16 #[cfg(target_pointer_width = "16")]
18 assert_eq
!(size_of
::<usize>(), 2);
19 assert_eq
!(size_of
::<*const usize>(), 2);
23 #[cfg(target_pointer_width = "32")]
25 assert_eq
!(size_of
::<usize>(), 4);
26 assert_eq
!(size_of
::<*const usize>(), 4);
30 #[cfg(target_pointer_width = "64")]
32 assert_eq
!(size_of
::<usize>(), 8);
33 assert_eq
!(size_of
::<*const usize>(), 8);
37 fn size_of_val_basic() {
38 assert_eq
!(size_of_val(&1u8), 1);
39 assert_eq
!(size_of_val(&1u16), 2);
40 assert_eq
!(size_of_val(&1u32), 4);
41 assert_eq
!(size_of_val(&1u64), 8);
46 assert_eq
!(align_of
::<u8>(), 1);
47 assert_eq
!(align_of
::<u16>(), 2);
48 assert_eq
!(align_of
::<u32>(), 4);
52 #[cfg(target_pointer_width = "16")]
54 assert_eq
!(align_of
::<usize>(), 2);
55 assert_eq
!(align_of
::<*const usize>(), 2);
59 #[cfg(target_pointer_width = "32")]
61 assert_eq
!(align_of
::<usize>(), 4);
62 assert_eq
!(align_of
::<*const usize>(), 4);
66 #[cfg(target_pointer_width = "64")]
68 assert_eq
!(align_of
::<usize>(), 8);
69 assert_eq
!(align_of
::<*const usize>(), 8);
73 fn align_of_val_basic() {
74 assert_eq
!(align_of_val(&1u8), 1);
75 assert_eq
!(align_of_val(&1u16), 2);
76 assert_eq
!(align_of_val(&1u32), 4);
80 fn align_of_val_raw_packed() {
85 let storage
= [0u8; 4];
86 let b
: *const B
= ptr
::from_raw_parts(storage
.as_ptr().cast(), 1);
87 assert_eq
!(unsafe { align_of_val_raw(b) }
, 1);
89 const ALIGN_OF_VAL_RAW
: usize = {
90 let storage
= [0u8; 4];
91 let b
: *const B
= ptr
::from_raw_parts(storage
.as_ptr().cast(), 1);
92 unsafe { align_of_val_raw(b) }
94 assert_eq
!(ALIGN_OF_VAL_RAW
, 1);
101 swap(&mut x
, &mut y
);
103 assert_eq
!(y
, 31337);
108 let mut x
= Some("test".to_string());
109 let y
= replace(&mut x
, None
);
110 assert
!(x
.is_none());
111 assert
!(y
.is_some());
115 fn test_transmute_copy() {
116 assert_eq
!(1, unsafe { transmute_copy(&1) }
);
120 fn test_transmute_copy_shrink() {
121 assert_eq
!(0_u8, unsafe { transmute_copy(&0_u64) }
);
125 fn test_transmute_copy_unaligned() {
133 let u
= Unaligned
::default();
134 assert_eq
!(0_u64, unsafe { transmute_copy(&u.b) }
);
138 #[cfg(panic = "unwind")]
139 fn test_transmute_copy_grow_panics() {
142 let err
= panic
::catch_unwind(panic
::AssertUnwindSafe(|| unsafe {
143 let _unused
: u64 = transmute_copy(&1_u8);
147 Ok(_
) => unreachable
!(),
150 .downcast
::<&'
static str>()
152 if *s
== "cannot transmute_copy if Dst is larger than Src" {
158 .unwrap_or_else(|p
| panic
::resume_unwind(p
));
165 fn test_discriminant_send_sync() {
174 fn is_send_sync
<T
: Send
+ Sync
>() {}
176 is_send_sync
::<Discriminant
<Regular
>>();
177 is_send_sync
::<Discriminant
<NotSendSync
>>();
181 fn assume_init_good() {
182 const TRUE
: bool
= unsafe { MaybeUninit::<bool>::new(true).assume_init() }
;
188 fn uninit_array_assume_init() {
189 let mut array
= [MaybeUninit
::<i16>::uninit(); 5];
196 let array
= unsafe { array.transpose().assume_init() }
;
198 assert_eq
!(array
, [3, 1, 4, 1, 5]);
200 let [] = unsafe { [MaybeUninit::<!>::uninit(); 0].transpose().assume_init() }
;
204 fn uninit_write_slice() {
205 let mut dst
= [MaybeUninit
::new(255); 64];
208 assert_eq
!(MaybeUninit
::write_slice(&mut dst
, &src
), &src
);
212 #[should_panic(expected = "source slice length (32) does not match destination slice length (64)")]
213 fn uninit_write_slice_panic_lt() {
214 let mut dst
= [MaybeUninit
::uninit(); 64];
217 MaybeUninit
::write_slice(&mut dst
, &src
);
221 #[should_panic(expected = "source slice length (128) does not match destination slice length (64)")]
222 fn uninit_write_slice_panic_gt() {
223 let mut dst
= [MaybeUninit
::uninit(); 64];
226 MaybeUninit
::write_slice(&mut dst
, &src
);
230 fn uninit_clone_from_slice() {
231 let mut dst
= [MaybeUninit
::new(255); 64];
234 assert_eq
!(MaybeUninit
::write_slice_cloned(&mut dst
, &src
), &src
);
238 #[should_panic(expected = "destination and source slices have different lengths")]
239 fn uninit_write_slice_cloned_panic_lt() {
240 let mut dst
= [MaybeUninit
::uninit(); 64];
243 MaybeUninit
::write_slice_cloned(&mut dst
, &src
);
247 #[should_panic(expected = "destination and source slices have different lengths")]
248 fn uninit_write_slice_cloned_panic_gt() {
249 let mut dst
= [MaybeUninit
::uninit(); 64];
252 MaybeUninit
::write_slice_cloned(&mut dst
, &src
);
256 #[cfg(panic = "unwind")]
257 fn uninit_write_slice_cloned_mid_panic() {
260 enum IncrementOrPanic
{
266 impl Clone
for IncrementOrPanic
{
267 fn clone(&self) -> Self {
269 Self::Increment(rc
) => Self::Increment(rc
.clone()),
270 Self::ExpectedPanic
=> panic
!("expected panic on clone"),
271 Self::UnexpectedPanic
=> panic
!("unexpected panic on clone"),
276 let rc
= Rc
::new(());
279 MaybeUninit
::uninit(),
280 MaybeUninit
::uninit(),
281 MaybeUninit
::uninit(),
282 MaybeUninit
::uninit(),
286 IncrementOrPanic
::Increment(rc
.clone()),
287 IncrementOrPanic
::Increment(rc
.clone()),
288 IncrementOrPanic
::ExpectedPanic
,
289 IncrementOrPanic
::UnexpectedPanic
,
292 let err
= panic
::catch_unwind(panic
::AssertUnwindSafe(|| {
293 MaybeUninit
::write_slice_cloned(&mut dst
, &src
);
299 Ok(_
) => unreachable
!(),
302 .downcast
::<&'
static str>()
303 .and_then(|s
| if *s
== "expected panic on clone" { Ok(s) }
else { Err(s) }
)
304 .unwrap_or_else(|p
| panic
::resume_unwind(p
));
306 assert_eq
!(Rc
::strong_count(&rc
), 1)
312 fn uninit_write_slice_cloned_no_drop() {
318 panic
!("dropped a bomb! kaboom")
322 let mut dst
= [MaybeUninit
::uninit()];
325 MaybeUninit
::write_slice_cloned(&mut dst
, &src
);
331 fn uninit_const_assume_init_read() {
332 const FOO
: u32 = unsafe { MaybeUninit::new(42).assume_init_read() }
;
337 fn const_maybe_uninit() {
340 #[derive(Debug, PartialEq)]
346 const FIELD_BY_FIELD
: Foo
= unsafe {
347 let mut val
= MaybeUninit
::uninit();
348 init_y(&mut val
); // order shouldn't matter
353 const fn init_x(foo
: &mut MaybeUninit
<Foo
>) {
355 *ptr
::addr_of_mut
!((*foo
.as_mut_ptr()).x
) = 1;
359 const fn init_y(foo
: &mut MaybeUninit
<Foo
>) {
361 *ptr
::addr_of_mut
!((*foo
.as_mut_ptr()).y
) = 2;
365 assert_eq
!(FIELD_BY_FIELD
, Foo { x: 1, y: 2 }
);
380 assert_eq
!(offset_of
!(Foo
, x
), 0);
381 assert_eq
!(offset_of
!(Foo
, y
), 2);
382 assert_eq
!(offset_of
!(Foo
, z
.0), 4);
383 assert_eq
!(offset_of
!(Foo
, z
.1), 5);
385 // Layout of tuples is unstable
386 assert
!(offset_of
!((u8, u16), 0) <= size_of
::<(u8, u16)>() - 1);
387 assert
!(offset_of
!((u8, u16), 1) <= size_of
::<(u8, u16)>() - 2);
398 // Ensure that this type of generics works
399 fn offs_of_z
<T
>() -> usize {
400 offset_of
!(Generic
<T
>, z
)
403 assert_eq
!(offset_of
!(Generic
<u8>, z
), 8);
404 assert_eq
!(offs_of_z
::<u8>(), 8);
406 // Ensure that it works with the implicit lifetime in `Box<dyn Trait + '_>`.
407 assert_eq
!(offset_of
!(Generic
<Box
<dyn Trait
>>, z
), 8);
411 fn offset_of_union() {
420 #[derive(Copy, Clone)]
423 assert_eq
!(offset_of
!(Foo
, x
), 0);
424 assert_eq
!(offset_of
!(Foo
, y
), 0);
425 assert_eq
!(offset_of
!(Foo
, z
.0), 0);
426 assert_eq
!(offset_of
!(Foo
, z
.1), 1);
458 assert_eq
!(offset_of
!(Alpha
, x
), 0);
459 assert_eq
!(offset_of
!(Alpha
, y
), 2);
461 assert_eq
!(offset_of
!(Beta
, x
), 0);
462 assert_eq
!(offset_of
!(Beta
, y
), 2);
464 assert_eq
!(offset_of
!(Gamma
, x
), 0);
465 assert_eq
!(offset_of
!(Gamma
, y
), 2);
469 fn offset_of_packed() {
476 assert_eq
!(offset_of
!(Foo
, x
), 0);
477 assert_eq
!(offset_of
!(Foo
, y
), 1);
481 fn offset_of_projection() {
492 impl Projector
for () {
496 assert_eq
!(offset_of
!(<() as Projector
>::Type
, x
), 0);
497 assert_eq
!(offset_of
!(<() as Projector
>::Type
, y
), 2);
501 fn offset_of_alias() {
510 assert_eq
!(offset_of
!(Bar
, x
), 0);
511 assert_eq
!(offset_of
!(Bar
, y
), 2);
515 fn const_offset_of() {
522 const X_OFFSET
: usize = offset_of
!(Foo
, x
);
523 const Y_OFFSET
: usize = offset_of
!(Foo
, y
);
525 assert_eq
!(X_OFFSET
, 0);
526 assert_eq
!(Y_OFFSET
, 2);
530 fn offset_of_without_const_promotion() {
532 struct Foo
<SuppressConstPromotion
> {
535 _scp
: SuppressConstPromotion
,
538 // Normally, offset_of is always const promoted.
539 // The generic parameter prevents this from happening.
540 // This is needed to test the codegen impl of offset_of
541 fn inner
<SuppressConstPromotion
>() {
542 assert_eq
!(offset_of
!(Foo
<SuppressConstPromotion
>, x
), 0);
543 assert_eq
!(offset_of
!(Foo
<SuppressConstPromotion
>, y
), 2);
550 fn offset_of_addr() {
561 let base
= Foo { x: 0, y: 0, z: Bar(0, 0) }
;
563 assert_eq
!(ptr
::addr_of
!(base
).addr() + offset_of
!(Foo
, x
), ptr
::addr_of
!(base
.x
).addr());
564 assert_eq
!(ptr
::addr_of
!(base
).addr() + offset_of
!(Foo
, y
), ptr
::addr_of
!(base
.y
).addr());
565 assert_eq
!(ptr
::addr_of
!(base
).addr() + offset_of
!(Foo
, z
.0), ptr
::addr_of
!(base
.z
.0).addr());
566 assert_eq
!(ptr
::addr_of
!(base
).addr() + offset_of
!(Foo
, z
.1), ptr
::addr_of
!(base
.z
.1).addr());
570 fn const_maybe_uninit_zeroed() {
571 // Sanity check for `MaybeUninit::zeroed` in a realistic const situation (plugin array term)
574 a
: Option
<&'
static str>,
581 struct FooPtr(*const Foo
);
582 unsafe impl Sync
for FooPtr {}
584 static UNINIT
: FooPtr
= FooPtr([unsafe { MaybeUninit::zeroed().assume_init() }
].as_ptr());
585 const SIZE
: usize = size_of
::<Foo
>();
587 assert_eq
!(unsafe { (*UNINIT.0.cast::<[[u8; SIZE]; 1]>())[0] }
, [0u8; SIZE
]);