]> git.proxmox.com Git - rustc.git/blob - library/core/tests/ptr.rs
New upstream version 1.50.0+dfsg1
[rustc.git] / library / core / tests / ptr.rs
1 use core::cell::RefCell;
2 use core::ptr::*;
3
4 #[test]
5 fn test_const_from_raw_parts() {
6 const SLICE: &[u8] = &[1, 2, 3, 4];
7 const FROM_RAW: &[u8] = unsafe { &*slice_from_raw_parts(SLICE.as_ptr(), SLICE.len()) };
8 assert_eq!(SLICE, FROM_RAW);
9
10 let slice = &[1, 2, 3, 4, 5];
11 let from_raw = unsafe { &*slice_from_raw_parts(slice.as_ptr(), 2) };
12 assert_eq!(&slice[..2], from_raw);
13 }
14
15 #[test]
16 fn test() {
17 unsafe {
18 struct Pair {
19 fst: isize,
20 snd: isize,
21 }
22 let mut p = Pair { fst: 10, snd: 20 };
23 let pptr: *mut Pair = &mut p;
24 let iptr: *mut isize = pptr as *mut isize;
25 assert_eq!(*iptr, 10);
26 *iptr = 30;
27 assert_eq!(*iptr, 30);
28 assert_eq!(p.fst, 30);
29
30 *pptr = Pair { fst: 50, snd: 60 };
31 assert_eq!(*iptr, 50);
32 assert_eq!(p.fst, 50);
33 assert_eq!(p.snd, 60);
34
35 let v0 = vec![32000u16, 32001u16, 32002u16];
36 let mut v1 = vec![0u16, 0u16, 0u16];
37
38 copy(v0.as_ptr().offset(1), v1.as_mut_ptr().offset(1), 1);
39 assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16));
40 copy(v0.as_ptr().offset(2), v1.as_mut_ptr(), 1);
41 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16));
42 copy(v0.as_ptr(), v1.as_mut_ptr().offset(2), 1);
43 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16));
44 }
45 }
46
47 #[test]
48 fn test_is_null() {
49 let p: *const isize = null();
50 assert!(p.is_null());
51
52 let q = p.wrapping_offset(1);
53 assert!(!q.is_null());
54
55 let mp: *mut isize = null_mut();
56 assert!(mp.is_null());
57
58 let mq = mp.wrapping_offset(1);
59 assert!(!mq.is_null());
60
61 // Pointers to unsized types -- slices
62 let s: &mut [u8] = &mut [1, 2, 3];
63 let cs: *const [u8] = s;
64 assert!(!cs.is_null());
65
66 let ms: *mut [u8] = s;
67 assert!(!ms.is_null());
68
69 let cz: *const [u8] = &[];
70 assert!(!cz.is_null());
71
72 let mz: *mut [u8] = &mut [];
73 assert!(!mz.is_null());
74
75 let ncs: *const [u8] = null::<[u8; 3]>();
76 assert!(ncs.is_null());
77
78 let nms: *mut [u8] = null_mut::<[u8; 3]>();
79 assert!(nms.is_null());
80
81 // Pointers to unsized types -- trait objects
82 let ci: *const dyn ToString = &3;
83 assert!(!ci.is_null());
84
85 let mi: *mut dyn ToString = &mut 3;
86 assert!(!mi.is_null());
87
88 let nci: *const dyn ToString = null::<isize>();
89 assert!(nci.is_null());
90
91 let nmi: *mut dyn ToString = null_mut::<isize>();
92 assert!(nmi.is_null());
93 }
94
95 #[test]
96 fn test_as_ref() {
97 unsafe {
98 let p: *const isize = null();
99 assert_eq!(p.as_ref(), None);
100
101 let q: *const isize = &2;
102 assert_eq!(q.as_ref().unwrap(), &2);
103
104 let p: *mut isize = null_mut();
105 assert_eq!(p.as_ref(), None);
106
107 let q: *mut isize = &mut 2;
108 assert_eq!(q.as_ref().unwrap(), &2);
109
110 // Lifetime inference
111 let u = 2isize;
112 {
113 let p = &u as *const isize;
114 assert_eq!(p.as_ref().unwrap(), &2);
115 }
116
117 // Pointers to unsized types -- slices
118 let s: &mut [u8] = &mut [1, 2, 3];
119 let cs: *const [u8] = s;
120 assert_eq!(cs.as_ref(), Some(&*s));
121
122 let ms: *mut [u8] = s;
123 assert_eq!(ms.as_ref(), Some(&*s));
124
125 let cz: *const [u8] = &[];
126 assert_eq!(cz.as_ref(), Some(&[][..]));
127
128 let mz: *mut [u8] = &mut [];
129 assert_eq!(mz.as_ref(), Some(&[][..]));
130
131 let ncs: *const [u8] = null::<[u8; 3]>();
132 assert_eq!(ncs.as_ref(), None);
133
134 let nms: *mut [u8] = null_mut::<[u8; 3]>();
135 assert_eq!(nms.as_ref(), None);
136
137 // Pointers to unsized types -- trait objects
138 let ci: *const dyn ToString = &3;
139 assert!(ci.as_ref().is_some());
140
141 let mi: *mut dyn ToString = &mut 3;
142 assert!(mi.as_ref().is_some());
143
144 let nci: *const dyn ToString = null::<isize>();
145 assert!(nci.as_ref().is_none());
146
147 let nmi: *mut dyn ToString = null_mut::<isize>();
148 assert!(nmi.as_ref().is_none());
149 }
150 }
151
152 #[test]
153 fn test_as_mut() {
154 unsafe {
155 let p: *mut isize = null_mut();
156 assert!(p.as_mut() == None);
157
158 let q: *mut isize = &mut 2;
159 assert!(q.as_mut().unwrap() == &mut 2);
160
161 // Lifetime inference
162 let mut u = 2isize;
163 {
164 let p = &mut u as *mut isize;
165 assert!(p.as_mut().unwrap() == &mut 2);
166 }
167
168 // Pointers to unsized types -- slices
169 let s: &mut [u8] = &mut [1, 2, 3];
170 let ms: *mut [u8] = s;
171 assert_eq!(ms.as_mut(), Some(&mut [1, 2, 3][..]));
172
173 let mz: *mut [u8] = &mut [];
174 assert_eq!(mz.as_mut(), Some(&mut [][..]));
175
176 let nms: *mut [u8] = null_mut::<[u8; 3]>();
177 assert_eq!(nms.as_mut(), None);
178
179 // Pointers to unsized types -- trait objects
180 let mi: *mut dyn ToString = &mut 3;
181 assert!(mi.as_mut().is_some());
182
183 let nmi: *mut dyn ToString = null_mut::<isize>();
184 assert!(nmi.as_mut().is_none());
185 }
186 }
187
188 #[test]
189 fn test_ptr_addition() {
190 unsafe {
191 let xs = vec![5; 16];
192 let mut ptr = xs.as_ptr();
193 let end = ptr.offset(16);
194
195 while ptr < end {
196 assert_eq!(*ptr, 5);
197 ptr = ptr.offset(1);
198 }
199
200 let mut xs_mut = xs;
201 let mut m_ptr = xs_mut.as_mut_ptr();
202 let m_end = m_ptr.offset(16);
203
204 while m_ptr < m_end {
205 *m_ptr += 5;
206 m_ptr = m_ptr.offset(1);
207 }
208
209 assert!(xs_mut == vec![10; 16]);
210 }
211 }
212
213 #[test]
214 fn test_ptr_subtraction() {
215 unsafe {
216 let xs = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
217 let mut idx = 9;
218 let ptr = xs.as_ptr();
219
220 while idx >= 0 {
221 assert_eq!(*(ptr.offset(idx as isize)), idx as isize);
222 idx = idx - 1;
223 }
224
225 let mut xs_mut = xs;
226 let m_start = xs_mut.as_mut_ptr();
227 let mut m_ptr = m_start.offset(9);
228
229 loop {
230 *m_ptr += *m_ptr;
231 if m_ptr == m_start {
232 break;
233 }
234 m_ptr = m_ptr.offset(-1);
235 }
236
237 assert_eq!(xs_mut, [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
238 }
239 }
240
241 #[test]
242 fn test_set_memory() {
243 let mut xs = [0u8; 20];
244 let ptr = xs.as_mut_ptr();
245 unsafe {
246 write_bytes(ptr, 5u8, xs.len());
247 }
248 assert!(xs == [5u8; 20]);
249 }
250
251 #[test]
252 fn test_unsized_nonnull() {
253 let xs: &[i32] = &[1, 2, 3];
254 let ptr = unsafe { NonNull::new_unchecked(xs as *const [i32] as *mut [i32]) };
255 let ys = unsafe { ptr.as_ref() };
256 let zs: &[i32] = &[1, 2, 3];
257 assert!(ys == zs);
258 }
259
260 #[test]
261 #[allow(warnings)]
262 // Have a symbol for the test below. It doesn’t need to be an actual variadic function, match the
263 // ABI, or even point to an actual executable code, because the function itself is never invoked.
264 #[no_mangle]
265 pub fn test_variadic_fnptr() {
266 use core::hash::{Hash, SipHasher};
267 extern "C" {
268 fn test_variadic_fnptr(_: u64, ...) -> f64;
269 }
270 let p: unsafe extern "C" fn(u64, ...) -> f64 = test_variadic_fnptr;
271 let q = p.clone();
272 assert_eq!(p, q);
273 assert!(!(p < q));
274 let mut s = SipHasher::new();
275 assert_eq!(p.hash(&mut s), q.hash(&mut s));
276 }
277
278 #[test]
279 fn write_unaligned_drop() {
280 thread_local! {
281 static DROPS: RefCell<Vec<u32>> = RefCell::new(Vec::new());
282 }
283
284 struct Dropper(u32);
285
286 impl Drop for Dropper {
287 fn drop(&mut self) {
288 DROPS.with(|d| d.borrow_mut().push(self.0));
289 }
290 }
291
292 {
293 let c = Dropper(0);
294 let mut t = Dropper(1);
295 unsafe {
296 write_unaligned(&mut t, c);
297 }
298 }
299 DROPS.with(|d| assert_eq!(*d.borrow(), [0]));
300 }
301
302 #[test]
303 fn align_offset_zst() {
304 // For pointers of stride = 0, the pointer is already aligned or it cannot be aligned at
305 // all, because no amount of elements will align the pointer.
306 let mut p = 1;
307 while p < 1024 {
308 assert_eq!((p as *const ()).align_offset(p), 0);
309 if p != 1 {
310 assert_eq!(((p + 1) as *const ()).align_offset(p), !0);
311 }
312 p = (p + 1).next_power_of_two();
313 }
314 }
315
316 #[test]
317 fn align_offset_stride1() {
318 // For pointers of stride = 1, the pointer can always be aligned. The offset is equal to
319 // number of bytes.
320 let mut align = 1;
321 while align < 1024 {
322 for ptr in 1..2 * align {
323 let expected = ptr % align;
324 let offset = if expected == 0 { 0 } else { align - expected };
325 assert_eq!(
326 (ptr as *const u8).align_offset(align),
327 offset,
328 "ptr = {}, align = {}, size = 1",
329 ptr,
330 align
331 );
332 }
333 align = (align + 1).next_power_of_two();
334 }
335 }
336
337 #[test]
338 fn align_offset_weird_strides() {
339 #[repr(packed)]
340 struct A3(u16, u8);
341 struct A4(u32);
342 #[repr(packed)]
343 struct A5(u32, u8);
344 #[repr(packed)]
345 struct A6(u32, u16);
346 #[repr(packed)]
347 struct A7(u32, u16, u8);
348 #[repr(packed)]
349 struct A8(u32, u32);
350 #[repr(packed)]
351 struct A9(u32, u32, u8);
352 #[repr(packed)]
353 struct A10(u32, u32, u16);
354
355 unsafe fn test_weird_stride<T>(ptr: *const T, align: usize) -> bool {
356 let numptr = ptr as usize;
357 let mut expected = usize::MAX;
358 // Naive but definitely correct way to find the *first* aligned element of stride::<T>.
359 for el in 0..align {
360 if (numptr + el * ::std::mem::size_of::<T>()) % align == 0 {
361 expected = el;
362 break;
363 }
364 }
365 let got = ptr.align_offset(align);
366 if got != expected {
367 eprintln!(
368 "aligning {:p} (with stride of {}) to {}, expected {}, got {}",
369 ptr,
370 ::std::mem::size_of::<T>(),
371 align,
372 expected,
373 got
374 );
375 return true;
376 }
377 return false;
378 }
379
380 // For pointers of stride != 1, we verify the algorithm against the naivest possible
381 // implementation
382 let mut align = 1;
383 let mut x = false;
384 // Miri is too slow
385 let limit = if cfg!(miri) { 32 } else { 1024 };
386 while align < limit {
387 for ptr in 1usize..4 * align {
388 unsafe {
389 x |= test_weird_stride::<A3>(ptr as *const A3, align);
390 x |= test_weird_stride::<A4>(ptr as *const A4, align);
391 x |= test_weird_stride::<A5>(ptr as *const A5, align);
392 x |= test_weird_stride::<A6>(ptr as *const A6, align);
393 x |= test_weird_stride::<A7>(ptr as *const A7, align);
394 x |= test_weird_stride::<A8>(ptr as *const A8, align);
395 x |= test_weird_stride::<A9>(ptr as *const A9, align);
396 x |= test_weird_stride::<A10>(ptr as *const A10, align);
397 }
398 }
399 align = (align + 1).next_power_of_two();
400 }
401 assert!(!x);
402 }
403
404 #[test]
405 fn offset_from() {
406 let mut a = [0; 5];
407 let ptr1: *mut i32 = &mut a[1];
408 let ptr2: *mut i32 = &mut a[3];
409 unsafe {
410 assert_eq!(ptr2.offset_from(ptr1), 2);
411 assert_eq!(ptr1.offset_from(ptr2), -2);
412 assert_eq!(ptr1.offset(2), ptr2);
413 assert_eq!(ptr2.offset(-2), ptr1);
414 }
415 }