]> git.proxmox.com Git - rustc.git/blame - src/libcore/tests/ptr.rs
New upstream version 1.41.1+dfsg1
[rustc.git] / src / libcore / tests / ptr.rs
CommitLineData
476ff2be 1use core::cell::RefCell;
60c5eb7d 2use core::ptr::*;
1a4d82fc
JJ
3
4#[test]
5fn test() {
6 unsafe {
7 struct Pair {
c34b1796 8 fst: isize,
60c5eb7d 9 snd: isize,
1a4d82fc 10 };
60c5eb7d 11 let mut p = Pair { fst: 10, snd: 20 };
1a4d82fc 12 let pptr: *mut Pair = &mut p;
e9174d1e 13 let iptr: *mut isize = pptr as *mut isize;
1a4d82fc
JJ
14 assert_eq!(*iptr, 10);
15 *iptr = 30;
16 assert_eq!(*iptr, 30);
17 assert_eq!(p.fst, 30);
18
60c5eb7d 19 *pptr = Pair { fst: 50, snd: 60 };
1a4d82fc
JJ
20 assert_eq!(*iptr, 50);
21 assert_eq!(p.fst, 50);
22 assert_eq!(p.snd, 60);
23
24 let v0 = vec![32000u16, 32001u16, 32002u16];
25 let mut v1 = vec![0u16, 0u16, 0u16];
26
c34b1796 27 copy(v0.as_ptr().offset(1), v1.as_mut_ptr().offset(1), 1);
60c5eb7d 28 assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16));
c34b1796 29 copy(v0.as_ptr().offset(2), v1.as_mut_ptr(), 1);
60c5eb7d 30 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16));
c34b1796 31 copy(v0.as_ptr(), v1.as_mut_ptr().offset(2), 1);
60c5eb7d 32 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16));
1a4d82fc
JJ
33 }
34}
35
36#[test]
37fn test_is_null() {
c34b1796 38 let p: *const isize = null();
1a4d82fc
JJ
39 assert!(p.is_null());
40
9fa01778 41 let q = p.wrapping_offset(1);
1a4d82fc
JJ
42 assert!(!q.is_null());
43
c34b1796 44 let mp: *mut isize = null_mut();
1a4d82fc
JJ
45 assert!(mp.is_null());
46
9fa01778 47 let mq = mp.wrapping_offset(1);
1a4d82fc 48 assert!(!mq.is_null());
ff7c6d11
XL
49
50 // Pointers to unsized types -- slices
51 let s: &mut [u8] = &mut [1, 2, 3];
52 let cs: *const [u8] = s;
53 assert!(!cs.is_null());
54
55 let ms: *mut [u8] = s;
56 assert!(!ms.is_null());
57
58 let cz: *const [u8] = &[];
59 assert!(!cz.is_null());
60
61 let mz: *mut [u8] = &mut [];
62 assert!(!mz.is_null());
63
64 let ncs: *const [u8] = null::<[u8; 3]>();
65 assert!(ncs.is_null());
66
67 let nms: *mut [u8] = null_mut::<[u8; 3]>();
68 assert!(nms.is_null());
69
70 // Pointers to unsized types -- trait objects
8faf50e0 71 let ci: *const dyn ToString = &3;
ff7c6d11
XL
72 assert!(!ci.is_null());
73
8faf50e0 74 let mi: *mut dyn ToString = &mut 3;
ff7c6d11
XL
75 assert!(!mi.is_null());
76
8faf50e0 77 let nci: *const dyn ToString = null::<isize>();
ff7c6d11
XL
78 assert!(nci.is_null());
79
8faf50e0 80 let nmi: *mut dyn ToString = null_mut::<isize>();
ff7c6d11 81 assert!(nmi.is_null());
1a4d82fc
JJ
82}
83
84#[test]
85fn test_as_ref() {
86 unsafe {
c34b1796 87 let p: *const isize = null();
1a4d82fc
JJ
88 assert_eq!(p.as_ref(), None);
89
c34b1796 90 let q: *const isize = &2;
1a4d82fc
JJ
91 assert_eq!(q.as_ref().unwrap(), &2);
92
c34b1796 93 let p: *mut isize = null_mut();
1a4d82fc
JJ
94 assert_eq!(p.as_ref(), None);
95
c34b1796 96 let q: *mut isize = &mut 2;
1a4d82fc
JJ
97 assert_eq!(q.as_ref().unwrap(), &2);
98
99 // Lifetime inference
c34b1796 100 let u = 2isize;
1a4d82fc 101 {
c34b1796 102 let p = &u as *const isize;
1a4d82fc
JJ
103 assert_eq!(p.as_ref().unwrap(), &2);
104 }
abe05a73
XL
105
106 // Pointers to unsized types -- slices
107 let s: &mut [u8] = &mut [1, 2, 3];
108 let cs: *const [u8] = s;
109 assert_eq!(cs.as_ref(), Some(&*s));
110
111 let ms: *mut [u8] = s;
112 assert_eq!(ms.as_ref(), Some(&*s));
113
114 let cz: *const [u8] = &[];
115 assert_eq!(cz.as_ref(), Some(&[][..]));
116
117 let mz: *mut [u8] = &mut [];
118 assert_eq!(mz.as_ref(), Some(&[][..]));
119
120 let ncs: *const [u8] = null::<[u8; 3]>();
121 assert_eq!(ncs.as_ref(), None);
122
123 let nms: *mut [u8] = null_mut::<[u8; 3]>();
124 assert_eq!(nms.as_ref(), None);
125
126 // Pointers to unsized types -- trait objects
8faf50e0 127 let ci: *const dyn ToString = &3;
abe05a73
XL
128 assert!(ci.as_ref().is_some());
129
8faf50e0 130 let mi: *mut dyn ToString = &mut 3;
abe05a73
XL
131 assert!(mi.as_ref().is_some());
132
8faf50e0 133 let nci: *const dyn ToString = null::<isize>();
abe05a73
XL
134 assert!(nci.as_ref().is_none());
135
8faf50e0 136 let nmi: *mut dyn ToString = null_mut::<isize>();
abe05a73 137 assert!(nmi.as_ref().is_none());
1a4d82fc
JJ
138 }
139}
140
141#[test]
142fn test_as_mut() {
143 unsafe {
c34b1796 144 let p: *mut isize = null_mut();
1a4d82fc
JJ
145 assert!(p.as_mut() == None);
146
c34b1796 147 let q: *mut isize = &mut 2;
1a4d82fc
JJ
148 assert!(q.as_mut().unwrap() == &mut 2);
149
150 // Lifetime inference
c34b1796 151 let mut u = 2isize;
1a4d82fc 152 {
c34b1796 153 let p = &mut u as *mut isize;
1a4d82fc
JJ
154 assert!(p.as_mut().unwrap() == &mut 2);
155 }
abe05a73
XL
156
157 // Pointers to unsized types -- slices
158 let s: &mut [u8] = &mut [1, 2, 3];
159 let ms: *mut [u8] = s;
416331ca 160 assert_eq!(ms.as_mut(), Some(&mut [1, 2, 3][..]));
abe05a73
XL
161
162 let mz: *mut [u8] = &mut [];
163 assert_eq!(mz.as_mut(), Some(&mut [][..]));
164
165 let nms: *mut [u8] = null_mut::<[u8; 3]>();
166 assert_eq!(nms.as_mut(), None);
167
168 // Pointers to unsized types -- trait objects
8faf50e0 169 let mi: *mut dyn ToString = &mut 3;
abe05a73
XL
170 assert!(mi.as_mut().is_some());
171
8faf50e0 172 let nmi: *mut dyn ToString = null_mut::<isize>();
abe05a73 173 assert!(nmi.as_mut().is_none());
1a4d82fc
JJ
174 }
175}
176
177#[test]
178fn test_ptr_addition() {
179 unsafe {
c1a9b12d 180 let xs = vec![5; 16];
1a4d82fc
JJ
181 let mut ptr = xs.as_ptr();
182 let end = ptr.offset(16);
183
184 while ptr < end {
185 assert_eq!(*ptr, 5);
186 ptr = ptr.offset(1);
187 }
188
189 let mut xs_mut = xs;
190 let mut m_ptr = xs_mut.as_mut_ptr();
191 let m_end = m_ptr.offset(16);
192
193 while m_ptr < m_end {
194 *m_ptr += 5;
195 m_ptr = m_ptr.offset(1);
196 }
197
c1a9b12d 198 assert!(xs_mut == vec![10; 16]);
1a4d82fc
JJ
199 }
200}
201
202#[test]
203fn test_ptr_subtraction() {
204 unsafe {
60c5eb7d 205 let xs = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
c34b1796 206 let mut idx = 9;
1a4d82fc
JJ
207 let ptr = xs.as_ptr();
208
c34b1796
AL
209 while idx >= 0 {
210 assert_eq!(*(ptr.offset(idx as isize)), idx as isize);
211 idx = idx - 1;
1a4d82fc
JJ
212 }
213
214 let mut xs_mut = xs;
215 let m_start = xs_mut.as_mut_ptr();
216 let mut m_ptr = m_start.offset(9);
217
9fa01778 218 loop {
1a4d82fc 219 *m_ptr += *m_ptr;
9fa01778
XL
220 if m_ptr == m_start {
221 break;
222 }
1a4d82fc
JJ
223 m_ptr = m_ptr.offset(-1);
224 }
225
60c5eb7d 226 assert_eq!(xs_mut, [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
1a4d82fc
JJ
227 }
228}
229
230#[test]
231fn test_set_memory() {
232 let mut xs = [0u8; 20];
233 let ptr = xs.as_mut_ptr();
60c5eb7d
XL
234 unsafe {
235 write_bytes(ptr, 5u8, xs.len());
236 }
1a4d82fc
JJ
237 assert!(xs == [5u8; 20]);
238}
85aaf69f
SL
239
240#[test]
2c00a5a8 241fn test_unsized_nonnull() {
7cac9316 242 let xs: &[i32] = &[1, 2, 3];
2c00a5a8 243 let ptr = unsafe { NonNull::new_unchecked(xs as *const [i32] as *mut [i32]) };
7cac9316
XL
244 let ys = unsafe { ptr.as_ref() };
245 let zs: &[i32] = &[1, 2, 3];
85aaf69f
SL
246 assert!(ys == zs);
247}
5bcae85e
SL
248
249#[test]
9e0c209e
SL
250#[allow(warnings)]
251// Have a symbol for the test below. It doesn’t need to be an actual variadic function, match the
252// ABI, or even point to an actual executable code, because the function itself is never invoked.
253#[no_mangle]
254pub fn test_variadic_fnptr() {
5bcae85e 255 use core::hash::{Hash, SipHasher};
60c5eb7d 256 extern "C" {
9e0c209e 257 fn test_variadic_fnptr(_: u64, ...) -> f64;
5bcae85e 258 }
60c5eb7d 259 let p: unsafe extern "C" fn(u64, ...) -> f64 = test_variadic_fnptr;
5bcae85e
SL
260 let q = p.clone();
261 assert_eq!(p, q);
262 assert!(!(p < q));
263 let mut s = SipHasher::new();
264 assert_eq!(p.hash(&mut s), q.hash(&mut s));
265}
476ff2be
SL
266
267#[test]
268fn write_unaligned_drop() {
269 thread_local! {
270 static DROPS: RefCell<Vec<u32>> = RefCell::new(Vec::new());
271 }
272
273 struct Dropper(u32);
274
275 impl Drop for Dropper {
276 fn drop(&mut self) {
277 DROPS.with(|d| d.borrow_mut().push(self.0));
278 }
279 }
280
281 {
282 let c = Dropper(0);
283 let mut t = Dropper(1);
60c5eb7d
XL
284 unsafe {
285 write_unaligned(&mut t, c);
286 }
476ff2be
SL
287 }
288 DROPS.with(|d| assert_eq!(*d.borrow(), [0]));
289}
94b46f34
XL
290
291#[test]
60c5eb7d 292#[cfg_attr(miri, ignore)] // Miri does not compute a maximal `mid` for `align_offset`
94b46f34
XL
293fn align_offset_zst() {
294 // For pointers of stride = 0, the pointer is already aligned or it cannot be aligned at
295 // all, because no amount of elements will align the pointer.
296 let mut p = 1;
297 while p < 1024 {
298 assert_eq!((p as *const ()).align_offset(p), 0);
299 if p != 1 {
300 assert_eq!(((p + 1) as *const ()).align_offset(p), !0);
301 }
302 p = (p + 1).next_power_of_two();
303 }
304}
305
306#[test]
60c5eb7d 307#[cfg_attr(miri, ignore)] // Miri does not compute a maximal `mid` for `align_offset`
94b46f34
XL
308fn align_offset_stride1() {
309 // For pointers of stride = 1, the pointer can always be aligned. The offset is equal to
310 // number of bytes.
311 let mut align = 1;
312 while align < 1024 {
60c5eb7d 313 for ptr in 1..2 * align {
94b46f34
XL
314 let expected = ptr % align;
315 let offset = if expected == 0 { 0 } else { align - expected };
60c5eb7d
XL
316 assert_eq!(
317 (ptr as *const u8).align_offset(align),
318 offset,
319 "ptr = {}, align = {}, size = 1",
320 ptr,
321 align
322 );
94b46f34
XL
323 }
324 align = (align + 1).next_power_of_two();
325 }
326}
327
328#[test]
60c5eb7d 329#[cfg_attr(miri, ignore)] // Miri is too slow
94b46f34
XL
330fn align_offset_weird_strides() {
331 #[repr(packed)]
332 struct A3(u16, u8);
333 struct A4(u32);
334 #[repr(packed)]
335 struct A5(u32, u8);
336 #[repr(packed)]
337 struct A6(u32, u16);
338 #[repr(packed)]
339 struct A7(u32, u16, u8);
340 #[repr(packed)]
341 struct A8(u32, u32);
342 #[repr(packed)]
343 struct A9(u32, u32, u8);
344 #[repr(packed)]
345 struct A10(u32, u32, u16);
346
347 unsafe fn test_weird_stride<T>(ptr: *const T, align: usize) -> bool {
348 let numptr = ptr as usize;
349 let mut expected = usize::max_value();
350 // Naive but definitely correct way to find the *first* aligned element of stride::<T>.
351 for el in 0..align {
352 if (numptr + el * ::std::mem::size_of::<T>()) % align == 0 {
353 expected = el;
354 break;
355 }
356 }
357 let got = ptr.align_offset(align);
358 if got != expected {
60c5eb7d
XL
359 eprintln!(
360 "aligning {:p} (with stride of {}) to {}, expected {}, got {}",
361 ptr,
362 ::std::mem::size_of::<T>(),
363 align,
364 expected,
365 got
366 );
94b46f34
XL
367 return true;
368 }
369 return false;
370 }
371
372 // For pointers of stride != 1, we verify the algorithm against the naivest possible
373 // implementation
374 let mut align = 1;
375 let mut x = false;
376 while align < 1024 {
60c5eb7d 377 for ptr in 1usize..4 * align {
94b46f34
XL
378 unsafe {
379 x |= test_weird_stride::<A3>(ptr as *const A3, align);
380 x |= test_weird_stride::<A4>(ptr as *const A4, align);
381 x |= test_weird_stride::<A5>(ptr as *const A5, align);
382 x |= test_weird_stride::<A6>(ptr as *const A6, align);
383 x |= test_weird_stride::<A7>(ptr as *const A7, align);
384 x |= test_weird_stride::<A8>(ptr as *const A8, align);
385 x |= test_weird_stride::<A9>(ptr as *const A9, align);
386 x |= test_weird_stride::<A10>(ptr as *const A10, align);
387 }
388 }
389 align = (align + 1).next_power_of_two();
390 }
391 assert!(!x);
392}