]>
Commit | Line | Data |
---|---|---|
b7449926 | 1 | // run-pass |
94b46f34 | 2 | // ignore-emscripten |
94b46f34 XL |
3 | |
4 | // Test that the simd_{gather,scatter} intrinsics produce the correct results. | |
5 | ||
6 | #![feature(repr_simd, platform_intrinsics)] | |
7 | #![allow(non_camel_case_types)] | |
8 | ||
9 | #[repr(simd)] | |
10 | #[derive(Copy, Clone, PartialEq, Debug)] | |
11 | struct x4<T>(pub T, pub T, pub T, pub T); | |
12 | ||
13 | extern "platform-intrinsic" { | |
14 | fn simd_gather<T, U, V>(x: T, y: U, z: V) -> T; | |
15 | fn simd_scatter<T, U, V>(x: T, y: U, z: V) -> (); | |
16 | } | |
17 | ||
18 | fn main() { | |
19 | let mut x = [0_f32, 1., 2., 3., 4., 5., 6., 7.]; | |
20 | ||
21 | let default = x4(-3_f32, -3., -3., -3.); | |
22 | let s_strided = x4(0_f32, 2., -3., 6.); | |
23 | let mask = x4(-1_i32, -1, 0, -1); | |
24 | ||
25 | // reading from *const | |
26 | unsafe { | |
27 | let pointer = &x[0] as *const f32; | |
28 | let pointers = x4( | |
29 | pointer.offset(0) as *const f32, | |
30 | pointer.offset(2), | |
31 | pointer.offset(4), | |
32 | pointer.offset(6) | |
33 | ); | |
34 | ||
35 | let r_strided = simd_gather(default, pointers, mask); | |
36 | ||
37 | assert_eq!(r_strided, s_strided); | |
38 | } | |
39 | ||
40 | // reading from *mut | |
41 | unsafe { | |
42 | let pointer = &mut x[0] as *mut f32; | |
43 | let pointers = x4( | |
44 | pointer.offset(0) as *mut f32, | |
45 | pointer.offset(2), | |
46 | pointer.offset(4), | |
47 | pointer.offset(6) | |
48 | ); | |
49 | ||
50 | let r_strided = simd_gather(default, pointers, mask); | |
51 | ||
52 | assert_eq!(r_strided, s_strided); | |
53 | } | |
54 | ||
55 | // writing to *mut | |
56 | unsafe { | |
57 | let pointer = &mut x[0] as *mut f32; | |
58 | let pointers = x4( | |
59 | pointer.offset(0) as *mut f32, | |
60 | pointer.offset(2), | |
61 | pointer.offset(4), | |
62 | pointer.offset(6) | |
63 | ); | |
64 | ||
65 | let values = x4(42_f32, 43_f32, 44_f32, 45_f32); | |
66 | simd_scatter(values, pointers, mask); | |
67 | ||
68 | assert_eq!(x, [42., 1., 43., 3., 4., 5., 45., 7.]); | |
69 | } | |
70 | ||
71 | // test modifying array of *const f32 | |
72 | let mut y = [ | |
73 | &x[0] as *const f32, | |
74 | &x[1] as *const f32, | |
75 | &x[2] as *const f32, | |
76 | &x[3] as *const f32, | |
77 | &x[4] as *const f32, | |
78 | &x[5] as *const f32, | |
79 | &x[6] as *const f32, | |
80 | &x[7] as *const f32 | |
81 | ]; | |
82 | ||
83 | let default = x4(y[0], y[0], y[0], y[0]); | |
84 | let s_strided = x4(y[0], y[2], y[0], y[6]); | |
85 | ||
86 | // reading from *const | |
87 | unsafe { | |
88 | let pointer = &y[0] as *const *const f32; | |
89 | let pointers = x4( | |
90 | pointer.offset(0) as *const *const f32, | |
91 | pointer.offset(2), | |
92 | pointer.offset(4), | |
93 | pointer.offset(6) | |
94 | ); | |
95 | ||
96 | let r_strided = simd_gather(default, pointers, mask); | |
97 | ||
98 | assert_eq!(r_strided, s_strided); | |
99 | } | |
100 | ||
101 | // reading from *mut | |
102 | unsafe { | |
103 | let pointer = &mut y[0] as *mut *const f32; | |
104 | let pointers = x4( | |
105 | pointer.offset(0) as *mut *const f32, | |
106 | pointer.offset(2), | |
107 | pointer.offset(4), | |
108 | pointer.offset(6) | |
109 | ); | |
110 | ||
111 | let r_strided = simd_gather(default, pointers, mask); | |
112 | ||
113 | assert_eq!(r_strided, s_strided); | |
114 | } | |
115 | ||
116 | // writing to *mut | |
117 | unsafe { | |
118 | let pointer = &mut y[0] as *mut *const f32; | |
119 | let pointers = x4( | |
120 | pointer.offset(0) as *mut *const f32, | |
121 | pointer.offset(2), | |
122 | pointer.offset(4), | |
123 | pointer.offset(6) | |
124 | ); | |
125 | ||
126 | let values = x4(y[7], y[6], y[5], y[1]); | |
127 | simd_scatter(values, pointers, mask); | |
128 | ||
129 | let s = [ | |
130 | &x[7] as *const f32, | |
131 | &x[1] as *const f32, | |
132 | &x[6] as *const f32, | |
133 | &x[3] as *const f32, | |
134 | &x[4] as *const f32, | |
135 | &x[5] as *const f32, | |
136 | &x[1] as *const f32, | |
137 | &x[7] as *const f32 | |
138 | ]; | |
139 | assert_eq!(y, s); | |
140 | } | |
141 | } |