]>
git.proxmox.com Git - rustc.git/blob - library/stdarch/crates/core_arch/src/arm_shared/crypto.rs
1 use crate::core_arch
::arm_shared
::{uint32x4_t, uint8x16_t}
;
3 #[allow(improper_ctypes)]
5 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.aese")]
6 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.aese")]
7 fn vaeseq_u8_(data
: uint8x16_t
, key
: uint8x16_t
) -> uint8x16_t
;
8 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.aesd")]
9 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.aesd")]
10 fn vaesdq_u8_(data
: uint8x16_t
, key
: uint8x16_t
) -> uint8x16_t
;
11 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.aesmc")]
12 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.aesmc")]
13 fn vaesmcq_u8_(data
: uint8x16_t
) -> uint8x16_t
;
14 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.aesimc")]
15 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.aesimc")]
16 fn vaesimcq_u8_(data
: uint8x16_t
) -> uint8x16_t
;
18 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha1h")]
19 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha1h")]
20 fn vsha1h_u32_(hash_e
: u32) -> u32;
21 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha1su0")]
22 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha1su0")]
23 fn vsha1su0q_u32_(w0_3
: uint32x4_t
, w4_7
: uint32x4_t
, w8_11
: uint32x4_t
) -> uint32x4_t
;
24 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha1su1")]
25 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha1su1")]
26 fn vsha1su1q_u32_(tw0_3
: uint32x4_t
, w12_15
: uint32x4_t
) -> uint32x4_t
;
27 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha1c")]
28 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha1c")]
29 fn vsha1cq_u32_(hash_abcd
: uint32x4_t
, hash_e
: u32, wk
: uint32x4_t
) -> uint32x4_t
;
30 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha1p")]
31 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha1p")]
32 fn vsha1pq_u32_(hash_abcd
: uint32x4_t
, hash_e
: u32, wk
: uint32x4_t
) -> uint32x4_t
;
33 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha1m")]
34 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha1m")]
35 fn vsha1mq_u32_(hash_abcd
: uint32x4_t
, hash_e
: u32, wk
: uint32x4_t
) -> uint32x4_t
;
37 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha256h")]
38 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha256h")]
39 fn vsha256hq_u32_(hash_abcd
: uint32x4_t
, hash_efgh
: uint32x4_t
, wk
: uint32x4_t
) -> uint32x4_t
;
40 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha256h2")]
41 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha256h2")]
42 fn vsha256h2q_u32_(hash_efgh
: uint32x4_t
, hash_abcd
: uint32x4_t
, wk
: uint32x4_t
) -> uint32x4_t
;
43 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha256su0")]
44 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha256su0")]
45 fn vsha256su0q_u32_(w0_3
: uint32x4_t
, w4_7
: uint32x4_t
) -> uint32x4_t
;
46 #[cfg_attr(target_arch = "aarch64", link_name = "llvm.aarch64.crypto.sha256su1")]
47 #[cfg_attr(target_arch = "arm", link_name = "llvm.arm.neon.sha256su1")]
48 fn vsha256su1q_u32_(tw0_3
: uint32x4_t
, w8_11
: uint32x4_t
, w12_15
: uint32x4_t
) -> uint32x4_t
;
52 use stdarch_test
::assert_instr
;
54 // TODO: Use AES for ARM when the minimum LLVM version includes b8baa2a9132498ea286dbb0d03f005760ecc6fdb
56 /// AES single round encryption.
58 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "aes"))]
59 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
60 #[cfg_attr(test, assert_instr(aese))]
61 pub unsafe fn vaeseq_u8(data
: uint8x16_t
, key
: uint8x16_t
) -> uint8x16_t
{
65 /// AES single round decryption.
67 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "aes"))]
68 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
69 #[cfg_attr(test, assert_instr(aesd))]
70 pub unsafe fn vaesdq_u8(data
: uint8x16_t
, key
: uint8x16_t
) -> uint8x16_t
{
76 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "aes"))]
77 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
78 #[cfg_attr(test, assert_instr(aesmc))]
79 pub unsafe fn vaesmcq_u8(data
: uint8x16_t
) -> uint8x16_t
{
83 /// AES inverse mix columns.
85 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "aes"))]
86 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
87 #[cfg_attr(test, assert_instr(aesimc))]
88 pub unsafe fn vaesimcq_u8(data
: uint8x16_t
) -> uint8x16_t
{
92 /// SHA1 fixed rotate.
94 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
95 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
96 #[cfg_attr(test, assert_instr(sha1h))]
97 pub unsafe fn vsha1h_u32(hash_e
: u32) -> u32 {
101 /// SHA1 hash update accelerator, choose.
103 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
104 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
105 #[cfg_attr(test, assert_instr(sha1c))]
106 pub unsafe fn vsha1cq_u32(hash_abcd
: uint32x4_t
, hash_e
: u32, wk
: uint32x4_t
) -> uint32x4_t
{
107 vsha1cq_u32_(hash_abcd
, hash_e
, wk
)
110 /// SHA1 hash update accelerator, majority.
112 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
113 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
114 #[cfg_attr(test, assert_instr(sha1m))]
115 pub unsafe fn vsha1mq_u32(hash_abcd
: uint32x4_t
, hash_e
: u32, wk
: uint32x4_t
) -> uint32x4_t
{
116 vsha1mq_u32_(hash_abcd
, hash_e
, wk
)
119 /// SHA1 hash update accelerator, parity.
121 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
122 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
123 #[cfg_attr(test, assert_instr(sha1p))]
124 pub unsafe fn vsha1pq_u32(hash_abcd
: uint32x4_t
, hash_e
: u32, wk
: uint32x4_t
) -> uint32x4_t
{
125 vsha1pq_u32_(hash_abcd
, hash_e
, wk
)
128 /// SHA1 schedule update accelerator, first part.
130 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
131 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
132 #[cfg_attr(test, assert_instr(sha1su0))]
133 pub unsafe fn vsha1su0q_u32(w0_3
: uint32x4_t
, w4_7
: uint32x4_t
, w8_11
: uint32x4_t
) -> uint32x4_t
{
134 vsha1su0q_u32_(w0_3
, w4_7
, w8_11
)
137 /// SHA1 schedule update accelerator, second part.
139 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
140 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
141 #[cfg_attr(test, assert_instr(sha1su1))]
142 pub unsafe fn vsha1su1q_u32(tw0_3
: uint32x4_t
, w12_15
: uint32x4_t
) -> uint32x4_t
{
143 vsha1su1q_u32_(tw0_3
, w12_15
)
146 /// SHA256 hash update accelerator.
148 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
149 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
150 #[cfg_attr(test, assert_instr(sha256h))]
151 pub unsafe fn vsha256hq_u32(
152 hash_abcd
: uint32x4_t
,
153 hash_efgh
: uint32x4_t
,
156 vsha256hq_u32_(hash_abcd
, hash_efgh
, wk
)
159 /// SHA256 hash update accelerator, upper part.
161 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
162 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
163 #[cfg_attr(test, assert_instr(sha256h2))]
164 pub unsafe fn vsha256h2q_u32(
165 hash_efgh
: uint32x4_t
,
166 hash_abcd
: uint32x4_t
,
169 vsha256h2q_u32_(hash_efgh
, hash_abcd
, wk
)
172 /// SHA256 schedule update accelerator, first part.
174 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
175 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
176 #[cfg_attr(test, assert_instr(sha256su0))]
177 pub unsafe fn vsha256su0q_u32(w0_3
: uint32x4_t
, w4_7
: uint32x4_t
) -> uint32x4_t
{
178 vsha256su0q_u32_(w0_3
, w4_7
)
181 /// SHA256 schedule update accelerator, second part.
183 #[cfg_attr(not(target_arch = "arm"), target_feature(enable = "sha2"))]
184 #[cfg_attr(target_arch = "arm", target_feature(enable = "crypto,v8"))]
185 #[cfg_attr(test, assert_instr(sha256su1))]
186 pub unsafe fn vsha256su1q_u32(
191 vsha256su1q_u32_(tw0_3
, w8_11
, w12_15
)
197 use crate::core_arch
::{arm_shared::*, simd::*}
;
199 use stdarch_test
::simd_test
;
201 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
202 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "aes"))]
203 unsafe fn test_vaeseq_u8() {
204 let data
= mem
::transmute(u8x16
::new(1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8));
205 let key
= mem
::transmute(u8x16
::new(0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7));
206 let r
: u8x16
= mem
::transmute(vaeseq_u8(data
, key
));
210 124, 123, 124, 118, 124, 123, 124, 197, 124, 123, 124, 118, 124, 123, 124, 197
215 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
216 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "aes"))]
217 unsafe fn test_vaesdq_u8() {
218 let data
= mem
::transmute(u8x16
::new(1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8));
219 let key
= mem
::transmute(u8x16
::new(0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7));
220 let r
: u8x16
= mem
::transmute(vaesdq_u8(data
, key
));
223 u8x16
::new(9, 213, 9, 251, 9, 213, 9, 56, 9, 213, 9, 251, 9, 213, 9, 56)
227 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
228 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "aes"))]
229 unsafe fn test_vaesmcq_u8() {
230 let data
= mem
::transmute(u8x16
::new(1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8));
231 let r
: u8x16
= mem
::transmute(vaesmcq_u8(data
));
234 u8x16
::new(3, 4, 9, 10, 15, 8, 21, 30, 3, 4, 9, 10, 15, 8, 21, 30)
238 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
239 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "aes"))]
240 unsafe fn test_vaesimcq_u8() {
241 let data
= mem
::transmute(u8x16
::new(1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8));
242 let r
: u8x16
= mem
::transmute(vaesimcq_u8(data
));
245 u8x16
::new(43, 60, 33, 50, 103, 80, 125, 70, 43, 60, 33, 50, 103, 80, 125, 70)
249 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
250 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
251 unsafe fn test_vsha1h_u32() {
252 assert_eq
!(vsha1h_u32(0x1234), 0x048d);
253 assert_eq
!(vsha1h_u32(0x5678), 0x159e);
256 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
257 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
258 unsafe fn test_vsha1su0q_u32() {
259 let r
: u32x4
= mem
::transmute(vsha1su0q_u32(
260 mem
::transmute(u32x4
::new(0x1234_u32, 0x5678_u32, 0x9abc_u32, 0xdef0_u32)),
261 mem
::transmute(u32x4
::new(0x1234_u32, 0x5678_u32, 0x9abc_u32, 0xdef0_u32)),
262 mem
::transmute(u32x4
::new(0x1234_u32, 0x5678_u32, 0x9abc_u32, 0xdef0_u32)),
264 assert_eq
!(r
, u32x4
::new(0x9abc, 0xdef0, 0x1234, 0x5678));
267 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
268 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
269 unsafe fn test_vsha1su1q_u32() {
270 let r
: u32x4
= mem
::transmute(vsha1su1q_u32(
271 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
272 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
276 u32x4
::new(0x00008898, 0x00019988, 0x00008898, 0x0000acd0)
280 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
281 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
282 unsafe fn test_vsha1cq_u32() {
283 let r
: u32x4
= mem
::transmute(vsha1cq_u32(
284 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
286 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
290 u32x4
::new(0x8a32cbd8, 0x0c518a96, 0x0018a081, 0x0000c168)
294 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
295 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
296 unsafe fn test_vsha1pq_u32() {
297 let r
: u32x4
= mem
::transmute(vsha1pq_u32(
298 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
300 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
304 u32x4
::new(0x469f0ba3, 0x0a326147, 0x80145d7f, 0x00009f47)
308 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
309 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
310 unsafe fn test_vsha1mq_u32() {
311 let r
: u32x4
= mem
::transmute(vsha1mq_u32(
312 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
314 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
318 u32x4
::new(0xaa39693b, 0x0d51bf84, 0x001aa109, 0x0000d278)
322 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
323 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
324 unsafe fn test_vsha256hq_u32() {
325 let r
: u32x4
= mem
::transmute(vsha256hq_u32(
326 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
327 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
328 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
332 u32x4
::new(0x05e9aaa8, 0xec5f4c02, 0x20a1ea61, 0x28738cef)
336 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
337 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
338 unsafe fn test_vsha256h2q_u32() {
339 let r
: u32x4
= mem
::transmute(vsha256h2q_u32(
340 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
341 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
342 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
346 u32x4
::new(0x3745362e, 0x2fb51d00, 0xbd4c529b, 0x968b8516)
350 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
351 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
352 unsafe fn test_vsha256su0q_u32() {
353 let r
: u32x4
= mem
::transmute(vsha256su0q_u32(
354 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
355 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
359 u32x4
::new(0xe59e1c97, 0x5eaf68da, 0xd7bcb51f, 0x6c8de152)
363 #[cfg_attr(target_arch = "arm", simd_test(enable = "crypto"))]
364 #[cfg_attr(not(target_arch = "arm"), simd_test(enable = "sha2"))]
365 unsafe fn test_vsha256su1q_u32() {
366 let r
: u32x4
= mem
::transmute(vsha256su1q_u32(
367 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
368 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
369 mem
::transmute(u32x4
::new(0x1234, 0x5678, 0x9abc, 0xdef0)),
373 u32x4
::new(0x5e09e8d2, 0x74a6f16b, 0xc966606b, 0xa686ee9f)