2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/hwcap.h>
13 #include <crypto/aes.h>
14 #include <crypto/internal/simd.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/module.h>
17 #include <linux/cpufeature.h>
18 #include <crypto/xts.h>
20 #include "aes-ce-setkey.h"
22 #ifdef USE_V8_CRYPTO_EXTENSIONS
25 #define aes_setkey ce_aes_setkey
26 #define aes_expandkey ce_aes_expandkey
27 #define aes_ecb_encrypt ce_aes_ecb_encrypt
28 #define aes_ecb_decrypt ce_aes_ecb_decrypt
29 #define aes_cbc_encrypt ce_aes_cbc_encrypt
30 #define aes_cbc_decrypt ce_aes_cbc_decrypt
31 #define aes_ctr_encrypt ce_aes_ctr_encrypt
32 #define aes_xts_encrypt ce_aes_xts_encrypt
33 #define aes_xts_decrypt ce_aes_xts_decrypt
34 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
38 #define aes_setkey crypto_aes_set_key
39 #define aes_expandkey crypto_aes_expand_key
40 #define aes_ecb_encrypt neon_aes_ecb_encrypt
41 #define aes_ecb_decrypt neon_aes_ecb_decrypt
42 #define aes_cbc_encrypt neon_aes_cbc_encrypt
43 #define aes_cbc_decrypt neon_aes_cbc_decrypt
44 #define aes_ctr_encrypt neon_aes_ctr_encrypt
45 #define aes_xts_encrypt neon_aes_xts_encrypt
46 #define aes_xts_decrypt neon_aes_xts_decrypt
47 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
48 MODULE_ALIAS_CRYPTO("ecb(aes)");
49 MODULE_ALIAS_CRYPTO("cbc(aes)");
50 MODULE_ALIAS_CRYPTO("ctr(aes)");
51 MODULE_ALIAS_CRYPTO("xts(aes)");
54 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
55 MODULE_LICENSE("GPL v2");
57 /* defined in aes-modes.S */
58 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
59 int rounds
, int blocks
, int first
);
60 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
61 int rounds
, int blocks
, int first
);
63 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
64 int rounds
, int blocks
, u8 iv
[], int first
);
65 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
66 int rounds
, int blocks
, u8 iv
[], int first
);
68 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
69 int rounds
, int blocks
, u8 ctr
[], int first
);
71 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
72 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
74 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
75 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
78 struct crypto_aes_xts_ctx
{
79 struct crypto_aes_ctx key1
;
80 struct crypto_aes_ctx
__aligned(8) key2
;
83 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
86 return aes_setkey(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
89 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
92 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
95 ret
= xts_verify_key(tfm
, in_key
, key_len
);
99 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
101 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
106 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
110 static int ecb_encrypt(struct skcipher_request
*req
)
112 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
113 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
114 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
115 struct skcipher_walk walk
;
118 err
= skcipher_walk_virt(&walk
, req
, true);
121 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
122 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
123 (u8
*)ctx
->key_enc
, rounds
, blocks
, first
);
124 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
130 static int ecb_decrypt(struct skcipher_request
*req
)
132 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
133 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
134 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
135 struct skcipher_walk walk
;
138 err
= skcipher_walk_virt(&walk
, req
, true);
141 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
142 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
143 (u8
*)ctx
->key_dec
, rounds
, blocks
, first
);
144 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
150 static int cbc_encrypt(struct skcipher_request
*req
)
152 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
153 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
154 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
155 struct skcipher_walk walk
;
158 err
= skcipher_walk_virt(&walk
, req
, true);
161 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
162 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
163 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
165 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
171 static int cbc_decrypt(struct skcipher_request
*req
)
173 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
174 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
175 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
176 struct skcipher_walk walk
;
179 err
= skcipher_walk_virt(&walk
, req
, true);
182 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
183 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
184 (u8
*)ctx
->key_dec
, rounds
, blocks
, walk
.iv
,
186 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
192 static int ctr_encrypt(struct skcipher_request
*req
)
194 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
195 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
196 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
197 struct skcipher_walk walk
;
200 err
= skcipher_walk_virt(&walk
, req
, true);
204 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
205 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
206 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
208 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
212 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
213 unsigned int nbytes
= walk
.nbytes
;
214 u8
*tdst
= walk
.dst
.virt
.addr
;
215 u8
*tsrc
= walk
.src
.virt
.addr
;
218 * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
219 * to tell aes_ctr_encrypt() to only read half a block.
221 blocks
= (nbytes
<= 8) ? -1 : 1;
223 aes_ctr_encrypt(tail
, tsrc
, (u8
*)ctx
->key_enc
, rounds
,
224 blocks
, walk
.iv
, first
);
225 memcpy(tdst
, tail
, nbytes
);
226 err
= skcipher_walk_done(&walk
, 0);
233 static int xts_encrypt(struct skcipher_request
*req
)
235 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
236 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
237 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
238 struct skcipher_walk walk
;
241 err
= skcipher_walk_virt(&walk
, req
, true);
244 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
245 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
246 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
247 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
248 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
255 static int xts_decrypt(struct skcipher_request
*req
)
257 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
258 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
259 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
260 struct skcipher_walk walk
;
263 err
= skcipher_walk_virt(&walk
, req
, true);
266 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
267 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
268 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
269 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
270 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
277 static struct skcipher_alg aes_algs
[] = { {
279 .cra_name
= "__ecb(aes)",
280 .cra_driver_name
= "__ecb-aes-" MODE
,
281 .cra_priority
= PRIO
,
282 .cra_flags
= CRYPTO_ALG_INTERNAL
,
283 .cra_blocksize
= AES_BLOCK_SIZE
,
284 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
286 .cra_module
= THIS_MODULE
,
288 .min_keysize
= AES_MIN_KEY_SIZE
,
289 .max_keysize
= AES_MAX_KEY_SIZE
,
290 .setkey
= skcipher_aes_setkey
,
291 .encrypt
= ecb_encrypt
,
292 .decrypt
= ecb_decrypt
,
295 .cra_name
= "__cbc(aes)",
296 .cra_driver_name
= "__cbc-aes-" MODE
,
297 .cra_priority
= PRIO
,
298 .cra_flags
= CRYPTO_ALG_INTERNAL
,
299 .cra_blocksize
= AES_BLOCK_SIZE
,
300 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
302 .cra_module
= THIS_MODULE
,
304 .min_keysize
= AES_MIN_KEY_SIZE
,
305 .max_keysize
= AES_MAX_KEY_SIZE
,
306 .ivsize
= AES_BLOCK_SIZE
,
307 .setkey
= skcipher_aes_setkey
,
308 .encrypt
= cbc_encrypt
,
309 .decrypt
= cbc_decrypt
,
312 .cra_name
= "__ctr(aes)",
313 .cra_driver_name
= "__ctr-aes-" MODE
,
314 .cra_priority
= PRIO
,
315 .cra_flags
= CRYPTO_ALG_INTERNAL
,
317 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
319 .cra_module
= THIS_MODULE
,
321 .min_keysize
= AES_MIN_KEY_SIZE
,
322 .max_keysize
= AES_MAX_KEY_SIZE
,
323 .ivsize
= AES_BLOCK_SIZE
,
324 .chunksize
= AES_BLOCK_SIZE
,
325 .setkey
= skcipher_aes_setkey
,
326 .encrypt
= ctr_encrypt
,
327 .decrypt
= ctr_encrypt
,
330 .cra_name
= "ctr(aes)",
331 .cra_driver_name
= "ctr-aes-" MODE
,
332 .cra_priority
= PRIO
- 1,
334 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
336 .cra_module
= THIS_MODULE
,
338 .min_keysize
= AES_MIN_KEY_SIZE
,
339 .max_keysize
= AES_MAX_KEY_SIZE
,
340 .ivsize
= AES_BLOCK_SIZE
,
341 .chunksize
= AES_BLOCK_SIZE
,
342 .setkey
= skcipher_aes_setkey
,
343 .encrypt
= ctr_encrypt
,
344 .decrypt
= ctr_encrypt
,
347 .cra_name
= "__xts(aes)",
348 .cra_driver_name
= "__xts-aes-" MODE
,
349 .cra_priority
= PRIO
,
350 .cra_flags
= CRYPTO_ALG_INTERNAL
,
351 .cra_blocksize
= AES_BLOCK_SIZE
,
352 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
354 .cra_module
= THIS_MODULE
,
356 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
357 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
358 .ivsize
= AES_BLOCK_SIZE
,
359 .setkey
= xts_set_key
,
360 .encrypt
= xts_encrypt
,
361 .decrypt
= xts_decrypt
,
364 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
366 static void aes_exit(void)
370 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
371 if (aes_simd_algs
[i
])
372 simd_skcipher_free(aes_simd_algs
[i
]);
374 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
377 static int __init
aes_init(void)
379 struct simd_skcipher_alg
*simd
;
380 const char *basename
;
386 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
390 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
391 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
394 algname
= aes_algs
[i
].base
.cra_name
+ 2;
395 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
396 basename
= aes_algs
[i
].base
.cra_driver_name
;
397 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
400 goto unregister_simds
;
402 aes_simd_algs
[i
] = simd
;
412 #ifdef USE_V8_CRYPTO_EXTENSIONS
413 module_cpu_feature_match(AES
, aes_init
);
415 module_init(aes_init
);
417 module_exit(aes_exit
);