1 // SPDX-License-Identifier: GPL-2.0-only
3 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
11 #include <crypto/aes.h>
12 #include <crypto/ctr.h>
13 #include <crypto/sha.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/scatterwalk.h>
18 #include <linux/module.h>
19 #include <linux/cpufeature.h>
20 #include <crypto/xts.h>
22 #include "aes-ce-setkey.h"
24 #ifdef USE_V8_CRYPTO_EXTENSIONS
27 #define aes_expandkey ce_aes_expandkey
28 #define aes_ecb_encrypt ce_aes_ecb_encrypt
29 #define aes_ecb_decrypt ce_aes_ecb_decrypt
30 #define aes_cbc_encrypt ce_aes_cbc_encrypt
31 #define aes_cbc_decrypt ce_aes_cbc_decrypt
32 #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
33 #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
34 #define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
35 #define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
36 #define aes_ctr_encrypt ce_aes_ctr_encrypt
37 #define aes_xts_encrypt ce_aes_xts_encrypt
38 #define aes_xts_decrypt ce_aes_xts_decrypt
39 #define aes_mac_update ce_aes_mac_update
40 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
44 #define aes_ecb_encrypt neon_aes_ecb_encrypt
45 #define aes_ecb_decrypt neon_aes_ecb_decrypt
46 #define aes_cbc_encrypt neon_aes_cbc_encrypt
47 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48 #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
49 #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
50 #define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
51 #define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
52 #define aes_ctr_encrypt neon_aes_ctr_encrypt
53 #define aes_xts_encrypt neon_aes_xts_encrypt
54 #define aes_xts_decrypt neon_aes_xts_decrypt
55 #define aes_mac_update neon_aes_mac_update
56 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
58 #if defined(USE_V8_CRYPTO_EXTENSIONS) || !defined(CONFIG_CRYPTO_AES_ARM64_BS)
59 MODULE_ALIAS_CRYPTO("ecb(aes)");
60 MODULE_ALIAS_CRYPTO("cbc(aes)");
61 MODULE_ALIAS_CRYPTO("ctr(aes)");
62 MODULE_ALIAS_CRYPTO("xts(aes)");
64 MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
65 MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
66 MODULE_ALIAS_CRYPTO("cmac(aes)");
67 MODULE_ALIAS_CRYPTO("xcbc(aes)");
68 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
70 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
71 MODULE_LICENSE("GPL v2");
73 /* defined in aes-modes.S */
74 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
75 int rounds
, int blocks
);
76 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
77 int rounds
, int blocks
);
79 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
80 int rounds
, int blocks
, u8 iv
[]);
81 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
82 int rounds
, int blocks
, u8 iv
[]);
84 asmlinkage
void aes_cbc_cts_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
85 int rounds
, int bytes
, u8
const iv
[]);
86 asmlinkage
void aes_cbc_cts_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
87 int rounds
, int bytes
, u8
const iv
[]);
89 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
90 int rounds
, int blocks
, u8 ctr
[]);
92 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
93 int rounds
, int blocks
, u32
const rk2
[], u8 iv
[],
95 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
96 int rounds
, int blocks
, u32
const rk2
[], u8 iv
[],
99 asmlinkage
void aes_essiv_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
100 int rounds
, int blocks
, u8 iv
[],
102 asmlinkage
void aes_essiv_cbc_decrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
103 int rounds
, int blocks
, u8 iv
[],
106 asmlinkage
void aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
107 int blocks
, u8 dg
[], int enc_before
,
110 struct cts_cbc_req_ctx
{
111 struct scatterlist sg_src
[2];
112 struct scatterlist sg_dst
[2];
113 struct skcipher_request subreq
;
116 struct crypto_aes_xts_ctx
{
117 struct crypto_aes_ctx key1
;
118 struct crypto_aes_ctx
__aligned(8) key2
;
121 struct crypto_aes_essiv_cbc_ctx
{
122 struct crypto_aes_ctx key1
;
123 struct crypto_aes_ctx
__aligned(8) key2
;
124 struct crypto_shash
*hash
;
128 struct crypto_aes_ctx key
;
129 u8
__aligned(8) consts
[];
132 struct mac_desc_ctx
{
134 u8 dg
[AES_BLOCK_SIZE
];
137 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
138 unsigned int key_len
)
140 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
143 ret
= aes_expandkey(ctx
, in_key
, key_len
);
145 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
150 static int __maybe_unused
xts_set_key(struct crypto_skcipher
*tfm
,
151 const u8
*in_key
, unsigned int key_len
)
153 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
156 ret
= xts_verify_key(tfm
, in_key
, key_len
);
160 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
162 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
167 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
171 static int __maybe_unused
essiv_cbc_set_key(struct crypto_skcipher
*tfm
,
173 unsigned int key_len
)
175 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
176 SHASH_DESC_ON_STACK(desc
, ctx
->hash
);
177 u8 digest
[SHA256_DIGEST_SIZE
];
180 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
);
184 desc
->tfm
= ctx
->hash
;
185 crypto_shash_digest(desc
, in_key
, key_len
, digest
);
187 ret
= aes_expandkey(&ctx
->key2
, digest
, sizeof(digest
));
193 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
197 static int __maybe_unused
ecb_encrypt(struct skcipher_request
*req
)
199 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
200 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
201 int err
, rounds
= 6 + ctx
->key_length
/ 4;
202 struct skcipher_walk walk
;
205 err
= skcipher_walk_virt(&walk
, req
, false);
207 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
209 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
210 ctx
->key_enc
, rounds
, blocks
);
212 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
217 static int __maybe_unused
ecb_decrypt(struct skcipher_request
*req
)
219 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
220 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
221 int err
, rounds
= 6 + ctx
->key_length
/ 4;
222 struct skcipher_walk walk
;
225 err
= skcipher_walk_virt(&walk
, req
, false);
227 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
229 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
230 ctx
->key_dec
, rounds
, blocks
);
232 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
237 static int cbc_encrypt_walk(struct skcipher_request
*req
,
238 struct skcipher_walk
*walk
)
240 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
241 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
242 int err
= 0, rounds
= 6 + ctx
->key_length
/ 4;
245 while ((blocks
= (walk
->nbytes
/ AES_BLOCK_SIZE
))) {
247 aes_cbc_encrypt(walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
248 ctx
->key_enc
, rounds
, blocks
, walk
->iv
);
250 err
= skcipher_walk_done(walk
, walk
->nbytes
% AES_BLOCK_SIZE
);
255 static int __maybe_unused
cbc_encrypt(struct skcipher_request
*req
)
257 struct skcipher_walk walk
;
260 err
= skcipher_walk_virt(&walk
, req
, false);
263 return cbc_encrypt_walk(req
, &walk
);
266 static int cbc_decrypt_walk(struct skcipher_request
*req
,
267 struct skcipher_walk
*walk
)
269 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
270 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
271 int err
= 0, rounds
= 6 + ctx
->key_length
/ 4;
274 while ((blocks
= (walk
->nbytes
/ AES_BLOCK_SIZE
))) {
276 aes_cbc_decrypt(walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
277 ctx
->key_dec
, rounds
, blocks
, walk
->iv
);
279 err
= skcipher_walk_done(walk
, walk
->nbytes
% AES_BLOCK_SIZE
);
284 static int __maybe_unused
cbc_decrypt(struct skcipher_request
*req
)
286 struct skcipher_walk walk
;
289 err
= skcipher_walk_virt(&walk
, req
, false);
292 return cbc_decrypt_walk(req
, &walk
);
295 static int cts_cbc_init_tfm(struct crypto_skcipher
*tfm
)
297 crypto_skcipher_set_reqsize(tfm
, sizeof(struct cts_cbc_req_ctx
));
301 static int cts_cbc_encrypt(struct skcipher_request
*req
)
303 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
304 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
305 struct cts_cbc_req_ctx
*rctx
= skcipher_request_ctx(req
);
306 int err
, rounds
= 6 + ctx
->key_length
/ 4;
307 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
308 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
309 struct skcipher_walk walk
;
311 skcipher_request_set_tfm(&rctx
->subreq
, tfm
);
313 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
314 if (req
->cryptlen
< AES_BLOCK_SIZE
)
319 if (cbc_blocks
> 0) {
320 skcipher_request_set_crypt(&rctx
->subreq
, req
->src
, req
->dst
,
321 cbc_blocks
* AES_BLOCK_SIZE
,
324 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false) ?:
325 cbc_encrypt_walk(&rctx
->subreq
, &walk
);
329 if (req
->cryptlen
== AES_BLOCK_SIZE
)
332 dst
= src
= scatterwalk_ffwd(rctx
->sg_src
, req
->src
,
333 rctx
->subreq
.cryptlen
);
334 if (req
->dst
!= req
->src
)
335 dst
= scatterwalk_ffwd(rctx
->sg_dst
, req
->dst
,
336 rctx
->subreq
.cryptlen
);
339 /* handle ciphertext stealing */
340 skcipher_request_set_crypt(&rctx
->subreq
, src
, dst
,
341 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
344 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false);
349 aes_cbc_cts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
350 ctx
->key_enc
, rounds
, walk
.nbytes
, walk
.iv
);
353 return skcipher_walk_done(&walk
, 0);
356 static int cts_cbc_decrypt(struct skcipher_request
*req
)
358 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
359 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
360 struct cts_cbc_req_ctx
*rctx
= skcipher_request_ctx(req
);
361 int err
, rounds
= 6 + ctx
->key_length
/ 4;
362 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
363 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
364 struct skcipher_walk walk
;
366 skcipher_request_set_tfm(&rctx
->subreq
, tfm
);
368 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
369 if (req
->cryptlen
< AES_BLOCK_SIZE
)
374 if (cbc_blocks
> 0) {
375 skcipher_request_set_crypt(&rctx
->subreq
, req
->src
, req
->dst
,
376 cbc_blocks
* AES_BLOCK_SIZE
,
379 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false) ?:
380 cbc_decrypt_walk(&rctx
->subreq
, &walk
);
384 if (req
->cryptlen
== AES_BLOCK_SIZE
)
387 dst
= src
= scatterwalk_ffwd(rctx
->sg_src
, req
->src
,
388 rctx
->subreq
.cryptlen
);
389 if (req
->dst
!= req
->src
)
390 dst
= scatterwalk_ffwd(rctx
->sg_dst
, req
->dst
,
391 rctx
->subreq
.cryptlen
);
394 /* handle ciphertext stealing */
395 skcipher_request_set_crypt(&rctx
->subreq
, src
, dst
,
396 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
399 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false);
404 aes_cbc_cts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
405 ctx
->key_dec
, rounds
, walk
.nbytes
, walk
.iv
);
408 return skcipher_walk_done(&walk
, 0);
411 static int __maybe_unused
essiv_cbc_init_tfm(struct crypto_skcipher
*tfm
)
413 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
415 ctx
->hash
= crypto_alloc_shash("sha256", 0, 0);
417 return PTR_ERR_OR_ZERO(ctx
->hash
);
420 static void __maybe_unused
essiv_cbc_exit_tfm(struct crypto_skcipher
*tfm
)
422 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
424 crypto_free_shash(ctx
->hash
);
427 static int __maybe_unused
essiv_cbc_encrypt(struct skcipher_request
*req
)
429 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
430 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
431 int err
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
432 struct skcipher_walk walk
;
435 err
= skcipher_walk_virt(&walk
, req
, false);
437 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
440 aes_essiv_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
441 ctx
->key1
.key_enc
, rounds
, blocks
,
442 req
->iv
, ctx
->key2
.key_enc
);
444 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
446 return err
?: cbc_encrypt_walk(req
, &walk
);
449 static int __maybe_unused
essiv_cbc_decrypt(struct skcipher_request
*req
)
451 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
452 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
453 int err
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
454 struct skcipher_walk walk
;
457 err
= skcipher_walk_virt(&walk
, req
, false);
459 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
462 aes_essiv_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
463 ctx
->key1
.key_dec
, rounds
, blocks
,
464 req
->iv
, ctx
->key2
.key_enc
);
466 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
468 return err
?: cbc_decrypt_walk(req
, &walk
);
471 static int ctr_encrypt(struct skcipher_request
*req
)
473 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
474 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
475 int err
, rounds
= 6 + ctx
->key_length
/ 4;
476 struct skcipher_walk walk
;
479 err
= skcipher_walk_virt(&walk
, req
, false);
481 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
483 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
484 ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
486 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
489 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
490 unsigned int nbytes
= walk
.nbytes
;
491 u8
*tdst
= walk
.dst
.virt
.addr
;
492 u8
*tsrc
= walk
.src
.virt
.addr
;
495 * Tell aes_ctr_encrypt() to process a tail block.
500 aes_ctr_encrypt(tail
, NULL
, ctx
->key_enc
, rounds
,
503 crypto_xor_cpy(tdst
, tsrc
, tail
, nbytes
);
504 err
= skcipher_walk_done(&walk
, 0);
510 static void ctr_encrypt_one(struct crypto_skcipher
*tfm
, const u8
*src
, u8
*dst
)
512 const struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
516 * Temporarily disable interrupts to avoid races where
517 * cachelines are evicted when the CPU is interrupted
518 * to do something else.
520 local_irq_save(flags
);
521 aes_encrypt(ctx
, dst
, src
);
522 local_irq_restore(flags
);
525 static int __maybe_unused
ctr_encrypt_sync(struct skcipher_request
*req
)
527 if (!crypto_simd_usable())
528 return crypto_ctr_encrypt_walk(req
, ctr_encrypt_one
);
530 return ctr_encrypt(req
);
533 static int __maybe_unused
xts_encrypt(struct skcipher_request
*req
)
535 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
536 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
537 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
538 struct skcipher_walk walk
;
541 err
= skcipher_walk_virt(&walk
, req
, false);
543 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
545 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
546 ctx
->key1
.key_enc
, rounds
, blocks
,
547 ctx
->key2
.key_enc
, walk
.iv
, first
);
549 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
555 static int __maybe_unused
xts_decrypt(struct skcipher_request
*req
)
557 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
558 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
559 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
560 struct skcipher_walk walk
;
563 err
= skcipher_walk_virt(&walk
, req
, false);
565 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
567 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
568 ctx
->key1
.key_dec
, rounds
, blocks
,
569 ctx
->key2
.key_enc
, walk
.iv
, first
);
571 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
577 static struct skcipher_alg aes_algs
[] = { {
578 #if defined(USE_V8_CRYPTO_EXTENSIONS) || !defined(CONFIG_CRYPTO_AES_ARM64_BS)
580 .cra_name
= "__ecb(aes)",
581 .cra_driver_name
= "__ecb-aes-" MODE
,
582 .cra_priority
= PRIO
,
583 .cra_flags
= CRYPTO_ALG_INTERNAL
,
584 .cra_blocksize
= AES_BLOCK_SIZE
,
585 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
586 .cra_module
= THIS_MODULE
,
588 .min_keysize
= AES_MIN_KEY_SIZE
,
589 .max_keysize
= AES_MAX_KEY_SIZE
,
590 .setkey
= skcipher_aes_setkey
,
591 .encrypt
= ecb_encrypt
,
592 .decrypt
= ecb_decrypt
,
595 .cra_name
= "__cbc(aes)",
596 .cra_driver_name
= "__cbc-aes-" MODE
,
597 .cra_priority
= PRIO
,
598 .cra_flags
= CRYPTO_ALG_INTERNAL
,
599 .cra_blocksize
= AES_BLOCK_SIZE
,
600 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
601 .cra_module
= THIS_MODULE
,
603 .min_keysize
= AES_MIN_KEY_SIZE
,
604 .max_keysize
= AES_MAX_KEY_SIZE
,
605 .ivsize
= AES_BLOCK_SIZE
,
606 .setkey
= skcipher_aes_setkey
,
607 .encrypt
= cbc_encrypt
,
608 .decrypt
= cbc_decrypt
,
611 .cra_name
= "__ctr(aes)",
612 .cra_driver_name
= "__ctr-aes-" MODE
,
613 .cra_priority
= PRIO
,
614 .cra_flags
= CRYPTO_ALG_INTERNAL
,
616 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
617 .cra_module
= THIS_MODULE
,
619 .min_keysize
= AES_MIN_KEY_SIZE
,
620 .max_keysize
= AES_MAX_KEY_SIZE
,
621 .ivsize
= AES_BLOCK_SIZE
,
622 .chunksize
= AES_BLOCK_SIZE
,
623 .setkey
= skcipher_aes_setkey
,
624 .encrypt
= ctr_encrypt
,
625 .decrypt
= ctr_encrypt
,
628 .cra_name
= "ctr(aes)",
629 .cra_driver_name
= "ctr-aes-" MODE
,
630 .cra_priority
= PRIO
- 1,
632 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
633 .cra_module
= THIS_MODULE
,
635 .min_keysize
= AES_MIN_KEY_SIZE
,
636 .max_keysize
= AES_MAX_KEY_SIZE
,
637 .ivsize
= AES_BLOCK_SIZE
,
638 .chunksize
= AES_BLOCK_SIZE
,
639 .setkey
= skcipher_aes_setkey
,
640 .encrypt
= ctr_encrypt_sync
,
641 .decrypt
= ctr_encrypt_sync
,
644 .cra_name
= "__xts(aes)",
645 .cra_driver_name
= "__xts-aes-" MODE
,
646 .cra_priority
= PRIO
,
647 .cra_flags
= CRYPTO_ALG_INTERNAL
,
648 .cra_blocksize
= AES_BLOCK_SIZE
,
649 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
650 .cra_module
= THIS_MODULE
,
652 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
653 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
654 .ivsize
= AES_BLOCK_SIZE
,
655 .setkey
= xts_set_key
,
656 .encrypt
= xts_encrypt
,
657 .decrypt
= xts_decrypt
,
661 .cra_name
= "__cts(cbc(aes))",
662 .cra_driver_name
= "__cts-cbc-aes-" MODE
,
663 .cra_priority
= PRIO
,
664 .cra_flags
= CRYPTO_ALG_INTERNAL
,
665 .cra_blocksize
= AES_BLOCK_SIZE
,
666 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
667 .cra_module
= THIS_MODULE
,
669 .min_keysize
= AES_MIN_KEY_SIZE
,
670 .max_keysize
= AES_MAX_KEY_SIZE
,
671 .ivsize
= AES_BLOCK_SIZE
,
672 .walksize
= 2 * AES_BLOCK_SIZE
,
673 .setkey
= skcipher_aes_setkey
,
674 .encrypt
= cts_cbc_encrypt
,
675 .decrypt
= cts_cbc_decrypt
,
676 .init
= cts_cbc_init_tfm
,
679 .cra_name
= "__essiv(cbc(aes),sha256)",
680 .cra_driver_name
= "__essiv-cbc-aes-sha256-" MODE
,
681 .cra_priority
= PRIO
+ 1,
682 .cra_flags
= CRYPTO_ALG_INTERNAL
,
683 .cra_blocksize
= AES_BLOCK_SIZE
,
684 .cra_ctxsize
= sizeof(struct crypto_aes_essiv_cbc_ctx
),
685 .cra_module
= THIS_MODULE
,
687 .min_keysize
= AES_MIN_KEY_SIZE
,
688 .max_keysize
= AES_MAX_KEY_SIZE
,
689 .ivsize
= AES_BLOCK_SIZE
,
690 .setkey
= essiv_cbc_set_key
,
691 .encrypt
= essiv_cbc_encrypt
,
692 .decrypt
= essiv_cbc_decrypt
,
693 .init
= essiv_cbc_init_tfm
,
694 .exit
= essiv_cbc_exit_tfm
,
697 static int cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
698 unsigned int key_len
)
700 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
703 err
= aes_expandkey(&ctx
->key
, in_key
, key_len
);
705 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
710 static void cmac_gf128_mul_by_x(be128
*y
, const be128
*x
)
712 u64 a
= be64_to_cpu(x
->a
);
713 u64 b
= be64_to_cpu(x
->b
);
715 y
->a
= cpu_to_be64((a
<< 1) | (b
>> 63));
716 y
->b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
719 static int cmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
720 unsigned int key_len
)
722 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
723 be128
*consts
= (be128
*)ctx
->consts
;
724 int rounds
= 6 + key_len
/ 4;
727 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
731 /* encrypt the zero vector */
733 aes_ecb_encrypt(ctx
->consts
, (u8
[AES_BLOCK_SIZE
]){}, ctx
->key
.key_enc
,
737 cmac_gf128_mul_by_x(consts
, consts
);
738 cmac_gf128_mul_by_x(consts
+ 1, consts
);
743 static int xcbc_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
744 unsigned int key_len
)
746 static u8
const ks
[3][AES_BLOCK_SIZE
] = {
747 { [0 ... AES_BLOCK_SIZE
- 1] = 0x1 },
748 { [0 ... AES_BLOCK_SIZE
- 1] = 0x2 },
749 { [0 ... AES_BLOCK_SIZE
- 1] = 0x3 },
752 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
753 int rounds
= 6 + key_len
/ 4;
754 u8 key
[AES_BLOCK_SIZE
];
757 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
762 aes_ecb_encrypt(key
, ks
[0], ctx
->key
.key_enc
, rounds
, 1);
763 aes_ecb_encrypt(ctx
->consts
, ks
[1], ctx
->key
.key_enc
, rounds
, 2);
766 return cbcmac_setkey(tfm
, key
, sizeof(key
));
769 static int mac_init(struct shash_desc
*desc
)
771 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
773 memset(ctx
->dg
, 0, AES_BLOCK_SIZE
);
779 static void mac_do_update(struct crypto_aes_ctx
*ctx
, u8
const in
[], int blocks
,
780 u8 dg
[], int enc_before
, int enc_after
)
782 int rounds
= 6 + ctx
->key_length
/ 4;
784 if (crypto_simd_usable()) {
786 aes_mac_update(in
, ctx
->key_enc
, rounds
, blocks
, dg
, enc_before
,
791 aes_encrypt(ctx
, dg
, dg
);
794 crypto_xor(dg
, in
, AES_BLOCK_SIZE
);
795 in
+= AES_BLOCK_SIZE
;
797 if (blocks
|| enc_after
)
798 aes_encrypt(ctx
, dg
, dg
);
803 static int mac_update(struct shash_desc
*desc
, const u8
*p
, unsigned int len
)
805 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
806 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
811 if ((ctx
->len
% AES_BLOCK_SIZE
) == 0 &&
812 (ctx
->len
+ len
) > AES_BLOCK_SIZE
) {
814 int blocks
= len
/ AES_BLOCK_SIZE
;
816 len
%= AES_BLOCK_SIZE
;
818 mac_do_update(&tctx
->key
, p
, blocks
, ctx
->dg
,
819 (ctx
->len
!= 0), (len
!= 0));
821 p
+= blocks
* AES_BLOCK_SIZE
;
824 ctx
->len
= AES_BLOCK_SIZE
;
830 l
= min(len
, AES_BLOCK_SIZE
- ctx
->len
);
832 if (l
<= AES_BLOCK_SIZE
) {
833 crypto_xor(ctx
->dg
+ ctx
->len
, p
, l
);
843 static int cbcmac_final(struct shash_desc
*desc
, u8
*out
)
845 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
846 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
848 mac_do_update(&tctx
->key
, NULL
, 0, ctx
->dg
, (ctx
->len
!= 0), 0);
850 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
855 static int cmac_final(struct shash_desc
*desc
, u8
*out
)
857 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
858 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
859 u8
*consts
= tctx
->consts
;
861 if (ctx
->len
!= AES_BLOCK_SIZE
) {
862 ctx
->dg
[ctx
->len
] ^= 0x80;
863 consts
+= AES_BLOCK_SIZE
;
866 mac_do_update(&tctx
->key
, consts
, 1, ctx
->dg
, 0, 1);
868 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
873 static struct shash_alg mac_algs
[] = { {
874 .base
.cra_name
= "cmac(aes)",
875 .base
.cra_driver_name
= "cmac-aes-" MODE
,
876 .base
.cra_priority
= PRIO
,
877 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
878 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
880 .base
.cra_module
= THIS_MODULE
,
882 .digestsize
= AES_BLOCK_SIZE
,
884 .update
= mac_update
,
886 .setkey
= cmac_setkey
,
887 .descsize
= sizeof(struct mac_desc_ctx
),
889 .base
.cra_name
= "xcbc(aes)",
890 .base
.cra_driver_name
= "xcbc-aes-" MODE
,
891 .base
.cra_priority
= PRIO
,
892 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
893 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
895 .base
.cra_module
= THIS_MODULE
,
897 .digestsize
= AES_BLOCK_SIZE
,
899 .update
= mac_update
,
901 .setkey
= xcbc_setkey
,
902 .descsize
= sizeof(struct mac_desc_ctx
),
904 .base
.cra_name
= "cbcmac(aes)",
905 .base
.cra_driver_name
= "cbcmac-aes-" MODE
,
906 .base
.cra_priority
= PRIO
,
907 .base
.cra_blocksize
= 1,
908 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
),
909 .base
.cra_module
= THIS_MODULE
,
911 .digestsize
= AES_BLOCK_SIZE
,
913 .update
= mac_update
,
914 .final
= cbcmac_final
,
915 .setkey
= cbcmac_setkey
,
916 .descsize
= sizeof(struct mac_desc_ctx
),
919 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
921 static void aes_exit(void)
925 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
926 if (aes_simd_algs
[i
])
927 simd_skcipher_free(aes_simd_algs
[i
]);
929 crypto_unregister_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
930 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
933 static int __init
aes_init(void)
935 struct simd_skcipher_alg
*simd
;
936 const char *basename
;
942 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
946 err
= crypto_register_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
948 goto unregister_ciphers
;
950 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
951 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
954 algname
= aes_algs
[i
].base
.cra_name
+ 2;
955 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
956 basename
= aes_algs
[i
].base
.cra_driver_name
;
957 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
960 goto unregister_simds
;
962 aes_simd_algs
[i
] = simd
;
971 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
975 #ifdef USE_V8_CRYPTO_EXTENSIONS
976 module_cpu_feature_match(AES
, aes_init
);
978 module_init(aes_init
);
979 EXPORT_SYMBOL(neon_aes_ecb_encrypt
);
980 EXPORT_SYMBOL(neon_aes_cbc_encrypt
);
982 module_exit(aes_exit
);