2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/crypto.h>
25 #include <linux/module.h>
26 #include <linux/err.h>
27 #include <crypto/algapi.h>
28 #include <crypto/aes.h>
29 #include <crypto/cryptd.h>
30 #include <crypto/ctr.h>
31 #include <crypto/b128ops.h>
32 #include <crypto/lrw.h>
33 #include <crypto/xts.h>
34 #include <asm/cpu_device_id.h>
36 #include <asm/crypto/aes.h>
37 #include <crypto/ablk_helper.h>
38 #include <crypto/scatterwalk.h>
39 #include <crypto/internal/aead.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
46 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
50 /* This data is stored at the end of the crypto_tfm struct.
51 * It's a type of per "session" data storage location.
52 * This needs to be 16 byte aligned.
54 struct aesni_rfc4106_gcm_ctx
{
56 struct crypto_aes_ctx aes_key_expanded
;
58 struct cryptd_aead
*cryptd_tfm
;
61 struct aesni_gcm_set_hash_subkey_result
{
63 struct completion completion
;
66 struct aesni_hash_subkey_req_data
{
68 struct aesni_gcm_set_hash_subkey_result result
;
69 struct scatterlist sg
;
72 #define AESNI_ALIGN (16)
73 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
74 #define RFC4106_HASH_SUBKEY_SIZE 16
76 struct aesni_lrw_ctx
{
77 struct lrw_table_ctx lrw_table
;
78 u8 raw_aes_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
81 struct aesni_xts_ctx
{
82 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
83 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
86 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
87 unsigned int key_len
);
88 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
90 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
92 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
93 const u8
*in
, unsigned int len
);
94 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
95 const u8
*in
, unsigned int len
);
96 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
97 const u8
*in
, unsigned int len
, u8
*iv
);
98 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
99 const u8
*in
, unsigned int len
, u8
*iv
);
101 int crypto_fpu_init(void);
102 void crypto_fpu_exit(void);
104 #define AVX_GEN2_OPTSIZE 640
105 #define AVX_GEN4_OPTSIZE 4096
108 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
109 const u8
*in
, unsigned int len
, u8
*iv
);
111 asmlinkage
void aesni_xts_crypt8(struct crypto_aes_ctx
*ctx
, u8
*out
,
112 const u8
*in
, bool enc
, u8
*iv
);
114 /* asmlinkage void aesni_gcm_enc()
115 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
116 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
117 * const u8 *in, Plaintext input
118 * unsigned long plaintext_len, Length of data in bytes for encryption.
119 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
120 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
121 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
122 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
123 * const u8 *aad, Additional Authentication Data (AAD)
124 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this
125 * is going to be 8 or 12 bytes
126 * u8 *auth_tag, Authenticated Tag output.
127 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
128 * Valid values are 16 (most likely), 12 or 8.
130 asmlinkage
void aesni_gcm_enc(void *ctx
, u8
*out
,
131 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
132 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
133 u8
*auth_tag
, unsigned long auth_tag_len
);
135 /* asmlinkage void aesni_gcm_dec()
136 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
137 * u8 *out, Plaintext output. Decrypt in-place is allowed.
138 * const u8 *in, Ciphertext input
139 * unsigned long ciphertext_len, Length of data in bytes for decryption.
140 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
141 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
142 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
143 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
144 * const u8 *aad, Additional Authentication Data (AAD)
145 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
146 * to be 8 or 12 bytes
147 * u8 *auth_tag, Authenticated Tag output.
148 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
149 * Valid values are 16 (most likely), 12 or 8.
151 asmlinkage
void aesni_gcm_dec(void *ctx
, u8
*out
,
152 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
153 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
154 u8
*auth_tag
, unsigned long auth_tag_len
);
159 * asmlinkage void aesni_gcm_precomp_avx_gen2()
160 * gcm_data *my_ctx_data, context data
161 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
163 asmlinkage
void aesni_gcm_precomp_avx_gen2(void *my_ctx_data
, u8
*hash_subkey
);
165 asmlinkage
void aesni_gcm_enc_avx_gen2(void *ctx
, u8
*out
,
166 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
167 const u8
*aad
, unsigned long aad_len
,
168 u8
*auth_tag
, unsigned long auth_tag_len
);
170 asmlinkage
void aesni_gcm_dec_avx_gen2(void *ctx
, u8
*out
,
171 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
172 const u8
*aad
, unsigned long aad_len
,
173 u8
*auth_tag
, unsigned long auth_tag_len
);
175 static void aesni_gcm_enc_avx(void *ctx
, u8
*out
,
176 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
177 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
178 u8
*auth_tag
, unsigned long auth_tag_len
)
180 if (plaintext_len
< AVX_GEN2_OPTSIZE
) {
181 aesni_gcm_enc(ctx
, out
, in
, plaintext_len
, iv
, hash_subkey
, aad
,
182 aad_len
, auth_tag
, auth_tag_len
);
184 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
185 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
186 aad_len
, auth_tag
, auth_tag_len
);
190 static void aesni_gcm_dec_avx(void *ctx
, u8
*out
,
191 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
192 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
193 u8
*auth_tag
, unsigned long auth_tag_len
)
195 if (ciphertext_len
< AVX_GEN2_OPTSIZE
) {
196 aesni_gcm_dec(ctx
, out
, in
, ciphertext_len
, iv
, hash_subkey
, aad
,
197 aad_len
, auth_tag
, auth_tag_len
);
199 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
200 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
201 aad_len
, auth_tag
, auth_tag_len
);
206 #ifdef CONFIG_AS_AVX2
208 * asmlinkage void aesni_gcm_precomp_avx_gen4()
209 * gcm_data *my_ctx_data, context data
210 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
212 asmlinkage
void aesni_gcm_precomp_avx_gen4(void *my_ctx_data
, u8
*hash_subkey
);
214 asmlinkage
void aesni_gcm_enc_avx_gen4(void *ctx
, u8
*out
,
215 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
216 const u8
*aad
, unsigned long aad_len
,
217 u8
*auth_tag
, unsigned long auth_tag_len
);
219 asmlinkage
void aesni_gcm_dec_avx_gen4(void *ctx
, u8
*out
,
220 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
221 const u8
*aad
, unsigned long aad_len
,
222 u8
*auth_tag
, unsigned long auth_tag_len
);
224 static void aesni_gcm_enc_avx2(void *ctx
, u8
*out
,
225 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
226 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
227 u8
*auth_tag
, unsigned long auth_tag_len
)
229 if (plaintext_len
< AVX_GEN2_OPTSIZE
) {
230 aesni_gcm_enc(ctx
, out
, in
, plaintext_len
, iv
, hash_subkey
, aad
,
231 aad_len
, auth_tag
, auth_tag_len
);
232 } else if (plaintext_len
< AVX_GEN4_OPTSIZE
) {
233 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
234 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
235 aad_len
, auth_tag
, auth_tag_len
);
237 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
238 aesni_gcm_enc_avx_gen4(ctx
, out
, in
, plaintext_len
, iv
, aad
,
239 aad_len
, auth_tag
, auth_tag_len
);
243 static void aesni_gcm_dec_avx2(void *ctx
, u8
*out
,
244 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
245 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
246 u8
*auth_tag
, unsigned long auth_tag_len
)
248 if (ciphertext_len
< AVX_GEN2_OPTSIZE
) {
249 aesni_gcm_dec(ctx
, out
, in
, ciphertext_len
, iv
, hash_subkey
,
250 aad
, aad_len
, auth_tag
, auth_tag_len
);
251 } else if (ciphertext_len
< AVX_GEN4_OPTSIZE
) {
252 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
253 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
254 aad_len
, auth_tag
, auth_tag_len
);
256 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
257 aesni_gcm_dec_avx_gen4(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
258 aad_len
, auth_tag
, auth_tag_len
);
263 static void (*aesni_gcm_enc_tfm
)(void *ctx
, u8
*out
,
264 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
265 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
266 u8
*auth_tag
, unsigned long auth_tag_len
);
268 static void (*aesni_gcm_dec_tfm
)(void *ctx
, u8
*out
,
269 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
270 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
271 u8
*auth_tag
, unsigned long auth_tag_len
);
274 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
277 (struct aesni_rfc4106_gcm_ctx
*)
279 crypto_tfm_ctx(crypto_aead_tfm(tfm
)), AESNI_ALIGN
);
283 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
285 unsigned long addr
= (unsigned long)raw_ctx
;
286 unsigned long align
= AESNI_ALIGN
;
288 if (align
<= crypto_tfm_ctx_alignment())
290 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
293 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
294 const u8
*in_key
, unsigned int key_len
)
296 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
297 u32
*flags
= &tfm
->crt_flags
;
300 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
301 key_len
!= AES_KEYSIZE_256
) {
302 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
306 if (!irq_fpu_usable())
307 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
310 err
= aesni_set_key(ctx
, in_key
, key_len
);
317 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
318 unsigned int key_len
)
320 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
323 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
325 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
327 if (!irq_fpu_usable())
328 crypto_aes_encrypt_x86(ctx
, dst
, src
);
331 aesni_enc(ctx
, dst
, src
);
336 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
338 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
340 if (!irq_fpu_usable())
341 crypto_aes_decrypt_x86(ctx
, dst
, src
);
344 aesni_dec(ctx
, dst
, src
);
349 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
351 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
353 aesni_enc(ctx
, dst
, src
);
356 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
358 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
360 aesni_dec(ctx
, dst
, src
);
363 static int ecb_encrypt(struct blkcipher_desc
*desc
,
364 struct scatterlist
*dst
, struct scatterlist
*src
,
367 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
368 struct blkcipher_walk walk
;
371 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
372 err
= blkcipher_walk_virt(desc
, &walk
);
373 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
376 while ((nbytes
= walk
.nbytes
)) {
377 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
378 nbytes
& AES_BLOCK_MASK
);
379 nbytes
&= AES_BLOCK_SIZE
- 1;
380 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
387 static int ecb_decrypt(struct blkcipher_desc
*desc
,
388 struct scatterlist
*dst
, struct scatterlist
*src
,
391 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
392 struct blkcipher_walk walk
;
395 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
396 err
= blkcipher_walk_virt(desc
, &walk
);
397 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
400 while ((nbytes
= walk
.nbytes
)) {
401 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
402 nbytes
& AES_BLOCK_MASK
);
403 nbytes
&= AES_BLOCK_SIZE
- 1;
404 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
411 static int cbc_encrypt(struct blkcipher_desc
*desc
,
412 struct scatterlist
*dst
, struct scatterlist
*src
,
415 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
416 struct blkcipher_walk walk
;
419 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
420 err
= blkcipher_walk_virt(desc
, &walk
);
421 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
424 while ((nbytes
= walk
.nbytes
)) {
425 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
426 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
427 nbytes
&= AES_BLOCK_SIZE
- 1;
428 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
435 static int cbc_decrypt(struct blkcipher_desc
*desc
,
436 struct scatterlist
*dst
, struct scatterlist
*src
,
439 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
440 struct blkcipher_walk walk
;
443 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
444 err
= blkcipher_walk_virt(desc
, &walk
);
445 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
448 while ((nbytes
= walk
.nbytes
)) {
449 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
450 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
451 nbytes
&= AES_BLOCK_SIZE
- 1;
452 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
460 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
461 struct blkcipher_walk
*walk
)
463 u8
*ctrblk
= walk
->iv
;
464 u8 keystream
[AES_BLOCK_SIZE
];
465 u8
*src
= walk
->src
.virt
.addr
;
466 u8
*dst
= walk
->dst
.virt
.addr
;
467 unsigned int nbytes
= walk
->nbytes
;
469 aesni_enc(ctx
, keystream
, ctrblk
);
470 crypto_xor(keystream
, src
, nbytes
);
471 memcpy(dst
, keystream
, nbytes
);
472 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
475 static int ctr_crypt(struct blkcipher_desc
*desc
,
476 struct scatterlist
*dst
, struct scatterlist
*src
,
479 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
480 struct blkcipher_walk walk
;
483 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
484 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
485 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
488 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
489 aesni_ctr_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
490 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
491 nbytes
&= AES_BLOCK_SIZE
- 1;
492 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
495 ctr_crypt_final(ctx
, &walk
);
496 err
= blkcipher_walk_done(desc
, &walk
, 0);
504 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
506 return ablk_init_common(tfm
, "__driver-ecb-aes-aesni");
509 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
511 return ablk_init_common(tfm
, "__driver-cbc-aes-aesni");
515 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
517 return ablk_init_common(tfm
, "__driver-ctr-aes-aesni");
523 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
525 return ablk_init_common(tfm
, "fpu(pcbc(__driver-aes-aesni))");
529 static void lrw_xts_encrypt_callback(void *ctx
, u8
*blks
, unsigned int nbytes
)
531 aesni_ecb_enc(ctx
, blks
, blks
, nbytes
);
534 static void lrw_xts_decrypt_callback(void *ctx
, u8
*blks
, unsigned int nbytes
)
536 aesni_ecb_dec(ctx
, blks
, blks
, nbytes
);
539 static int lrw_aesni_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
542 struct aesni_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
545 err
= aes_set_key_common(tfm
, ctx
->raw_aes_ctx
, key
,
546 keylen
- AES_BLOCK_SIZE
);
550 return lrw_init_table(&ctx
->lrw_table
, key
+ keylen
- AES_BLOCK_SIZE
);
553 static void lrw_aesni_exit_tfm(struct crypto_tfm
*tfm
)
555 struct aesni_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
557 lrw_free_table(&ctx
->lrw_table
);
560 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
561 struct scatterlist
*src
, unsigned int nbytes
)
563 struct aesni_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
565 struct lrw_crypt_req req
= {
567 .tbuflen
= sizeof(buf
),
569 .table_ctx
= &ctx
->lrw_table
,
570 .crypt_ctx
= aes_ctx(ctx
->raw_aes_ctx
),
571 .crypt_fn
= lrw_xts_encrypt_callback
,
575 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
578 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
584 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
585 struct scatterlist
*src
, unsigned int nbytes
)
587 struct aesni_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
589 struct lrw_crypt_req req
= {
591 .tbuflen
= sizeof(buf
),
593 .table_ctx
= &ctx
->lrw_table
,
594 .crypt_ctx
= aes_ctx(ctx
->raw_aes_ctx
),
595 .crypt_fn
= lrw_xts_decrypt_callback
,
599 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
602 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
608 static int xts_aesni_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
611 struct aesni_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
612 u32
*flags
= &tfm
->crt_flags
;
615 /* key consists of keys of equal size concatenated, therefore
616 * the length must be even
619 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
623 /* first half of xts-key is for crypt */
624 err
= aes_set_key_common(tfm
, ctx
->raw_crypt_ctx
, key
, keylen
/ 2);
628 /* second half of xts-key is for tweak */
629 return aes_set_key_common(tfm
, ctx
->raw_tweak_ctx
, key
+ keylen
/ 2,
634 static void aesni_xts_tweak(void *ctx
, u8
*out
, const u8
*in
)
636 aesni_enc(ctx
, out
, in
);
641 static void aesni_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
643 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_enc
));
646 static void aesni_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
648 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_dec
));
651 static void aesni_xts_enc8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
653 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, true, (u8
*)iv
);
656 static void aesni_xts_dec8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
658 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, false, (u8
*)iv
);
661 static const struct common_glue_ctx aesni_enc_xts
= {
663 .fpu_blocks_limit
= 1,
667 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc8
) }
670 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc
) }
674 static const struct common_glue_ctx aesni_dec_xts
= {
676 .fpu_blocks_limit
= 1,
680 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec8
) }
683 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec
) }
687 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
688 struct scatterlist
*src
, unsigned int nbytes
)
690 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
692 return glue_xts_crypt_128bit(&aesni_enc_xts
, desc
, dst
, src
, nbytes
,
693 XTS_TWEAK_CAST(aesni_xts_tweak
),
694 aes_ctx(ctx
->raw_tweak_ctx
),
695 aes_ctx(ctx
->raw_crypt_ctx
));
698 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
699 struct scatterlist
*src
, unsigned int nbytes
)
701 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
703 return glue_xts_crypt_128bit(&aesni_dec_xts
, desc
, dst
, src
, nbytes
,
704 XTS_TWEAK_CAST(aesni_xts_tweak
),
705 aes_ctx(ctx
->raw_tweak_ctx
),
706 aes_ctx(ctx
->raw_crypt_ctx
));
711 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
712 struct scatterlist
*src
, unsigned int nbytes
)
714 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
716 struct xts_crypt_req req
= {
718 .tbuflen
= sizeof(buf
),
720 .tweak_ctx
= aes_ctx(ctx
->raw_tweak_ctx
),
721 .tweak_fn
= aesni_xts_tweak
,
722 .crypt_ctx
= aes_ctx(ctx
->raw_crypt_ctx
),
723 .crypt_fn
= lrw_xts_encrypt_callback
,
727 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
730 ret
= xts_crypt(desc
, dst
, src
, nbytes
, &req
);
736 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
737 struct scatterlist
*src
, unsigned int nbytes
)
739 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
741 struct xts_crypt_req req
= {
743 .tbuflen
= sizeof(buf
),
745 .tweak_ctx
= aes_ctx(ctx
->raw_tweak_ctx
),
746 .tweak_fn
= aesni_xts_tweak
,
747 .crypt_ctx
= aes_ctx(ctx
->raw_crypt_ctx
),
748 .crypt_fn
= lrw_xts_decrypt_callback
,
752 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
755 ret
= xts_crypt(desc
, dst
, src
, nbytes
, &req
);
764 static int rfc4106_init(struct crypto_tfm
*tfm
)
766 struct cryptd_aead
*cryptd_tfm
;
767 struct aesni_rfc4106_gcm_ctx
*ctx
= (struct aesni_rfc4106_gcm_ctx
*)
768 PTR_ALIGN((u8
*)crypto_tfm_ctx(tfm
), AESNI_ALIGN
);
769 struct crypto_aead
*cryptd_child
;
770 struct aesni_rfc4106_gcm_ctx
*child_ctx
;
771 cryptd_tfm
= cryptd_alloc_aead("__driver-gcm-aes-aesni", 0, 0);
772 if (IS_ERR(cryptd_tfm
))
773 return PTR_ERR(cryptd_tfm
);
775 cryptd_child
= cryptd_aead_child(cryptd_tfm
);
776 child_ctx
= aesni_rfc4106_gcm_ctx_get(cryptd_child
);
777 memcpy(child_ctx
, ctx
, sizeof(*ctx
));
778 ctx
->cryptd_tfm
= cryptd_tfm
;
779 tfm
->crt_aead
.reqsize
= sizeof(struct aead_request
)
780 + crypto_aead_reqsize(&cryptd_tfm
->base
);
784 static void rfc4106_exit(struct crypto_tfm
*tfm
)
786 struct aesni_rfc4106_gcm_ctx
*ctx
=
787 (struct aesni_rfc4106_gcm_ctx
*)
788 PTR_ALIGN((u8
*)crypto_tfm_ctx(tfm
), AESNI_ALIGN
);
789 if (!IS_ERR(ctx
->cryptd_tfm
))
790 cryptd_free_aead(ctx
->cryptd_tfm
);
795 rfc4106_set_hash_subkey_done(struct crypto_async_request
*req
, int err
)
797 struct aesni_gcm_set_hash_subkey_result
*result
= req
->data
;
799 if (err
== -EINPROGRESS
)
802 complete(&result
->completion
);
806 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
808 struct crypto_ablkcipher
*ctr_tfm
;
809 struct ablkcipher_request
*req
;
811 struct aesni_hash_subkey_req_data
*req_data
;
813 ctr_tfm
= crypto_alloc_ablkcipher("ctr(aes)", 0, 0);
815 return PTR_ERR(ctr_tfm
);
817 crypto_ablkcipher_clear_flags(ctr_tfm
, ~0);
819 ret
= crypto_ablkcipher_setkey(ctr_tfm
, key
, key_len
);
821 goto out_free_ablkcipher
;
824 req
= ablkcipher_request_alloc(ctr_tfm
, GFP_KERNEL
);
826 goto out_free_ablkcipher
;
828 req_data
= kmalloc(sizeof(*req_data
), GFP_KERNEL
);
830 goto out_free_request
;
832 memset(req_data
->iv
, 0, sizeof(req_data
->iv
));
834 /* Clear the data in the hash sub key container to zero.*/
835 /* We want to cipher all zeros to create the hash sub key. */
836 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
838 init_completion(&req_data
->result
.completion
);
839 sg_init_one(&req_data
->sg
, hash_subkey
, RFC4106_HASH_SUBKEY_SIZE
);
840 ablkcipher_request_set_tfm(req
, ctr_tfm
);
841 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
842 CRYPTO_TFM_REQ_MAY_BACKLOG
,
843 rfc4106_set_hash_subkey_done
,
846 ablkcipher_request_set_crypt(req
, &req_data
->sg
,
847 &req_data
->sg
, RFC4106_HASH_SUBKEY_SIZE
, req_data
->iv
);
849 ret
= crypto_ablkcipher_encrypt(req
);
850 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
851 ret
= wait_for_completion_interruptible
852 (&req_data
->result
.completion
);
854 ret
= req_data
->result
.err
;
858 ablkcipher_request_free(req
);
860 crypto_free_ablkcipher(ctr_tfm
);
864 static int rfc4106_set_key(struct crypto_aead
*parent
, const u8
*key
,
865 unsigned int key_len
)
868 struct crypto_tfm
*tfm
= crypto_aead_tfm(parent
);
869 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(parent
);
870 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
871 struct aesni_rfc4106_gcm_ctx
*child_ctx
=
872 aesni_rfc4106_gcm_ctx_get(cryptd_child
);
873 u8
*new_key_align
, *new_key_mem
= NULL
;
876 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
879 /*Account for 4 byte nonce at the end.*/
881 if (key_len
!= AES_KEYSIZE_128
) {
882 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
886 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
887 /*This must be on a 16 byte boundary!*/
888 if ((unsigned long)(&(ctx
->aes_key_expanded
.key_enc
[0])) % AESNI_ALIGN
)
891 if ((unsigned long)key
% AESNI_ALIGN
) {
892 /*key is not aligned: use an auxuliar aligned pointer*/
893 new_key_mem
= kmalloc(key_len
+AESNI_ALIGN
, GFP_KERNEL
);
897 new_key_align
= PTR_ALIGN(new_key_mem
, AESNI_ALIGN
);
898 memcpy(new_key_align
, key
, key_len
);
902 if (!irq_fpu_usable())
903 ret
= crypto_aes_expand_key(&(ctx
->aes_key_expanded
),
907 ret
= aesni_set_key(&(ctx
->aes_key_expanded
), key
, key_len
);
910 /*This must be on a 16 byte boundary!*/
911 if ((unsigned long)(&(ctx
->hash_subkey
[0])) % AESNI_ALIGN
) {
915 ret
= rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
916 memcpy(child_ctx
, ctx
, sizeof(*ctx
));
922 /* This is the Integrity Check Value (aka the authentication tag length and can
923 * be 8, 12 or 16 bytes long. */
924 static int rfc4106_set_authsize(struct crypto_aead
*parent
,
925 unsigned int authsize
)
927 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(parent
);
928 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
938 crypto_aead_crt(parent
)->authsize
= authsize
;
939 crypto_aead_crt(cryptd_child
)->authsize
= authsize
;
943 static int rfc4106_encrypt(struct aead_request
*req
)
946 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
947 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
949 if (!irq_fpu_usable()) {
950 struct aead_request
*cryptd_req
=
951 (struct aead_request
*) aead_request_ctx(req
);
952 memcpy(cryptd_req
, req
, sizeof(*req
));
953 aead_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
954 return crypto_aead_encrypt(cryptd_req
);
956 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
958 ret
= cryptd_child
->base
.crt_aead
.encrypt(req
);
964 static int rfc4106_decrypt(struct aead_request
*req
)
967 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
968 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
970 if (!irq_fpu_usable()) {
971 struct aead_request
*cryptd_req
=
972 (struct aead_request
*) aead_request_ctx(req
);
973 memcpy(cryptd_req
, req
, sizeof(*req
));
974 aead_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
975 return crypto_aead_decrypt(cryptd_req
);
977 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
979 ret
= cryptd_child
->base
.crt_aead
.decrypt(req
);
985 static int __driver_rfc4106_encrypt(struct aead_request
*req
)
987 u8 one_entry_in_sg
= 0;
988 u8
*src
, *dst
, *assoc
;
989 __be32 counter
= cpu_to_be32(1);
990 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
991 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
992 void *aes_ctx
= &(ctx
->aes_key_expanded
);
993 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
994 u8 iv_tab
[16+AESNI_ALIGN
];
995 u8
* iv
= (u8
*) PTR_ALIGN((u8
*)iv_tab
, AESNI_ALIGN
);
996 struct scatter_walk src_sg_walk
;
997 struct scatter_walk assoc_sg_walk
;
998 struct scatter_walk dst_sg_walk
;
1001 /* Assuming we are supporting rfc4106 64-bit extended */
1002 /* sequence numbers We need to have the AAD length equal */
1003 /* to 8 or 12 bytes */
1004 if (unlikely(req
->assoclen
!= 8 && req
->assoclen
!= 12))
1006 /* IV below built */
1007 for (i
= 0; i
< 4; i
++)
1008 *(iv
+i
) = ctx
->nonce
[i
];
1009 for (i
= 0; i
< 8; i
++)
1010 *(iv
+4+i
) = req
->iv
[i
];
1011 *((__be32
*)(iv
+12)) = counter
;
1013 if ((sg_is_last(req
->src
)) && (sg_is_last(req
->assoc
))) {
1014 one_entry_in_sg
= 1;
1015 scatterwalk_start(&src_sg_walk
, req
->src
);
1016 scatterwalk_start(&assoc_sg_walk
, req
->assoc
);
1017 src
= scatterwalk_map(&src_sg_walk
);
1018 assoc
= scatterwalk_map(&assoc_sg_walk
);
1020 if (unlikely(req
->src
!= req
->dst
)) {
1021 scatterwalk_start(&dst_sg_walk
, req
->dst
);
1022 dst
= scatterwalk_map(&dst_sg_walk
);
1026 /* Allocate memory for src, dst, assoc */
1027 src
= kmalloc(req
->cryptlen
+ auth_tag_len
+ req
->assoclen
,
1031 assoc
= (src
+ req
->cryptlen
+ auth_tag_len
);
1032 scatterwalk_map_and_copy(src
, req
->src
, 0, req
->cryptlen
, 0);
1033 scatterwalk_map_and_copy(assoc
, req
->assoc
, 0,
1038 aesni_gcm_enc_tfm(aes_ctx
, dst
, src
, (unsigned long)req
->cryptlen
, iv
,
1039 ctx
->hash_subkey
, assoc
, (unsigned long)req
->assoclen
, dst
1040 + ((unsigned long)req
->cryptlen
), auth_tag_len
);
1042 /* The authTag (aka the Integrity Check Value) needs to be written
1043 * back to the packet. */
1044 if (one_entry_in_sg
) {
1045 if (unlikely(req
->src
!= req
->dst
)) {
1046 scatterwalk_unmap(dst
);
1047 scatterwalk_done(&dst_sg_walk
, 0, 0);
1049 scatterwalk_unmap(src
);
1050 scatterwalk_unmap(assoc
);
1051 scatterwalk_done(&src_sg_walk
, 0, 0);
1052 scatterwalk_done(&assoc_sg_walk
, 0, 0);
1054 scatterwalk_map_and_copy(dst
, req
->dst
, 0,
1055 req
->cryptlen
+ auth_tag_len
, 1);
1061 static int __driver_rfc4106_decrypt(struct aead_request
*req
)
1063 u8 one_entry_in_sg
= 0;
1064 u8
*src
, *dst
, *assoc
;
1065 unsigned long tempCipherLen
= 0;
1066 __be32 counter
= cpu_to_be32(1);
1068 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1069 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1070 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1071 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
1072 u8 iv_and_authTag
[32+AESNI_ALIGN
];
1073 u8
*iv
= (u8
*) PTR_ALIGN((u8
*)iv_and_authTag
, AESNI_ALIGN
);
1074 u8
*authTag
= iv
+ 16;
1075 struct scatter_walk src_sg_walk
;
1076 struct scatter_walk assoc_sg_walk
;
1077 struct scatter_walk dst_sg_walk
;
1080 if (unlikely((req
->cryptlen
< auth_tag_len
) ||
1081 (req
->assoclen
!= 8 && req
->assoclen
!= 12)))
1083 /* Assuming we are supporting rfc4106 64-bit extended */
1084 /* sequence numbers We need to have the AAD length */
1085 /* equal to 8 or 12 bytes */
1087 tempCipherLen
= (unsigned long)(req
->cryptlen
- auth_tag_len
);
1088 /* IV below built */
1089 for (i
= 0; i
< 4; i
++)
1090 *(iv
+i
) = ctx
->nonce
[i
];
1091 for (i
= 0; i
< 8; i
++)
1092 *(iv
+4+i
) = req
->iv
[i
];
1093 *((__be32
*)(iv
+12)) = counter
;
1095 if ((sg_is_last(req
->src
)) && (sg_is_last(req
->assoc
))) {
1096 one_entry_in_sg
= 1;
1097 scatterwalk_start(&src_sg_walk
, req
->src
);
1098 scatterwalk_start(&assoc_sg_walk
, req
->assoc
);
1099 src
= scatterwalk_map(&src_sg_walk
);
1100 assoc
= scatterwalk_map(&assoc_sg_walk
);
1102 if (unlikely(req
->src
!= req
->dst
)) {
1103 scatterwalk_start(&dst_sg_walk
, req
->dst
);
1104 dst
= scatterwalk_map(&dst_sg_walk
);
1108 /* Allocate memory for src, dst, assoc */
1109 src
= kmalloc(req
->cryptlen
+ req
->assoclen
, GFP_ATOMIC
);
1112 assoc
= (src
+ req
->cryptlen
+ auth_tag_len
);
1113 scatterwalk_map_and_copy(src
, req
->src
, 0, req
->cryptlen
, 0);
1114 scatterwalk_map_and_copy(assoc
, req
->assoc
, 0,
1119 aesni_gcm_dec_tfm(aes_ctx
, dst
, src
, tempCipherLen
, iv
,
1120 ctx
->hash_subkey
, assoc
, (unsigned long)req
->assoclen
,
1121 authTag
, auth_tag_len
);
1123 /* Compare generated tag with passed in tag. */
1124 retval
= crypto_memneq(src
+ tempCipherLen
, authTag
, auth_tag_len
) ?
1127 if (one_entry_in_sg
) {
1128 if (unlikely(req
->src
!= req
->dst
)) {
1129 scatterwalk_unmap(dst
);
1130 scatterwalk_done(&dst_sg_walk
, 0, 0);
1132 scatterwalk_unmap(src
);
1133 scatterwalk_unmap(assoc
);
1134 scatterwalk_done(&src_sg_walk
, 0, 0);
1135 scatterwalk_done(&assoc_sg_walk
, 0, 0);
1137 scatterwalk_map_and_copy(dst
, req
->dst
, 0, req
->cryptlen
, 1);
1144 static struct crypto_alg aesni_algs
[] = { {
1146 .cra_driver_name
= "aes-aesni",
1147 .cra_priority
= 300,
1148 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
1149 .cra_blocksize
= AES_BLOCK_SIZE
,
1150 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1153 .cra_module
= THIS_MODULE
,
1156 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1157 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1158 .cia_setkey
= aes_set_key
,
1159 .cia_encrypt
= aes_encrypt
,
1160 .cia_decrypt
= aes_decrypt
1164 .cra_name
= "__aes-aesni",
1165 .cra_driver_name
= "__driver-aes-aesni",
1167 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
1168 .cra_blocksize
= AES_BLOCK_SIZE
,
1169 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1172 .cra_module
= THIS_MODULE
,
1175 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1176 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1177 .cia_setkey
= aes_set_key
,
1178 .cia_encrypt
= __aes_encrypt
,
1179 .cia_decrypt
= __aes_decrypt
1183 .cra_name
= "__ecb-aes-aesni",
1184 .cra_driver_name
= "__driver-ecb-aes-aesni",
1186 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1187 .cra_blocksize
= AES_BLOCK_SIZE
,
1188 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1191 .cra_type
= &crypto_blkcipher_type
,
1192 .cra_module
= THIS_MODULE
,
1195 .min_keysize
= AES_MIN_KEY_SIZE
,
1196 .max_keysize
= AES_MAX_KEY_SIZE
,
1197 .setkey
= aes_set_key
,
1198 .encrypt
= ecb_encrypt
,
1199 .decrypt
= ecb_decrypt
,
1203 .cra_name
= "__cbc-aes-aesni",
1204 .cra_driver_name
= "__driver-cbc-aes-aesni",
1206 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1207 .cra_blocksize
= AES_BLOCK_SIZE
,
1208 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1211 .cra_type
= &crypto_blkcipher_type
,
1212 .cra_module
= THIS_MODULE
,
1215 .min_keysize
= AES_MIN_KEY_SIZE
,
1216 .max_keysize
= AES_MAX_KEY_SIZE
,
1217 .setkey
= aes_set_key
,
1218 .encrypt
= cbc_encrypt
,
1219 .decrypt
= cbc_decrypt
,
1223 .cra_name
= "ecb(aes)",
1224 .cra_driver_name
= "ecb-aes-aesni",
1225 .cra_priority
= 400,
1226 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1227 .cra_blocksize
= AES_BLOCK_SIZE
,
1228 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1230 .cra_type
= &crypto_ablkcipher_type
,
1231 .cra_module
= THIS_MODULE
,
1232 .cra_init
= ablk_ecb_init
,
1233 .cra_exit
= ablk_exit
,
1236 .min_keysize
= AES_MIN_KEY_SIZE
,
1237 .max_keysize
= AES_MAX_KEY_SIZE
,
1238 .setkey
= ablk_set_key
,
1239 .encrypt
= ablk_encrypt
,
1240 .decrypt
= ablk_decrypt
,
1244 .cra_name
= "cbc(aes)",
1245 .cra_driver_name
= "cbc-aes-aesni",
1246 .cra_priority
= 400,
1247 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1248 .cra_blocksize
= AES_BLOCK_SIZE
,
1249 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1251 .cra_type
= &crypto_ablkcipher_type
,
1252 .cra_module
= THIS_MODULE
,
1253 .cra_init
= ablk_cbc_init
,
1254 .cra_exit
= ablk_exit
,
1257 .min_keysize
= AES_MIN_KEY_SIZE
,
1258 .max_keysize
= AES_MAX_KEY_SIZE
,
1259 .ivsize
= AES_BLOCK_SIZE
,
1260 .setkey
= ablk_set_key
,
1261 .encrypt
= ablk_encrypt
,
1262 .decrypt
= ablk_decrypt
,
1265 #ifdef CONFIG_X86_64
1267 .cra_name
= "__ctr-aes-aesni",
1268 .cra_driver_name
= "__driver-ctr-aes-aesni",
1270 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1272 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1275 .cra_type
= &crypto_blkcipher_type
,
1276 .cra_module
= THIS_MODULE
,
1279 .min_keysize
= AES_MIN_KEY_SIZE
,
1280 .max_keysize
= AES_MAX_KEY_SIZE
,
1281 .ivsize
= AES_BLOCK_SIZE
,
1282 .setkey
= aes_set_key
,
1283 .encrypt
= ctr_crypt
,
1284 .decrypt
= ctr_crypt
,
1288 .cra_name
= "ctr(aes)",
1289 .cra_driver_name
= "ctr-aes-aesni",
1290 .cra_priority
= 400,
1291 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1293 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1295 .cra_type
= &crypto_ablkcipher_type
,
1296 .cra_module
= THIS_MODULE
,
1297 .cra_init
= ablk_ctr_init
,
1298 .cra_exit
= ablk_exit
,
1301 .min_keysize
= AES_MIN_KEY_SIZE
,
1302 .max_keysize
= AES_MAX_KEY_SIZE
,
1303 .ivsize
= AES_BLOCK_SIZE
,
1304 .setkey
= ablk_set_key
,
1305 .encrypt
= ablk_encrypt
,
1306 .decrypt
= ablk_encrypt
,
1311 .cra_name
= "__gcm-aes-aesni",
1312 .cra_driver_name
= "__driver-gcm-aes-aesni",
1314 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
,
1316 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
) +
1319 .cra_type
= &crypto_aead_type
,
1320 .cra_module
= THIS_MODULE
,
1323 .encrypt
= __driver_rfc4106_encrypt
,
1324 .decrypt
= __driver_rfc4106_decrypt
,
1328 .cra_name
= "rfc4106(gcm(aes))",
1329 .cra_driver_name
= "rfc4106-gcm-aesni",
1330 .cra_priority
= 400,
1331 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
,
1333 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
) +
1336 .cra_type
= &crypto_nivaead_type
,
1337 .cra_module
= THIS_MODULE
,
1338 .cra_init
= rfc4106_init
,
1339 .cra_exit
= rfc4106_exit
,
1342 .setkey
= rfc4106_set_key
,
1343 .setauthsize
= rfc4106_set_authsize
,
1344 .encrypt
= rfc4106_encrypt
,
1345 .decrypt
= rfc4106_decrypt
,
1354 .cra_name
= "pcbc(aes)",
1355 .cra_driver_name
= "pcbc-aes-aesni",
1356 .cra_priority
= 400,
1357 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1358 .cra_blocksize
= AES_BLOCK_SIZE
,
1359 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1361 .cra_type
= &crypto_ablkcipher_type
,
1362 .cra_module
= THIS_MODULE
,
1363 .cra_init
= ablk_pcbc_init
,
1364 .cra_exit
= ablk_exit
,
1367 .min_keysize
= AES_MIN_KEY_SIZE
,
1368 .max_keysize
= AES_MAX_KEY_SIZE
,
1369 .ivsize
= AES_BLOCK_SIZE
,
1370 .setkey
= ablk_set_key
,
1371 .encrypt
= ablk_encrypt
,
1372 .decrypt
= ablk_decrypt
,
1377 .cra_name
= "__lrw-aes-aesni",
1378 .cra_driver_name
= "__driver-lrw-aes-aesni",
1380 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1381 .cra_blocksize
= AES_BLOCK_SIZE
,
1382 .cra_ctxsize
= sizeof(struct aesni_lrw_ctx
),
1384 .cra_type
= &crypto_blkcipher_type
,
1385 .cra_module
= THIS_MODULE
,
1386 .cra_exit
= lrw_aesni_exit_tfm
,
1389 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
1390 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
1391 .ivsize
= AES_BLOCK_SIZE
,
1392 .setkey
= lrw_aesni_setkey
,
1393 .encrypt
= lrw_encrypt
,
1394 .decrypt
= lrw_decrypt
,
1398 .cra_name
= "__xts-aes-aesni",
1399 .cra_driver_name
= "__driver-xts-aes-aesni",
1401 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1402 .cra_blocksize
= AES_BLOCK_SIZE
,
1403 .cra_ctxsize
= sizeof(struct aesni_xts_ctx
),
1405 .cra_type
= &crypto_blkcipher_type
,
1406 .cra_module
= THIS_MODULE
,
1409 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1410 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1411 .ivsize
= AES_BLOCK_SIZE
,
1412 .setkey
= xts_aesni_setkey
,
1413 .encrypt
= xts_encrypt
,
1414 .decrypt
= xts_decrypt
,
1418 .cra_name
= "lrw(aes)",
1419 .cra_driver_name
= "lrw-aes-aesni",
1420 .cra_priority
= 400,
1421 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1422 .cra_blocksize
= AES_BLOCK_SIZE
,
1423 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1425 .cra_type
= &crypto_ablkcipher_type
,
1426 .cra_module
= THIS_MODULE
,
1427 .cra_init
= ablk_init
,
1428 .cra_exit
= ablk_exit
,
1431 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
1432 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
1433 .ivsize
= AES_BLOCK_SIZE
,
1434 .setkey
= ablk_set_key
,
1435 .encrypt
= ablk_encrypt
,
1436 .decrypt
= ablk_decrypt
,
1440 .cra_name
= "xts(aes)",
1441 .cra_driver_name
= "xts-aes-aesni",
1442 .cra_priority
= 400,
1443 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1444 .cra_blocksize
= AES_BLOCK_SIZE
,
1445 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1447 .cra_type
= &crypto_ablkcipher_type
,
1448 .cra_module
= THIS_MODULE
,
1449 .cra_init
= ablk_init
,
1450 .cra_exit
= ablk_exit
,
1453 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1454 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1455 .ivsize
= AES_BLOCK_SIZE
,
1456 .setkey
= ablk_set_key
,
1457 .encrypt
= ablk_encrypt
,
1458 .decrypt
= ablk_decrypt
,
1464 static const struct x86_cpu_id aesni_cpu_id
[] = {
1465 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1468 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1470 static int __init
aesni_init(void)
1474 if (!x86_match_cpu(aesni_cpu_id
))
1476 #ifdef CONFIG_X86_64
1477 #ifdef CONFIG_AS_AVX2
1478 if (boot_cpu_has(X86_FEATURE_AVX2
)) {
1479 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1480 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx2
;
1481 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx2
;
1484 #ifdef CONFIG_AS_AVX
1485 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1486 pr_info("AVX version of gcm_enc/dec engaged.\n");
1487 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx
;
1488 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx
;
1492 pr_info("SSE version of gcm_enc/dec engaged.\n");
1493 aesni_gcm_enc_tfm
= aesni_gcm_enc
;
1494 aesni_gcm_dec_tfm
= aesni_gcm_dec
;
1498 err
= crypto_fpu_init();
1502 return crypto_register_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1505 static void __exit
aesni_exit(void)
1507 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1512 module_init(aesni_init
);
1513 module_exit(aesni_exit
);
1515 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1516 MODULE_LICENSE("GPL");
1517 MODULE_ALIAS("aes");