1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Support for Intel AES-NI instructions. This file contains glue
4 * code, the real AES implementation is in intel-aes_asm.S.
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
18 #include <linux/hardirq.h>
19 #include <linux/types.h>
20 #include <linux/module.h>
21 #include <linux/err.h>
22 #include <crypto/algapi.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/b128ops.h>
26 #include <crypto/gcm.h>
27 #include <crypto/xts.h>
28 #include <asm/cpu_device_id.h>
29 #include <asm/crypto/aes.h>
31 #include <crypto/scatterwalk.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/simd.h>
34 #include <crypto/internal/skcipher.h>
35 #include <linux/workqueue.h>
36 #include <linux/spinlock.h>
38 #include <asm/crypto/glue_helper.h>
42 #define AESNI_ALIGN 16
43 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
44 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
45 #define RFC4106_HASH_SUBKEY_SIZE 16
46 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
47 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
48 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
50 /* This data is stored at the end of the crypto_tfm struct.
51 * It's a type of per "session" data storage location.
52 * This needs to be 16 byte aligned.
54 struct aesni_rfc4106_gcm_ctx
{
55 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
56 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
60 struct generic_gcmaes_ctx
{
61 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
62 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
65 struct aesni_xts_ctx
{
66 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
67 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
70 #define GCM_BLOCK_LEN 16
72 struct gcm_context_data
{
73 /* init, update and finalize context data */
74 u8 aad_hash
[GCM_BLOCK_LEN
];
77 u8 partial_block_enc_key
[GCM_BLOCK_LEN
];
78 u8 orig_IV
[GCM_BLOCK_LEN
];
79 u8 current_counter
[GCM_BLOCK_LEN
];
80 u64 partial_block_len
;
82 u8 hash_keys
[GCM_BLOCK_LEN
* 16];
85 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
86 unsigned int key_len
);
87 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
89 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
91 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
92 const u8
*in
, unsigned int len
);
93 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
94 const u8
*in
, unsigned int len
);
95 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
96 const u8
*in
, unsigned int len
, u8
*iv
);
97 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
98 const u8
*in
, unsigned int len
, u8
*iv
);
100 #define AVX_GEN2_OPTSIZE 640
101 #define AVX_GEN4_OPTSIZE 4096
105 static void (*aesni_ctr_enc_tfm
)(struct crypto_aes_ctx
*ctx
, u8
*out
,
106 const u8
*in
, unsigned int len
, u8
*iv
);
107 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
108 const u8
*in
, unsigned int len
, u8
*iv
);
110 asmlinkage
void aesni_xts_crypt8(struct crypto_aes_ctx
*ctx
, u8
*out
,
111 const u8
*in
, bool enc
, u8
*iv
);
113 /* asmlinkage void aesni_gcm_enc()
114 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
115 * struct gcm_context_data. May be uninitialized.
116 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
117 * const u8 *in, Plaintext input
118 * unsigned long plaintext_len, Length of data in bytes for encryption.
119 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
120 * 16-byte aligned pointer.
121 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
122 * const u8 *aad, Additional Authentication Data (AAD)
123 * unsigned long aad_len, Length of AAD in bytes.
124 * u8 *auth_tag, Authenticated Tag output.
125 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
126 * Valid values are 16 (most likely), 12 or 8.
128 asmlinkage
void aesni_gcm_enc(void *ctx
,
129 struct gcm_context_data
*gdata
, u8
*out
,
130 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
131 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
132 u8
*auth_tag
, unsigned long auth_tag_len
);
134 /* asmlinkage void aesni_gcm_dec()
135 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
136 * struct gcm_context_data. May be uninitialized.
137 * u8 *out, Plaintext output. Decrypt in-place is allowed.
138 * const u8 *in, Ciphertext input
139 * unsigned long ciphertext_len, Length of data in bytes for decryption.
140 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
141 * 16-byte aligned pointer.
142 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
143 * const u8 *aad, Additional Authentication Data (AAD)
144 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
145 * to be 8 or 12 bytes
146 * u8 *auth_tag, Authenticated Tag output.
147 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
148 * Valid values are 16 (most likely), 12 or 8.
150 asmlinkage
void aesni_gcm_dec(void *ctx
,
151 struct gcm_context_data
*gdata
, u8
*out
,
152 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
153 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
154 u8
*auth_tag
, unsigned long auth_tag_len
);
156 /* Scatter / Gather routines, with args similar to above */
157 asmlinkage
void aesni_gcm_init(void *ctx
,
158 struct gcm_context_data
*gdata
,
160 u8
*hash_subkey
, const u8
*aad
,
161 unsigned long aad_len
);
162 asmlinkage
void aesni_gcm_enc_update(void *ctx
,
163 struct gcm_context_data
*gdata
, u8
*out
,
164 const u8
*in
, unsigned long plaintext_len
);
165 asmlinkage
void aesni_gcm_dec_update(void *ctx
,
166 struct gcm_context_data
*gdata
, u8
*out
,
168 unsigned long ciphertext_len
);
169 asmlinkage
void aesni_gcm_finalize(void *ctx
,
170 struct gcm_context_data
*gdata
,
171 u8
*auth_tag
, unsigned long auth_tag_len
);
173 static const struct aesni_gcm_tfm_s
{
174 void (*init
)(void *ctx
, struct gcm_context_data
*gdata
, u8
*iv
,
175 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
);
176 void (*enc_update
)(void *ctx
, struct gcm_context_data
*gdata
, u8
*out
,
177 const u8
*in
, unsigned long plaintext_len
);
178 void (*dec_update
)(void *ctx
, struct gcm_context_data
*gdata
, u8
*out
,
179 const u8
*in
, unsigned long ciphertext_len
);
180 void (*finalize
)(void *ctx
, struct gcm_context_data
*gdata
,
181 u8
*auth_tag
, unsigned long auth_tag_len
);
184 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse
= {
185 .init
= &aesni_gcm_init
,
186 .enc_update
= &aesni_gcm_enc_update
,
187 .dec_update
= &aesni_gcm_dec_update
,
188 .finalize
= &aesni_gcm_finalize
,
192 asmlinkage
void aes_ctr_enc_128_avx_by8(const u8
*in
, u8
*iv
,
193 void *keys
, u8
*out
, unsigned int num_bytes
);
194 asmlinkage
void aes_ctr_enc_192_avx_by8(const u8
*in
, u8
*iv
,
195 void *keys
, u8
*out
, unsigned int num_bytes
);
196 asmlinkage
void aes_ctr_enc_256_avx_by8(const u8
*in
, u8
*iv
,
197 void *keys
, u8
*out
, unsigned int num_bytes
);
199 * asmlinkage void aesni_gcm_init_avx_gen2()
200 * gcm_data *my_ctx_data, context data
201 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
203 asmlinkage
void aesni_gcm_init_avx_gen2(void *my_ctx_data
,
204 struct gcm_context_data
*gdata
,
208 unsigned long aad_len
);
210 asmlinkage
void aesni_gcm_enc_update_avx_gen2(void *ctx
,
211 struct gcm_context_data
*gdata
, u8
*out
,
212 const u8
*in
, unsigned long plaintext_len
);
213 asmlinkage
void aesni_gcm_dec_update_avx_gen2(void *ctx
,
214 struct gcm_context_data
*gdata
, u8
*out
,
216 unsigned long ciphertext_len
);
217 asmlinkage
void aesni_gcm_finalize_avx_gen2(void *ctx
,
218 struct gcm_context_data
*gdata
,
219 u8
*auth_tag
, unsigned long auth_tag_len
);
221 asmlinkage
void aesni_gcm_enc_avx_gen2(void *ctx
,
222 struct gcm_context_data
*gdata
, u8
*out
,
223 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
224 const u8
*aad
, unsigned long aad_len
,
225 u8
*auth_tag
, unsigned long auth_tag_len
);
227 asmlinkage
void aesni_gcm_dec_avx_gen2(void *ctx
,
228 struct gcm_context_data
*gdata
, u8
*out
,
229 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
230 const u8
*aad
, unsigned long aad_len
,
231 u8
*auth_tag
, unsigned long auth_tag_len
);
233 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2
= {
234 .init
= &aesni_gcm_init_avx_gen2
,
235 .enc_update
= &aesni_gcm_enc_update_avx_gen2
,
236 .dec_update
= &aesni_gcm_dec_update_avx_gen2
,
237 .finalize
= &aesni_gcm_finalize_avx_gen2
,
242 #ifdef CONFIG_AS_AVX2
244 * asmlinkage void aesni_gcm_init_avx_gen4()
245 * gcm_data *my_ctx_data, context data
246 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
248 asmlinkage
void aesni_gcm_init_avx_gen4(void *my_ctx_data
,
249 struct gcm_context_data
*gdata
,
253 unsigned long aad_len
);
255 asmlinkage
void aesni_gcm_enc_update_avx_gen4(void *ctx
,
256 struct gcm_context_data
*gdata
, u8
*out
,
257 const u8
*in
, unsigned long plaintext_len
);
258 asmlinkage
void aesni_gcm_dec_update_avx_gen4(void *ctx
,
259 struct gcm_context_data
*gdata
, u8
*out
,
261 unsigned long ciphertext_len
);
262 asmlinkage
void aesni_gcm_finalize_avx_gen4(void *ctx
,
263 struct gcm_context_data
*gdata
,
264 u8
*auth_tag
, unsigned long auth_tag_len
);
266 asmlinkage
void aesni_gcm_enc_avx_gen4(void *ctx
,
267 struct gcm_context_data
*gdata
, u8
*out
,
268 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
269 const u8
*aad
, unsigned long aad_len
,
270 u8
*auth_tag
, unsigned long auth_tag_len
);
272 asmlinkage
void aesni_gcm_dec_avx_gen4(void *ctx
,
273 struct gcm_context_data
*gdata
, u8
*out
,
274 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
275 const u8
*aad
, unsigned long aad_len
,
276 u8
*auth_tag
, unsigned long auth_tag_len
);
278 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4
= {
279 .init
= &aesni_gcm_init_avx_gen4
,
280 .enc_update
= &aesni_gcm_enc_update_avx_gen4
,
281 .dec_update
= &aesni_gcm_dec_update_avx_gen4
,
282 .finalize
= &aesni_gcm_finalize_avx_gen4
,
288 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
290 unsigned long align
= AESNI_ALIGN
;
292 if (align
<= crypto_tfm_ctx_alignment())
294 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
298 generic_gcmaes_ctx
*generic_gcmaes_ctx_get(struct crypto_aead
*tfm
)
300 unsigned long align
= AESNI_ALIGN
;
302 if (align
<= crypto_tfm_ctx_alignment())
304 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
308 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
310 unsigned long addr
= (unsigned long)raw_ctx
;
311 unsigned long align
= AESNI_ALIGN
;
313 if (align
<= crypto_tfm_ctx_alignment())
315 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
318 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
319 const u8
*in_key
, unsigned int key_len
)
321 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
322 u32
*flags
= &tfm
->crt_flags
;
325 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
326 key_len
!= AES_KEYSIZE_256
) {
327 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
331 if (!crypto_simd_usable())
332 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
335 err
= aesni_set_key(ctx
, in_key
, key_len
);
342 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
343 unsigned int key_len
)
345 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
348 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
350 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
352 if (!crypto_simd_usable())
353 crypto_aes_encrypt_x86(ctx
, dst
, src
);
356 aesni_enc(ctx
, dst
, src
);
361 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
363 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
365 if (!crypto_simd_usable())
366 crypto_aes_decrypt_x86(ctx
, dst
, src
);
369 aesni_dec(ctx
, dst
, src
);
374 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
376 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
378 aesni_enc(ctx
, dst
, src
);
381 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
383 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
385 aesni_dec(ctx
, dst
, src
);
388 static int aesni_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
391 return aes_set_key_common(crypto_skcipher_tfm(tfm
),
392 crypto_skcipher_ctx(tfm
), key
, len
);
395 static int ecb_encrypt(struct skcipher_request
*req
)
397 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
398 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
399 struct skcipher_walk walk
;
403 err
= skcipher_walk_virt(&walk
, req
, true);
406 while ((nbytes
= walk
.nbytes
)) {
407 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
408 nbytes
& AES_BLOCK_MASK
);
409 nbytes
&= AES_BLOCK_SIZE
- 1;
410 err
= skcipher_walk_done(&walk
, nbytes
);
417 static int ecb_decrypt(struct skcipher_request
*req
)
419 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
420 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
421 struct skcipher_walk walk
;
425 err
= skcipher_walk_virt(&walk
, req
, true);
428 while ((nbytes
= walk
.nbytes
)) {
429 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
430 nbytes
& AES_BLOCK_MASK
);
431 nbytes
&= AES_BLOCK_SIZE
- 1;
432 err
= skcipher_walk_done(&walk
, nbytes
);
439 static int cbc_encrypt(struct skcipher_request
*req
)
441 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
442 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
443 struct skcipher_walk walk
;
447 err
= skcipher_walk_virt(&walk
, req
, true);
450 while ((nbytes
= walk
.nbytes
)) {
451 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
452 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
453 nbytes
&= AES_BLOCK_SIZE
- 1;
454 err
= skcipher_walk_done(&walk
, nbytes
);
461 static int cbc_decrypt(struct skcipher_request
*req
)
463 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
464 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
465 struct skcipher_walk walk
;
469 err
= skcipher_walk_virt(&walk
, req
, true);
472 while ((nbytes
= walk
.nbytes
)) {
473 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
474 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
475 nbytes
&= AES_BLOCK_SIZE
- 1;
476 err
= skcipher_walk_done(&walk
, nbytes
);
484 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
485 struct skcipher_walk
*walk
)
487 u8
*ctrblk
= walk
->iv
;
488 u8 keystream
[AES_BLOCK_SIZE
];
489 u8
*src
= walk
->src
.virt
.addr
;
490 u8
*dst
= walk
->dst
.virt
.addr
;
491 unsigned int nbytes
= walk
->nbytes
;
493 aesni_enc(ctx
, keystream
, ctrblk
);
494 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
496 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
500 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx
*ctx
, u8
*out
,
501 const u8
*in
, unsigned int len
, u8
*iv
)
504 * based on key length, override with the by8 version
505 * of ctr mode encryption/decryption for improved performance
506 * aes_set_key_common() ensures that key length is one of
509 if (ctx
->key_length
== AES_KEYSIZE_128
)
510 aes_ctr_enc_128_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
511 else if (ctx
->key_length
== AES_KEYSIZE_192
)
512 aes_ctr_enc_192_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
514 aes_ctr_enc_256_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
518 static int ctr_crypt(struct skcipher_request
*req
)
520 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
521 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
522 struct skcipher_walk walk
;
526 err
= skcipher_walk_virt(&walk
, req
, true);
529 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
530 aesni_ctr_enc_tfm(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
531 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
532 nbytes
&= AES_BLOCK_SIZE
- 1;
533 err
= skcipher_walk_done(&walk
, nbytes
);
536 ctr_crypt_final(ctx
, &walk
);
537 err
= skcipher_walk_done(&walk
, 0);
544 static int xts_aesni_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
547 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
550 err
= xts_verify_key(tfm
, key
, keylen
);
556 /* first half of xts-key is for crypt */
557 err
= aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_crypt_ctx
,
562 /* second half of xts-key is for tweak */
563 return aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_tweak_ctx
,
564 key
+ keylen
, keylen
);
568 static void aesni_xts_tweak(void *ctx
, u8
*out
, const u8
*in
)
570 aesni_enc(ctx
, out
, in
);
573 static void aesni_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
575 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_enc
));
578 static void aesni_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
580 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_dec
));
583 static void aesni_xts_enc8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
585 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, true, (u8
*)iv
);
588 static void aesni_xts_dec8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
590 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, false, (u8
*)iv
);
593 static const struct common_glue_ctx aesni_enc_xts
= {
595 .fpu_blocks_limit
= 1,
599 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc8
) }
602 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc
) }
606 static const struct common_glue_ctx aesni_dec_xts
= {
608 .fpu_blocks_limit
= 1,
612 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec8
) }
615 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec
) }
619 static int xts_encrypt(struct skcipher_request
*req
)
621 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
622 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
624 return glue_xts_req_128bit(&aesni_enc_xts
, req
,
625 XTS_TWEAK_CAST(aesni_xts_tweak
),
626 aes_ctx(ctx
->raw_tweak_ctx
),
627 aes_ctx(ctx
->raw_crypt_ctx
));
630 static int xts_decrypt(struct skcipher_request
*req
)
632 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
633 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
635 return glue_xts_req_128bit(&aesni_dec_xts
, req
,
636 XTS_TWEAK_CAST(aesni_xts_tweak
),
637 aes_ctx(ctx
->raw_tweak_ctx
),
638 aes_ctx(ctx
->raw_crypt_ctx
));
642 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
644 struct crypto_cipher
*tfm
;
647 tfm
= crypto_alloc_cipher("aes", 0, 0);
651 ret
= crypto_cipher_setkey(tfm
, key
, key_len
);
653 goto out_free_cipher
;
655 /* Clear the data in the hash sub key container to zero.*/
656 /* We want to cipher all zeros to create the hash sub key. */
657 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
659 crypto_cipher_encrypt_one(tfm
, hash_subkey
, hash_subkey
);
662 crypto_free_cipher(tfm
);
666 static int common_rfc4106_set_key(struct crypto_aead
*aead
, const u8
*key
,
667 unsigned int key_len
)
669 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(aead
);
672 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
675 /*Account for 4 byte nonce at the end.*/
678 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
680 return aes_set_key_common(crypto_aead_tfm(aead
),
681 &ctx
->aes_key_expanded
, key
, key_len
) ?:
682 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
685 /* This is the Integrity Check Value (aka the authentication tag) length and can
686 * be 8, 12 or 16 bytes long. */
687 static int common_rfc4106_set_authsize(struct crypto_aead
*aead
,
688 unsigned int authsize
)
702 static int generic_gcmaes_set_authsize(struct crypto_aead
*tfm
,
703 unsigned int authsize
)
721 static int gcmaes_crypt_by_sg(bool enc
, struct aead_request
*req
,
722 unsigned int assoclen
, u8
*hash_subkey
,
723 u8
*iv
, void *aes_ctx
)
725 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
726 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
727 const struct aesni_gcm_tfm_s
*gcm_tfm
= aesni_gcm_tfm
;
728 struct gcm_context_data data AESNI_ALIGN_ATTR
;
729 struct scatter_walk dst_sg_walk
= {};
730 unsigned long left
= req
->cryptlen
;
731 unsigned long len
, srclen
, dstlen
;
732 struct scatter_walk assoc_sg_walk
;
733 struct scatter_walk src_sg_walk
;
734 struct scatterlist src_start
[2];
735 struct scatterlist dst_start
[2];
736 struct scatterlist
*src_sg
;
737 struct scatterlist
*dst_sg
;
738 u8
*src
, *dst
, *assoc
;
743 left
-= auth_tag_len
;
745 #ifdef CONFIG_AS_AVX2
746 if (left
< AVX_GEN4_OPTSIZE
&& gcm_tfm
== &aesni_gcm_tfm_avx_gen4
)
747 gcm_tfm
= &aesni_gcm_tfm_avx_gen2
;
750 if (left
< AVX_GEN2_OPTSIZE
&& gcm_tfm
== &aesni_gcm_tfm_avx_gen2
)
751 gcm_tfm
= &aesni_gcm_tfm_sse
;
754 /* Linearize assoc, if not already linear */
755 if (req
->src
->length
>= assoclen
&& req
->src
->length
&&
756 (!PageHighMem(sg_page(req
->src
)) ||
757 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
)) {
758 scatterwalk_start(&assoc_sg_walk
, req
->src
);
759 assoc
= scatterwalk_map(&assoc_sg_walk
);
761 /* assoc can be any length, so must be on heap */
762 assocmem
= kmalloc(assoclen
, GFP_ATOMIC
);
763 if (unlikely(!assocmem
))
767 scatterwalk_map_and_copy(assoc
, req
->src
, 0, assoclen
, 0);
771 src_sg
= scatterwalk_ffwd(src_start
, req
->src
, req
->assoclen
);
772 scatterwalk_start(&src_sg_walk
, src_sg
);
773 if (req
->src
!= req
->dst
) {
774 dst_sg
= scatterwalk_ffwd(dst_start
, req
->dst
,
776 scatterwalk_start(&dst_sg_walk
, dst_sg
);
781 gcm_tfm
->init(aes_ctx
, &data
, iv
,
782 hash_subkey
, assoc
, assoclen
);
783 if (req
->src
!= req
->dst
) {
785 src
= scatterwalk_map(&src_sg_walk
);
786 dst
= scatterwalk_map(&dst_sg_walk
);
787 srclen
= scatterwalk_clamp(&src_sg_walk
, left
);
788 dstlen
= scatterwalk_clamp(&dst_sg_walk
, left
);
789 len
= min(srclen
, dstlen
);
792 gcm_tfm
->enc_update(aes_ctx
, &data
,
795 gcm_tfm
->dec_update(aes_ctx
, &data
,
800 scatterwalk_unmap(src
);
801 scatterwalk_unmap(dst
);
802 scatterwalk_advance(&src_sg_walk
, len
);
803 scatterwalk_advance(&dst_sg_walk
, len
);
804 scatterwalk_done(&src_sg_walk
, 0, left
);
805 scatterwalk_done(&dst_sg_walk
, 1, left
);
809 dst
= src
= scatterwalk_map(&src_sg_walk
);
810 len
= scatterwalk_clamp(&src_sg_walk
, left
);
813 gcm_tfm
->enc_update(aes_ctx
, &data
,
816 gcm_tfm
->dec_update(aes_ctx
, &data
,
820 scatterwalk_unmap(src
);
821 scatterwalk_advance(&src_sg_walk
, len
);
822 scatterwalk_done(&src_sg_walk
, 1, left
);
825 gcm_tfm
->finalize(aes_ctx
, &data
, authTag
, auth_tag_len
);
829 scatterwalk_unmap(assoc
);
836 /* Copy out original authTag */
837 scatterwalk_map_and_copy(authTagMsg
, req
->src
,
838 req
->assoclen
+ req
->cryptlen
-
842 /* Compare generated tag with passed in tag. */
843 return crypto_memneq(authTagMsg
, authTag
, auth_tag_len
) ?
847 /* Copy in the authTag */
848 scatterwalk_map_and_copy(authTag
, req
->dst
,
849 req
->assoclen
+ req
->cryptlen
,
855 static int gcmaes_encrypt(struct aead_request
*req
, unsigned int assoclen
,
856 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
858 return gcmaes_crypt_by_sg(true, req
, assoclen
, hash_subkey
, iv
,
862 static int gcmaes_decrypt(struct aead_request
*req
, unsigned int assoclen
,
863 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
865 return gcmaes_crypt_by_sg(false, req
, assoclen
, hash_subkey
, iv
,
869 static int helper_rfc4106_encrypt(struct aead_request
*req
)
871 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
872 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
873 void *aes_ctx
= &(ctx
->aes_key_expanded
);
874 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
876 __be32 counter
= cpu_to_be32(1);
878 /* Assuming we are supporting rfc4106 64-bit extended */
879 /* sequence numbers We need to have the AAD length equal */
880 /* to 16 or 20 bytes */
881 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
885 for (i
= 0; i
< 4; i
++)
886 *(iv
+i
) = ctx
->nonce
[i
];
887 for (i
= 0; i
< 8; i
++)
888 *(iv
+4+i
) = req
->iv
[i
];
889 *((__be32
*)(iv
+12)) = counter
;
891 return gcmaes_encrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
895 static int helper_rfc4106_decrypt(struct aead_request
*req
)
897 __be32 counter
= cpu_to_be32(1);
898 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
899 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
900 void *aes_ctx
= &(ctx
->aes_key_expanded
);
901 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
904 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
907 /* Assuming we are supporting rfc4106 64-bit extended */
908 /* sequence numbers We need to have the AAD length */
909 /* equal to 16 or 20 bytes */
912 for (i
= 0; i
< 4; i
++)
913 *(iv
+i
) = ctx
->nonce
[i
];
914 for (i
= 0; i
< 8; i
++)
915 *(iv
+4+i
) = req
->iv
[i
];
916 *((__be32
*)(iv
+12)) = counter
;
918 return gcmaes_decrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
923 static struct crypto_alg aesni_algs
[] = { {
925 .cra_driver_name
= "aes-aesni",
927 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
928 .cra_blocksize
= AES_BLOCK_SIZE
,
929 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
930 .cra_module
= THIS_MODULE
,
933 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
934 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
935 .cia_setkey
= aes_set_key
,
936 .cia_encrypt
= aes_encrypt
,
937 .cia_decrypt
= aes_decrypt
942 .cra_driver_name
= "__aes-aesni",
944 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
| CRYPTO_ALG_INTERNAL
,
945 .cra_blocksize
= AES_BLOCK_SIZE
,
946 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
947 .cra_module
= THIS_MODULE
,
950 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
951 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
952 .cia_setkey
= aes_set_key
,
953 .cia_encrypt
= __aes_encrypt
,
954 .cia_decrypt
= __aes_decrypt
959 static struct skcipher_alg aesni_skciphers
[] = {
962 .cra_name
= "__ecb(aes)",
963 .cra_driver_name
= "__ecb-aes-aesni",
965 .cra_flags
= CRYPTO_ALG_INTERNAL
,
966 .cra_blocksize
= AES_BLOCK_SIZE
,
967 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
968 .cra_module
= THIS_MODULE
,
970 .min_keysize
= AES_MIN_KEY_SIZE
,
971 .max_keysize
= AES_MAX_KEY_SIZE
,
972 .setkey
= aesni_skcipher_setkey
,
973 .encrypt
= ecb_encrypt
,
974 .decrypt
= ecb_decrypt
,
977 .cra_name
= "__cbc(aes)",
978 .cra_driver_name
= "__cbc-aes-aesni",
980 .cra_flags
= CRYPTO_ALG_INTERNAL
,
981 .cra_blocksize
= AES_BLOCK_SIZE
,
982 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
983 .cra_module
= THIS_MODULE
,
985 .min_keysize
= AES_MIN_KEY_SIZE
,
986 .max_keysize
= AES_MAX_KEY_SIZE
,
987 .ivsize
= AES_BLOCK_SIZE
,
988 .setkey
= aesni_skcipher_setkey
,
989 .encrypt
= cbc_encrypt
,
990 .decrypt
= cbc_decrypt
,
994 .cra_name
= "__ctr(aes)",
995 .cra_driver_name
= "__ctr-aes-aesni",
997 .cra_flags
= CRYPTO_ALG_INTERNAL
,
999 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1000 .cra_module
= THIS_MODULE
,
1002 .min_keysize
= AES_MIN_KEY_SIZE
,
1003 .max_keysize
= AES_MAX_KEY_SIZE
,
1004 .ivsize
= AES_BLOCK_SIZE
,
1005 .chunksize
= AES_BLOCK_SIZE
,
1006 .setkey
= aesni_skcipher_setkey
,
1007 .encrypt
= ctr_crypt
,
1008 .decrypt
= ctr_crypt
,
1011 .cra_name
= "__xts(aes)",
1012 .cra_driver_name
= "__xts-aes-aesni",
1013 .cra_priority
= 401,
1014 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1015 .cra_blocksize
= AES_BLOCK_SIZE
,
1016 .cra_ctxsize
= XTS_AES_CTX_SIZE
,
1017 .cra_module
= THIS_MODULE
,
1019 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1020 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1021 .ivsize
= AES_BLOCK_SIZE
,
1022 .setkey
= xts_aesni_setkey
,
1023 .encrypt
= xts_encrypt
,
1024 .decrypt
= xts_decrypt
,
1030 struct simd_skcipher_alg
*aesni_simd_skciphers
[ARRAY_SIZE(aesni_skciphers
)];
1032 #ifdef CONFIG_X86_64
1033 static int generic_gcmaes_set_key(struct crypto_aead
*aead
, const u8
*key
,
1034 unsigned int key_len
)
1036 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(aead
);
1038 return aes_set_key_common(crypto_aead_tfm(aead
),
1039 &ctx
->aes_key_expanded
, key
, key_len
) ?:
1040 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
1043 static int generic_gcmaes_encrypt(struct aead_request
*req
)
1045 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1046 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
1047 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1048 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1049 __be32 counter
= cpu_to_be32(1);
1051 memcpy(iv
, req
->iv
, 12);
1052 *((__be32
*)(iv
+12)) = counter
;
1054 return gcmaes_encrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1058 static int generic_gcmaes_decrypt(struct aead_request
*req
)
1060 __be32 counter
= cpu_to_be32(1);
1061 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1062 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
1063 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1064 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1066 memcpy(iv
, req
->iv
, 12);
1067 *((__be32
*)(iv
+12)) = counter
;
1069 return gcmaes_decrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1073 static struct aead_alg aesni_aeads
[] = { {
1074 .setkey
= common_rfc4106_set_key
,
1075 .setauthsize
= common_rfc4106_set_authsize
,
1076 .encrypt
= helper_rfc4106_encrypt
,
1077 .decrypt
= helper_rfc4106_decrypt
,
1078 .ivsize
= GCM_RFC4106_IV_SIZE
,
1081 .cra_name
= "__rfc4106(gcm(aes))",
1082 .cra_driver_name
= "__rfc4106-gcm-aesni",
1083 .cra_priority
= 400,
1084 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1086 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
),
1087 .cra_alignmask
= AESNI_ALIGN
- 1,
1088 .cra_module
= THIS_MODULE
,
1091 .setkey
= generic_gcmaes_set_key
,
1092 .setauthsize
= generic_gcmaes_set_authsize
,
1093 .encrypt
= generic_gcmaes_encrypt
,
1094 .decrypt
= generic_gcmaes_decrypt
,
1095 .ivsize
= GCM_AES_IV_SIZE
,
1098 .cra_name
= "__gcm(aes)",
1099 .cra_driver_name
= "__generic-gcm-aesni",
1100 .cra_priority
= 400,
1101 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1103 .cra_ctxsize
= sizeof(struct generic_gcmaes_ctx
),
1104 .cra_alignmask
= AESNI_ALIGN
- 1,
1105 .cra_module
= THIS_MODULE
,
1109 static struct aead_alg aesni_aeads
[0];
1112 static struct simd_aead_alg
*aesni_simd_aeads
[ARRAY_SIZE(aesni_aeads
)];
1114 static const struct x86_cpu_id aesni_cpu_id
[] = {
1115 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1118 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1120 static int __init
aesni_init(void)
1124 if (!x86_match_cpu(aesni_cpu_id
))
1126 #ifdef CONFIG_X86_64
1127 #ifdef CONFIG_AS_AVX2
1128 if (boot_cpu_has(X86_FEATURE_AVX2
)) {
1129 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1130 aesni_gcm_tfm
= &aesni_gcm_tfm_avx_gen4
;
1133 #ifdef CONFIG_AS_AVX
1134 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1135 pr_info("AVX version of gcm_enc/dec engaged.\n");
1136 aesni_gcm_tfm
= &aesni_gcm_tfm_avx_gen2
;
1140 pr_info("SSE version of gcm_enc/dec engaged.\n");
1141 aesni_gcm_tfm
= &aesni_gcm_tfm_sse
;
1143 aesni_ctr_enc_tfm
= aesni_ctr_enc
;
1144 #ifdef CONFIG_AS_AVX
1145 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1146 /* optimize performance of ctr mode encryption transform */
1147 aesni_ctr_enc_tfm
= aesni_ctr_enc_avx_tfm
;
1148 pr_info("AES CTR mode by8 optimization enabled\n");
1153 err
= crypto_register_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1157 err
= simd_register_skciphers_compat(aesni_skciphers
,
1158 ARRAY_SIZE(aesni_skciphers
),
1159 aesni_simd_skciphers
);
1161 goto unregister_algs
;
1163 err
= simd_register_aeads_compat(aesni_aeads
, ARRAY_SIZE(aesni_aeads
),
1166 goto unregister_skciphers
;
1170 unregister_skciphers
:
1171 simd_unregister_skciphers(aesni_skciphers
, ARRAY_SIZE(aesni_skciphers
),
1172 aesni_simd_skciphers
);
1174 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1178 static void __exit
aesni_exit(void)
1180 simd_unregister_aeads(aesni_aeads
, ARRAY_SIZE(aesni_aeads
),
1182 simd_unregister_skciphers(aesni_skciphers
, ARRAY_SIZE(aesni_skciphers
),
1183 aesni_simd_skciphers
);
1184 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1187 late_initcall(aesni_init
);
1188 module_exit(aesni_exit
);
1190 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1191 MODULE_LICENSE("GPL");
1192 MODULE_ALIAS_CRYPTO("aes");