]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blame - arch/x86/crypto/aesni-intel_glue.c
crypto: x86/aesni - convert to use skcipher SIMD bulk registration
[mirror_ubuntu-focal-kernel.git] / arch / x86 / crypto / aesni-intel_glue.c
CommitLineData
54b6a1bd
HY
1/*
2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
0bd82f5f
TS
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
54b6a1bd
HY
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
20 */
21
22#include <linux/hardirq.h>
23#include <linux/types.h>
7c52d551 24#include <linux/module.h>
54b6a1bd
HY
25#include <linux/err.h>
26#include <crypto/algapi.h>
27#include <crypto/aes.h>
28#include <crypto/cryptd.h>
12387a46 29#include <crypto/ctr.h>
023af608 30#include <crypto/b128ops.h>
46d93748 31#include <crypto/gcm.h>
023af608 32#include <crypto/xts.h>
3bd391f0 33#include <asm/cpu_device_id.h>
df6b35f4 34#include <asm/fpu/api.h>
70ef2601 35#include <asm/crypto/aes.h>
0bd82f5f
TS
36#include <crypto/scatterwalk.h>
37#include <crypto/internal/aead.h>
85671860
HX
38#include <crypto/internal/simd.h>
39#include <crypto/internal/skcipher.h>
0bd82f5f
TS
40#include <linux/workqueue.h>
41#include <linux/spinlock.h>
c456a9cd
JK
42#ifdef CONFIG_X86_64
43#include <asm/crypto/glue_helper.h>
44#endif
54b6a1bd 45
e31ac32d 46
b7c89d9e 47#define AESNI_ALIGN 16
85671860 48#define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
b7c89d9e
HX
49#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
50#define RFC4106_HASH_SUBKEY_SIZE 16
85671860
HX
51#define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
52#define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
53#define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
b7c89d9e 54
0bd82f5f
TS
55/* This data is stored at the end of the crypto_tfm struct.
56 * It's a type of per "session" data storage location.
57 * This needs to be 16 byte aligned.
58 */
59struct aesni_rfc4106_gcm_ctx {
85671860
HX
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
0bd82f5f 62 u8 nonce[4];
0bd82f5f
TS
63};
64
cce2ea8d
SD
65struct generic_gcmaes_ctx {
66 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
67 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
68};
69
023af608 70struct aesni_xts_ctx {
85671860
HX
71 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
72 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
023af608
JK
73};
74
9ee4a5df
DW
75#define GCM_BLOCK_LEN 16
76
77struct gcm_context_data {
78 /* init, update and finalize context data */
79 u8 aad_hash[GCM_BLOCK_LEN];
80 u64 aad_length;
81 u64 in_length;
82 u8 partial_block_enc_key[GCM_BLOCK_LEN];
83 u8 orig_IV[GCM_BLOCK_LEN];
84 u8 current_counter[GCM_BLOCK_LEN];
85 u64 partial_block_len;
86 u64 unused;
de85fc46 87 u8 hash_keys[GCM_BLOCK_LEN * 16];
9ee4a5df
DW
88};
89
54b6a1bd
HY
90asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
91 unsigned int key_len);
92asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
93 const u8 *in);
94asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
95 const u8 *in);
96asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
97 const u8 *in, unsigned int len);
98asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
99 const u8 *in, unsigned int len);
100asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len, u8 *iv);
102asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
103 const u8 *in, unsigned int len, u8 *iv);
9bed4aca 104
d764593a
TC
105#define AVX_GEN2_OPTSIZE 640
106#define AVX_GEN4_OPTSIZE 4096
107
0d258efb 108#ifdef CONFIG_X86_64
22cddcc7 109
110static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
111 const u8 *in, unsigned int len, u8 *iv);
12387a46
HY
112asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
113 const u8 *in, unsigned int len, u8 *iv);
54b6a1bd 114
c456a9cd
JK
115asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
116 const u8 *in, bool enc, u8 *iv);
117
0bd82f5f
TS
118/* asmlinkage void aesni_gcm_enc()
119 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
9ee4a5df 120 * struct gcm_context_data. May be uninitialized.
0bd82f5f
TS
121 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
122 * const u8 *in, Plaintext input
123 * unsigned long plaintext_len, Length of data in bytes for encryption.
cce2ea8d
SD
124 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
125 * 16-byte aligned pointer.
0bd82f5f
TS
126 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
127 * const u8 *aad, Additional Authentication Data (AAD)
cce2ea8d 128 * unsigned long aad_len, Length of AAD in bytes.
0bd82f5f
TS
129 * u8 *auth_tag, Authenticated Tag output.
130 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
131 * Valid values are 16 (most likely), 12 or 8.
132 */
9ee4a5df
DW
133asmlinkage void aesni_gcm_enc(void *ctx,
134 struct gcm_context_data *gdata, u8 *out,
0bd82f5f
TS
135 const u8 *in, unsigned long plaintext_len, u8 *iv,
136 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
137 u8 *auth_tag, unsigned long auth_tag_len);
138
139/* asmlinkage void aesni_gcm_dec()
140 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
9ee4a5df 141 * struct gcm_context_data. May be uninitialized.
0bd82f5f
TS
142 * u8 *out, Plaintext output. Decrypt in-place is allowed.
143 * const u8 *in, Ciphertext input
144 * unsigned long ciphertext_len, Length of data in bytes for decryption.
cce2ea8d
SD
145 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
146 * 16-byte aligned pointer.
0bd82f5f
TS
147 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
148 * const u8 *aad, Additional Authentication Data (AAD)
149 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
150 * to be 8 or 12 bytes
151 * u8 *auth_tag, Authenticated Tag output.
152 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
153 * Valid values are 16 (most likely), 12 or 8.
154 */
9ee4a5df
DW
155asmlinkage void aesni_gcm_dec(void *ctx,
156 struct gcm_context_data *gdata, u8 *out,
0bd82f5f
TS
157 const u8 *in, unsigned long ciphertext_len, u8 *iv,
158 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
159 u8 *auth_tag, unsigned long auth_tag_len);
160
fb8986e6
DW
161/* Scatter / Gather routines, with args similar to above */
162asmlinkage void aesni_gcm_init(void *ctx,
163 struct gcm_context_data *gdata,
164 u8 *iv,
165 u8 *hash_subkey, const u8 *aad,
166 unsigned long aad_len);
167asmlinkage void aesni_gcm_enc_update(void *ctx,
168 struct gcm_context_data *gdata, u8 *out,
169 const u8 *in, unsigned long plaintext_len);
170asmlinkage void aesni_gcm_dec_update(void *ctx,
171 struct gcm_context_data *gdata, u8 *out,
172 const u8 *in,
173 unsigned long ciphertext_len);
174asmlinkage void aesni_gcm_finalize(void *ctx,
175 struct gcm_context_data *gdata,
176 u8 *auth_tag, unsigned long auth_tag_len);
d764593a 177
793ff5ff
EB
178static const struct aesni_gcm_tfm_s {
179 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
180 u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
181 void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
182 const u8 *in, unsigned long plaintext_len);
183 void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
184 const u8 *in, unsigned long ciphertext_len);
185 void (*finalize)(void *ctx, struct gcm_context_data *gdata,
186 u8 *auth_tag, unsigned long auth_tag_len);
603f8c3b
DW
187} *aesni_gcm_tfm;
188
793ff5ff 189static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
603f8c3b
DW
190 .init = &aesni_gcm_init,
191 .enc_update = &aesni_gcm_enc_update,
192 .dec_update = &aesni_gcm_dec_update,
193 .finalize = &aesni_gcm_finalize,
194};
195
d764593a 196#ifdef CONFIG_AS_AVX
22cddcc7 197asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
198 void *keys, u8 *out, unsigned int num_bytes);
199asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
200 void *keys, u8 *out, unsigned int num_bytes);
201asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
202 void *keys, u8 *out, unsigned int num_bytes);
d764593a 203/*
603f8c3b 204 * asmlinkage void aesni_gcm_init_avx_gen2()
d764593a
TC
205 * gcm_data *my_ctx_data, context data
206 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
207 */
603f8c3b
DW
208asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
209 struct gcm_context_data *gdata,
210 u8 *iv,
211 u8 *hash_subkey,
212 const u8 *aad,
213 unsigned long aad_len);
214
215asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
216 struct gcm_context_data *gdata, u8 *out,
217 const u8 *in, unsigned long plaintext_len);
218asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
219 struct gcm_context_data *gdata, u8 *out,
220 const u8 *in,
221 unsigned long ciphertext_len);
222asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
223 struct gcm_context_data *gdata,
224 u8 *auth_tag, unsigned long auth_tag_len);
d764593a 225
de85fc46
DW
226asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
227 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
228 const u8 *in, unsigned long plaintext_len, u8 *iv,
229 const u8 *aad, unsigned long aad_len,
230 u8 *auth_tag, unsigned long auth_tag_len);
231
de85fc46
DW
232asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
233 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
234 const u8 *in, unsigned long ciphertext_len, u8 *iv,
235 const u8 *aad, unsigned long aad_len,
236 u8 *auth_tag, unsigned long auth_tag_len);
237
793ff5ff 238static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
603f8c3b
DW
239 .init = &aesni_gcm_init_avx_gen2,
240 .enc_update = &aesni_gcm_enc_update_avx_gen2,
241 .dec_update = &aesni_gcm_dec_update_avx_gen2,
242 .finalize = &aesni_gcm_finalize_avx_gen2,
243};
d764593a 244
d764593a
TC
245#endif
246
247#ifdef CONFIG_AS_AVX2
248/*
603f8c3b 249 * asmlinkage void aesni_gcm_init_avx_gen4()
d764593a
TC
250 * gcm_data *my_ctx_data, context data
251 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
252 */
603f8c3b
DW
253asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
254 struct gcm_context_data *gdata,
255 u8 *iv,
256 u8 *hash_subkey,
257 const u8 *aad,
258 unsigned long aad_len);
259
260asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
261 struct gcm_context_data *gdata, u8 *out,
262 const u8 *in, unsigned long plaintext_len);
263asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
264 struct gcm_context_data *gdata, u8 *out,
265 const u8 *in,
266 unsigned long ciphertext_len);
267asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
268 struct gcm_context_data *gdata,
269 u8 *auth_tag, unsigned long auth_tag_len);
d764593a 270
de85fc46
DW
271asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
272 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
273 const u8 *in, unsigned long plaintext_len, u8 *iv,
274 const u8 *aad, unsigned long aad_len,
275 u8 *auth_tag, unsigned long auth_tag_len);
276
de85fc46
DW
277asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
278 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
279 const u8 *in, unsigned long ciphertext_len, u8 *iv,
280 const u8 *aad, unsigned long aad_len,
281 u8 *auth_tag, unsigned long auth_tag_len);
282
793ff5ff 283static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
603f8c3b
DW
284 .init = &aesni_gcm_init_avx_gen4,
285 .enc_update = &aesni_gcm_enc_update_avx_gen4,
286 .dec_update = &aesni_gcm_dec_update_avx_gen4,
287 .finalize = &aesni_gcm_finalize_avx_gen4,
288};
d764593a 289
d764593a
TC
290#endif
291
0bd82f5f
TS
292static inline struct
293aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
294{
b7c89d9e
HX
295 unsigned long align = AESNI_ALIGN;
296
297 if (align <= crypto_tfm_ctx_alignment())
298 align = 1;
299 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
0bd82f5f 300}
cce2ea8d
SD
301
302static inline struct
303generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
304{
305 unsigned long align = AESNI_ALIGN;
306
307 if (align <= crypto_tfm_ctx_alignment())
308 align = 1;
309 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
310}
559ad0ff 311#endif
0bd82f5f 312
54b6a1bd
HY
313static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
314{
315 unsigned long addr = (unsigned long)raw_ctx;
316 unsigned long align = AESNI_ALIGN;
317
318 if (align <= crypto_tfm_ctx_alignment())
319 align = 1;
320 return (struct crypto_aes_ctx *)ALIGN(addr, align);
321}
322
323static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
324 const u8 *in_key, unsigned int key_len)
325{
326 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
327 u32 *flags = &tfm->crt_flags;
328 int err;
329
330 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
331 key_len != AES_KEYSIZE_256) {
332 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
333 return -EINVAL;
334 }
335
13b79b97 336 if (!irq_fpu_usable())
54b6a1bd
HY
337 err = crypto_aes_expand_key(ctx, in_key, key_len);
338 else {
339 kernel_fpu_begin();
340 err = aesni_set_key(ctx, in_key, key_len);
341 kernel_fpu_end();
342 }
343
344 return err;
345}
346
347static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
348 unsigned int key_len)
349{
350 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
351}
352
353static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
354{
355 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
356
13b79b97 357 if (!irq_fpu_usable())
54b6a1bd
HY
358 crypto_aes_encrypt_x86(ctx, dst, src);
359 else {
360 kernel_fpu_begin();
361 aesni_enc(ctx, dst, src);
362 kernel_fpu_end();
363 }
364}
365
366static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
367{
368 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
369
13b79b97 370 if (!irq_fpu_usable())
54b6a1bd
HY
371 crypto_aes_decrypt_x86(ctx, dst, src);
372 else {
373 kernel_fpu_begin();
374 aesni_dec(ctx, dst, src);
375 kernel_fpu_end();
376 }
377}
378
2cf4ac8b
HY
379static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
380{
381 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
382
383 aesni_enc(ctx, dst, src);
384}
385
386static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
387{
388 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
389
390 aesni_dec(ctx, dst, src);
391}
392
85671860
HX
393static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
394 unsigned int len)
395{
396 return aes_set_key_common(crypto_skcipher_tfm(tfm),
397 crypto_skcipher_ctx(tfm), key, len);
398}
399
400static int ecb_encrypt(struct skcipher_request *req)
54b6a1bd 401{
85671860
HX
402 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
403 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
404 struct skcipher_walk walk;
405 unsigned int nbytes;
54b6a1bd
HY
406 int err;
407
85671860 408 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
409
410 kernel_fpu_begin();
411 while ((nbytes = walk.nbytes)) {
412 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
413 nbytes & AES_BLOCK_MASK);
414 nbytes &= AES_BLOCK_SIZE - 1;
85671860 415 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
416 }
417 kernel_fpu_end();
418
419 return err;
420}
421
85671860 422static int ecb_decrypt(struct skcipher_request *req)
54b6a1bd 423{
85671860
HX
424 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
425 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
426 struct skcipher_walk walk;
427 unsigned int nbytes;
54b6a1bd
HY
428 int err;
429
85671860 430 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
431
432 kernel_fpu_begin();
433 while ((nbytes = walk.nbytes)) {
434 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
435 nbytes & AES_BLOCK_MASK);
436 nbytes &= AES_BLOCK_SIZE - 1;
85671860 437 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
438 }
439 kernel_fpu_end();
440
441 return err;
442}
443
85671860 444static int cbc_encrypt(struct skcipher_request *req)
54b6a1bd 445{
85671860
HX
446 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
447 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
448 struct skcipher_walk walk;
449 unsigned int nbytes;
54b6a1bd
HY
450 int err;
451
85671860 452 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
453
454 kernel_fpu_begin();
455 while ((nbytes = walk.nbytes)) {
456 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
457 nbytes & AES_BLOCK_MASK, walk.iv);
458 nbytes &= AES_BLOCK_SIZE - 1;
85671860 459 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
460 }
461 kernel_fpu_end();
462
463 return err;
464}
465
85671860 466static int cbc_decrypt(struct skcipher_request *req)
54b6a1bd 467{
85671860
HX
468 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
469 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
470 struct skcipher_walk walk;
471 unsigned int nbytes;
54b6a1bd
HY
472 int err;
473
85671860 474 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
475
476 kernel_fpu_begin();
477 while ((nbytes = walk.nbytes)) {
478 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
479 nbytes & AES_BLOCK_MASK, walk.iv);
480 nbytes &= AES_BLOCK_SIZE - 1;
85671860 481 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
482 }
483 kernel_fpu_end();
484
485 return err;
486}
487
0d258efb 488#ifdef CONFIG_X86_64
12387a46 489static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
85671860 490 struct skcipher_walk *walk)
12387a46
HY
491{
492 u8 *ctrblk = walk->iv;
493 u8 keystream[AES_BLOCK_SIZE];
494 u8 *src = walk->src.virt.addr;
495 u8 *dst = walk->dst.virt.addr;
496 unsigned int nbytes = walk->nbytes;
497
498 aesni_enc(ctx, keystream, ctrblk);
45fe93df
AB
499 crypto_xor_cpy(dst, keystream, src, nbytes);
500
12387a46
HY
501 crypto_inc(ctrblk, AES_BLOCK_SIZE);
502}
503
5cfed7b3 504#ifdef CONFIG_AS_AVX
22cddcc7 505static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
506 const u8 *in, unsigned int len, u8 *iv)
507{
508 /*
509 * based on key length, override with the by8 version
510 * of ctr mode encryption/decryption for improved performance
511 * aes_set_key_common() ensures that key length is one of
512 * {128,192,256}
513 */
514 if (ctx->key_length == AES_KEYSIZE_128)
515 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
516 else if (ctx->key_length == AES_KEYSIZE_192)
517 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
518 else
519 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
520}
521#endif
522
85671860 523static int ctr_crypt(struct skcipher_request *req)
12387a46 524{
85671860
HX
525 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
526 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
527 struct skcipher_walk walk;
528 unsigned int nbytes;
12387a46
HY
529 int err;
530
85671860 531 err = skcipher_walk_virt(&walk, req, true);
12387a46
HY
532
533 kernel_fpu_begin();
534 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
22cddcc7 535 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
e31ac32d 536 nbytes & AES_BLOCK_MASK, walk.iv);
12387a46 537 nbytes &= AES_BLOCK_SIZE - 1;
85671860 538 err = skcipher_walk_done(&walk, nbytes);
12387a46
HY
539 }
540 if (walk.nbytes) {
541 ctr_crypt_final(ctx, &walk);
85671860 542 err = skcipher_walk_done(&walk, 0);
12387a46
HY
543 }
544 kernel_fpu_end();
545
546 return err;
547}
023af608 548
85671860 549static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
023af608
JK
550 unsigned int keylen)
551{
85671860 552 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
023af608
JK
553 int err;
554
85671860 555 err = xts_verify_key(tfm, key, keylen);
023af608
JK
556 if (err)
557 return err;
558
85671860 559 keylen /= 2;
023af608
JK
560
561 /* first half of xts-key is for crypt */
85671860
HX
562 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
563 key, keylen);
023af608
JK
564 if (err)
565 return err;
566
567 /* second half of xts-key is for tweak */
85671860
HX
568 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
569 key + keylen, keylen);
023af608
JK
570}
571
572
32bec973
JK
573static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
574{
575 aesni_enc(ctx, out, in);
576}
577
c456a9cd
JK
578static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
579{
580 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
581}
582
583static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
584{
585 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
586}
587
588static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
589{
590 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
591}
592
593static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
594{
595 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
596}
597
598static const struct common_glue_ctx aesni_enc_xts = {
599 .num_funcs = 2,
600 .fpu_blocks_limit = 1,
601
602 .funcs = { {
603 .num_blocks = 8,
604 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
605 }, {
606 .num_blocks = 1,
607 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
608 } }
609};
610
611static const struct common_glue_ctx aesni_dec_xts = {
612 .num_funcs = 2,
613 .fpu_blocks_limit = 1,
614
615 .funcs = { {
616 .num_blocks = 8,
617 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
618 }, {
619 .num_blocks = 1,
620 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
621 } }
622};
623
85671860 624static int xts_encrypt(struct skcipher_request *req)
c456a9cd 625{
85671860
HX
626 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
627 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
c456a9cd 628
85671860
HX
629 return glue_xts_req_128bit(&aesni_enc_xts, req,
630 XTS_TWEAK_CAST(aesni_xts_tweak),
631 aes_ctx(ctx->raw_tweak_ctx),
632 aes_ctx(ctx->raw_crypt_ctx));
c456a9cd
JK
633}
634
85671860 635static int xts_decrypt(struct skcipher_request *req)
c456a9cd 636{
85671860
HX
637 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
638 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
023af608 639
85671860
HX
640 return glue_xts_req_128bit(&aesni_dec_xts, req,
641 XTS_TWEAK_CAST(aesni_xts_tweak),
642 aes_ctx(ctx->raw_tweak_ctx),
643 aes_ctx(ctx->raw_crypt_ctx));
2cf4ac8b 644}
2cf4ac8b 645
af05b300 646static int rfc4106_init(struct crypto_aead *aead)
0bd82f5f
TS
647{
648 struct cryptd_aead *cryptd_tfm;
af05b300
HX
649 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
650
eabdc320
SM
651 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
652 CRYPTO_ALG_INTERNAL,
653 CRYPTO_ALG_INTERNAL);
0bd82f5f
TS
654 if (IS_ERR(cryptd_tfm))
655 return PTR_ERR(cryptd_tfm);
60af520c 656
af05b300 657 *ctx = cryptd_tfm;
e9b8d2c2 658 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
0bd82f5f
TS
659 return 0;
660}
661
af05b300 662static void rfc4106_exit(struct crypto_aead *aead)
0bd82f5f 663{
af05b300
HX
664 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
665
666 cryptd_free_aead(*ctx);
0bd82f5f
TS
667}
668
0bd82f5f
TS
669static int
670rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
671{
02fa472a
HX
672 struct crypto_cipher *tfm;
673 int ret;
0bd82f5f 674
02fa472a
HX
675 tfm = crypto_alloc_cipher("aes", 0, 0);
676 if (IS_ERR(tfm))
677 return PTR_ERR(tfm);
0bd82f5f 678
02fa472a 679 ret = crypto_cipher_setkey(tfm, key, key_len);
7efd95f6 680 if (ret)
02fa472a 681 goto out_free_cipher;
0bd82f5f
TS
682
683 /* Clear the data in the hash sub key container to zero.*/
684 /* We want to cipher all zeros to create the hash sub key. */
685 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
686
02fa472a
HX
687 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
688
689out_free_cipher:
690 crypto_free_cipher(tfm);
0bd82f5f
TS
691 return ret;
692}
693
81e397d9
TS
694static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
695 unsigned int key_len)
0bd82f5f 696{
81e397d9 697 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
0bd82f5f
TS
698
699 if (key_len < 4) {
b7c89d9e 700 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
0bd82f5f
TS
701 return -EINVAL;
702 }
703 /*Account for 4 byte nonce at the end.*/
704 key_len -= 4;
0bd82f5f
TS
705
706 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
0bd82f5f 707
b7c89d9e
HX
708 return aes_set_key_common(crypto_aead_tfm(aead),
709 &ctx->aes_key_expanded, key, key_len) ?:
710 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
0bd82f5f
TS
711}
712
fc8517bf
SD
713static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key,
714 unsigned int key_len)
0bd82f5f 715{
af05b300
HX
716 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
717 struct cryptd_aead *cryptd_tfm = *ctx;
0bd82f5f 718
af05b300 719 return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
81e397d9
TS
720}
721
722static int common_rfc4106_set_authsize(struct crypto_aead *aead,
723 unsigned int authsize)
724{
0bd82f5f
TS
725 switch (authsize) {
726 case 8:
727 case 12:
728 case 16:
729 break;
730 default:
731 return -EINVAL;
732 }
b7c89d9e 733
0bd82f5f
TS
734 return 0;
735}
736
81e397d9
TS
737/* This is the Integrity Check Value (aka the authentication tag length and can
738 * be 8, 12 or 16 bytes long. */
fc8517bf
SD
739static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent,
740 unsigned int authsize)
0bd82f5f 741{
af05b300
HX
742 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
743 struct cryptd_aead *cryptd_tfm = *ctx;
0bd82f5f 744
af05b300 745 return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
0bd82f5f
TS
746}
747
cce2ea8d
SD
748static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
749 unsigned int authsize)
750{
751 switch (authsize) {
752 case 4:
753 case 8:
754 case 12:
755 case 13:
756 case 14:
757 case 15:
758 case 16:
759 break;
760 default:
761 return -EINVAL;
762 }
763
764 return 0;
765}
766
e8455207
DW
767static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
768 unsigned int assoclen, u8 *hash_subkey,
769 u8 *iv, void *aes_ctx)
770{
771 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
772 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
793ff5ff 773 const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
e8455207
DW
774 struct gcm_context_data data AESNI_ALIGN_ATTR;
775 struct scatter_walk dst_sg_walk = {};
776 unsigned long left = req->cryptlen;
777 unsigned long len, srclen, dstlen;
778 struct scatter_walk assoc_sg_walk;
779 struct scatter_walk src_sg_walk;
780 struct scatterlist src_start[2];
781 struct scatterlist dst_start[2];
782 struct scatterlist *src_sg;
783 struct scatterlist *dst_sg;
784 u8 *src, *dst, *assoc;
785 u8 *assocmem = NULL;
786 u8 authTag[16];
787
788 if (!enc)
789 left -= auth_tag_len;
790
603f8c3b
DW
791#ifdef CONFIG_AS_AVX2
792 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
793 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
794#endif
795#ifdef CONFIG_AS_AVX
796 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
797 gcm_tfm = &aesni_gcm_tfm_sse;
798#endif
799
e8455207
DW
800 /* Linearize assoc, if not already linear */
801 if (req->src->length >= assoclen && req->src->length &&
802 (!PageHighMem(sg_page(req->src)) ||
a7888481 803 req->src->offset + req->src->length <= PAGE_SIZE)) {
e8455207
DW
804 scatterwalk_start(&assoc_sg_walk, req->src);
805 assoc = scatterwalk_map(&assoc_sg_walk);
806 } else {
807 /* assoc can be any length, so must be on heap */
808 assocmem = kmalloc(assoclen, GFP_ATOMIC);
809 if (unlikely(!assocmem))
810 return -ENOMEM;
811 assoc = assocmem;
812
813 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
814 }
815
3af34963
EB
816 if (left) {
817 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
818 scatterwalk_start(&src_sg_walk, src_sg);
819 if (req->src != req->dst) {
820 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
821 req->assoclen);
822 scatterwalk_start(&dst_sg_walk, dst_sg);
823 }
e8455207
DW
824 }
825
826 kernel_fpu_begin();
603f8c3b 827 gcm_tfm->init(aes_ctx, &data, iv,
e8455207
DW
828 hash_subkey, assoc, assoclen);
829 if (req->src != req->dst) {
830 while (left) {
831 src = scatterwalk_map(&src_sg_walk);
832 dst = scatterwalk_map(&dst_sg_walk);
833 srclen = scatterwalk_clamp(&src_sg_walk, left);
834 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
835 len = min(srclen, dstlen);
836 if (len) {
837 if (enc)
603f8c3b 838 gcm_tfm->enc_update(aes_ctx, &data,
e8455207
DW
839 dst, src, len);
840 else
603f8c3b 841 gcm_tfm->dec_update(aes_ctx, &data,
e8455207
DW
842 dst, src, len);
843 }
844 left -= len;
845
846 scatterwalk_unmap(src);
847 scatterwalk_unmap(dst);
848 scatterwalk_advance(&src_sg_walk, len);
849 scatterwalk_advance(&dst_sg_walk, len);
850 scatterwalk_done(&src_sg_walk, 0, left);
851 scatterwalk_done(&dst_sg_walk, 1, left);
852 }
853 } else {
854 while (left) {
855 dst = src = scatterwalk_map(&src_sg_walk);
856 len = scatterwalk_clamp(&src_sg_walk, left);
857 if (len) {
858 if (enc)
603f8c3b 859 gcm_tfm->enc_update(aes_ctx, &data,
e8455207
DW
860 src, src, len);
861 else
603f8c3b 862 gcm_tfm->dec_update(aes_ctx, &data,
e8455207
DW
863 src, src, len);
864 }
865 left -= len;
866 scatterwalk_unmap(src);
867 scatterwalk_advance(&src_sg_walk, len);
868 scatterwalk_done(&src_sg_walk, 1, left);
869 }
870 }
603f8c3b 871 gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len);
e8455207
DW
872 kernel_fpu_end();
873
874 if (!assocmem)
875 scatterwalk_unmap(assoc);
876 else
877 kfree(assocmem);
878
879 if (!enc) {
880 u8 authTagMsg[16];
881
882 /* Copy out original authTag */
883 scatterwalk_map_and_copy(authTagMsg, req->src,
884 req->assoclen + req->cryptlen -
885 auth_tag_len,
886 auth_tag_len, 0);
887
888 /* Compare generated tag with passed in tag. */
889 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
890 -EBADMSG : 0;
891 }
892
893 /* Copy in the authTag */
894 scatterwalk_map_and_copy(authTag, req->dst,
895 req->assoclen + req->cryptlen,
896 auth_tag_len, 1);
897
898 return 0;
899}
900
cce2ea8d
SD
901static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
902 u8 *hash_subkey, u8 *iv, void *aes_ctx)
0bd82f5f 903{
603f8c3b
DW
904 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
905 aes_ctx);
0bd82f5f
TS
906}
907
cce2ea8d
SD
908static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
909 u8 *hash_subkey, u8 *iv, void *aes_ctx)
0bd82f5f 910{
603f8c3b
DW
911 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
912 aes_ctx);
cce2ea8d
SD
913}
914
915static int helper_rfc4106_encrypt(struct aead_request *req)
916{
917 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
918 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
919 void *aes_ctx = &(ctx->aes_key_expanded);
920 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
921 unsigned int i;
922 __be32 counter = cpu_to_be32(1);
923
924 /* Assuming we are supporting rfc4106 64-bit extended */
925 /* sequence numbers We need to have the AAD length equal */
926 /* to 16 or 20 bytes */
927 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
928 return -EINVAL;
929
930 /* IV below built */
931 for (i = 0; i < 4; i++)
932 *(iv+i) = ctx->nonce[i];
933 for (i = 0; i < 8; i++)
934 *(iv+4+i) = req->iv[i];
935 *((__be32 *)(iv+12)) = counter;
936
937 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
938 aes_ctx);
939}
940
941static int helper_rfc4106_decrypt(struct aead_request *req)
942{
943 __be32 counter = cpu_to_be32(1);
944 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
945 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
946 void *aes_ctx = &(ctx->aes_key_expanded);
947 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
948 unsigned int i;
949
950 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
951 return -EINVAL;
952
953 /* Assuming we are supporting rfc4106 64-bit extended */
954 /* sequence numbers We need to have the AAD length */
955 /* equal to 16 or 20 bytes */
956
957 /* IV below built */
958 for (i = 0; i < 4; i++)
959 *(iv+i) = ctx->nonce[i];
960 for (i = 0; i < 8; i++)
961 *(iv+4+i) = req->iv[i];
962 *((__be32 *)(iv+12)) = counter;
963
964 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
965 aes_ctx);
0bd82f5f 966}
81e397d9 967
fc8517bf 968static int gcmaes_wrapper_encrypt(struct aead_request *req)
81e397d9 969{
81e397d9 970 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
af05b300
HX
971 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
972 struct cryptd_aead *cryptd_tfm = *ctx;
81e397d9 973
38b2f68b
HX
974 tfm = &cryptd_tfm->base;
975 if (irq_fpu_usable() && (!in_atomic() ||
976 !cryptd_aead_queued(cryptd_tfm)))
977 tfm = cryptd_aead_child(cryptd_tfm);
978
979 aead_request_set_tfm(req, tfm);
81e397d9 980
e9b8d2c2 981 return crypto_aead_encrypt(req);
81e397d9
TS
982}
983
fc8517bf 984static int gcmaes_wrapper_decrypt(struct aead_request *req)
81e397d9 985{
81e397d9 986 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
af05b300
HX
987 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
988 struct cryptd_aead *cryptd_tfm = *ctx;
81e397d9 989
38b2f68b
HX
990 tfm = &cryptd_tfm->base;
991 if (irq_fpu_usable() && (!in_atomic() ||
992 !cryptd_aead_queued(cryptd_tfm)))
993 tfm = cryptd_aead_child(cryptd_tfm);
994
995 aead_request_set_tfm(req, tfm);
81e397d9 996
e9b8d2c2 997 return crypto_aead_decrypt(req);
81e397d9 998}
fa46ccb8 999#endif
0bd82f5f 1000
fa46ccb8
JK
1001static struct crypto_alg aesni_algs[] = { {
1002 .cra_name = "aes",
1003 .cra_driver_name = "aes-aesni",
1004 .cra_priority = 300,
1005 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1006 .cra_blocksize = AES_BLOCK_SIZE,
85671860 1007 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
fa46ccb8
JK
1008 .cra_module = THIS_MODULE,
1009 .cra_u = {
1010 .cipher = {
1011 .cia_min_keysize = AES_MIN_KEY_SIZE,
1012 .cia_max_keysize = AES_MAX_KEY_SIZE,
1013 .cia_setkey = aes_set_key,
1014 .cia_encrypt = aes_encrypt,
1015 .cia_decrypt = aes_decrypt
1016 }
1017 }
1018}, {
85671860
HX
1019 .cra_name = "__aes",
1020 .cra_driver_name = "__aes-aesni",
1021 .cra_priority = 300,
eabdc320 1022 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
fa46ccb8 1023 .cra_blocksize = AES_BLOCK_SIZE,
85671860 1024 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
fa46ccb8
JK
1025 .cra_module = THIS_MODULE,
1026 .cra_u = {
1027 .cipher = {
1028 .cia_min_keysize = AES_MIN_KEY_SIZE,
1029 .cia_max_keysize = AES_MAX_KEY_SIZE,
1030 .cia_setkey = aes_set_key,
1031 .cia_encrypt = __aes_encrypt,
1032 .cia_decrypt = __aes_decrypt
1033 }
1034 }
85671860
HX
1035} };
1036
1037static struct skcipher_alg aesni_skciphers[] = {
1038 {
1039 .base = {
1040 .cra_name = "__ecb(aes)",
1041 .cra_driver_name = "__ecb-aes-aesni",
1042 .cra_priority = 400,
1043 .cra_flags = CRYPTO_ALG_INTERNAL,
1044 .cra_blocksize = AES_BLOCK_SIZE,
1045 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1046 .cra_module = THIS_MODULE,
fa46ccb8 1047 },
85671860
HX
1048 .min_keysize = AES_MIN_KEY_SIZE,
1049 .max_keysize = AES_MAX_KEY_SIZE,
1050 .setkey = aesni_skcipher_setkey,
1051 .encrypt = ecb_encrypt,
1052 .decrypt = ecb_decrypt,
1053 }, {
1054 .base = {
1055 .cra_name = "__cbc(aes)",
1056 .cra_driver_name = "__cbc-aes-aesni",
1057 .cra_priority = 400,
1058 .cra_flags = CRYPTO_ALG_INTERNAL,
1059 .cra_blocksize = AES_BLOCK_SIZE,
1060 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1061 .cra_module = THIS_MODULE,
fa46ccb8 1062 },
85671860
HX
1063 .min_keysize = AES_MIN_KEY_SIZE,
1064 .max_keysize = AES_MAX_KEY_SIZE,
1065 .ivsize = AES_BLOCK_SIZE,
1066 .setkey = aesni_skcipher_setkey,
1067 .encrypt = cbc_encrypt,
1068 .decrypt = cbc_decrypt,
fa46ccb8 1069#ifdef CONFIG_X86_64
85671860
HX
1070 }, {
1071 .base = {
1072 .cra_name = "__ctr(aes)",
1073 .cra_driver_name = "__ctr-aes-aesni",
1074 .cra_priority = 400,
1075 .cra_flags = CRYPTO_ALG_INTERNAL,
1076 .cra_blocksize = 1,
1077 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1078 .cra_module = THIS_MODULE,
fa46ccb8 1079 },
85671860
HX
1080 .min_keysize = AES_MIN_KEY_SIZE,
1081 .max_keysize = AES_MAX_KEY_SIZE,
1082 .ivsize = AES_BLOCK_SIZE,
1083 .chunksize = AES_BLOCK_SIZE,
1084 .setkey = aesni_skcipher_setkey,
1085 .encrypt = ctr_crypt,
1086 .decrypt = ctr_crypt,
1087 }, {
1088 .base = {
1089 .cra_name = "__xts(aes)",
1090 .cra_driver_name = "__xts-aes-aesni",
1091 .cra_priority = 401,
1092 .cra_flags = CRYPTO_ALG_INTERNAL,
1093 .cra_blocksize = AES_BLOCK_SIZE,
1094 .cra_ctxsize = XTS_AES_CTX_SIZE,
1095 .cra_module = THIS_MODULE,
fa46ccb8 1096 },
85671860
HX
1097 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1098 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1099 .ivsize = AES_BLOCK_SIZE,
1100 .setkey = xts_aesni_setkey,
1101 .encrypt = xts_encrypt,
1102 .decrypt = xts_decrypt,
fa46ccb8 1103#endif
85671860
HX
1104 }
1105};
1106
1c9fa294 1107static
85671860
HX
1108struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1109
af05b300 1110#ifdef CONFIG_X86_64
cce2ea8d
SD
1111static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1112 unsigned int key_len)
1113{
1114 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1115
1116 return aes_set_key_common(crypto_aead_tfm(aead),
1117 &ctx->aes_key_expanded, key, key_len) ?:
1118 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1119}
1120
1121static int generic_gcmaes_encrypt(struct aead_request *req)
1122{
1123 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1124 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1125 void *aes_ctx = &(ctx->aes_key_expanded);
1126 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1127 __be32 counter = cpu_to_be32(1);
1128
1129 memcpy(iv, req->iv, 12);
1130 *((__be32 *)(iv+12)) = counter;
1131
1132 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1133 aes_ctx);
1134}
1135
1136static int generic_gcmaes_decrypt(struct aead_request *req)
1137{
1138 __be32 counter = cpu_to_be32(1);
1139 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
106840c4 1140 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
cce2ea8d
SD
1141 void *aes_ctx = &(ctx->aes_key_expanded);
1142 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1143
1144 memcpy(iv, req->iv, 12);
1145 *((__be32 *)(iv+12)) = counter;
1146
1147 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1148 aes_ctx);
1149}
1150
fc8517bf
SD
1151static int generic_gcmaes_init(struct crypto_aead *aead)
1152{
1153 struct cryptd_aead *cryptd_tfm;
1154 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1155
1156 cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1157 CRYPTO_ALG_INTERNAL,
1158 CRYPTO_ALG_INTERNAL);
1159 if (IS_ERR(cryptd_tfm))
1160 return PTR_ERR(cryptd_tfm);
1161
1162 *ctx = cryptd_tfm;
1163 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
1164
1165 return 0;
1166}
1167
1168static void generic_gcmaes_exit(struct crypto_aead *aead)
1169{
1170 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1171
1172 cryptd_free_aead(*ctx);
1173}
1174
af05b300 1175static struct aead_alg aesni_aead_algs[] = { {
b7c89d9e
HX
1176 .setkey = common_rfc4106_set_key,
1177 .setauthsize = common_rfc4106_set_authsize,
1178 .encrypt = helper_rfc4106_encrypt,
1179 .decrypt = helper_rfc4106_decrypt,
46d93748 1180 .ivsize = GCM_RFC4106_IV_SIZE,
b7c89d9e
HX
1181 .maxauthsize = 16,
1182 .base = {
1183 .cra_name = "__gcm-aes-aesni",
1184 .cra_driver_name = "__driver-gcm-aes-aesni",
1185 .cra_flags = CRYPTO_ALG_INTERNAL,
1186 .cra_blocksize = 1,
1187 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1188 .cra_alignmask = AESNI_ALIGN - 1,
1189 .cra_module = THIS_MODULE,
1190 },
1191}, {
af05b300
HX
1192 .init = rfc4106_init,
1193 .exit = rfc4106_exit,
fc8517bf
SD
1194 .setkey = gcmaes_wrapper_set_key,
1195 .setauthsize = gcmaes_wrapper_set_authsize,
1196 .encrypt = gcmaes_wrapper_encrypt,
1197 .decrypt = gcmaes_wrapper_decrypt,
46d93748 1198 .ivsize = GCM_RFC4106_IV_SIZE,
af05b300
HX
1199 .maxauthsize = 16,
1200 .base = {
1201 .cra_name = "rfc4106(gcm(aes))",
1202 .cra_driver_name = "rfc4106-gcm-aesni",
1203 .cra_priority = 400,
5e4b8c1f 1204 .cra_flags = CRYPTO_ALG_ASYNC,
af05b300
HX
1205 .cra_blocksize = 1,
1206 .cra_ctxsize = sizeof(struct cryptd_aead *),
1207 .cra_module = THIS_MODULE,
1208 },
cce2ea8d
SD
1209}, {
1210 .setkey = generic_gcmaes_set_key,
1211 .setauthsize = generic_gcmaes_set_authsize,
1212 .encrypt = generic_gcmaes_encrypt,
1213 .decrypt = generic_gcmaes_decrypt,
46d93748 1214 .ivsize = GCM_AES_IV_SIZE,
cce2ea8d 1215 .maxauthsize = 16,
fc8517bf
SD
1216 .base = {
1217 .cra_name = "__generic-gcm-aes-aesni",
1218 .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1219 .cra_priority = 0,
1220 .cra_flags = CRYPTO_ALG_INTERNAL,
1221 .cra_blocksize = 1,
1222 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1223 .cra_alignmask = AESNI_ALIGN - 1,
1224 .cra_module = THIS_MODULE,
1225 },
1226}, {
1227 .init = generic_gcmaes_init,
1228 .exit = generic_gcmaes_exit,
1229 .setkey = gcmaes_wrapper_set_key,
1230 .setauthsize = gcmaes_wrapper_set_authsize,
1231 .encrypt = gcmaes_wrapper_encrypt,
1232 .decrypt = gcmaes_wrapper_decrypt,
1233 .ivsize = GCM_AES_IV_SIZE,
1234 .maxauthsize = 16,
cce2ea8d
SD
1235 .base = {
1236 .cra_name = "gcm(aes)",
1237 .cra_driver_name = "generic-gcm-aesni",
1238 .cra_priority = 400,
1239 .cra_flags = CRYPTO_ALG_ASYNC,
1240 .cra_blocksize = 1,
fc8517bf 1241 .cra_ctxsize = sizeof(struct cryptd_aead *),
cce2ea8d
SD
1242 .cra_module = THIS_MODULE,
1243 },
af05b300
HX
1244} };
1245#else
1246static struct aead_alg aesni_aead_algs[0];
1247#endif
1248
3bd391f0
AK
1249
1250static const struct x86_cpu_id aesni_cpu_id[] = {
1251 X86_FEATURE_MATCH(X86_FEATURE_AES),
1252 {}
1253};
1254MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1255
54b6a1bd
HY
1256static int __init aesni_init(void)
1257{
7af6c245 1258 int err;
54b6a1bd 1259
3bd391f0 1260 if (!x86_match_cpu(aesni_cpu_id))
54b6a1bd 1261 return -ENODEV;
8610d7bf 1262#ifdef CONFIG_X86_64
d764593a
TC
1263#ifdef CONFIG_AS_AVX2
1264 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1265 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
603f8c3b 1266 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
d764593a
TC
1267 } else
1268#endif
1269#ifdef CONFIG_AS_AVX
1270 if (boot_cpu_has(X86_FEATURE_AVX)) {
1271 pr_info("AVX version of gcm_enc/dec engaged.\n");
603f8c3b 1272 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
d764593a
TC
1273 } else
1274#endif
1275 {
1276 pr_info("SSE version of gcm_enc/dec engaged.\n");
603f8c3b 1277 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
d764593a 1278 }
22cddcc7 1279 aesni_ctr_enc_tfm = aesni_ctr_enc;
5cfed7b3 1280#ifdef CONFIG_AS_AVX
da154e82 1281 if (boot_cpu_has(X86_FEATURE_AVX)) {
22cddcc7 1282 /* optimize performance of ctr mode encryption transform */
1283 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1284 pr_info("AES CTR mode by8 optimization enabled\n");
1285 }
1286#endif
8610d7bf 1287#endif
0bd82f5f 1288
af05b300
HX
1289 err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1290 if (err)
e0db9c48 1291 return err;
af05b300 1292
8b56d348
EB
1293 err = simd_register_skciphers_compat(aesni_skciphers,
1294 ARRAY_SIZE(aesni_skciphers),
1295 aesni_simd_skciphers);
85671860
HX
1296 if (err)
1297 goto unregister_algs;
1298
af05b300
HX
1299 err = crypto_register_aeads(aesni_aead_algs,
1300 ARRAY_SIZE(aesni_aead_algs));
1301 if (err)
85671860
HX
1302 goto unregister_skciphers;
1303
85671860
HX
1304 return 0;
1305
85671860 1306unregister_skciphers:
8b56d348
EB
1307 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1308 aesni_simd_skciphers);
af05b300
HX
1309unregister_algs:
1310 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
af05b300 1311 return err;
54b6a1bd
HY
1312}
1313
1314static void __exit aesni_exit(void)
1315{
af05b300 1316 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
8b56d348
EB
1317 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1318 aesni_simd_skciphers);
fa46ccb8 1319 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
54b6a1bd
HY
1320}
1321
0fbafd06 1322late_initcall(aesni_init);
54b6a1bd
HY
1323module_exit(aesni_exit);
1324
1325MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1326MODULE_LICENSE("GPL");
5d26a105 1327MODULE_ALIAS_CRYPTO("aes");