]>
Commit | Line | Data |
---|---|---|
54b6a1bd HY |
1 | /* |
2 | * Support for Intel AES-NI instructions. This file contains glue | |
3 | * code, the real AES implementation is in intel-aes_asm.S. | |
4 | * | |
5 | * Copyright (C) 2008, Intel Corp. | |
6 | * Author: Huang Ying <ying.huang@intel.com> | |
7 | * | |
0bd82f5f TS |
8 | * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD |
9 | * interface for 64-bit kernels. | |
10 | * Authors: Adrian Hoban <adrian.hoban@intel.com> | |
11 | * Gabriele Paoloni <gabriele.paoloni@intel.com> | |
12 | * Tadeusz Struk (tadeusz.struk@intel.com) | |
13 | * Aidan O'Mahony (aidan.o.mahony@intel.com) | |
14 | * Copyright (c) 2010, Intel Corporation. | |
15 | * | |
54b6a1bd HY |
16 | * This program is free software; you can redistribute it and/or modify |
17 | * it under the terms of the GNU General Public License as published by | |
18 | * the Free Software Foundation; either version 2 of the License, or | |
19 | * (at your option) any later version. | |
20 | */ | |
21 | ||
22 | #include <linux/hardirq.h> | |
23 | #include <linux/types.h> | |
7c52d551 | 24 | #include <linux/module.h> |
54b6a1bd HY |
25 | #include <linux/err.h> |
26 | #include <crypto/algapi.h> | |
27 | #include <crypto/aes.h> | |
28 | #include <crypto/cryptd.h> | |
12387a46 | 29 | #include <crypto/ctr.h> |
023af608 | 30 | #include <crypto/b128ops.h> |
46d93748 | 31 | #include <crypto/gcm.h> |
023af608 | 32 | #include <crypto/xts.h> |
3bd391f0 | 33 | #include <asm/cpu_device_id.h> |
df6b35f4 | 34 | #include <asm/fpu/api.h> |
70ef2601 | 35 | #include <asm/crypto/aes.h> |
0bd82f5f TS |
36 | #include <crypto/scatterwalk.h> |
37 | #include <crypto/internal/aead.h> | |
85671860 HX |
38 | #include <crypto/internal/simd.h> |
39 | #include <crypto/internal/skcipher.h> | |
0bd82f5f TS |
40 | #include <linux/workqueue.h> |
41 | #include <linux/spinlock.h> | |
c456a9cd JK |
42 | #ifdef CONFIG_X86_64 |
43 | #include <asm/crypto/glue_helper.h> | |
44 | #endif | |
54b6a1bd | 45 | |
e31ac32d | 46 | |
b7c89d9e | 47 | #define AESNI_ALIGN 16 |
85671860 | 48 | #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN))) |
b7c89d9e HX |
49 | #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1)) |
50 | #define RFC4106_HASH_SUBKEY_SIZE 16 | |
85671860 HX |
51 | #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1)) |
52 | #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA) | |
53 | #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA) | |
b7c89d9e | 54 | |
0bd82f5f TS |
55 | /* This data is stored at the end of the crypto_tfm struct. |
56 | * It's a type of per "session" data storage location. | |
57 | * This needs to be 16 byte aligned. | |
58 | */ | |
59 | struct aesni_rfc4106_gcm_ctx { | |
85671860 HX |
60 | u8 hash_subkey[16] AESNI_ALIGN_ATTR; |
61 | struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR; | |
0bd82f5f | 62 | u8 nonce[4]; |
0bd82f5f TS |
63 | }; |
64 | ||
cce2ea8d SD |
65 | struct generic_gcmaes_ctx { |
66 | u8 hash_subkey[16] AESNI_ALIGN_ATTR; | |
67 | struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR; | |
68 | }; | |
69 | ||
023af608 | 70 | struct aesni_xts_ctx { |
85671860 HX |
71 | u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR; |
72 | u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR; | |
023af608 JK |
73 | }; |
74 | ||
9ee4a5df DW |
75 | #define GCM_BLOCK_LEN 16 |
76 | ||
77 | struct gcm_context_data { | |
78 | /* init, update and finalize context data */ | |
79 | u8 aad_hash[GCM_BLOCK_LEN]; | |
80 | u64 aad_length; | |
81 | u64 in_length; | |
82 | u8 partial_block_enc_key[GCM_BLOCK_LEN]; | |
83 | u8 orig_IV[GCM_BLOCK_LEN]; | |
84 | u8 current_counter[GCM_BLOCK_LEN]; | |
85 | u64 partial_block_len; | |
86 | u64 unused; | |
87 | u8 hash_keys[GCM_BLOCK_LEN * 8]; | |
88 | }; | |
89 | ||
54b6a1bd HY |
90 | asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, |
91 | unsigned int key_len); | |
92 | asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, | |
93 | const u8 *in); | |
94 | asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out, | |
95 | const u8 *in); | |
96 | asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out, | |
97 | const u8 *in, unsigned int len); | |
98 | asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out, | |
99 | const u8 *in, unsigned int len); | |
100 | asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out, | |
101 | const u8 *in, unsigned int len, u8 *iv); | |
102 | asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out, | |
103 | const u8 *in, unsigned int len, u8 *iv); | |
9bed4aca RD |
104 | |
105 | int crypto_fpu_init(void); | |
106 | void crypto_fpu_exit(void); | |
107 | ||
d764593a TC |
108 | #define AVX_GEN2_OPTSIZE 640 |
109 | #define AVX_GEN4_OPTSIZE 4096 | |
110 | ||
0d258efb | 111 | #ifdef CONFIG_X86_64 |
22cddcc7 | 112 | |
113 | static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out, | |
114 | const u8 *in, unsigned int len, u8 *iv); | |
12387a46 HY |
115 | asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out, |
116 | const u8 *in, unsigned int len, u8 *iv); | |
54b6a1bd | 117 | |
c456a9cd JK |
118 | asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out, |
119 | const u8 *in, bool enc, u8 *iv); | |
120 | ||
0bd82f5f TS |
121 | /* asmlinkage void aesni_gcm_enc() |
122 | * void *ctx, AES Key schedule. Starts on a 16 byte boundary. | |
9ee4a5df | 123 | * struct gcm_context_data. May be uninitialized. |
0bd82f5f TS |
124 | * u8 *out, Ciphertext output. Encrypt in-place is allowed. |
125 | * const u8 *in, Plaintext input | |
126 | * unsigned long plaintext_len, Length of data in bytes for encryption. | |
cce2ea8d SD |
127 | * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001. |
128 | * 16-byte aligned pointer. | |
0bd82f5f TS |
129 | * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary. |
130 | * const u8 *aad, Additional Authentication Data (AAD) | |
cce2ea8d | 131 | * unsigned long aad_len, Length of AAD in bytes. |
0bd82f5f TS |
132 | * u8 *auth_tag, Authenticated Tag output. |
133 | * unsigned long auth_tag_len), Authenticated Tag Length in bytes. | |
134 | * Valid values are 16 (most likely), 12 or 8. | |
135 | */ | |
9ee4a5df DW |
136 | asmlinkage void aesni_gcm_enc(void *ctx, |
137 | struct gcm_context_data *gdata, u8 *out, | |
0bd82f5f TS |
138 | const u8 *in, unsigned long plaintext_len, u8 *iv, |
139 | u8 *hash_subkey, const u8 *aad, unsigned long aad_len, | |
140 | u8 *auth_tag, unsigned long auth_tag_len); | |
141 | ||
142 | /* asmlinkage void aesni_gcm_dec() | |
143 | * void *ctx, AES Key schedule. Starts on a 16 byte boundary. | |
9ee4a5df | 144 | * struct gcm_context_data. May be uninitialized. |
0bd82f5f TS |
145 | * u8 *out, Plaintext output. Decrypt in-place is allowed. |
146 | * const u8 *in, Ciphertext input | |
147 | * unsigned long ciphertext_len, Length of data in bytes for decryption. | |
cce2ea8d SD |
148 | * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001. |
149 | * 16-byte aligned pointer. | |
0bd82f5f TS |
150 | * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary. |
151 | * const u8 *aad, Additional Authentication Data (AAD) | |
152 | * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going | |
153 | * to be 8 or 12 bytes | |
154 | * u8 *auth_tag, Authenticated Tag output. | |
155 | * unsigned long auth_tag_len) Authenticated Tag Length in bytes. | |
156 | * Valid values are 16 (most likely), 12 or 8. | |
157 | */ | |
9ee4a5df DW |
158 | asmlinkage void aesni_gcm_dec(void *ctx, |
159 | struct gcm_context_data *gdata, u8 *out, | |
0bd82f5f TS |
160 | const u8 *in, unsigned long ciphertext_len, u8 *iv, |
161 | u8 *hash_subkey, const u8 *aad, unsigned long aad_len, | |
162 | u8 *auth_tag, unsigned long auth_tag_len); | |
163 | ||
fb8986e6 DW |
164 | /* Scatter / Gather routines, with args similar to above */ |
165 | asmlinkage void aesni_gcm_init(void *ctx, | |
166 | struct gcm_context_data *gdata, | |
167 | u8 *iv, | |
168 | u8 *hash_subkey, const u8 *aad, | |
169 | unsigned long aad_len); | |
170 | asmlinkage void aesni_gcm_enc_update(void *ctx, | |
171 | struct gcm_context_data *gdata, u8 *out, | |
172 | const u8 *in, unsigned long plaintext_len); | |
173 | asmlinkage void aesni_gcm_dec_update(void *ctx, | |
174 | struct gcm_context_data *gdata, u8 *out, | |
175 | const u8 *in, | |
176 | unsigned long ciphertext_len); | |
177 | asmlinkage void aesni_gcm_finalize(void *ctx, | |
178 | struct gcm_context_data *gdata, | |
179 | u8 *auth_tag, unsigned long auth_tag_len); | |
d764593a TC |
180 | |
181 | #ifdef CONFIG_AS_AVX | |
22cddcc7 | 182 | asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv, |
183 | void *keys, u8 *out, unsigned int num_bytes); | |
184 | asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv, | |
185 | void *keys, u8 *out, unsigned int num_bytes); | |
186 | asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv, | |
187 | void *keys, u8 *out, unsigned int num_bytes); | |
d764593a TC |
188 | /* |
189 | * asmlinkage void aesni_gcm_precomp_avx_gen2() | |
190 | * gcm_data *my_ctx_data, context data | |
191 | * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary. | |
192 | */ | |
193 | asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey); | |
194 | ||
195 | asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out, | |
196 | const u8 *in, unsigned long plaintext_len, u8 *iv, | |
197 | const u8 *aad, unsigned long aad_len, | |
198 | u8 *auth_tag, unsigned long auth_tag_len); | |
199 | ||
200 | asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out, | |
201 | const u8 *in, unsigned long ciphertext_len, u8 *iv, | |
202 | const u8 *aad, unsigned long aad_len, | |
203 | u8 *auth_tag, unsigned long auth_tag_len); | |
204 | ||
9ee4a5df DW |
205 | static void aesni_gcm_enc_avx(void *ctx, |
206 | struct gcm_context_data *data, u8 *out, | |
d764593a TC |
207 | const u8 *in, unsigned long plaintext_len, u8 *iv, |
208 | u8 *hash_subkey, const u8 *aad, unsigned long aad_len, | |
209 | u8 *auth_tag, unsigned long auth_tag_len) | |
210 | { | |
e31ac32d TM |
211 | struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
212 | if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)){ | |
9ee4a5df DW |
213 | aesni_gcm_enc(ctx, data, out, in, |
214 | plaintext_len, iv, hash_subkey, aad, | |
215 | aad_len, auth_tag, auth_tag_len); | |
d764593a TC |
216 | } else { |
217 | aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); | |
218 | aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad, | |
219 | aad_len, auth_tag, auth_tag_len); | |
220 | } | |
221 | } | |
222 | ||
9ee4a5df DW |
223 | static void aesni_gcm_dec_avx(void *ctx, |
224 | struct gcm_context_data *data, u8 *out, | |
d764593a TC |
225 | const u8 *in, unsigned long ciphertext_len, u8 *iv, |
226 | u8 *hash_subkey, const u8 *aad, unsigned long aad_len, | |
227 | u8 *auth_tag, unsigned long auth_tag_len) | |
228 | { | |
e31ac32d TM |
229 | struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
230 | if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) { | |
9ee4a5df DW |
231 | aesni_gcm_dec(ctx, data, out, in, |
232 | ciphertext_len, iv, hash_subkey, aad, | |
233 | aad_len, auth_tag, auth_tag_len); | |
d764593a TC |
234 | } else { |
235 | aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); | |
236 | aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad, | |
237 | aad_len, auth_tag, auth_tag_len); | |
238 | } | |
239 | } | |
240 | #endif | |
241 | ||
242 | #ifdef CONFIG_AS_AVX2 | |
243 | /* | |
244 | * asmlinkage void aesni_gcm_precomp_avx_gen4() | |
245 | * gcm_data *my_ctx_data, context data | |
246 | * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary. | |
247 | */ | |
248 | asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey); | |
249 | ||
250 | asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out, | |
251 | const u8 *in, unsigned long plaintext_len, u8 *iv, | |
252 | const u8 *aad, unsigned long aad_len, | |
253 | u8 *auth_tag, unsigned long auth_tag_len); | |
254 | ||
255 | asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out, | |
256 | const u8 *in, unsigned long ciphertext_len, u8 *iv, | |
257 | const u8 *aad, unsigned long aad_len, | |
258 | u8 *auth_tag, unsigned long auth_tag_len); | |
259 | ||
9ee4a5df DW |
260 | static void aesni_gcm_enc_avx2(void *ctx, |
261 | struct gcm_context_data *data, u8 *out, | |
d764593a TC |
262 | const u8 *in, unsigned long plaintext_len, u8 *iv, |
263 | u8 *hash_subkey, const u8 *aad, unsigned long aad_len, | |
264 | u8 *auth_tag, unsigned long auth_tag_len) | |
265 | { | |
e31ac32d TM |
266 | struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
267 | if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) { | |
9ee4a5df DW |
268 | aesni_gcm_enc(ctx, data, out, in, |
269 | plaintext_len, iv, hash_subkey, aad, | |
270 | aad_len, auth_tag, auth_tag_len); | |
d764593a TC |
271 | } else if (plaintext_len < AVX_GEN4_OPTSIZE) { |
272 | aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); | |
273 | aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad, | |
274 | aad_len, auth_tag, auth_tag_len); | |
275 | } else { | |
276 | aesni_gcm_precomp_avx_gen4(ctx, hash_subkey); | |
277 | aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad, | |
278 | aad_len, auth_tag, auth_tag_len); | |
279 | } | |
280 | } | |
281 | ||
9ee4a5df DW |
282 | static void aesni_gcm_dec_avx2(void *ctx, |
283 | struct gcm_context_data *data, u8 *out, | |
d764593a TC |
284 | const u8 *in, unsigned long ciphertext_len, u8 *iv, |
285 | u8 *hash_subkey, const u8 *aad, unsigned long aad_len, | |
286 | u8 *auth_tag, unsigned long auth_tag_len) | |
287 | { | |
e31ac32d TM |
288 | struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx; |
289 | if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) { | |
9ee4a5df DW |
290 | aesni_gcm_dec(ctx, data, out, in, |
291 | ciphertext_len, iv, hash_subkey, | |
292 | aad, aad_len, auth_tag, auth_tag_len); | |
d764593a TC |
293 | } else if (ciphertext_len < AVX_GEN4_OPTSIZE) { |
294 | aesni_gcm_precomp_avx_gen2(ctx, hash_subkey); | |
295 | aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad, | |
296 | aad_len, auth_tag, auth_tag_len); | |
297 | } else { | |
298 | aesni_gcm_precomp_avx_gen4(ctx, hash_subkey); | |
299 | aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad, | |
300 | aad_len, auth_tag, auth_tag_len); | |
301 | } | |
302 | } | |
303 | #endif | |
304 | ||
9ee4a5df DW |
305 | static void (*aesni_gcm_enc_tfm)(void *ctx, |
306 | struct gcm_context_data *data, u8 *out, | |
307 | const u8 *in, unsigned long plaintext_len, | |
308 | u8 *iv, u8 *hash_subkey, const u8 *aad, | |
309 | unsigned long aad_len, u8 *auth_tag, | |
310 | unsigned long auth_tag_len); | |
d764593a | 311 | |
9ee4a5df DW |
312 | static void (*aesni_gcm_dec_tfm)(void *ctx, |
313 | struct gcm_context_data *data, u8 *out, | |
314 | const u8 *in, unsigned long ciphertext_len, | |
315 | u8 *iv, u8 *hash_subkey, const u8 *aad, | |
316 | unsigned long aad_len, u8 *auth_tag, | |
317 | unsigned long auth_tag_len); | |
d764593a | 318 | |
0bd82f5f TS |
319 | static inline struct |
320 | aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm) | |
321 | { | |
b7c89d9e HX |
322 | unsigned long align = AESNI_ALIGN; |
323 | ||
324 | if (align <= crypto_tfm_ctx_alignment()) | |
325 | align = 1; | |
326 | return PTR_ALIGN(crypto_aead_ctx(tfm), align); | |
0bd82f5f | 327 | } |
cce2ea8d SD |
328 | |
329 | static inline struct | |
330 | generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm) | |
331 | { | |
332 | unsigned long align = AESNI_ALIGN; | |
333 | ||
334 | if (align <= crypto_tfm_ctx_alignment()) | |
335 | align = 1; | |
336 | return PTR_ALIGN(crypto_aead_ctx(tfm), align); | |
337 | } | |
559ad0ff | 338 | #endif |
0bd82f5f | 339 | |
54b6a1bd HY |
340 | static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx) |
341 | { | |
342 | unsigned long addr = (unsigned long)raw_ctx; | |
343 | unsigned long align = AESNI_ALIGN; | |
344 | ||
345 | if (align <= crypto_tfm_ctx_alignment()) | |
346 | align = 1; | |
347 | return (struct crypto_aes_ctx *)ALIGN(addr, align); | |
348 | } | |
349 | ||
350 | static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx, | |
351 | const u8 *in_key, unsigned int key_len) | |
352 | { | |
353 | struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx); | |
354 | u32 *flags = &tfm->crt_flags; | |
355 | int err; | |
356 | ||
357 | if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 && | |
358 | key_len != AES_KEYSIZE_256) { | |
359 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | |
360 | return -EINVAL; | |
361 | } | |
362 | ||
13b79b97 | 363 | if (!irq_fpu_usable()) |
54b6a1bd HY |
364 | err = crypto_aes_expand_key(ctx, in_key, key_len); |
365 | else { | |
366 | kernel_fpu_begin(); | |
367 | err = aesni_set_key(ctx, in_key, key_len); | |
368 | kernel_fpu_end(); | |
369 | } | |
370 | ||
371 | return err; | |
372 | } | |
373 | ||
374 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | |
375 | unsigned int key_len) | |
376 | { | |
377 | return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len); | |
378 | } | |
379 | ||
380 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | |
381 | { | |
382 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); | |
383 | ||
13b79b97 | 384 | if (!irq_fpu_usable()) |
54b6a1bd HY |
385 | crypto_aes_encrypt_x86(ctx, dst, src); |
386 | else { | |
387 | kernel_fpu_begin(); | |
388 | aesni_enc(ctx, dst, src); | |
389 | kernel_fpu_end(); | |
390 | } | |
391 | } | |
392 | ||
393 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | |
394 | { | |
395 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); | |
396 | ||
13b79b97 | 397 | if (!irq_fpu_usable()) |
54b6a1bd HY |
398 | crypto_aes_decrypt_x86(ctx, dst, src); |
399 | else { | |
400 | kernel_fpu_begin(); | |
401 | aesni_dec(ctx, dst, src); | |
402 | kernel_fpu_end(); | |
403 | } | |
404 | } | |
405 | ||
2cf4ac8b HY |
406 | static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
407 | { | |
408 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); | |
409 | ||
410 | aesni_enc(ctx, dst, src); | |
411 | } | |
412 | ||
413 | static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | |
414 | { | |
415 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); | |
416 | ||
417 | aesni_dec(ctx, dst, src); | |
418 | } | |
419 | ||
85671860 HX |
420 | static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key, |
421 | unsigned int len) | |
422 | { | |
423 | return aes_set_key_common(crypto_skcipher_tfm(tfm), | |
424 | crypto_skcipher_ctx(tfm), key, len); | |
425 | } | |
426 | ||
427 | static int ecb_encrypt(struct skcipher_request *req) | |
54b6a1bd | 428 | { |
85671860 HX |
429 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
430 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm)); | |
431 | struct skcipher_walk walk; | |
432 | unsigned int nbytes; | |
54b6a1bd HY |
433 | int err; |
434 | ||
85671860 | 435 | err = skcipher_walk_virt(&walk, req, true); |
54b6a1bd HY |
436 | |
437 | kernel_fpu_begin(); | |
438 | while ((nbytes = walk.nbytes)) { | |
439 | aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, | |
440 | nbytes & AES_BLOCK_MASK); | |
441 | nbytes &= AES_BLOCK_SIZE - 1; | |
85671860 | 442 | err = skcipher_walk_done(&walk, nbytes); |
54b6a1bd HY |
443 | } |
444 | kernel_fpu_end(); | |
445 | ||
446 | return err; | |
447 | } | |
448 | ||
85671860 | 449 | static int ecb_decrypt(struct skcipher_request *req) |
54b6a1bd | 450 | { |
85671860 HX |
451 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
452 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm)); | |
453 | struct skcipher_walk walk; | |
454 | unsigned int nbytes; | |
54b6a1bd HY |
455 | int err; |
456 | ||
85671860 | 457 | err = skcipher_walk_virt(&walk, req, true); |
54b6a1bd HY |
458 | |
459 | kernel_fpu_begin(); | |
460 | while ((nbytes = walk.nbytes)) { | |
461 | aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, | |
462 | nbytes & AES_BLOCK_MASK); | |
463 | nbytes &= AES_BLOCK_SIZE - 1; | |
85671860 | 464 | err = skcipher_walk_done(&walk, nbytes); |
54b6a1bd HY |
465 | } |
466 | kernel_fpu_end(); | |
467 | ||
468 | return err; | |
469 | } | |
470 | ||
85671860 | 471 | static int cbc_encrypt(struct skcipher_request *req) |
54b6a1bd | 472 | { |
85671860 HX |
473 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
474 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm)); | |
475 | struct skcipher_walk walk; | |
476 | unsigned int nbytes; | |
54b6a1bd HY |
477 | int err; |
478 | ||
85671860 | 479 | err = skcipher_walk_virt(&walk, req, true); |
54b6a1bd HY |
480 | |
481 | kernel_fpu_begin(); | |
482 | while ((nbytes = walk.nbytes)) { | |
483 | aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, | |
484 | nbytes & AES_BLOCK_MASK, walk.iv); | |
485 | nbytes &= AES_BLOCK_SIZE - 1; | |
85671860 | 486 | err = skcipher_walk_done(&walk, nbytes); |
54b6a1bd HY |
487 | } |
488 | kernel_fpu_end(); | |
489 | ||
490 | return err; | |
491 | } | |
492 | ||
85671860 | 493 | static int cbc_decrypt(struct skcipher_request *req) |
54b6a1bd | 494 | { |
85671860 HX |
495 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
496 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm)); | |
497 | struct skcipher_walk walk; | |
498 | unsigned int nbytes; | |
54b6a1bd HY |
499 | int err; |
500 | ||
85671860 | 501 | err = skcipher_walk_virt(&walk, req, true); |
54b6a1bd HY |
502 | |
503 | kernel_fpu_begin(); | |
504 | while ((nbytes = walk.nbytes)) { | |
505 | aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, | |
506 | nbytes & AES_BLOCK_MASK, walk.iv); | |
507 | nbytes &= AES_BLOCK_SIZE - 1; | |
85671860 | 508 | err = skcipher_walk_done(&walk, nbytes); |
54b6a1bd HY |
509 | } |
510 | kernel_fpu_end(); | |
511 | ||
512 | return err; | |
513 | } | |
514 | ||
0d258efb | 515 | #ifdef CONFIG_X86_64 |
12387a46 | 516 | static void ctr_crypt_final(struct crypto_aes_ctx *ctx, |
85671860 | 517 | struct skcipher_walk *walk) |
12387a46 HY |
518 | { |
519 | u8 *ctrblk = walk->iv; | |
520 | u8 keystream[AES_BLOCK_SIZE]; | |
521 | u8 *src = walk->src.virt.addr; | |
522 | u8 *dst = walk->dst.virt.addr; | |
523 | unsigned int nbytes = walk->nbytes; | |
524 | ||
525 | aesni_enc(ctx, keystream, ctrblk); | |
45fe93df AB |
526 | crypto_xor_cpy(dst, keystream, src, nbytes); |
527 | ||
12387a46 HY |
528 | crypto_inc(ctrblk, AES_BLOCK_SIZE); |
529 | } | |
530 | ||
5cfed7b3 | 531 | #ifdef CONFIG_AS_AVX |
22cddcc7 | 532 | static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out, |
533 | const u8 *in, unsigned int len, u8 *iv) | |
534 | { | |
535 | /* | |
536 | * based on key length, override with the by8 version | |
537 | * of ctr mode encryption/decryption for improved performance | |
538 | * aes_set_key_common() ensures that key length is one of | |
539 | * {128,192,256} | |
540 | */ | |
541 | if (ctx->key_length == AES_KEYSIZE_128) | |
542 | aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len); | |
543 | else if (ctx->key_length == AES_KEYSIZE_192) | |
544 | aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len); | |
545 | else | |
546 | aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len); | |
547 | } | |
548 | #endif | |
549 | ||
85671860 | 550 | static int ctr_crypt(struct skcipher_request *req) |
12387a46 | 551 | { |
85671860 HX |
552 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
553 | struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm)); | |
554 | struct skcipher_walk walk; | |
555 | unsigned int nbytes; | |
12387a46 HY |
556 | int err; |
557 | ||
85671860 | 558 | err = skcipher_walk_virt(&walk, req, true); |
12387a46 HY |
559 | |
560 | kernel_fpu_begin(); | |
561 | while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { | |
22cddcc7 | 562 | aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr, |
e31ac32d | 563 | nbytes & AES_BLOCK_MASK, walk.iv); |
12387a46 | 564 | nbytes &= AES_BLOCK_SIZE - 1; |
85671860 | 565 | err = skcipher_walk_done(&walk, nbytes); |
12387a46 HY |
566 | } |
567 | if (walk.nbytes) { | |
568 | ctr_crypt_final(ctx, &walk); | |
85671860 | 569 | err = skcipher_walk_done(&walk, 0); |
12387a46 HY |
570 | } |
571 | kernel_fpu_end(); | |
572 | ||
573 | return err; | |
574 | } | |
023af608 | 575 | |
85671860 | 576 | static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key, |
023af608 JK |
577 | unsigned int keylen) |
578 | { | |
85671860 | 579 | struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
023af608 JK |
580 | int err; |
581 | ||
85671860 | 582 | err = xts_verify_key(tfm, key, keylen); |
023af608 JK |
583 | if (err) |
584 | return err; | |
585 | ||
85671860 | 586 | keylen /= 2; |
023af608 JK |
587 | |
588 | /* first half of xts-key is for crypt */ | |
85671860 HX |
589 | err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx, |
590 | key, keylen); | |
023af608 JK |
591 | if (err) |
592 | return err; | |
593 | ||
594 | /* second half of xts-key is for tweak */ | |
85671860 HX |
595 | return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx, |
596 | key + keylen, keylen); | |
023af608 JK |
597 | } |
598 | ||
599 | ||
32bec973 JK |
600 | static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in) |
601 | { | |
602 | aesni_enc(ctx, out, in); | |
603 | } | |
604 | ||
c456a9cd JK |
605 | static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
606 | { | |
607 | glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc)); | |
608 | } | |
609 | ||
610 | static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) | |
611 | { | |
612 | glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec)); | |
613 | } | |
614 | ||
615 | static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv) | |
616 | { | |
617 | aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv); | |
618 | } | |
619 | ||
620 | static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv) | |
621 | { | |
622 | aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv); | |
623 | } | |
624 | ||
625 | static const struct common_glue_ctx aesni_enc_xts = { | |
626 | .num_funcs = 2, | |
627 | .fpu_blocks_limit = 1, | |
628 | ||
629 | .funcs = { { | |
630 | .num_blocks = 8, | |
631 | .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) } | |
632 | }, { | |
633 | .num_blocks = 1, | |
634 | .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) } | |
635 | } } | |
636 | }; | |
637 | ||
638 | static const struct common_glue_ctx aesni_dec_xts = { | |
639 | .num_funcs = 2, | |
640 | .fpu_blocks_limit = 1, | |
641 | ||
642 | .funcs = { { | |
643 | .num_blocks = 8, | |
644 | .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) } | |
645 | }, { | |
646 | .num_blocks = 1, | |
647 | .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) } | |
648 | } } | |
649 | }; | |
650 | ||
85671860 | 651 | static int xts_encrypt(struct skcipher_request *req) |
c456a9cd | 652 | { |
85671860 HX |
653 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
654 | struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm); | |
c456a9cd | 655 | |
85671860 HX |
656 | return glue_xts_req_128bit(&aesni_enc_xts, req, |
657 | XTS_TWEAK_CAST(aesni_xts_tweak), | |
658 | aes_ctx(ctx->raw_tweak_ctx), | |
659 | aes_ctx(ctx->raw_crypt_ctx)); | |
c456a9cd JK |
660 | } |
661 | ||
85671860 | 662 | static int xts_decrypt(struct skcipher_request *req) |
c456a9cd | 663 | { |
85671860 HX |
664 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
665 | struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm); | |
023af608 | 666 | |
85671860 HX |
667 | return glue_xts_req_128bit(&aesni_dec_xts, req, |
668 | XTS_TWEAK_CAST(aesni_xts_tweak), | |
669 | aes_ctx(ctx->raw_tweak_ctx), | |
670 | aes_ctx(ctx->raw_crypt_ctx)); | |
2cf4ac8b | 671 | } |
2cf4ac8b | 672 | |
af05b300 | 673 | static int rfc4106_init(struct crypto_aead *aead) |
0bd82f5f TS |
674 | { |
675 | struct cryptd_aead *cryptd_tfm; | |
af05b300 HX |
676 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); |
677 | ||
eabdc320 SM |
678 | cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni", |
679 | CRYPTO_ALG_INTERNAL, | |
680 | CRYPTO_ALG_INTERNAL); | |
0bd82f5f TS |
681 | if (IS_ERR(cryptd_tfm)) |
682 | return PTR_ERR(cryptd_tfm); | |
60af520c | 683 | |
af05b300 | 684 | *ctx = cryptd_tfm; |
e9b8d2c2 | 685 | crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base)); |
0bd82f5f TS |
686 | return 0; |
687 | } | |
688 | ||
af05b300 | 689 | static void rfc4106_exit(struct crypto_aead *aead) |
0bd82f5f | 690 | { |
af05b300 HX |
691 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); |
692 | ||
693 | cryptd_free_aead(*ctx); | |
0bd82f5f TS |
694 | } |
695 | ||
0bd82f5f TS |
696 | static int |
697 | rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len) | |
698 | { | |
02fa472a HX |
699 | struct crypto_cipher *tfm; |
700 | int ret; | |
0bd82f5f | 701 | |
02fa472a HX |
702 | tfm = crypto_alloc_cipher("aes", 0, 0); |
703 | if (IS_ERR(tfm)) | |
704 | return PTR_ERR(tfm); | |
0bd82f5f | 705 | |
02fa472a | 706 | ret = crypto_cipher_setkey(tfm, key, key_len); |
7efd95f6 | 707 | if (ret) |
02fa472a | 708 | goto out_free_cipher; |
0bd82f5f TS |
709 | |
710 | /* Clear the data in the hash sub key container to zero.*/ | |
711 | /* We want to cipher all zeros to create the hash sub key. */ | |
712 | memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE); | |
713 | ||
02fa472a HX |
714 | crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey); |
715 | ||
716 | out_free_cipher: | |
717 | crypto_free_cipher(tfm); | |
0bd82f5f TS |
718 | return ret; |
719 | } | |
720 | ||
81e397d9 TS |
721 | static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key, |
722 | unsigned int key_len) | |
0bd82f5f | 723 | { |
81e397d9 | 724 | struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead); |
0bd82f5f TS |
725 | |
726 | if (key_len < 4) { | |
b7c89d9e | 727 | crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); |
0bd82f5f TS |
728 | return -EINVAL; |
729 | } | |
730 | /*Account for 4 byte nonce at the end.*/ | |
731 | key_len -= 4; | |
0bd82f5f TS |
732 | |
733 | memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce)); | |
0bd82f5f | 734 | |
b7c89d9e HX |
735 | return aes_set_key_common(crypto_aead_tfm(aead), |
736 | &ctx->aes_key_expanded, key, key_len) ?: | |
737 | rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len); | |
0bd82f5f TS |
738 | } |
739 | ||
fc8517bf SD |
740 | static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key, |
741 | unsigned int key_len) | |
0bd82f5f | 742 | { |
af05b300 HX |
743 | struct cryptd_aead **ctx = crypto_aead_ctx(parent); |
744 | struct cryptd_aead *cryptd_tfm = *ctx; | |
0bd82f5f | 745 | |
af05b300 | 746 | return crypto_aead_setkey(&cryptd_tfm->base, key, key_len); |
81e397d9 TS |
747 | } |
748 | ||
749 | static int common_rfc4106_set_authsize(struct crypto_aead *aead, | |
750 | unsigned int authsize) | |
751 | { | |
0bd82f5f TS |
752 | switch (authsize) { |
753 | case 8: | |
754 | case 12: | |
755 | case 16: | |
756 | break; | |
757 | default: | |
758 | return -EINVAL; | |
759 | } | |
b7c89d9e | 760 | |
0bd82f5f TS |
761 | return 0; |
762 | } | |
763 | ||
81e397d9 TS |
764 | /* This is the Integrity Check Value (aka the authentication tag length and can |
765 | * be 8, 12 or 16 bytes long. */ | |
fc8517bf SD |
766 | static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent, |
767 | unsigned int authsize) | |
0bd82f5f | 768 | { |
af05b300 HX |
769 | struct cryptd_aead **ctx = crypto_aead_ctx(parent); |
770 | struct cryptd_aead *cryptd_tfm = *ctx; | |
0bd82f5f | 771 | |
af05b300 | 772 | return crypto_aead_setauthsize(&cryptd_tfm->base, authsize); |
0bd82f5f TS |
773 | } |
774 | ||
cce2ea8d SD |
775 | static int generic_gcmaes_set_authsize(struct crypto_aead *tfm, |
776 | unsigned int authsize) | |
777 | { | |
778 | switch (authsize) { | |
779 | case 4: | |
780 | case 8: | |
781 | case 12: | |
782 | case 13: | |
783 | case 14: | |
784 | case 15: | |
785 | case 16: | |
786 | break; | |
787 | default: | |
788 | return -EINVAL; | |
789 | } | |
790 | ||
791 | return 0; | |
792 | } | |
793 | ||
e8455207 DW |
794 | static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req, |
795 | unsigned int assoclen, u8 *hash_subkey, | |
796 | u8 *iv, void *aes_ctx) | |
797 | { | |
798 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | |
799 | unsigned long auth_tag_len = crypto_aead_authsize(tfm); | |
800 | struct gcm_context_data data AESNI_ALIGN_ATTR; | |
801 | struct scatter_walk dst_sg_walk = {}; | |
802 | unsigned long left = req->cryptlen; | |
803 | unsigned long len, srclen, dstlen; | |
804 | struct scatter_walk assoc_sg_walk; | |
805 | struct scatter_walk src_sg_walk; | |
806 | struct scatterlist src_start[2]; | |
807 | struct scatterlist dst_start[2]; | |
808 | struct scatterlist *src_sg; | |
809 | struct scatterlist *dst_sg; | |
810 | u8 *src, *dst, *assoc; | |
811 | u8 *assocmem = NULL; | |
812 | u8 authTag[16]; | |
813 | ||
814 | if (!enc) | |
815 | left -= auth_tag_len; | |
816 | ||
817 | /* Linearize assoc, if not already linear */ | |
818 | if (req->src->length >= assoclen && req->src->length && | |
819 | (!PageHighMem(sg_page(req->src)) || | |
a7888481 | 820 | req->src->offset + req->src->length <= PAGE_SIZE)) { |
e8455207 DW |
821 | scatterwalk_start(&assoc_sg_walk, req->src); |
822 | assoc = scatterwalk_map(&assoc_sg_walk); | |
823 | } else { | |
824 | /* assoc can be any length, so must be on heap */ | |
825 | assocmem = kmalloc(assoclen, GFP_ATOMIC); | |
826 | if (unlikely(!assocmem)) | |
827 | return -ENOMEM; | |
828 | assoc = assocmem; | |
829 | ||
830 | scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0); | |
831 | } | |
832 | ||
833 | src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen); | |
834 | scatterwalk_start(&src_sg_walk, src_sg); | |
835 | if (req->src != req->dst) { | |
836 | dst_sg = scatterwalk_ffwd(dst_start, req->dst, req->assoclen); | |
837 | scatterwalk_start(&dst_sg_walk, dst_sg); | |
838 | } | |
839 | ||
840 | kernel_fpu_begin(); | |
841 | aesni_gcm_init(aes_ctx, &data, iv, | |
842 | hash_subkey, assoc, assoclen); | |
843 | if (req->src != req->dst) { | |
844 | while (left) { | |
845 | src = scatterwalk_map(&src_sg_walk); | |
846 | dst = scatterwalk_map(&dst_sg_walk); | |
847 | srclen = scatterwalk_clamp(&src_sg_walk, left); | |
848 | dstlen = scatterwalk_clamp(&dst_sg_walk, left); | |
849 | len = min(srclen, dstlen); | |
850 | if (len) { | |
851 | if (enc) | |
852 | aesni_gcm_enc_update(aes_ctx, &data, | |
853 | dst, src, len); | |
854 | else | |
855 | aesni_gcm_dec_update(aes_ctx, &data, | |
856 | dst, src, len); | |
857 | } | |
858 | left -= len; | |
859 | ||
860 | scatterwalk_unmap(src); | |
861 | scatterwalk_unmap(dst); | |
862 | scatterwalk_advance(&src_sg_walk, len); | |
863 | scatterwalk_advance(&dst_sg_walk, len); | |
864 | scatterwalk_done(&src_sg_walk, 0, left); | |
865 | scatterwalk_done(&dst_sg_walk, 1, left); | |
866 | } | |
867 | } else { | |
868 | while (left) { | |
869 | dst = src = scatterwalk_map(&src_sg_walk); | |
870 | len = scatterwalk_clamp(&src_sg_walk, left); | |
871 | if (len) { | |
872 | if (enc) | |
873 | aesni_gcm_enc_update(aes_ctx, &data, | |
874 | src, src, len); | |
875 | else | |
876 | aesni_gcm_dec_update(aes_ctx, &data, | |
877 | src, src, len); | |
878 | } | |
879 | left -= len; | |
880 | scatterwalk_unmap(src); | |
881 | scatterwalk_advance(&src_sg_walk, len); | |
882 | scatterwalk_done(&src_sg_walk, 1, left); | |
883 | } | |
884 | } | |
885 | aesni_gcm_finalize(aes_ctx, &data, authTag, auth_tag_len); | |
886 | kernel_fpu_end(); | |
887 | ||
888 | if (!assocmem) | |
889 | scatterwalk_unmap(assoc); | |
890 | else | |
891 | kfree(assocmem); | |
892 | ||
893 | if (!enc) { | |
894 | u8 authTagMsg[16]; | |
895 | ||
896 | /* Copy out original authTag */ | |
897 | scatterwalk_map_and_copy(authTagMsg, req->src, | |
898 | req->assoclen + req->cryptlen - | |
899 | auth_tag_len, | |
900 | auth_tag_len, 0); | |
901 | ||
902 | /* Compare generated tag with passed in tag. */ | |
903 | return crypto_memneq(authTagMsg, authTag, auth_tag_len) ? | |
904 | -EBADMSG : 0; | |
905 | } | |
906 | ||
907 | /* Copy in the authTag */ | |
908 | scatterwalk_map_and_copy(authTag, req->dst, | |
909 | req->assoclen + req->cryptlen, | |
910 | auth_tag_len, 1); | |
911 | ||
912 | return 0; | |
913 | } | |
914 | ||
cce2ea8d SD |
915 | static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen, |
916 | u8 *hash_subkey, u8 *iv, void *aes_ctx) | |
0bd82f5f TS |
917 | { |
918 | u8 one_entry_in_sg = 0; | |
919 | u8 *src, *dst, *assoc; | |
0bd82f5f | 920 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
0bd82f5f | 921 | unsigned long auth_tag_len = crypto_aead_authsize(tfm); |
0bd82f5f | 922 | struct scatter_walk src_sg_walk; |
beae2c9e | 923 | struct scatter_walk dst_sg_walk = {}; |
9ee4a5df | 924 | struct gcm_context_data data AESNI_ALIGN_ATTR; |
0bd82f5f | 925 | |
e8455207 DW |
926 | if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 || |
927 | aesni_gcm_enc_tfm == aesni_gcm_enc || | |
928 | req->cryptlen < AVX_GEN2_OPTSIZE) { | |
929 | return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv, | |
930 | aes_ctx); | |
931 | } | |
b7c89d9e | 932 | if (sg_is_last(req->src) && |
50fb5704 IL |
933 | (!PageHighMem(sg_page(req->src)) || |
934 | req->src->offset + req->src->length <= PAGE_SIZE) && | |
b7c89d9e | 935 | sg_is_last(req->dst) && |
50fb5704 IL |
936 | (!PageHighMem(sg_page(req->dst)) || |
937 | req->dst->offset + req->dst->length <= PAGE_SIZE)) { | |
0bd82f5f TS |
938 | one_entry_in_sg = 1; |
939 | scatterwalk_start(&src_sg_walk, req->src); | |
b7c89d9e HX |
940 | assoc = scatterwalk_map(&src_sg_walk); |
941 | src = assoc + req->assoclen; | |
0bd82f5f TS |
942 | dst = src; |
943 | if (unlikely(req->src != req->dst)) { | |
944 | scatterwalk_start(&dst_sg_walk, req->dst); | |
b7c89d9e | 945 | dst = scatterwalk_map(&dst_sg_walk) + req->assoclen; |
0bd82f5f | 946 | } |
0bd82f5f TS |
947 | } else { |
948 | /* Allocate memory for src, dst, assoc */ | |
b7c89d9e | 949 | assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen, |
0bd82f5f | 950 | GFP_ATOMIC); |
b7c89d9e | 951 | if (unlikely(!assoc)) |
0bd82f5f | 952 | return -ENOMEM; |
b7c89d9e HX |
953 | scatterwalk_map_and_copy(assoc, req->src, 0, |
954 | req->assoclen + req->cryptlen, 0); | |
955 | src = assoc + req->assoclen; | |
0bd82f5f TS |
956 | dst = src; |
957 | } | |
958 | ||
b7c89d9e | 959 | kernel_fpu_begin(); |
9ee4a5df | 960 | aesni_gcm_enc_tfm(aes_ctx, &data, dst, src, req->cryptlen, iv, |
cce2ea8d | 961 | hash_subkey, assoc, assoclen, |
e9b8d2c2 | 962 | dst + req->cryptlen, auth_tag_len); |
b7c89d9e | 963 | kernel_fpu_end(); |
0bd82f5f TS |
964 | |
965 | /* The authTag (aka the Integrity Check Value) needs to be written | |
966 | * back to the packet. */ | |
967 | if (one_entry_in_sg) { | |
968 | if (unlikely(req->src != req->dst)) { | |
b7c89d9e HX |
969 | scatterwalk_unmap(dst - req->assoclen); |
970 | scatterwalk_advance(&dst_sg_walk, req->dst->length); | |
971 | scatterwalk_done(&dst_sg_walk, 1, 0); | |
0bd82f5f | 972 | } |
8fd75e12 | 973 | scatterwalk_unmap(assoc); |
b7c89d9e HX |
974 | scatterwalk_advance(&src_sg_walk, req->src->length); |
975 | scatterwalk_done(&src_sg_walk, req->src == req->dst, 0); | |
0bd82f5f | 976 | } else { |
b7c89d9e HX |
977 | scatterwalk_map_and_copy(dst, req->dst, req->assoclen, |
978 | req->cryptlen + auth_tag_len, 1); | |
979 | kfree(assoc); | |
0bd82f5f TS |
980 | } |
981 | return 0; | |
982 | } | |
983 | ||
cce2ea8d SD |
984 | static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen, |
985 | u8 *hash_subkey, u8 *iv, void *aes_ctx) | |
0bd82f5f TS |
986 | { |
987 | u8 one_entry_in_sg = 0; | |
988 | u8 *src, *dst, *assoc; | |
989 | unsigned long tempCipherLen = 0; | |
0bd82f5f | 990 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
0bd82f5f | 991 | unsigned long auth_tag_len = crypto_aead_authsize(tfm); |
b7c89d9e | 992 | u8 authTag[16]; |
0bd82f5f | 993 | struct scatter_walk src_sg_walk; |
beae2c9e | 994 | struct scatter_walk dst_sg_walk = {}; |
9ee4a5df | 995 | struct gcm_context_data data AESNI_ALIGN_ATTR; |
cce2ea8d | 996 | int retval = 0; |
0bd82f5f | 997 | |
e8455207 DW |
998 | if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 || |
999 | aesni_gcm_enc_tfm == aesni_gcm_enc || | |
1000 | req->cryptlen < AVX_GEN2_OPTSIZE) { | |
1001 | return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv, | |
1002 | aes_ctx); | |
1003 | } | |
0bd82f5f | 1004 | tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len); |
0bd82f5f | 1005 | |
b7c89d9e | 1006 | if (sg_is_last(req->src) && |
50fb5704 IL |
1007 | (!PageHighMem(sg_page(req->src)) || |
1008 | req->src->offset + req->src->length <= PAGE_SIZE) && | |
9c674e1e | 1009 | sg_is_last(req->dst) && req->dst->length && |
50fb5704 IL |
1010 | (!PageHighMem(sg_page(req->dst)) || |
1011 | req->dst->offset + req->dst->length <= PAGE_SIZE)) { | |
0bd82f5f TS |
1012 | one_entry_in_sg = 1; |
1013 | scatterwalk_start(&src_sg_walk, req->src); | |
b7c89d9e HX |
1014 | assoc = scatterwalk_map(&src_sg_walk); |
1015 | src = assoc + req->assoclen; | |
0bd82f5f TS |
1016 | dst = src; |
1017 | if (unlikely(req->src != req->dst)) { | |
1018 | scatterwalk_start(&dst_sg_walk, req->dst); | |
b7c89d9e | 1019 | dst = scatterwalk_map(&dst_sg_walk) + req->assoclen; |
0bd82f5f | 1020 | } |
0bd82f5f TS |
1021 | } else { |
1022 | /* Allocate memory for src, dst, assoc */ | |
b7c89d9e HX |
1023 | assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC); |
1024 | if (!assoc) | |
0bd82f5f | 1025 | return -ENOMEM; |
b7c89d9e HX |
1026 | scatterwalk_map_and_copy(assoc, req->src, 0, |
1027 | req->assoclen + req->cryptlen, 0); | |
1028 | src = assoc + req->assoclen; | |
0bd82f5f TS |
1029 | dst = src; |
1030 | } | |
1031 | ||
cce2ea8d | 1032 | |
b7c89d9e | 1033 | kernel_fpu_begin(); |
9ee4a5df | 1034 | aesni_gcm_dec_tfm(aes_ctx, &data, dst, src, tempCipherLen, iv, |
cce2ea8d | 1035 | hash_subkey, assoc, assoclen, |
e9b8d2c2 | 1036 | authTag, auth_tag_len); |
b7c89d9e | 1037 | kernel_fpu_end(); |
0bd82f5f TS |
1038 | |
1039 | /* Compare generated tag with passed in tag. */ | |
fed28611 | 1040 | retval = crypto_memneq(src + tempCipherLen, authTag, auth_tag_len) ? |
0bd82f5f TS |
1041 | -EBADMSG : 0; |
1042 | ||
1043 | if (one_entry_in_sg) { | |
1044 | if (unlikely(req->src != req->dst)) { | |
b7c89d9e HX |
1045 | scatterwalk_unmap(dst - req->assoclen); |
1046 | scatterwalk_advance(&dst_sg_walk, req->dst->length); | |
1047 | scatterwalk_done(&dst_sg_walk, 1, 0); | |
0bd82f5f | 1048 | } |
8fd75e12 | 1049 | scatterwalk_unmap(assoc); |
b7c89d9e HX |
1050 | scatterwalk_advance(&src_sg_walk, req->src->length); |
1051 | scatterwalk_done(&src_sg_walk, req->src == req->dst, 0); | |
0bd82f5f | 1052 | } else { |
b7c89d9e HX |
1053 | scatterwalk_map_and_copy(dst, req->dst, req->assoclen, |
1054 | tempCipherLen, 1); | |
1055 | kfree(assoc); | |
0bd82f5f TS |
1056 | } |
1057 | return retval; | |
cce2ea8d SD |
1058 | |
1059 | } | |
1060 | ||
1061 | static int helper_rfc4106_encrypt(struct aead_request *req) | |
1062 | { | |
1063 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | |
1064 | struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); | |
1065 | void *aes_ctx = &(ctx->aes_key_expanded); | |
1066 | u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); | |
1067 | unsigned int i; | |
1068 | __be32 counter = cpu_to_be32(1); | |
1069 | ||
1070 | /* Assuming we are supporting rfc4106 64-bit extended */ | |
1071 | /* sequence numbers We need to have the AAD length equal */ | |
1072 | /* to 16 or 20 bytes */ | |
1073 | if (unlikely(req->assoclen != 16 && req->assoclen != 20)) | |
1074 | return -EINVAL; | |
1075 | ||
1076 | /* IV below built */ | |
1077 | for (i = 0; i < 4; i++) | |
1078 | *(iv+i) = ctx->nonce[i]; | |
1079 | for (i = 0; i < 8; i++) | |
1080 | *(iv+4+i) = req->iv[i]; | |
1081 | *((__be32 *)(iv+12)) = counter; | |
1082 | ||
1083 | return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv, | |
1084 | aes_ctx); | |
1085 | } | |
1086 | ||
1087 | static int helper_rfc4106_decrypt(struct aead_request *req) | |
1088 | { | |
1089 | __be32 counter = cpu_to_be32(1); | |
1090 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | |
1091 | struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); | |
1092 | void *aes_ctx = &(ctx->aes_key_expanded); | |
1093 | u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); | |
1094 | unsigned int i; | |
1095 | ||
1096 | if (unlikely(req->assoclen != 16 && req->assoclen != 20)) | |
1097 | return -EINVAL; | |
1098 | ||
1099 | /* Assuming we are supporting rfc4106 64-bit extended */ | |
1100 | /* sequence numbers We need to have the AAD length */ | |
1101 | /* equal to 16 or 20 bytes */ | |
1102 | ||
1103 | /* IV below built */ | |
1104 | for (i = 0; i < 4; i++) | |
1105 | *(iv+i) = ctx->nonce[i]; | |
1106 | for (i = 0; i < 8; i++) | |
1107 | *(iv+4+i) = req->iv[i]; | |
1108 | *((__be32 *)(iv+12)) = counter; | |
1109 | ||
1110 | return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv, | |
1111 | aes_ctx); | |
0bd82f5f | 1112 | } |
81e397d9 | 1113 | |
fc8517bf | 1114 | static int gcmaes_wrapper_encrypt(struct aead_request *req) |
81e397d9 | 1115 | { |
81e397d9 | 1116 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
af05b300 HX |
1117 | struct cryptd_aead **ctx = crypto_aead_ctx(tfm); |
1118 | struct cryptd_aead *cryptd_tfm = *ctx; | |
81e397d9 | 1119 | |
38b2f68b HX |
1120 | tfm = &cryptd_tfm->base; |
1121 | if (irq_fpu_usable() && (!in_atomic() || | |
1122 | !cryptd_aead_queued(cryptd_tfm))) | |
1123 | tfm = cryptd_aead_child(cryptd_tfm); | |
1124 | ||
1125 | aead_request_set_tfm(req, tfm); | |
81e397d9 | 1126 | |
e9b8d2c2 | 1127 | return crypto_aead_encrypt(req); |
81e397d9 TS |
1128 | } |
1129 | ||
fc8517bf | 1130 | static int gcmaes_wrapper_decrypt(struct aead_request *req) |
81e397d9 | 1131 | { |
81e397d9 | 1132 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); |
af05b300 HX |
1133 | struct cryptd_aead **ctx = crypto_aead_ctx(tfm); |
1134 | struct cryptd_aead *cryptd_tfm = *ctx; | |
81e397d9 | 1135 | |
38b2f68b HX |
1136 | tfm = &cryptd_tfm->base; |
1137 | if (irq_fpu_usable() && (!in_atomic() || | |
1138 | !cryptd_aead_queued(cryptd_tfm))) | |
1139 | tfm = cryptd_aead_child(cryptd_tfm); | |
1140 | ||
1141 | aead_request_set_tfm(req, tfm); | |
81e397d9 | 1142 | |
e9b8d2c2 | 1143 | return crypto_aead_decrypt(req); |
81e397d9 | 1144 | } |
fa46ccb8 | 1145 | #endif |
0bd82f5f | 1146 | |
fa46ccb8 JK |
1147 | static struct crypto_alg aesni_algs[] = { { |
1148 | .cra_name = "aes", | |
1149 | .cra_driver_name = "aes-aesni", | |
1150 | .cra_priority = 300, | |
1151 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | |
1152 | .cra_blocksize = AES_BLOCK_SIZE, | |
85671860 | 1153 | .cra_ctxsize = CRYPTO_AES_CTX_SIZE, |
fa46ccb8 JK |
1154 | .cra_module = THIS_MODULE, |
1155 | .cra_u = { | |
1156 | .cipher = { | |
1157 | .cia_min_keysize = AES_MIN_KEY_SIZE, | |
1158 | .cia_max_keysize = AES_MAX_KEY_SIZE, | |
1159 | .cia_setkey = aes_set_key, | |
1160 | .cia_encrypt = aes_encrypt, | |
1161 | .cia_decrypt = aes_decrypt | |
1162 | } | |
1163 | } | |
1164 | }, { | |
85671860 HX |
1165 | .cra_name = "__aes", |
1166 | .cra_driver_name = "__aes-aesni", | |
1167 | .cra_priority = 300, | |
eabdc320 | 1168 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL, |
fa46ccb8 | 1169 | .cra_blocksize = AES_BLOCK_SIZE, |
85671860 | 1170 | .cra_ctxsize = CRYPTO_AES_CTX_SIZE, |
fa46ccb8 JK |
1171 | .cra_module = THIS_MODULE, |
1172 | .cra_u = { | |
1173 | .cipher = { | |
1174 | .cia_min_keysize = AES_MIN_KEY_SIZE, | |
1175 | .cia_max_keysize = AES_MAX_KEY_SIZE, | |
1176 | .cia_setkey = aes_set_key, | |
1177 | .cia_encrypt = __aes_encrypt, | |
1178 | .cia_decrypt = __aes_decrypt | |
1179 | } | |
1180 | } | |
85671860 HX |
1181 | } }; |
1182 | ||
1183 | static struct skcipher_alg aesni_skciphers[] = { | |
1184 | { | |
1185 | .base = { | |
1186 | .cra_name = "__ecb(aes)", | |
1187 | .cra_driver_name = "__ecb-aes-aesni", | |
1188 | .cra_priority = 400, | |
1189 | .cra_flags = CRYPTO_ALG_INTERNAL, | |
1190 | .cra_blocksize = AES_BLOCK_SIZE, | |
1191 | .cra_ctxsize = CRYPTO_AES_CTX_SIZE, | |
1192 | .cra_module = THIS_MODULE, | |
fa46ccb8 | 1193 | }, |
85671860 HX |
1194 | .min_keysize = AES_MIN_KEY_SIZE, |
1195 | .max_keysize = AES_MAX_KEY_SIZE, | |
1196 | .setkey = aesni_skcipher_setkey, | |
1197 | .encrypt = ecb_encrypt, | |
1198 | .decrypt = ecb_decrypt, | |
1199 | }, { | |
1200 | .base = { | |
1201 | .cra_name = "__cbc(aes)", | |
1202 | .cra_driver_name = "__cbc-aes-aesni", | |
1203 | .cra_priority = 400, | |
1204 | .cra_flags = CRYPTO_ALG_INTERNAL, | |
1205 | .cra_blocksize = AES_BLOCK_SIZE, | |
1206 | .cra_ctxsize = CRYPTO_AES_CTX_SIZE, | |
1207 | .cra_module = THIS_MODULE, | |
fa46ccb8 | 1208 | }, |
85671860 HX |
1209 | .min_keysize = AES_MIN_KEY_SIZE, |
1210 | .max_keysize = AES_MAX_KEY_SIZE, | |
1211 | .ivsize = AES_BLOCK_SIZE, | |
1212 | .setkey = aesni_skcipher_setkey, | |
1213 | .encrypt = cbc_encrypt, | |
1214 | .decrypt = cbc_decrypt, | |
fa46ccb8 | 1215 | #ifdef CONFIG_X86_64 |
85671860 HX |
1216 | }, { |
1217 | .base = { | |
1218 | .cra_name = "__ctr(aes)", | |
1219 | .cra_driver_name = "__ctr-aes-aesni", | |
1220 | .cra_priority = 400, | |
1221 | .cra_flags = CRYPTO_ALG_INTERNAL, | |
1222 | .cra_blocksize = 1, | |
1223 | .cra_ctxsize = CRYPTO_AES_CTX_SIZE, | |
1224 | .cra_module = THIS_MODULE, | |
fa46ccb8 | 1225 | }, |
85671860 HX |
1226 | .min_keysize = AES_MIN_KEY_SIZE, |
1227 | .max_keysize = AES_MAX_KEY_SIZE, | |
1228 | .ivsize = AES_BLOCK_SIZE, | |
1229 | .chunksize = AES_BLOCK_SIZE, | |
1230 | .setkey = aesni_skcipher_setkey, | |
1231 | .encrypt = ctr_crypt, | |
1232 | .decrypt = ctr_crypt, | |
1233 | }, { | |
1234 | .base = { | |
1235 | .cra_name = "__xts(aes)", | |
1236 | .cra_driver_name = "__xts-aes-aesni", | |
1237 | .cra_priority = 401, | |
1238 | .cra_flags = CRYPTO_ALG_INTERNAL, | |
1239 | .cra_blocksize = AES_BLOCK_SIZE, | |
1240 | .cra_ctxsize = XTS_AES_CTX_SIZE, | |
1241 | .cra_module = THIS_MODULE, | |
fa46ccb8 | 1242 | }, |
85671860 HX |
1243 | .min_keysize = 2 * AES_MIN_KEY_SIZE, |
1244 | .max_keysize = 2 * AES_MAX_KEY_SIZE, | |
1245 | .ivsize = AES_BLOCK_SIZE, | |
1246 | .setkey = xts_aesni_setkey, | |
1247 | .encrypt = xts_encrypt, | |
1248 | .decrypt = xts_decrypt, | |
fa46ccb8 | 1249 | #endif |
85671860 HX |
1250 | } |
1251 | }; | |
1252 | ||
1c9fa294 | 1253 | static |
85671860 HX |
1254 | struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)]; |
1255 | ||
af05b300 | 1256 | #ifdef CONFIG_X86_64 |
cce2ea8d SD |
1257 | static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key, |
1258 | unsigned int key_len) | |
1259 | { | |
1260 | struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead); | |
1261 | ||
1262 | return aes_set_key_common(crypto_aead_tfm(aead), | |
1263 | &ctx->aes_key_expanded, key, key_len) ?: | |
1264 | rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len); | |
1265 | } | |
1266 | ||
1267 | static int generic_gcmaes_encrypt(struct aead_request *req) | |
1268 | { | |
1269 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | |
1270 | struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm); | |
1271 | void *aes_ctx = &(ctx->aes_key_expanded); | |
1272 | u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); | |
1273 | __be32 counter = cpu_to_be32(1); | |
1274 | ||
1275 | memcpy(iv, req->iv, 12); | |
1276 | *((__be32 *)(iv+12)) = counter; | |
1277 | ||
1278 | return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv, | |
1279 | aes_ctx); | |
1280 | } | |
1281 | ||
1282 | static int generic_gcmaes_decrypt(struct aead_request *req) | |
1283 | { | |
1284 | __be32 counter = cpu_to_be32(1); | |
1285 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | |
106840c4 | 1286 | struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm); |
cce2ea8d SD |
1287 | void *aes_ctx = &(ctx->aes_key_expanded); |
1288 | u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN))); | |
1289 | ||
1290 | memcpy(iv, req->iv, 12); | |
1291 | *((__be32 *)(iv+12)) = counter; | |
1292 | ||
1293 | return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv, | |
1294 | aes_ctx); | |
1295 | } | |
1296 | ||
fc8517bf SD |
1297 | static int generic_gcmaes_init(struct crypto_aead *aead) |
1298 | { | |
1299 | struct cryptd_aead *cryptd_tfm; | |
1300 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | |
1301 | ||
1302 | cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni", | |
1303 | CRYPTO_ALG_INTERNAL, | |
1304 | CRYPTO_ALG_INTERNAL); | |
1305 | if (IS_ERR(cryptd_tfm)) | |
1306 | return PTR_ERR(cryptd_tfm); | |
1307 | ||
1308 | *ctx = cryptd_tfm; | |
1309 | crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base)); | |
1310 | ||
1311 | return 0; | |
1312 | } | |
1313 | ||
1314 | static void generic_gcmaes_exit(struct crypto_aead *aead) | |
1315 | { | |
1316 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | |
1317 | ||
1318 | cryptd_free_aead(*ctx); | |
1319 | } | |
1320 | ||
af05b300 | 1321 | static struct aead_alg aesni_aead_algs[] = { { |
b7c89d9e HX |
1322 | .setkey = common_rfc4106_set_key, |
1323 | .setauthsize = common_rfc4106_set_authsize, | |
1324 | .encrypt = helper_rfc4106_encrypt, | |
1325 | .decrypt = helper_rfc4106_decrypt, | |
46d93748 | 1326 | .ivsize = GCM_RFC4106_IV_SIZE, |
b7c89d9e HX |
1327 | .maxauthsize = 16, |
1328 | .base = { | |
1329 | .cra_name = "__gcm-aes-aesni", | |
1330 | .cra_driver_name = "__driver-gcm-aes-aesni", | |
1331 | .cra_flags = CRYPTO_ALG_INTERNAL, | |
1332 | .cra_blocksize = 1, | |
1333 | .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx), | |
1334 | .cra_alignmask = AESNI_ALIGN - 1, | |
1335 | .cra_module = THIS_MODULE, | |
1336 | }, | |
1337 | }, { | |
af05b300 HX |
1338 | .init = rfc4106_init, |
1339 | .exit = rfc4106_exit, | |
fc8517bf SD |
1340 | .setkey = gcmaes_wrapper_set_key, |
1341 | .setauthsize = gcmaes_wrapper_set_authsize, | |
1342 | .encrypt = gcmaes_wrapper_encrypt, | |
1343 | .decrypt = gcmaes_wrapper_decrypt, | |
46d93748 | 1344 | .ivsize = GCM_RFC4106_IV_SIZE, |
af05b300 HX |
1345 | .maxauthsize = 16, |
1346 | .base = { | |
1347 | .cra_name = "rfc4106(gcm(aes))", | |
1348 | .cra_driver_name = "rfc4106-gcm-aesni", | |
1349 | .cra_priority = 400, | |
5e4b8c1f | 1350 | .cra_flags = CRYPTO_ALG_ASYNC, |
af05b300 HX |
1351 | .cra_blocksize = 1, |
1352 | .cra_ctxsize = sizeof(struct cryptd_aead *), | |
1353 | .cra_module = THIS_MODULE, | |
1354 | }, | |
cce2ea8d SD |
1355 | }, { |
1356 | .setkey = generic_gcmaes_set_key, | |
1357 | .setauthsize = generic_gcmaes_set_authsize, | |
1358 | .encrypt = generic_gcmaes_encrypt, | |
1359 | .decrypt = generic_gcmaes_decrypt, | |
46d93748 | 1360 | .ivsize = GCM_AES_IV_SIZE, |
cce2ea8d | 1361 | .maxauthsize = 16, |
fc8517bf SD |
1362 | .base = { |
1363 | .cra_name = "__generic-gcm-aes-aesni", | |
1364 | .cra_driver_name = "__driver-generic-gcm-aes-aesni", | |
1365 | .cra_priority = 0, | |
1366 | .cra_flags = CRYPTO_ALG_INTERNAL, | |
1367 | .cra_blocksize = 1, | |
1368 | .cra_ctxsize = sizeof(struct generic_gcmaes_ctx), | |
1369 | .cra_alignmask = AESNI_ALIGN - 1, | |
1370 | .cra_module = THIS_MODULE, | |
1371 | }, | |
1372 | }, { | |
1373 | .init = generic_gcmaes_init, | |
1374 | .exit = generic_gcmaes_exit, | |
1375 | .setkey = gcmaes_wrapper_set_key, | |
1376 | .setauthsize = gcmaes_wrapper_set_authsize, | |
1377 | .encrypt = gcmaes_wrapper_encrypt, | |
1378 | .decrypt = gcmaes_wrapper_decrypt, | |
1379 | .ivsize = GCM_AES_IV_SIZE, | |
1380 | .maxauthsize = 16, | |
cce2ea8d SD |
1381 | .base = { |
1382 | .cra_name = "gcm(aes)", | |
1383 | .cra_driver_name = "generic-gcm-aesni", | |
1384 | .cra_priority = 400, | |
1385 | .cra_flags = CRYPTO_ALG_ASYNC, | |
1386 | .cra_blocksize = 1, | |
fc8517bf | 1387 | .cra_ctxsize = sizeof(struct cryptd_aead *), |
cce2ea8d SD |
1388 | .cra_module = THIS_MODULE, |
1389 | }, | |
af05b300 HX |
1390 | } }; |
1391 | #else | |
1392 | static struct aead_alg aesni_aead_algs[0]; | |
1393 | #endif | |
1394 | ||
3bd391f0 AK |
1395 | |
1396 | static const struct x86_cpu_id aesni_cpu_id[] = { | |
1397 | X86_FEATURE_MATCH(X86_FEATURE_AES), | |
1398 | {} | |
1399 | }; | |
1400 | MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id); | |
1401 | ||
85671860 HX |
1402 | static void aesni_free_simds(void) |
1403 | { | |
1404 | int i; | |
1405 | ||
1406 | for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) && | |
1407 | aesni_simd_skciphers[i]; i++) | |
1408 | simd_skcipher_free(aesni_simd_skciphers[i]); | |
85671860 HX |
1409 | } |
1410 | ||
54b6a1bd HY |
1411 | static int __init aesni_init(void) |
1412 | { | |
85671860 HX |
1413 | struct simd_skcipher_alg *simd; |
1414 | const char *basename; | |
1415 | const char *algname; | |
1416 | const char *drvname; | |
7af6c245 | 1417 | int err; |
85671860 | 1418 | int i; |
54b6a1bd | 1419 | |
3bd391f0 | 1420 | if (!x86_match_cpu(aesni_cpu_id)) |
54b6a1bd | 1421 | return -ENODEV; |
8610d7bf | 1422 | #ifdef CONFIG_X86_64 |
d764593a TC |
1423 | #ifdef CONFIG_AS_AVX2 |
1424 | if (boot_cpu_has(X86_FEATURE_AVX2)) { | |
1425 | pr_info("AVX2 version of gcm_enc/dec engaged.\n"); | |
1426 | aesni_gcm_enc_tfm = aesni_gcm_enc_avx2; | |
1427 | aesni_gcm_dec_tfm = aesni_gcm_dec_avx2; | |
1428 | } else | |
1429 | #endif | |
1430 | #ifdef CONFIG_AS_AVX | |
1431 | if (boot_cpu_has(X86_FEATURE_AVX)) { | |
1432 | pr_info("AVX version of gcm_enc/dec engaged.\n"); | |
1433 | aesni_gcm_enc_tfm = aesni_gcm_enc_avx; | |
1434 | aesni_gcm_dec_tfm = aesni_gcm_dec_avx; | |
1435 | } else | |
1436 | #endif | |
1437 | { | |
1438 | pr_info("SSE version of gcm_enc/dec engaged.\n"); | |
1439 | aesni_gcm_enc_tfm = aesni_gcm_enc; | |
1440 | aesni_gcm_dec_tfm = aesni_gcm_dec; | |
1441 | } | |
22cddcc7 | 1442 | aesni_ctr_enc_tfm = aesni_ctr_enc; |
5cfed7b3 | 1443 | #ifdef CONFIG_AS_AVX |
da154e82 | 1444 | if (boot_cpu_has(X86_FEATURE_AVX)) { |
22cddcc7 | 1445 | /* optimize performance of ctr mode encryption transform */ |
1446 | aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; | |
1447 | pr_info("AES CTR mode by8 optimization enabled\n"); | |
1448 | } | |
1449 | #endif | |
8610d7bf | 1450 | #endif |
0bd82f5f | 1451 | |
fa46ccb8 JK |
1452 | err = crypto_fpu_init(); |
1453 | if (err) | |
1454 | return err; | |
54b6a1bd | 1455 | |
af05b300 HX |
1456 | err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); |
1457 | if (err) | |
1458 | goto fpu_exit; | |
1459 | ||
85671860 HX |
1460 | err = crypto_register_skciphers(aesni_skciphers, |
1461 | ARRAY_SIZE(aesni_skciphers)); | |
1462 | if (err) | |
1463 | goto unregister_algs; | |
1464 | ||
af05b300 HX |
1465 | err = crypto_register_aeads(aesni_aead_algs, |
1466 | ARRAY_SIZE(aesni_aead_algs)); | |
1467 | if (err) | |
85671860 HX |
1468 | goto unregister_skciphers; |
1469 | ||
1470 | for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) { | |
1471 | algname = aesni_skciphers[i].base.cra_name + 2; | |
1472 | drvname = aesni_skciphers[i].base.cra_driver_name + 2; | |
1473 | basename = aesni_skciphers[i].base.cra_driver_name; | |
1474 | simd = simd_skcipher_create_compat(algname, drvname, basename); | |
1475 | err = PTR_ERR(simd); | |
1476 | if (IS_ERR(simd)) | |
1477 | goto unregister_simds; | |
1478 | ||
1479 | aesni_simd_skciphers[i] = simd; | |
1480 | } | |
af05b300 | 1481 | |
85671860 HX |
1482 | return 0; |
1483 | ||
1484 | unregister_simds: | |
1485 | aesni_free_simds(); | |
1486 | crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); | |
1487 | unregister_skciphers: | |
1488 | crypto_unregister_skciphers(aesni_skciphers, | |
1489 | ARRAY_SIZE(aesni_skciphers)); | |
af05b300 HX |
1490 | unregister_algs: |
1491 | crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); | |
1492 | fpu_exit: | |
1493 | crypto_fpu_exit(); | |
1494 | return err; | |
54b6a1bd HY |
1495 | } |
1496 | ||
1497 | static void __exit aesni_exit(void) | |
1498 | { | |
85671860 | 1499 | aesni_free_simds(); |
af05b300 | 1500 | crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); |
85671860 HX |
1501 | crypto_unregister_skciphers(aesni_skciphers, |
1502 | ARRAY_SIZE(aesni_skciphers)); | |
fa46ccb8 | 1503 | crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); |
b23b6451 AL |
1504 | |
1505 | crypto_fpu_exit(); | |
54b6a1bd HY |
1506 | } |
1507 | ||
0fbafd06 | 1508 | late_initcall(aesni_init); |
54b6a1bd HY |
1509 | module_exit(aesni_exit); |
1510 | ||
1511 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized"); | |
1512 | MODULE_LICENSE("GPL"); | |
5d26a105 | 1513 | MODULE_ALIAS_CRYPTO("aes"); |