]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/x86/crypto/camellia_aesni_avx_glue.c
x86/fpu: Rename fpu/xsave.h to fpu/xstate.h
[mirror_ubuntu-bionic-kernel.git] / arch / x86 / crypto / camellia_aesni_avx_glue.c
1 /*
2 * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
3 *
4 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 */
12
13 #include <linux/module.h>
14 #include <linux/types.h>
15 #include <linux/crypto.h>
16 #include <linux/err.h>
17 #include <crypto/ablk_helper.h>
18 #include <crypto/algapi.h>
19 #include <crypto/ctr.h>
20 #include <crypto/lrw.h>
21 #include <crypto/xts.h>
22 #include <asm/xcr.h>
23 #include <asm/fpu/xstate.h>
24 #include <asm/crypto/camellia.h>
25 #include <asm/crypto/glue_helper.h>
26
27 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
28
29 /* 16-way parallel cipher functions (avx/aes-ni) */
30 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
31 const u8 *src);
32 EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way);
33
34 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
35 const u8 *src);
36 EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
37
38 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
39 const u8 *src);
40 EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
41
42 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
43 const u8 *src, le128 *iv);
44 EXPORT_SYMBOL_GPL(camellia_ctr_16way);
45
46 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
47 const u8 *src, le128 *iv);
48 EXPORT_SYMBOL_GPL(camellia_xts_enc_16way);
49
50 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
51 const u8 *src, le128 *iv);
52 EXPORT_SYMBOL_GPL(camellia_xts_dec_16way);
53
54 void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
55 {
56 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
57 GLUE_FUNC_CAST(camellia_enc_blk));
58 }
59 EXPORT_SYMBOL_GPL(camellia_xts_enc);
60
61 void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
62 {
63 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
64 GLUE_FUNC_CAST(camellia_dec_blk));
65 }
66 EXPORT_SYMBOL_GPL(camellia_xts_dec);
67
68 static const struct common_glue_ctx camellia_enc = {
69 .num_funcs = 3,
70 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
71
72 .funcs = { {
73 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
74 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
75 }, {
76 .num_blocks = 2,
77 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
78 }, {
79 .num_blocks = 1,
80 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
81 } }
82 };
83
84 static const struct common_glue_ctx camellia_ctr = {
85 .num_funcs = 3,
86 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
87
88 .funcs = { {
89 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
90 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
91 }, {
92 .num_blocks = 2,
93 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
94 }, {
95 .num_blocks = 1,
96 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
97 } }
98 };
99
100 static const struct common_glue_ctx camellia_enc_xts = {
101 .num_funcs = 2,
102 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
103
104 .funcs = { {
105 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
106 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
107 }, {
108 .num_blocks = 1,
109 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
110 } }
111 };
112
113 static const struct common_glue_ctx camellia_dec = {
114 .num_funcs = 3,
115 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
116
117 .funcs = { {
118 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
119 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
120 }, {
121 .num_blocks = 2,
122 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
123 }, {
124 .num_blocks = 1,
125 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
126 } }
127 };
128
129 static const struct common_glue_ctx camellia_dec_cbc = {
130 .num_funcs = 3,
131 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
132
133 .funcs = { {
134 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
135 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
136 }, {
137 .num_blocks = 2,
138 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
139 }, {
140 .num_blocks = 1,
141 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
142 } }
143 };
144
145 static const struct common_glue_ctx camellia_dec_xts = {
146 .num_funcs = 2,
147 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
148
149 .funcs = { {
150 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
151 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
152 }, {
153 .num_blocks = 1,
154 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
155 } }
156 };
157
158 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
159 struct scatterlist *src, unsigned int nbytes)
160 {
161 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
162 }
163
164 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
165 struct scatterlist *src, unsigned int nbytes)
166 {
167 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
168 }
169
170 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
171 struct scatterlist *src, unsigned int nbytes)
172 {
173 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
174 dst, src, nbytes);
175 }
176
177 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
178 struct scatterlist *src, unsigned int nbytes)
179 {
180 return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
181 nbytes);
182 }
183
184 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
185 struct scatterlist *src, unsigned int nbytes)
186 {
187 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
188 }
189
190 static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
191 {
192 return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
193 CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
194 nbytes);
195 }
196
197 static inline void camellia_fpu_end(bool fpu_enabled)
198 {
199 glue_fpu_end(fpu_enabled);
200 }
201
202 static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
203 unsigned int key_len)
204 {
205 return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
206 &tfm->crt_flags);
207 }
208
209 struct crypt_priv {
210 struct camellia_ctx *ctx;
211 bool fpu_enabled;
212 };
213
214 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
215 {
216 const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
217 struct crypt_priv *ctx = priv;
218 int i;
219
220 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
221
222 if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
223 camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
224 srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
225 nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
226 }
227
228 while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
229 camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
230 srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
231 nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
232 }
233
234 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
235 camellia_enc_blk(ctx->ctx, srcdst, srcdst);
236 }
237
238 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
239 {
240 const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
241 struct crypt_priv *ctx = priv;
242 int i;
243
244 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
245
246 if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
247 camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
248 srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
249 nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
250 }
251
252 while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
253 camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
254 srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
255 nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
256 }
257
258 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
259 camellia_dec_blk(ctx->ctx, srcdst, srcdst);
260 }
261
262 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
263 struct scatterlist *src, unsigned int nbytes)
264 {
265 struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
266 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
267 struct crypt_priv crypt_ctx = {
268 .ctx = &ctx->camellia_ctx,
269 .fpu_enabled = false,
270 };
271 struct lrw_crypt_req req = {
272 .tbuf = buf,
273 .tbuflen = sizeof(buf),
274
275 .table_ctx = &ctx->lrw_table,
276 .crypt_ctx = &crypt_ctx,
277 .crypt_fn = encrypt_callback,
278 };
279 int ret;
280
281 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
282 ret = lrw_crypt(desc, dst, src, nbytes, &req);
283 camellia_fpu_end(crypt_ctx.fpu_enabled);
284
285 return ret;
286 }
287
288 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
289 struct scatterlist *src, unsigned int nbytes)
290 {
291 struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
292 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
293 struct crypt_priv crypt_ctx = {
294 .ctx = &ctx->camellia_ctx,
295 .fpu_enabled = false,
296 };
297 struct lrw_crypt_req req = {
298 .tbuf = buf,
299 .tbuflen = sizeof(buf),
300
301 .table_ctx = &ctx->lrw_table,
302 .crypt_ctx = &crypt_ctx,
303 .crypt_fn = decrypt_callback,
304 };
305 int ret;
306
307 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
308 ret = lrw_crypt(desc, dst, src, nbytes, &req);
309 camellia_fpu_end(crypt_ctx.fpu_enabled);
310
311 return ret;
312 }
313
314 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
315 struct scatterlist *src, unsigned int nbytes)
316 {
317 struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
318
319 return glue_xts_crypt_128bit(&camellia_enc_xts, desc, dst, src, nbytes,
320 XTS_TWEAK_CAST(camellia_enc_blk),
321 &ctx->tweak_ctx, &ctx->crypt_ctx);
322 }
323
324 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
325 struct scatterlist *src, unsigned int nbytes)
326 {
327 struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
328
329 return glue_xts_crypt_128bit(&camellia_dec_xts, desc, dst, src, nbytes,
330 XTS_TWEAK_CAST(camellia_enc_blk),
331 &ctx->tweak_ctx, &ctx->crypt_ctx);
332 }
333
334 static struct crypto_alg cmll_algs[10] = { {
335 .cra_name = "__ecb-camellia-aesni",
336 .cra_driver_name = "__driver-ecb-camellia-aesni",
337 .cra_priority = 0,
338 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
339 CRYPTO_ALG_INTERNAL,
340 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
341 .cra_ctxsize = sizeof(struct camellia_ctx),
342 .cra_alignmask = 0,
343 .cra_type = &crypto_blkcipher_type,
344 .cra_module = THIS_MODULE,
345 .cra_u = {
346 .blkcipher = {
347 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
348 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
349 .setkey = camellia_setkey,
350 .encrypt = ecb_encrypt,
351 .decrypt = ecb_decrypt,
352 },
353 },
354 }, {
355 .cra_name = "__cbc-camellia-aesni",
356 .cra_driver_name = "__driver-cbc-camellia-aesni",
357 .cra_priority = 0,
358 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
359 CRYPTO_ALG_INTERNAL,
360 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
361 .cra_ctxsize = sizeof(struct camellia_ctx),
362 .cra_alignmask = 0,
363 .cra_type = &crypto_blkcipher_type,
364 .cra_module = THIS_MODULE,
365 .cra_u = {
366 .blkcipher = {
367 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
368 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
369 .setkey = camellia_setkey,
370 .encrypt = cbc_encrypt,
371 .decrypt = cbc_decrypt,
372 },
373 },
374 }, {
375 .cra_name = "__ctr-camellia-aesni",
376 .cra_driver_name = "__driver-ctr-camellia-aesni",
377 .cra_priority = 0,
378 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
379 CRYPTO_ALG_INTERNAL,
380 .cra_blocksize = 1,
381 .cra_ctxsize = sizeof(struct camellia_ctx),
382 .cra_alignmask = 0,
383 .cra_type = &crypto_blkcipher_type,
384 .cra_module = THIS_MODULE,
385 .cra_u = {
386 .blkcipher = {
387 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
388 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
389 .ivsize = CAMELLIA_BLOCK_SIZE,
390 .setkey = camellia_setkey,
391 .encrypt = ctr_crypt,
392 .decrypt = ctr_crypt,
393 },
394 },
395 }, {
396 .cra_name = "__lrw-camellia-aesni",
397 .cra_driver_name = "__driver-lrw-camellia-aesni",
398 .cra_priority = 0,
399 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
400 CRYPTO_ALG_INTERNAL,
401 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
402 .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
403 .cra_alignmask = 0,
404 .cra_type = &crypto_blkcipher_type,
405 .cra_module = THIS_MODULE,
406 .cra_exit = lrw_camellia_exit_tfm,
407 .cra_u = {
408 .blkcipher = {
409 .min_keysize = CAMELLIA_MIN_KEY_SIZE +
410 CAMELLIA_BLOCK_SIZE,
411 .max_keysize = CAMELLIA_MAX_KEY_SIZE +
412 CAMELLIA_BLOCK_SIZE,
413 .ivsize = CAMELLIA_BLOCK_SIZE,
414 .setkey = lrw_camellia_setkey,
415 .encrypt = lrw_encrypt,
416 .decrypt = lrw_decrypt,
417 },
418 },
419 }, {
420 .cra_name = "__xts-camellia-aesni",
421 .cra_driver_name = "__driver-xts-camellia-aesni",
422 .cra_priority = 0,
423 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
424 CRYPTO_ALG_INTERNAL,
425 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
426 .cra_ctxsize = sizeof(struct camellia_xts_ctx),
427 .cra_alignmask = 0,
428 .cra_type = &crypto_blkcipher_type,
429 .cra_module = THIS_MODULE,
430 .cra_u = {
431 .blkcipher = {
432 .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
433 .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
434 .ivsize = CAMELLIA_BLOCK_SIZE,
435 .setkey = xts_camellia_setkey,
436 .encrypt = xts_encrypt,
437 .decrypt = xts_decrypt,
438 },
439 },
440 }, {
441 .cra_name = "ecb(camellia)",
442 .cra_driver_name = "ecb-camellia-aesni",
443 .cra_priority = 400,
444 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
445 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
446 .cra_ctxsize = sizeof(struct async_helper_ctx),
447 .cra_alignmask = 0,
448 .cra_type = &crypto_ablkcipher_type,
449 .cra_module = THIS_MODULE,
450 .cra_init = ablk_init,
451 .cra_exit = ablk_exit,
452 .cra_u = {
453 .ablkcipher = {
454 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
455 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
456 .setkey = ablk_set_key,
457 .encrypt = ablk_encrypt,
458 .decrypt = ablk_decrypt,
459 },
460 },
461 }, {
462 .cra_name = "cbc(camellia)",
463 .cra_driver_name = "cbc-camellia-aesni",
464 .cra_priority = 400,
465 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
466 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
467 .cra_ctxsize = sizeof(struct async_helper_ctx),
468 .cra_alignmask = 0,
469 .cra_type = &crypto_ablkcipher_type,
470 .cra_module = THIS_MODULE,
471 .cra_init = ablk_init,
472 .cra_exit = ablk_exit,
473 .cra_u = {
474 .ablkcipher = {
475 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
476 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
477 .ivsize = CAMELLIA_BLOCK_SIZE,
478 .setkey = ablk_set_key,
479 .encrypt = __ablk_encrypt,
480 .decrypt = ablk_decrypt,
481 },
482 },
483 }, {
484 .cra_name = "ctr(camellia)",
485 .cra_driver_name = "ctr-camellia-aesni",
486 .cra_priority = 400,
487 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
488 .cra_blocksize = 1,
489 .cra_ctxsize = sizeof(struct async_helper_ctx),
490 .cra_alignmask = 0,
491 .cra_type = &crypto_ablkcipher_type,
492 .cra_module = THIS_MODULE,
493 .cra_init = ablk_init,
494 .cra_exit = ablk_exit,
495 .cra_u = {
496 .ablkcipher = {
497 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
498 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
499 .ivsize = CAMELLIA_BLOCK_SIZE,
500 .setkey = ablk_set_key,
501 .encrypt = ablk_encrypt,
502 .decrypt = ablk_encrypt,
503 .geniv = "chainiv",
504 },
505 },
506 }, {
507 .cra_name = "lrw(camellia)",
508 .cra_driver_name = "lrw-camellia-aesni",
509 .cra_priority = 400,
510 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
511 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
512 .cra_ctxsize = sizeof(struct async_helper_ctx),
513 .cra_alignmask = 0,
514 .cra_type = &crypto_ablkcipher_type,
515 .cra_module = THIS_MODULE,
516 .cra_init = ablk_init,
517 .cra_exit = ablk_exit,
518 .cra_u = {
519 .ablkcipher = {
520 .min_keysize = CAMELLIA_MIN_KEY_SIZE +
521 CAMELLIA_BLOCK_SIZE,
522 .max_keysize = CAMELLIA_MAX_KEY_SIZE +
523 CAMELLIA_BLOCK_SIZE,
524 .ivsize = CAMELLIA_BLOCK_SIZE,
525 .setkey = ablk_set_key,
526 .encrypt = ablk_encrypt,
527 .decrypt = ablk_decrypt,
528 },
529 },
530 }, {
531 .cra_name = "xts(camellia)",
532 .cra_driver_name = "xts-camellia-aesni",
533 .cra_priority = 400,
534 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
535 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
536 .cra_ctxsize = sizeof(struct async_helper_ctx),
537 .cra_alignmask = 0,
538 .cra_type = &crypto_ablkcipher_type,
539 .cra_module = THIS_MODULE,
540 .cra_init = ablk_init,
541 .cra_exit = ablk_exit,
542 .cra_u = {
543 .ablkcipher = {
544 .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
545 .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
546 .ivsize = CAMELLIA_BLOCK_SIZE,
547 .setkey = ablk_set_key,
548 .encrypt = ablk_encrypt,
549 .decrypt = ablk_decrypt,
550 },
551 },
552 } };
553
554 static int __init camellia_aesni_init(void)
555 {
556 u64 xcr0;
557
558 if (!cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
559 pr_info("AVX or AES-NI instructions are not detected.\n");
560 return -ENODEV;
561 }
562
563 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
564 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
565 pr_info("AVX detected but unusable.\n");
566 return -ENODEV;
567 }
568
569 return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
570 }
571
572 static void __exit camellia_aesni_fini(void)
573 {
574 crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
575 }
576
577 module_init(camellia_aesni_init);
578 module_exit(camellia_aesni_fini);
579
580 MODULE_LICENSE("GPL");
581 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
582 MODULE_ALIAS_CRYPTO("camellia");
583 MODULE_ALIAS_CRYPTO("camellia-asm");