]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/x86/crypto/serpent_avx2_glue.c
crypto: serpent_avx2 - mark Serpent AVX2 helper ciphers
[mirror_ubuntu-artful-kernel.git] / arch / x86 / crypto / serpent_avx2_glue.c
CommitLineData
56d76c96
JK
1/*
2 * Glue Code for x86_64/AVX2 assembler optimized version of Serpent
3 *
4 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 */
12
13#include <linux/module.h>
14#include <linux/types.h>
15#include <linux/crypto.h>
16#include <linux/err.h>
801201aa 17#include <crypto/ablk_helper.h>
56d76c96
JK
18#include <crypto/algapi.h>
19#include <crypto/ctr.h>
20#include <crypto/lrw.h>
21#include <crypto/xts.h>
22#include <crypto/serpent.h>
23#include <asm/xcr.h>
24#include <asm/xsave.h>
25#include <asm/crypto/serpent-avx.h>
56d76c96
JK
26#include <asm/crypto/glue_helper.h>
27
28#define SERPENT_AVX2_PARALLEL_BLOCKS 16
29
30/* 16-way AVX2 parallel cipher functions */
31asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst,
32 const u8 *src);
33asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst,
34 const u8 *src);
35asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src);
36
37asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src,
38 le128 *iv);
39asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst,
40 const u8 *src, le128 *iv);
41asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst,
42 const u8 *src, le128 *iv);
43
44static const struct common_glue_ctx serpent_enc = {
45 .num_funcs = 3,
46 .fpu_blocks_limit = 8,
47
48 .funcs = { {
49 .num_blocks = 16,
50 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_16way) }
51 }, {
52 .num_blocks = 8,
53 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
54 }, {
55 .num_blocks = 1,
56 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
57 } }
58};
59
60static const struct common_glue_ctx serpent_ctr = {
61 .num_funcs = 3,
62 .fpu_blocks_limit = 8,
63
64 .funcs = { {
65 .num_blocks = 16,
66 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_16way) }
67 }, {
68 .num_blocks = 8,
69 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
70 }, {
71 .num_blocks = 1,
72 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
73 } }
74};
75
76static const struct common_glue_ctx serpent_enc_xts = {
77 .num_funcs = 3,
78 .fpu_blocks_limit = 8,
79
80 .funcs = { {
81 .num_blocks = 16,
82 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_16way) }
83 }, {
84 .num_blocks = 8,
85 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
86 }, {
87 .num_blocks = 1,
88 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
89 } }
90};
91
92static const struct common_glue_ctx serpent_dec = {
93 .num_funcs = 3,
94 .fpu_blocks_limit = 8,
95
96 .funcs = { {
97 .num_blocks = 16,
98 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_16way) }
99 }, {
100 .num_blocks = 8,
101 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
102 }, {
103 .num_blocks = 1,
104 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
105 } }
106};
107
108static const struct common_glue_ctx serpent_dec_cbc = {
109 .num_funcs = 3,
110 .fpu_blocks_limit = 8,
111
112 .funcs = { {
113 .num_blocks = 16,
114 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_16way) }
115 }, {
116 .num_blocks = 8,
117 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
118 }, {
119 .num_blocks = 1,
120 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
121 } }
122};
123
124static const struct common_glue_ctx serpent_dec_xts = {
125 .num_funcs = 3,
126 .fpu_blocks_limit = 8,
127
128 .funcs = { {
129 .num_blocks = 16,
130 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_16way) }
131 }, {
132 .num_blocks = 8,
133 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
134 }, {
135 .num_blocks = 1,
136 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
137 } }
138};
139
140static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
141 struct scatterlist *src, unsigned int nbytes)
142{
143 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
144}
145
146static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
147 struct scatterlist *src, unsigned int nbytes)
148{
149 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
150}
151
152static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
153 struct scatterlist *src, unsigned int nbytes)
154{
155 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
156 dst, src, nbytes);
157}
158
159static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
160 struct scatterlist *src, unsigned int nbytes)
161{
162 return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
163 nbytes);
164}
165
166static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
167 struct scatterlist *src, unsigned int nbytes)
168{
169 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
170}
171
172static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
173{
174 /* since reusing AVX functions, starts using FPU at 8 parallel blocks */
175 return glue_fpu_begin(SERPENT_BLOCK_SIZE, 8, NULL, fpu_enabled, nbytes);
176}
177
178static inline void serpent_fpu_end(bool fpu_enabled)
179{
180 glue_fpu_end(fpu_enabled);
181}
182
183struct crypt_priv {
184 struct serpent_ctx *ctx;
185 bool fpu_enabled;
186};
187
188static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
189{
190 const unsigned int bsize = SERPENT_BLOCK_SIZE;
191 struct crypt_priv *ctx = priv;
192 int i;
193
194 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
195
196 if (nbytes >= SERPENT_AVX2_PARALLEL_BLOCKS * bsize) {
197 serpent_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
198 srcdst += bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
199 nbytes -= bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
200 }
201
202 while (nbytes >= SERPENT_PARALLEL_BLOCKS * bsize) {
203 serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
204 srcdst += bsize * SERPENT_PARALLEL_BLOCKS;
205 nbytes -= bsize * SERPENT_PARALLEL_BLOCKS;
206 }
207
208 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
209 __serpent_encrypt(ctx->ctx, srcdst, srcdst);
210}
211
212static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
213{
214 const unsigned int bsize = SERPENT_BLOCK_SIZE;
215 struct crypt_priv *ctx = priv;
216 int i;
217
218 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
219
220 if (nbytes >= SERPENT_AVX2_PARALLEL_BLOCKS * bsize) {
221 serpent_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
222 srcdst += bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
223 nbytes -= bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
224 }
225
226 while (nbytes >= SERPENT_PARALLEL_BLOCKS * bsize) {
227 serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
228 srcdst += bsize * SERPENT_PARALLEL_BLOCKS;
229 nbytes -= bsize * SERPENT_PARALLEL_BLOCKS;
230 }
231
232 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
233 __serpent_decrypt(ctx->ctx, srcdst, srcdst);
234}
235
236static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
237 struct scatterlist *src, unsigned int nbytes)
238{
239 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
240 be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS];
241 struct crypt_priv crypt_ctx = {
242 .ctx = &ctx->serpent_ctx,
243 .fpu_enabled = false,
244 };
245 struct lrw_crypt_req req = {
246 .tbuf = buf,
247 .tbuflen = sizeof(buf),
248
249 .table_ctx = &ctx->lrw_table,
250 .crypt_ctx = &crypt_ctx,
251 .crypt_fn = encrypt_callback,
252 };
253 int ret;
254
255 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
256 ret = lrw_crypt(desc, dst, src, nbytes, &req);
257 serpent_fpu_end(crypt_ctx.fpu_enabled);
258
259 return ret;
260}
261
262static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
263 struct scatterlist *src, unsigned int nbytes)
264{
265 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
266 be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS];
267 struct crypt_priv crypt_ctx = {
268 .ctx = &ctx->serpent_ctx,
269 .fpu_enabled = false,
270 };
271 struct lrw_crypt_req req = {
272 .tbuf = buf,
273 .tbuflen = sizeof(buf),
274
275 .table_ctx = &ctx->lrw_table,
276 .crypt_ctx = &crypt_ctx,
277 .crypt_fn = decrypt_callback,
278 };
279 int ret;
280
281 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
282 ret = lrw_crypt(desc, dst, src, nbytes, &req);
283 serpent_fpu_end(crypt_ctx.fpu_enabled);
284
285 return ret;
286}
287
288static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
289 struct scatterlist *src, unsigned int nbytes)
290{
291 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
292
293 return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
294 XTS_TWEAK_CAST(__serpent_encrypt),
295 &ctx->tweak_ctx, &ctx->crypt_ctx);
296}
297
298static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
299 struct scatterlist *src, unsigned int nbytes)
300{
301 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
302
303 return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
304 XTS_TWEAK_CAST(__serpent_encrypt),
305 &ctx->tweak_ctx, &ctx->crypt_ctx);
306}
307
308static struct crypto_alg srp_algs[10] = { {
309 .cra_name = "__ecb-serpent-avx2",
310 .cra_driver_name = "__driver-ecb-serpent-avx2",
311 .cra_priority = 0,
f82419ac
SM
312 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
313 CRYPTO_ALG_INTERNAL,
56d76c96
JK
314 .cra_blocksize = SERPENT_BLOCK_SIZE,
315 .cra_ctxsize = sizeof(struct serpent_ctx),
316 .cra_alignmask = 0,
317 .cra_type = &crypto_blkcipher_type,
318 .cra_module = THIS_MODULE,
319 .cra_list = LIST_HEAD_INIT(srp_algs[0].cra_list),
320 .cra_u = {
321 .blkcipher = {
322 .min_keysize = SERPENT_MIN_KEY_SIZE,
323 .max_keysize = SERPENT_MAX_KEY_SIZE,
324 .setkey = serpent_setkey,
325 .encrypt = ecb_encrypt,
326 .decrypt = ecb_decrypt,
327 },
328 },
329}, {
330 .cra_name = "__cbc-serpent-avx2",
331 .cra_driver_name = "__driver-cbc-serpent-avx2",
332 .cra_priority = 0,
f82419ac
SM
333 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
334 CRYPTO_ALG_INTERNAL,
56d76c96
JK
335 .cra_blocksize = SERPENT_BLOCK_SIZE,
336 .cra_ctxsize = sizeof(struct serpent_ctx),
337 .cra_alignmask = 0,
338 .cra_type = &crypto_blkcipher_type,
339 .cra_module = THIS_MODULE,
340 .cra_list = LIST_HEAD_INIT(srp_algs[1].cra_list),
341 .cra_u = {
342 .blkcipher = {
343 .min_keysize = SERPENT_MIN_KEY_SIZE,
344 .max_keysize = SERPENT_MAX_KEY_SIZE,
345 .setkey = serpent_setkey,
346 .encrypt = cbc_encrypt,
347 .decrypt = cbc_decrypt,
348 },
349 },
350}, {
351 .cra_name = "__ctr-serpent-avx2",
352 .cra_driver_name = "__driver-ctr-serpent-avx2",
353 .cra_priority = 0,
f82419ac
SM
354 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
355 CRYPTO_ALG_INTERNAL,
56d76c96
JK
356 .cra_blocksize = 1,
357 .cra_ctxsize = sizeof(struct serpent_ctx),
358 .cra_alignmask = 0,
359 .cra_type = &crypto_blkcipher_type,
360 .cra_module = THIS_MODULE,
361 .cra_list = LIST_HEAD_INIT(srp_algs[2].cra_list),
362 .cra_u = {
363 .blkcipher = {
364 .min_keysize = SERPENT_MIN_KEY_SIZE,
365 .max_keysize = SERPENT_MAX_KEY_SIZE,
366 .ivsize = SERPENT_BLOCK_SIZE,
367 .setkey = serpent_setkey,
368 .encrypt = ctr_crypt,
369 .decrypt = ctr_crypt,
370 },
371 },
372}, {
373 .cra_name = "__lrw-serpent-avx2",
374 .cra_driver_name = "__driver-lrw-serpent-avx2",
375 .cra_priority = 0,
f82419ac
SM
376 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
377 CRYPTO_ALG_INTERNAL,
56d76c96
JK
378 .cra_blocksize = SERPENT_BLOCK_SIZE,
379 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
380 .cra_alignmask = 0,
381 .cra_type = &crypto_blkcipher_type,
382 .cra_module = THIS_MODULE,
383 .cra_list = LIST_HEAD_INIT(srp_algs[3].cra_list),
384 .cra_exit = lrw_serpent_exit_tfm,
385 .cra_u = {
386 .blkcipher = {
387 .min_keysize = SERPENT_MIN_KEY_SIZE +
388 SERPENT_BLOCK_SIZE,
389 .max_keysize = SERPENT_MAX_KEY_SIZE +
390 SERPENT_BLOCK_SIZE,
391 .ivsize = SERPENT_BLOCK_SIZE,
392 .setkey = lrw_serpent_setkey,
393 .encrypt = lrw_encrypt,
394 .decrypt = lrw_decrypt,
395 },
396 },
397}, {
398 .cra_name = "__xts-serpent-avx2",
399 .cra_driver_name = "__driver-xts-serpent-avx2",
400 .cra_priority = 0,
f82419ac
SM
401 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
402 CRYPTO_ALG_INTERNAL,
56d76c96
JK
403 .cra_blocksize = SERPENT_BLOCK_SIZE,
404 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
405 .cra_alignmask = 0,
406 .cra_type = &crypto_blkcipher_type,
407 .cra_module = THIS_MODULE,
408 .cra_list = LIST_HEAD_INIT(srp_algs[4].cra_list),
409 .cra_u = {
410 .blkcipher = {
411 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
412 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
413 .ivsize = SERPENT_BLOCK_SIZE,
414 .setkey = xts_serpent_setkey,
415 .encrypt = xts_encrypt,
416 .decrypt = xts_decrypt,
417 },
418 },
419}, {
420 .cra_name = "ecb(serpent)",
421 .cra_driver_name = "ecb-serpent-avx2",
422 .cra_priority = 600,
423 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
424 .cra_blocksize = SERPENT_BLOCK_SIZE,
425 .cra_ctxsize = sizeof(struct async_helper_ctx),
426 .cra_alignmask = 0,
427 .cra_type = &crypto_ablkcipher_type,
428 .cra_module = THIS_MODULE,
429 .cra_list = LIST_HEAD_INIT(srp_algs[5].cra_list),
430 .cra_init = ablk_init,
431 .cra_exit = ablk_exit,
432 .cra_u = {
433 .ablkcipher = {
434 .min_keysize = SERPENT_MIN_KEY_SIZE,
435 .max_keysize = SERPENT_MAX_KEY_SIZE,
436 .setkey = ablk_set_key,
437 .encrypt = ablk_encrypt,
438 .decrypt = ablk_decrypt,
439 },
440 },
441}, {
442 .cra_name = "cbc(serpent)",
443 .cra_driver_name = "cbc-serpent-avx2",
444 .cra_priority = 600,
445 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
446 .cra_blocksize = SERPENT_BLOCK_SIZE,
447 .cra_ctxsize = sizeof(struct async_helper_ctx),
448 .cra_alignmask = 0,
449 .cra_type = &crypto_ablkcipher_type,
450 .cra_module = THIS_MODULE,
451 .cra_list = LIST_HEAD_INIT(srp_algs[6].cra_list),
452 .cra_init = ablk_init,
453 .cra_exit = ablk_exit,
454 .cra_u = {
455 .ablkcipher = {
456 .min_keysize = SERPENT_MIN_KEY_SIZE,
457 .max_keysize = SERPENT_MAX_KEY_SIZE,
458 .ivsize = SERPENT_BLOCK_SIZE,
459 .setkey = ablk_set_key,
460 .encrypt = __ablk_encrypt,
461 .decrypt = ablk_decrypt,
462 },
463 },
464}, {
465 .cra_name = "ctr(serpent)",
466 .cra_driver_name = "ctr-serpent-avx2",
467 .cra_priority = 600,
468 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
469 .cra_blocksize = 1,
470 .cra_ctxsize = sizeof(struct async_helper_ctx),
471 .cra_alignmask = 0,
472 .cra_type = &crypto_ablkcipher_type,
473 .cra_module = THIS_MODULE,
474 .cra_list = LIST_HEAD_INIT(srp_algs[7].cra_list),
475 .cra_init = ablk_init,
476 .cra_exit = ablk_exit,
477 .cra_u = {
478 .ablkcipher = {
479 .min_keysize = SERPENT_MIN_KEY_SIZE,
480 .max_keysize = SERPENT_MAX_KEY_SIZE,
481 .ivsize = SERPENT_BLOCK_SIZE,
482 .setkey = ablk_set_key,
483 .encrypt = ablk_encrypt,
484 .decrypt = ablk_encrypt,
485 .geniv = "chainiv",
486 },
487 },
488}, {
489 .cra_name = "lrw(serpent)",
490 .cra_driver_name = "lrw-serpent-avx2",
491 .cra_priority = 600,
492 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
493 .cra_blocksize = SERPENT_BLOCK_SIZE,
494 .cra_ctxsize = sizeof(struct async_helper_ctx),
495 .cra_alignmask = 0,
496 .cra_type = &crypto_ablkcipher_type,
497 .cra_module = THIS_MODULE,
498 .cra_list = LIST_HEAD_INIT(srp_algs[8].cra_list),
499 .cra_init = ablk_init,
500 .cra_exit = ablk_exit,
501 .cra_u = {
502 .ablkcipher = {
503 .min_keysize = SERPENT_MIN_KEY_SIZE +
504 SERPENT_BLOCK_SIZE,
505 .max_keysize = SERPENT_MAX_KEY_SIZE +
506 SERPENT_BLOCK_SIZE,
507 .ivsize = SERPENT_BLOCK_SIZE,
508 .setkey = ablk_set_key,
509 .encrypt = ablk_encrypt,
510 .decrypt = ablk_decrypt,
511 },
512 },
513}, {
514 .cra_name = "xts(serpent)",
515 .cra_driver_name = "xts-serpent-avx2",
516 .cra_priority = 600,
517 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
518 .cra_blocksize = SERPENT_BLOCK_SIZE,
519 .cra_ctxsize = sizeof(struct async_helper_ctx),
520 .cra_alignmask = 0,
521 .cra_type = &crypto_ablkcipher_type,
522 .cra_module = THIS_MODULE,
523 .cra_list = LIST_HEAD_INIT(srp_algs[9].cra_list),
524 .cra_init = ablk_init,
525 .cra_exit = ablk_exit,
526 .cra_u = {
527 .ablkcipher = {
528 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
529 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
530 .ivsize = SERPENT_BLOCK_SIZE,
531 .setkey = ablk_set_key,
532 .encrypt = ablk_encrypt,
533 .decrypt = ablk_decrypt,
534 },
535 },
536} };
537
538static int __init init(void)
539{
540 u64 xcr0;
541
542 if (!cpu_has_avx2 || !cpu_has_osxsave) {
543 pr_info("AVX2 instructions are not detected.\n");
544 return -ENODEV;
545 }
546
547 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
548 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
549 pr_info("AVX detected but unusable.\n");
550 return -ENODEV;
551 }
552
553 return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs));
554}
555
556static void __exit fini(void)
557{
558 crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs));
559}
560
561module_init(init);
562module_exit(fini);
563
564MODULE_LICENSE("GPL");
565MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX2 optimized");
5d26a105
KC
566MODULE_ALIAS_CRYPTO("serpent");
567MODULE_ALIAS_CRYPTO("serpent-asm");