]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - arch/x86/crypto/serpent_sse2_glue.c
crypto: serpent_sse2 - mark Serpent SSE2 helper ciphers
[mirror_ubuntu-bionic-kernel.git] / arch / x86 / crypto / serpent_sse2_glue.c
CommitLineData
937c30d7
JK
1/*
2 * Glue Code for SSE2 assembler versions of Serpent Cipher
3 *
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 *
6 * Glue code based on aesni-intel_glue.c by:
7 * Copyright (C) 2008, Intel Corp.
8 * Author: Huang Ying <ying.huang@intel.com>
9 *
10 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
11 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
12 * CTR part based on code (crypto/ctr.c) by:
13 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * This program is distributed in the hope that it will be useful,
21 * but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 * GNU General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, write to the Free Software
27 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 * USA
29 *
30 */
31
32#include <linux/module.h>
33#include <linux/hardirq.h>
34#include <linux/types.h>
35#include <linux/crypto.h>
36#include <linux/err.h>
801201aa 37#include <crypto/ablk_helper.h>
937c30d7
JK
38#include <crypto/algapi.h>
39#include <crypto/serpent.h>
40#include <crypto/cryptd.h>
41#include <crypto/b128ops.h>
42#include <crypto/ctr.h>
18482053 43#include <crypto/lrw.h>
5962f8b6 44#include <crypto/xts.h>
d4af0e9d 45#include <asm/crypto/serpent-sse2.h>
596d8750 46#include <asm/crypto/glue_helper.h>
937c30d7 47
e81792fb
JK
48static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src)
49{
50 u128 ivs[SERPENT_PARALLEL_BLOCKS - 1];
51 unsigned int j;
52
53 for (j = 0; j < SERPENT_PARALLEL_BLOCKS - 1; j++)
54 ivs[j] = src[j];
55
56 serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src);
57
58 for (j = 0; j < SERPENT_PARALLEL_BLOCKS - 1; j++)
59 u128_xor(dst + (j + 1), dst + (j + 1), ivs + j);
60}
61
58990986 62static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
e81792fb
JK
63{
64 be128 ctrblk;
65
58990986
JK
66 le128_to_be128(&ctrblk, iv);
67 le128_inc(iv);
e81792fb
JK
68
69 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
70 u128_xor(dst, src, (u128 *)&ctrblk);
71}
72
73static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src,
58990986 74 le128 *iv)
e81792fb
JK
75{
76 be128 ctrblks[SERPENT_PARALLEL_BLOCKS];
77 unsigned int i;
78
79 for (i = 0; i < SERPENT_PARALLEL_BLOCKS; i++) {
80 if (dst != src)
81 dst[i] = src[i];
82
58990986
JK
83 le128_to_be128(&ctrblks[i], iv);
84 le128_inc(iv);
e81792fb
JK
85 }
86
87 serpent_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks);
88}
89
90static const struct common_glue_ctx serpent_enc = {
91 .num_funcs = 2,
92 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
93
94 .funcs = { {
95 .num_blocks = SERPENT_PARALLEL_BLOCKS,
96 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_enc_blk_xway) }
97 }, {
98 .num_blocks = 1,
99 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
100 } }
101};
102
103static const struct common_glue_ctx serpent_ctr = {
104 .num_funcs = 2,
105 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
106
107 .funcs = { {
108 .num_blocks = SERPENT_PARALLEL_BLOCKS,
109 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr_xway) }
110 }, {
111 .num_blocks = 1,
112 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr) }
113 } }
114};
115
116static const struct common_glue_ctx serpent_dec = {
117 .num_funcs = 2,
118 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
119
120 .funcs = { {
121 .num_blocks = SERPENT_PARALLEL_BLOCKS,
122 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_dec_blk_xway) }
123 }, {
124 .num_blocks = 1,
125 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
126 } }
127};
128
129static const struct common_glue_ctx serpent_dec_cbc = {
130 .num_funcs = 2,
131 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
132
133 .funcs = { {
134 .num_blocks = SERPENT_PARALLEL_BLOCKS,
135 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_decrypt_cbc_xway) }
136 }, {
137 .num_blocks = 1,
138 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
139 } }
140};
141
142static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
143 struct scatterlist *src, unsigned int nbytes)
144{
145 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
146}
147
148static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
149 struct scatterlist *src, unsigned int nbytes)
150{
151 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
152}
153
154static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
155 struct scatterlist *src, unsigned int nbytes)
156{
157 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
158 dst, src, nbytes);
159}
160
161static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
162 struct scatterlist *src, unsigned int nbytes)
163{
164 return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
165 nbytes);
166}
167
168static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
169 struct scatterlist *src, unsigned int nbytes)
170{
171 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
172}
173
174static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
175{
176 return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
177 NULL, fpu_enabled, nbytes);
178}
179
180static inline void serpent_fpu_end(bool fpu_enabled)
181{
182 glue_fpu_end(fpu_enabled);
183}
184
18482053
JK
185struct crypt_priv {
186 struct serpent_ctx *ctx;
187 bool fpu_enabled;
188};
189
190static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
191{
192 const unsigned int bsize = SERPENT_BLOCK_SIZE;
193 struct crypt_priv *ctx = priv;
194 int i;
195
196 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
197
198 if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
199 serpent_enc_blk_xway(ctx->ctx, srcdst, srcdst);
200 return;
201 }
202
203 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
204 __serpent_encrypt(ctx->ctx, srcdst, srcdst);
205}
206
207static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
208{
209 const unsigned int bsize = SERPENT_BLOCK_SIZE;
210 struct crypt_priv *ctx = priv;
211 int i;
212
213 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
214
215 if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
216 serpent_dec_blk_xway(ctx->ctx, srcdst, srcdst);
217 return;
218 }
219
220 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
221 __serpent_decrypt(ctx->ctx, srcdst, srcdst);
222}
223
224struct serpent_lrw_ctx {
225 struct lrw_table_ctx lrw_table;
226 struct serpent_ctx serpent_ctx;
227};
228
229static int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
230 unsigned int keylen)
231{
232 struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
233 int err;
234
235 err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
236 SERPENT_BLOCK_SIZE);
237 if (err)
238 return err;
239
240 return lrw_init_table(&ctx->lrw_table, key + keylen -
241 SERPENT_BLOCK_SIZE);
242}
243
244static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
245 struct scatterlist *src, unsigned int nbytes)
246{
247 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
248 be128 buf[SERPENT_PARALLEL_BLOCKS];
249 struct crypt_priv crypt_ctx = {
250 .ctx = &ctx->serpent_ctx,
251 .fpu_enabled = false,
252 };
253 struct lrw_crypt_req req = {
254 .tbuf = buf,
255 .tbuflen = sizeof(buf),
256
257 .table_ctx = &ctx->lrw_table,
258 .crypt_ctx = &crypt_ctx,
259 .crypt_fn = encrypt_callback,
260 };
261 int ret;
262
d3564338 263 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
18482053
JK
264 ret = lrw_crypt(desc, dst, src, nbytes, &req);
265 serpent_fpu_end(crypt_ctx.fpu_enabled);
266
267 return ret;
268}
269
270static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
271 struct scatterlist *src, unsigned int nbytes)
272{
273 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
274 be128 buf[SERPENT_PARALLEL_BLOCKS];
275 struct crypt_priv crypt_ctx = {
276 .ctx = &ctx->serpent_ctx,
277 .fpu_enabled = false,
278 };
279 struct lrw_crypt_req req = {
280 .tbuf = buf,
281 .tbuflen = sizeof(buf),
282
283 .table_ctx = &ctx->lrw_table,
284 .crypt_ctx = &crypt_ctx,
285 .crypt_fn = decrypt_callback,
286 };
287 int ret;
288
d3564338 289 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
18482053
JK
290 ret = lrw_crypt(desc, dst, src, nbytes, &req);
291 serpent_fpu_end(crypt_ctx.fpu_enabled);
292
293 return ret;
294}
295
296static void lrw_exit_tfm(struct crypto_tfm *tfm)
297{
298 struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
299
300 lrw_free_table(&ctx->lrw_table);
301}
302
5962f8b6
JK
303struct serpent_xts_ctx {
304 struct serpent_ctx tweak_ctx;
305 struct serpent_ctx crypt_ctx;
306};
307
308static int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
309 unsigned int keylen)
310{
311 struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
312 u32 *flags = &tfm->crt_flags;
313 int err;
314
315 /* key consists of keys of equal size concatenated, therefore
316 * the length must be even
317 */
318 if (keylen % 2) {
319 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
320 return -EINVAL;
321 }
322
323 /* first half of xts-key is for crypt */
324 err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
325 if (err)
326 return err;
327
328 /* second half of xts-key is for tweak */
329 return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
330}
331
332static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
333 struct scatterlist *src, unsigned int nbytes)
334{
335 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
336 be128 buf[SERPENT_PARALLEL_BLOCKS];
337 struct crypt_priv crypt_ctx = {
338 .ctx = &ctx->crypt_ctx,
339 .fpu_enabled = false,
340 };
341 struct xts_crypt_req req = {
342 .tbuf = buf,
343 .tbuflen = sizeof(buf),
344
345 .tweak_ctx = &ctx->tweak_ctx,
346 .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
347 .crypt_ctx = &crypt_ctx,
348 .crypt_fn = encrypt_callback,
349 };
350 int ret;
351
d3564338 352 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
5962f8b6
JK
353 ret = xts_crypt(desc, dst, src, nbytes, &req);
354 serpent_fpu_end(crypt_ctx.fpu_enabled);
355
356 return ret;
357}
358
359static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
360 struct scatterlist *src, unsigned int nbytes)
361{
362 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
363 be128 buf[SERPENT_PARALLEL_BLOCKS];
364 struct crypt_priv crypt_ctx = {
365 .ctx = &ctx->crypt_ctx,
366 .fpu_enabled = false,
367 };
368 struct xts_crypt_req req = {
369 .tbuf = buf,
370 .tbuflen = sizeof(buf),
371
372 .tweak_ctx = &ctx->tweak_ctx,
373 .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
374 .crypt_ctx = &crypt_ctx,
375 .crypt_fn = decrypt_callback,
376 };
377 int ret;
378
d3564338 379 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
5962f8b6
JK
380 ret = xts_crypt(desc, dst, src, nbytes, &req);
381 serpent_fpu_end(crypt_ctx.fpu_enabled);
382
383 return ret;
384}
385
35474c3b
JK
386static struct crypto_alg serpent_algs[10] = { {
387 .cra_name = "__ecb-serpent-sse2",
388 .cra_driver_name = "__driver-ecb-serpent-sse2",
389 .cra_priority = 0,
748be1f1
SM
390 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
391 CRYPTO_ALG_INTERNAL,
35474c3b
JK
392 .cra_blocksize = SERPENT_BLOCK_SIZE,
393 .cra_ctxsize = sizeof(struct serpent_ctx),
394 .cra_alignmask = 0,
395 .cra_type = &crypto_blkcipher_type,
396 .cra_module = THIS_MODULE,
35474c3b
JK
397 .cra_u = {
398 .blkcipher = {
399 .min_keysize = SERPENT_MIN_KEY_SIZE,
400 .max_keysize = SERPENT_MAX_KEY_SIZE,
401 .setkey = serpent_setkey,
402 .encrypt = ecb_encrypt,
403 .decrypt = ecb_decrypt,
404 },
405 },
406}, {
407 .cra_name = "__cbc-serpent-sse2",
408 .cra_driver_name = "__driver-cbc-serpent-sse2",
409 .cra_priority = 0,
748be1f1
SM
410 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
411 CRYPTO_ALG_INTERNAL,
35474c3b
JK
412 .cra_blocksize = SERPENT_BLOCK_SIZE,
413 .cra_ctxsize = sizeof(struct serpent_ctx),
414 .cra_alignmask = 0,
415 .cra_type = &crypto_blkcipher_type,
416 .cra_module = THIS_MODULE,
35474c3b
JK
417 .cra_u = {
418 .blkcipher = {
419 .min_keysize = SERPENT_MIN_KEY_SIZE,
420 .max_keysize = SERPENT_MAX_KEY_SIZE,
421 .setkey = serpent_setkey,
422 .encrypt = cbc_encrypt,
423 .decrypt = cbc_decrypt,
424 },
425 },
426}, {
427 .cra_name = "__ctr-serpent-sse2",
428 .cra_driver_name = "__driver-ctr-serpent-sse2",
429 .cra_priority = 0,
748be1f1
SM
430 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
431 CRYPTO_ALG_INTERNAL,
35474c3b
JK
432 .cra_blocksize = 1,
433 .cra_ctxsize = sizeof(struct serpent_ctx),
434 .cra_alignmask = 0,
435 .cra_type = &crypto_blkcipher_type,
436 .cra_module = THIS_MODULE,
35474c3b
JK
437 .cra_u = {
438 .blkcipher = {
439 .min_keysize = SERPENT_MIN_KEY_SIZE,
440 .max_keysize = SERPENT_MAX_KEY_SIZE,
441 .ivsize = SERPENT_BLOCK_SIZE,
442 .setkey = serpent_setkey,
443 .encrypt = ctr_crypt,
444 .decrypt = ctr_crypt,
445 },
446 },
447}, {
448 .cra_name = "__lrw-serpent-sse2",
449 .cra_driver_name = "__driver-lrw-serpent-sse2",
450 .cra_priority = 0,
748be1f1
SM
451 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
452 CRYPTO_ALG_INTERNAL,
35474c3b
JK
453 .cra_blocksize = SERPENT_BLOCK_SIZE,
454 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
455 .cra_alignmask = 0,
456 .cra_type = &crypto_blkcipher_type,
457 .cra_module = THIS_MODULE,
35474c3b
JK
458 .cra_exit = lrw_exit_tfm,
459 .cra_u = {
460 .blkcipher = {
461 .min_keysize = SERPENT_MIN_KEY_SIZE +
462 SERPENT_BLOCK_SIZE,
463 .max_keysize = SERPENT_MAX_KEY_SIZE +
464 SERPENT_BLOCK_SIZE,
465 .ivsize = SERPENT_BLOCK_SIZE,
466 .setkey = lrw_serpent_setkey,
467 .encrypt = lrw_encrypt,
468 .decrypt = lrw_decrypt,
469 },
470 },
471}, {
472 .cra_name = "__xts-serpent-sse2",
473 .cra_driver_name = "__driver-xts-serpent-sse2",
474 .cra_priority = 0,
748be1f1
SM
475 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
476 CRYPTO_ALG_INTERNAL,
35474c3b
JK
477 .cra_blocksize = SERPENT_BLOCK_SIZE,
478 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
479 .cra_alignmask = 0,
480 .cra_type = &crypto_blkcipher_type,
481 .cra_module = THIS_MODULE,
35474c3b
JK
482 .cra_u = {
483 .blkcipher = {
484 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
485 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
486 .ivsize = SERPENT_BLOCK_SIZE,
487 .setkey = xts_serpent_setkey,
488 .encrypt = xts_encrypt,
489 .decrypt = xts_decrypt,
490 },
491 },
492}, {
937c30d7
JK
493 .cra_name = "ecb(serpent)",
494 .cra_driver_name = "ecb-serpent-sse2",
495 .cra_priority = 400,
496 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
497 .cra_blocksize = SERPENT_BLOCK_SIZE,
ffaf9156 498 .cra_ctxsize = sizeof(struct async_helper_ctx),
937c30d7
JK
499 .cra_alignmask = 0,
500 .cra_type = &crypto_ablkcipher_type,
501 .cra_module = THIS_MODULE,
435d3e51 502 .cra_init = ablk_init,
937c30d7
JK
503 .cra_exit = ablk_exit,
504 .cra_u = {
505 .ablkcipher = {
506 .min_keysize = SERPENT_MIN_KEY_SIZE,
507 .max_keysize = SERPENT_MAX_KEY_SIZE,
508 .setkey = ablk_set_key,
509 .encrypt = ablk_encrypt,
510 .decrypt = ablk_decrypt,
511 },
512 },
35474c3b 513}, {
937c30d7
JK
514 .cra_name = "cbc(serpent)",
515 .cra_driver_name = "cbc-serpent-sse2",
516 .cra_priority = 400,
517 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
518 .cra_blocksize = SERPENT_BLOCK_SIZE,
ffaf9156 519 .cra_ctxsize = sizeof(struct async_helper_ctx),
937c30d7
JK
520 .cra_alignmask = 0,
521 .cra_type = &crypto_ablkcipher_type,
522 .cra_module = THIS_MODULE,
435d3e51 523 .cra_init = ablk_init,
937c30d7
JK
524 .cra_exit = ablk_exit,
525 .cra_u = {
526 .ablkcipher = {
527 .min_keysize = SERPENT_MIN_KEY_SIZE,
528 .max_keysize = SERPENT_MAX_KEY_SIZE,
529 .ivsize = SERPENT_BLOCK_SIZE,
530 .setkey = ablk_set_key,
531 .encrypt = __ablk_encrypt,
532 .decrypt = ablk_decrypt,
533 },
534 },
35474c3b 535}, {
937c30d7
JK
536 .cra_name = "ctr(serpent)",
537 .cra_driver_name = "ctr-serpent-sse2",
538 .cra_priority = 400,
539 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
540 .cra_blocksize = 1,
ffaf9156 541 .cra_ctxsize = sizeof(struct async_helper_ctx),
937c30d7
JK
542 .cra_alignmask = 0,
543 .cra_type = &crypto_ablkcipher_type,
544 .cra_module = THIS_MODULE,
435d3e51 545 .cra_init = ablk_init,
937c30d7
JK
546 .cra_exit = ablk_exit,
547 .cra_u = {
548 .ablkcipher = {
549 .min_keysize = SERPENT_MIN_KEY_SIZE,
550 .max_keysize = SERPENT_MAX_KEY_SIZE,
551 .ivsize = SERPENT_BLOCK_SIZE,
552 .setkey = ablk_set_key,
553 .encrypt = ablk_encrypt,
554 .decrypt = ablk_encrypt,
555 .geniv = "chainiv",
556 },
557 },
35474c3b 558}, {
18482053
JK
559 .cra_name = "lrw(serpent)",
560 .cra_driver_name = "lrw-serpent-sse2",
561 .cra_priority = 400,
562 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
563 .cra_blocksize = SERPENT_BLOCK_SIZE,
ffaf9156 564 .cra_ctxsize = sizeof(struct async_helper_ctx),
18482053
JK
565 .cra_alignmask = 0,
566 .cra_type = &crypto_ablkcipher_type,
567 .cra_module = THIS_MODULE,
435d3e51 568 .cra_init = ablk_init,
18482053
JK
569 .cra_exit = ablk_exit,
570 .cra_u = {
571 .ablkcipher = {
572 .min_keysize = SERPENT_MIN_KEY_SIZE +
573 SERPENT_BLOCK_SIZE,
574 .max_keysize = SERPENT_MAX_KEY_SIZE +
575 SERPENT_BLOCK_SIZE,
576 .ivsize = SERPENT_BLOCK_SIZE,
577 .setkey = ablk_set_key,
578 .encrypt = ablk_encrypt,
579 .decrypt = ablk_decrypt,
580 },
581 },
35474c3b 582}, {
5962f8b6
JK
583 .cra_name = "xts(serpent)",
584 .cra_driver_name = "xts-serpent-sse2",
585 .cra_priority = 400,
586 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
587 .cra_blocksize = SERPENT_BLOCK_SIZE,
ffaf9156 588 .cra_ctxsize = sizeof(struct async_helper_ctx),
5962f8b6
JK
589 .cra_alignmask = 0,
590 .cra_type = &crypto_ablkcipher_type,
591 .cra_module = THIS_MODULE,
435d3e51 592 .cra_init = ablk_init,
5962f8b6
JK
593 .cra_exit = ablk_exit,
594 .cra_u = {
595 .ablkcipher = {
596 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
597 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
598 .ivsize = SERPENT_BLOCK_SIZE,
599 .setkey = ablk_set_key,
600 .encrypt = ablk_encrypt,
601 .decrypt = ablk_decrypt,
602 },
603 },
35474c3b 604} };
5962f8b6 605
937c30d7
JK
606static int __init serpent_sse2_init(void)
607{
937c30d7
JK
608 if (!cpu_has_xmm2) {
609 printk(KERN_INFO "SSE2 instructions are not detected.\n");
610 return -ENODEV;
611 }
612
35474c3b 613 return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
937c30d7
JK
614}
615
616static void __exit serpent_sse2_exit(void)
617{
35474c3b 618 crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
937c30d7
JK
619}
620
621module_init(serpent_sse2_init);
622module_exit(serpent_sse2_exit);
623
624MODULE_DESCRIPTION("Serpent Cipher Algorithm, SSE2 optimized");
625MODULE_LICENSE("GPL");
5d26a105 626MODULE_ALIAS_CRYPTO("serpent");