]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - arch/s390/crypto/des_s390.c
s390/crypto: Don't panic after crypto instruction failures
[mirror_ubuntu-artful-kernel.git] / arch / s390 / crypto / des_s390.c
1 /*
2 * Cryptographic API.
3 *
4 * s390 implementation of the DES Cipher Algorithm.
5 *
6 * Copyright IBM Corp. 2003, 2011
7 * Author(s): Thomas Spatzier
8 * Jan Glauber (jan.glauber@de.ibm.com)
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
14 *
15 */
16
17 #include <linux/init.h>
18 #include <linux/module.h>
19 #include <linux/crypto.h>
20 #include <crypto/algapi.h>
21 #include <crypto/des.h>
22
23 #include "crypt_s390.h"
24
25 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
26
27 static u8 *ctrblk;
28
29 struct s390_des_ctx {
30 u8 iv[DES_BLOCK_SIZE];
31 u8 key[DES3_KEY_SIZE];
32 };
33
34 static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
35 unsigned int key_len)
36 {
37 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
38 u32 *flags = &tfm->crt_flags;
39 u32 tmp[DES_EXPKEY_WORDS];
40
41 /* check for weak keys */
42 if (!des_ekey(tmp, key) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
43 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
44 return -EINVAL;
45 }
46
47 memcpy(ctx->key, key, key_len);
48 return 0;
49 }
50
51 static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
52 {
53 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
54
55 crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
56 }
57
58 static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
59 {
60 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
61
62 crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
63 }
64
65 static struct crypto_alg des_alg = {
66 .cra_name = "des",
67 .cra_driver_name = "des-s390",
68 .cra_priority = CRYPT_S390_PRIORITY,
69 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
70 .cra_blocksize = DES_BLOCK_SIZE,
71 .cra_ctxsize = sizeof(struct s390_des_ctx),
72 .cra_module = THIS_MODULE,
73 .cra_u = {
74 .cipher = {
75 .cia_min_keysize = DES_KEY_SIZE,
76 .cia_max_keysize = DES_KEY_SIZE,
77 .cia_setkey = des_setkey,
78 .cia_encrypt = des_encrypt,
79 .cia_decrypt = des_decrypt,
80 }
81 }
82 };
83
84 static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
85 u8 *key, struct blkcipher_walk *walk)
86 {
87 int ret = blkcipher_walk_virt(desc, walk);
88 unsigned int nbytes;
89
90 while ((nbytes = walk->nbytes)) {
91 /* only use complete blocks */
92 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
93 u8 *out = walk->dst.virt.addr;
94 u8 *in = walk->src.virt.addr;
95
96 ret = crypt_s390_km(func, key, out, in, n);
97 if (ret < 0 || ret != n)
98 return -EIO;
99
100 nbytes &= DES_BLOCK_SIZE - 1;
101 ret = blkcipher_walk_done(desc, walk, nbytes);
102 }
103
104 return ret;
105 }
106
107 static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
108 u8 *iv, struct blkcipher_walk *walk)
109 {
110 int ret = blkcipher_walk_virt(desc, walk);
111 unsigned int nbytes = walk->nbytes;
112
113 if (!nbytes)
114 goto out;
115
116 memcpy(iv, walk->iv, DES_BLOCK_SIZE);
117 do {
118 /* only use complete blocks */
119 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
120 u8 *out = walk->dst.virt.addr;
121 u8 *in = walk->src.virt.addr;
122
123 ret = crypt_s390_kmc(func, iv, out, in, n);
124 if (ret < 0 || ret != n)
125 return -EIO;
126
127 nbytes &= DES_BLOCK_SIZE - 1;
128 ret = blkcipher_walk_done(desc, walk, nbytes);
129 } while ((nbytes = walk->nbytes));
130 memcpy(walk->iv, iv, DES_BLOCK_SIZE);
131
132 out:
133 return ret;
134 }
135
136 static int ecb_des_encrypt(struct blkcipher_desc *desc,
137 struct scatterlist *dst, struct scatterlist *src,
138 unsigned int nbytes)
139 {
140 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
141 struct blkcipher_walk walk;
142
143 blkcipher_walk_init(&walk, dst, src, nbytes);
144 return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk);
145 }
146
147 static int ecb_des_decrypt(struct blkcipher_desc *desc,
148 struct scatterlist *dst, struct scatterlist *src,
149 unsigned int nbytes)
150 {
151 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
152 struct blkcipher_walk walk;
153
154 blkcipher_walk_init(&walk, dst, src, nbytes);
155 return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk);
156 }
157
158 static struct crypto_alg ecb_des_alg = {
159 .cra_name = "ecb(des)",
160 .cra_driver_name = "ecb-des-s390",
161 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
162 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
163 .cra_blocksize = DES_BLOCK_SIZE,
164 .cra_ctxsize = sizeof(struct s390_des_ctx),
165 .cra_type = &crypto_blkcipher_type,
166 .cra_module = THIS_MODULE,
167 .cra_u = {
168 .blkcipher = {
169 .min_keysize = DES_KEY_SIZE,
170 .max_keysize = DES_KEY_SIZE,
171 .setkey = des_setkey,
172 .encrypt = ecb_des_encrypt,
173 .decrypt = ecb_des_decrypt,
174 }
175 }
176 };
177
178 static int cbc_des_encrypt(struct blkcipher_desc *desc,
179 struct scatterlist *dst, struct scatterlist *src,
180 unsigned int nbytes)
181 {
182 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
183 struct blkcipher_walk walk;
184
185 blkcipher_walk_init(&walk, dst, src, nbytes);
186 return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, ctx->iv, &walk);
187 }
188
189 static int cbc_des_decrypt(struct blkcipher_desc *desc,
190 struct scatterlist *dst, struct scatterlist *src,
191 unsigned int nbytes)
192 {
193 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
194 struct blkcipher_walk walk;
195
196 blkcipher_walk_init(&walk, dst, src, nbytes);
197 return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, ctx->iv, &walk);
198 }
199
200 static struct crypto_alg cbc_des_alg = {
201 .cra_name = "cbc(des)",
202 .cra_driver_name = "cbc-des-s390",
203 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
204 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
205 .cra_blocksize = DES_BLOCK_SIZE,
206 .cra_ctxsize = sizeof(struct s390_des_ctx),
207 .cra_type = &crypto_blkcipher_type,
208 .cra_module = THIS_MODULE,
209 .cra_u = {
210 .blkcipher = {
211 .min_keysize = DES_KEY_SIZE,
212 .max_keysize = DES_KEY_SIZE,
213 .ivsize = DES_BLOCK_SIZE,
214 .setkey = des_setkey,
215 .encrypt = cbc_des_encrypt,
216 .decrypt = cbc_des_decrypt,
217 }
218 }
219 };
220
221 /*
222 * RFC2451:
223 *
224 * For DES-EDE3, there is no known need to reject weak or
225 * complementation keys. Any weakness is obviated by the use of
226 * multiple keys.
227 *
228 * However, if the first two or last two independent 64-bit keys are
229 * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
230 * same as DES. Implementers MUST reject keys that exhibit this
231 * property.
232 *
233 */
234 static int des3_setkey(struct crypto_tfm *tfm, const u8 *key,
235 unsigned int key_len)
236 {
237 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
238 u32 *flags = &tfm->crt_flags;
239
240 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
241 memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
242 DES_KEY_SIZE)) &&
243 (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
244 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
245 return -EINVAL;
246 }
247 memcpy(ctx->key, key, key_len);
248 return 0;
249 }
250
251 static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
252 {
253 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
254
255 crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
256 }
257
258 static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
259 {
260 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
261
262 crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
263 }
264
265 static struct crypto_alg des3_alg = {
266 .cra_name = "des3_ede",
267 .cra_driver_name = "des3_ede-s390",
268 .cra_priority = CRYPT_S390_PRIORITY,
269 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
270 .cra_blocksize = DES_BLOCK_SIZE,
271 .cra_ctxsize = sizeof(struct s390_des_ctx),
272 .cra_module = THIS_MODULE,
273 .cra_u = {
274 .cipher = {
275 .cia_min_keysize = DES3_KEY_SIZE,
276 .cia_max_keysize = DES3_KEY_SIZE,
277 .cia_setkey = des3_setkey,
278 .cia_encrypt = des3_encrypt,
279 .cia_decrypt = des3_decrypt,
280 }
281 }
282 };
283
284 static int ecb_des3_encrypt(struct blkcipher_desc *desc,
285 struct scatterlist *dst, struct scatterlist *src,
286 unsigned int nbytes)
287 {
288 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
289 struct blkcipher_walk walk;
290
291 blkcipher_walk_init(&walk, dst, src, nbytes);
292 return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk);
293 }
294
295 static int ecb_des3_decrypt(struct blkcipher_desc *desc,
296 struct scatterlist *dst, struct scatterlist *src,
297 unsigned int nbytes)
298 {
299 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
300 struct blkcipher_walk walk;
301
302 blkcipher_walk_init(&walk, dst, src, nbytes);
303 return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk);
304 }
305
306 static struct crypto_alg ecb_des3_alg = {
307 .cra_name = "ecb(des3_ede)",
308 .cra_driver_name = "ecb-des3_ede-s390",
309 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
310 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
311 .cra_blocksize = DES_BLOCK_SIZE,
312 .cra_ctxsize = sizeof(struct s390_des_ctx),
313 .cra_type = &crypto_blkcipher_type,
314 .cra_module = THIS_MODULE,
315 .cra_u = {
316 .blkcipher = {
317 .min_keysize = DES3_KEY_SIZE,
318 .max_keysize = DES3_KEY_SIZE,
319 .setkey = des3_setkey,
320 .encrypt = ecb_des3_encrypt,
321 .decrypt = ecb_des3_decrypt,
322 }
323 }
324 };
325
326 static int cbc_des3_encrypt(struct blkcipher_desc *desc,
327 struct scatterlist *dst, struct scatterlist *src,
328 unsigned int nbytes)
329 {
330 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
331 struct blkcipher_walk walk;
332
333 blkcipher_walk_init(&walk, dst, src, nbytes);
334 return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, ctx->iv, &walk);
335 }
336
337 static int cbc_des3_decrypt(struct blkcipher_desc *desc,
338 struct scatterlist *dst, struct scatterlist *src,
339 unsigned int nbytes)
340 {
341 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
342 struct blkcipher_walk walk;
343
344 blkcipher_walk_init(&walk, dst, src, nbytes);
345 return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, ctx->iv, &walk);
346 }
347
348 static struct crypto_alg cbc_des3_alg = {
349 .cra_name = "cbc(des3_ede)",
350 .cra_driver_name = "cbc-des3_ede-s390",
351 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
352 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
353 .cra_blocksize = DES_BLOCK_SIZE,
354 .cra_ctxsize = sizeof(struct s390_des_ctx),
355 .cra_type = &crypto_blkcipher_type,
356 .cra_module = THIS_MODULE,
357 .cra_u = {
358 .blkcipher = {
359 .min_keysize = DES3_KEY_SIZE,
360 .max_keysize = DES3_KEY_SIZE,
361 .ivsize = DES_BLOCK_SIZE,
362 .setkey = des3_setkey,
363 .encrypt = cbc_des3_encrypt,
364 .decrypt = cbc_des3_decrypt,
365 }
366 }
367 };
368
369 static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
370 struct s390_des_ctx *ctx, struct blkcipher_walk *walk)
371 {
372 int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
373 unsigned int i, n, nbytes;
374 u8 buf[DES_BLOCK_SIZE];
375 u8 *out, *in;
376
377 memcpy(ctrblk, walk->iv, DES_BLOCK_SIZE);
378 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
379 out = walk->dst.virt.addr;
380 in = walk->src.virt.addr;
381 while (nbytes >= DES_BLOCK_SIZE) {
382 /* align to block size, max. PAGE_SIZE */
383 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
384 nbytes & ~(DES_BLOCK_SIZE - 1);
385 for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) {
386 memcpy(ctrblk + i, ctrblk + i - DES_BLOCK_SIZE,
387 DES_BLOCK_SIZE);
388 crypto_inc(ctrblk + i, DES_BLOCK_SIZE);
389 }
390 ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk);
391 if (ret < 0 || ret != n)
392 return -EIO;
393 if (n > DES_BLOCK_SIZE)
394 memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE,
395 DES_BLOCK_SIZE);
396 crypto_inc(ctrblk, DES_BLOCK_SIZE);
397 out += n;
398 in += n;
399 nbytes -= n;
400 }
401 ret = blkcipher_walk_done(desc, walk, nbytes);
402 }
403
404 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */
405 if (nbytes) {
406 out = walk->dst.virt.addr;
407 in = walk->src.virt.addr;
408 ret = crypt_s390_kmctr(func, ctx->key, buf, in,
409 DES_BLOCK_SIZE, ctrblk);
410 if (ret < 0 || ret != DES_BLOCK_SIZE)
411 return -EIO;
412 memcpy(out, buf, nbytes);
413 crypto_inc(ctrblk, DES_BLOCK_SIZE);
414 ret = blkcipher_walk_done(desc, walk, 0);
415 }
416 memcpy(walk->iv, ctrblk, DES_BLOCK_SIZE);
417 return ret;
418 }
419
420 static int ctr_des_encrypt(struct blkcipher_desc *desc,
421 struct scatterlist *dst, struct scatterlist *src,
422 unsigned int nbytes)
423 {
424 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
425 struct blkcipher_walk walk;
426
427 blkcipher_walk_init(&walk, dst, src, nbytes);
428 return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk);
429 }
430
431 static int ctr_des_decrypt(struct blkcipher_desc *desc,
432 struct scatterlist *dst, struct scatterlist *src,
433 unsigned int nbytes)
434 {
435 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
436 struct blkcipher_walk walk;
437
438 blkcipher_walk_init(&walk, dst, src, nbytes);
439 return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk);
440 }
441
442 static struct crypto_alg ctr_des_alg = {
443 .cra_name = "ctr(des)",
444 .cra_driver_name = "ctr-des-s390",
445 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
446 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
447 .cra_blocksize = 1,
448 .cra_ctxsize = sizeof(struct s390_des_ctx),
449 .cra_type = &crypto_blkcipher_type,
450 .cra_module = THIS_MODULE,
451 .cra_u = {
452 .blkcipher = {
453 .min_keysize = DES_KEY_SIZE,
454 .max_keysize = DES_KEY_SIZE,
455 .ivsize = DES_BLOCK_SIZE,
456 .setkey = des_setkey,
457 .encrypt = ctr_des_encrypt,
458 .decrypt = ctr_des_decrypt,
459 }
460 }
461 };
462
463 static int ctr_des3_encrypt(struct blkcipher_desc *desc,
464 struct scatterlist *dst, struct scatterlist *src,
465 unsigned int nbytes)
466 {
467 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
468 struct blkcipher_walk walk;
469
470 blkcipher_walk_init(&walk, dst, src, nbytes);
471 return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk);
472 }
473
474 static int ctr_des3_decrypt(struct blkcipher_desc *desc,
475 struct scatterlist *dst, struct scatterlist *src,
476 unsigned int nbytes)
477 {
478 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
479 struct blkcipher_walk walk;
480
481 blkcipher_walk_init(&walk, dst, src, nbytes);
482 return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk);
483 }
484
485 static struct crypto_alg ctr_des3_alg = {
486 .cra_name = "ctr(des3_ede)",
487 .cra_driver_name = "ctr-des3_ede-s390",
488 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
489 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
490 .cra_blocksize = 1,
491 .cra_ctxsize = sizeof(struct s390_des_ctx),
492 .cra_type = &crypto_blkcipher_type,
493 .cra_module = THIS_MODULE,
494 .cra_u = {
495 .blkcipher = {
496 .min_keysize = DES3_KEY_SIZE,
497 .max_keysize = DES3_KEY_SIZE,
498 .ivsize = DES_BLOCK_SIZE,
499 .setkey = des3_setkey,
500 .encrypt = ctr_des3_encrypt,
501 .decrypt = ctr_des3_decrypt,
502 }
503 }
504 };
505
506 static int __init des_s390_init(void)
507 {
508 int ret;
509
510 if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) ||
511 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA))
512 return -EOPNOTSUPP;
513
514 ret = crypto_register_alg(&des_alg);
515 if (ret)
516 goto des_err;
517 ret = crypto_register_alg(&ecb_des_alg);
518 if (ret)
519 goto ecb_des_err;
520 ret = crypto_register_alg(&cbc_des_alg);
521 if (ret)
522 goto cbc_des_err;
523 ret = crypto_register_alg(&des3_alg);
524 if (ret)
525 goto des3_err;
526 ret = crypto_register_alg(&ecb_des3_alg);
527 if (ret)
528 goto ecb_des3_err;
529 ret = crypto_register_alg(&cbc_des3_alg);
530 if (ret)
531 goto cbc_des3_err;
532
533 if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
534 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
535 crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
536 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
537 ret = crypto_register_alg(&ctr_des_alg);
538 if (ret)
539 goto ctr_des_err;
540 ret = crypto_register_alg(&ctr_des3_alg);
541 if (ret)
542 goto ctr_des3_err;
543 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
544 if (!ctrblk) {
545 ret = -ENOMEM;
546 goto ctr_mem_err;
547 }
548 }
549 out:
550 return ret;
551
552 ctr_mem_err:
553 crypto_unregister_alg(&ctr_des3_alg);
554 ctr_des3_err:
555 crypto_unregister_alg(&ctr_des_alg);
556 ctr_des_err:
557 crypto_unregister_alg(&cbc_des3_alg);
558 cbc_des3_err:
559 crypto_unregister_alg(&ecb_des3_alg);
560 ecb_des3_err:
561 crypto_unregister_alg(&des3_alg);
562 des3_err:
563 crypto_unregister_alg(&cbc_des_alg);
564 cbc_des_err:
565 crypto_unregister_alg(&ecb_des_alg);
566 ecb_des_err:
567 crypto_unregister_alg(&des_alg);
568 des_err:
569 goto out;
570 }
571
572 static void __exit des_s390_exit(void)
573 {
574 if (ctrblk) {
575 crypto_unregister_alg(&ctr_des_alg);
576 crypto_unregister_alg(&ctr_des3_alg);
577 free_page((unsigned long) ctrblk);
578 }
579 crypto_unregister_alg(&cbc_des3_alg);
580 crypto_unregister_alg(&ecb_des3_alg);
581 crypto_unregister_alg(&des3_alg);
582 crypto_unregister_alg(&cbc_des_alg);
583 crypto_unregister_alg(&ecb_des_alg);
584 crypto_unregister_alg(&des_alg);
585 }
586
587 module_init(des_s390_init);
588 module_exit(des_s390_exit);
589
590 MODULE_ALIAS("des");
591 MODULE_ALIAS("des3_ede");
592
593 MODULE_LICENSE("GPL");
594 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");