]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - crypto/shash.c
UBUNTU: snapcraft.yaml: various improvements
[mirror_ubuntu-artful-kernel.git] / crypto / shash.c
1 /*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22
23 #include "internal.h"
24
25 static const struct crypto_type crypto_shash_type;
26
27 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 unsigned int keylen)
29 {
30 return -ENOSYS;
31 }
32 EXPORT_SYMBOL_GPL(shash_no_setkey);
33
34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35 unsigned int keylen)
36 {
37 struct shash_alg *shash = crypto_shash_alg(tfm);
38 unsigned long alignmask = crypto_shash_alignmask(tfm);
39 unsigned long absize;
40 u8 *buffer, *alignbuffer;
41 int err;
42
43 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44 buffer = kmalloc(absize, GFP_KERNEL);
45 if (!buffer)
46 return -ENOMEM;
47
48 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49 memcpy(alignbuffer, key, keylen);
50 err = shash->setkey(tfm, alignbuffer, keylen);
51 kzfree(buffer);
52 return err;
53 }
54
55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56 unsigned int keylen)
57 {
58 struct shash_alg *shash = crypto_shash_alg(tfm);
59 unsigned long alignmask = crypto_shash_alignmask(tfm);
60
61 if ((unsigned long)key & alignmask)
62 return shash_setkey_unaligned(tfm, key, keylen);
63
64 return shash->setkey(tfm, key, keylen);
65 }
66 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
67
68 static inline unsigned int shash_align_buffer_size(unsigned len,
69 unsigned long mask)
70 {
71 typedef u8 __attribute__ ((aligned)) u8_aligned;
72 return len + (mask & ~(__alignof__(u8_aligned) - 1));
73 }
74
75 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
76 unsigned int len)
77 {
78 struct crypto_shash *tfm = desc->tfm;
79 struct shash_alg *shash = crypto_shash_alg(tfm);
80 unsigned long alignmask = crypto_shash_alignmask(tfm);
81 unsigned int unaligned_len = alignmask + 1 -
82 ((unsigned long)data & alignmask);
83 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
84 __attribute__ ((aligned));
85 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
86 int err;
87
88 if (unaligned_len > len)
89 unaligned_len = len;
90
91 memcpy(buf, data, unaligned_len);
92 err = shash->update(desc, buf, unaligned_len);
93 memset(buf, 0, unaligned_len);
94
95 return err ?:
96 shash->update(desc, data + unaligned_len, len - unaligned_len);
97 }
98
99 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
100 unsigned int len)
101 {
102 struct crypto_shash *tfm = desc->tfm;
103 struct shash_alg *shash = crypto_shash_alg(tfm);
104 unsigned long alignmask = crypto_shash_alignmask(tfm);
105
106 if ((unsigned long)data & alignmask)
107 return shash_update_unaligned(desc, data, len);
108
109 return shash->update(desc, data, len);
110 }
111 EXPORT_SYMBOL_GPL(crypto_shash_update);
112
113 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
114 {
115 struct crypto_shash *tfm = desc->tfm;
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
117 struct shash_alg *shash = crypto_shash_alg(tfm);
118 unsigned int ds = crypto_shash_digestsize(tfm);
119 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
120 __attribute__ ((aligned));
121 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
122 int err;
123
124 err = shash->final(desc, buf);
125 if (err)
126 goto out;
127
128 memcpy(out, buf, ds);
129
130 out:
131 memset(buf, 0, ds);
132 return err;
133 }
134
135 int crypto_shash_final(struct shash_desc *desc, u8 *out)
136 {
137 struct crypto_shash *tfm = desc->tfm;
138 struct shash_alg *shash = crypto_shash_alg(tfm);
139 unsigned long alignmask = crypto_shash_alignmask(tfm);
140
141 if ((unsigned long)out & alignmask)
142 return shash_final_unaligned(desc, out);
143
144 return shash->final(desc, out);
145 }
146 EXPORT_SYMBOL_GPL(crypto_shash_final);
147
148 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
149 unsigned int len, u8 *out)
150 {
151 return crypto_shash_update(desc, data, len) ?:
152 crypto_shash_final(desc, out);
153 }
154
155 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
156 unsigned int len, u8 *out)
157 {
158 struct crypto_shash *tfm = desc->tfm;
159 struct shash_alg *shash = crypto_shash_alg(tfm);
160 unsigned long alignmask = crypto_shash_alignmask(tfm);
161
162 if (((unsigned long)data | (unsigned long)out) & alignmask)
163 return shash_finup_unaligned(desc, data, len, out);
164
165 return shash->finup(desc, data, len, out);
166 }
167 EXPORT_SYMBOL_GPL(crypto_shash_finup);
168
169 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
170 unsigned int len, u8 *out)
171 {
172 return crypto_shash_init(desc) ?:
173 crypto_shash_finup(desc, data, len, out);
174 }
175
176 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
177 unsigned int len, u8 *out)
178 {
179 struct crypto_shash *tfm = desc->tfm;
180 struct shash_alg *shash = crypto_shash_alg(tfm);
181 unsigned long alignmask = crypto_shash_alignmask(tfm);
182
183 if (((unsigned long)data | (unsigned long)out) & alignmask)
184 return shash_digest_unaligned(desc, data, len, out);
185
186 return shash->digest(desc, data, len, out);
187 }
188 EXPORT_SYMBOL_GPL(crypto_shash_digest);
189
190 static int shash_default_export(struct shash_desc *desc, void *out)
191 {
192 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
193 return 0;
194 }
195
196 static int shash_default_import(struct shash_desc *desc, const void *in)
197 {
198 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
199 return 0;
200 }
201
202 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
203 unsigned int keylen)
204 {
205 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
206
207 return crypto_shash_setkey(*ctx, key, keylen);
208 }
209
210 static int shash_async_init(struct ahash_request *req)
211 {
212 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
213 struct shash_desc *desc = ahash_request_ctx(req);
214
215 desc->tfm = *ctx;
216 desc->flags = req->base.flags;
217
218 return crypto_shash_init(desc);
219 }
220
221 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
222 {
223 struct crypto_hash_walk walk;
224 int nbytes;
225
226 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
227 nbytes = crypto_hash_walk_done(&walk, nbytes))
228 nbytes = crypto_shash_update(desc, walk.data, nbytes);
229
230 return nbytes;
231 }
232 EXPORT_SYMBOL_GPL(shash_ahash_update);
233
234 static int shash_async_update(struct ahash_request *req)
235 {
236 return shash_ahash_update(req, ahash_request_ctx(req));
237 }
238
239 static int shash_async_final(struct ahash_request *req)
240 {
241 return crypto_shash_final(ahash_request_ctx(req), req->result);
242 }
243
244 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
245 {
246 struct crypto_hash_walk walk;
247 int nbytes;
248
249 nbytes = crypto_hash_walk_first(req, &walk);
250 if (!nbytes)
251 return crypto_shash_final(desc, req->result);
252
253 do {
254 nbytes = crypto_hash_walk_last(&walk) ?
255 crypto_shash_finup(desc, walk.data, nbytes,
256 req->result) :
257 crypto_shash_update(desc, walk.data, nbytes);
258 nbytes = crypto_hash_walk_done(&walk, nbytes);
259 } while (nbytes > 0);
260
261 return nbytes;
262 }
263 EXPORT_SYMBOL_GPL(shash_ahash_finup);
264
265 static int shash_async_finup(struct ahash_request *req)
266 {
267 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
268 struct shash_desc *desc = ahash_request_ctx(req);
269
270 desc->tfm = *ctx;
271 desc->flags = req->base.flags;
272
273 return shash_ahash_finup(req, desc);
274 }
275
276 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
277 {
278 unsigned int nbytes = req->nbytes;
279 struct scatterlist *sg;
280 unsigned int offset;
281 int err;
282
283 if (nbytes &&
284 (sg = req->src, offset = sg->offset,
285 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
286 void *data;
287
288 data = kmap_atomic(sg_page(sg));
289 err = crypto_shash_digest(desc, data + offset, nbytes,
290 req->result);
291 kunmap_atomic(data);
292 crypto_yield(desc->flags);
293 } else
294 err = crypto_shash_init(desc) ?:
295 shash_ahash_finup(req, desc);
296
297 return err;
298 }
299 EXPORT_SYMBOL_GPL(shash_ahash_digest);
300
301 static int shash_async_digest(struct ahash_request *req)
302 {
303 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304 struct shash_desc *desc = ahash_request_ctx(req);
305
306 desc->tfm = *ctx;
307 desc->flags = req->base.flags;
308
309 return shash_ahash_digest(req, desc);
310 }
311
312 static int shash_async_export(struct ahash_request *req, void *out)
313 {
314 return crypto_shash_export(ahash_request_ctx(req), out);
315 }
316
317 static int shash_async_import(struct ahash_request *req, const void *in)
318 {
319 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
320 struct shash_desc *desc = ahash_request_ctx(req);
321
322 desc->tfm = *ctx;
323 desc->flags = req->base.flags;
324
325 return crypto_shash_import(desc, in);
326 }
327
328 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
329 {
330 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
331
332 crypto_free_shash(*ctx);
333 }
334
335 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
336 {
337 struct crypto_alg *calg = tfm->__crt_alg;
338 struct shash_alg *alg = __crypto_shash_alg(calg);
339 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
340 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
341 struct crypto_shash *shash;
342
343 if (!crypto_mod_get(calg))
344 return -EAGAIN;
345
346 shash = crypto_create_tfm(calg, &crypto_shash_type);
347 if (IS_ERR(shash)) {
348 crypto_mod_put(calg);
349 return PTR_ERR(shash);
350 }
351
352 *ctx = shash;
353 tfm->exit = crypto_exit_shash_ops_async;
354
355 crt->init = shash_async_init;
356 crt->update = shash_async_update;
357 crt->final = shash_async_final;
358 crt->finup = shash_async_finup;
359 crt->digest = shash_async_digest;
360 crt->setkey = shash_async_setkey;
361
362 crt->has_setkey = alg->setkey != shash_no_setkey;
363
364 if (alg->export)
365 crt->export = shash_async_export;
366 if (alg->import)
367 crt->import = shash_async_import;
368
369 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
370
371 return 0;
372 }
373
374 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
375 unsigned int keylen)
376 {
377 struct shash_desc **descp = crypto_hash_ctx(tfm);
378 struct shash_desc *desc = *descp;
379
380 return crypto_shash_setkey(desc->tfm, key, keylen);
381 }
382
383 static int shash_compat_init(struct hash_desc *hdesc)
384 {
385 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
386 struct shash_desc *desc = *descp;
387
388 desc->flags = hdesc->flags;
389
390 return crypto_shash_init(desc);
391 }
392
393 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
394 unsigned int len)
395 {
396 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
397 struct shash_desc *desc = *descp;
398 struct crypto_hash_walk walk;
399 int nbytes;
400
401 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
402 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
403 nbytes = crypto_shash_update(desc, walk.data, nbytes);
404
405 return nbytes;
406 }
407
408 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
409 {
410 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
411
412 return crypto_shash_final(*descp, out);
413 }
414
415 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
416 unsigned int nbytes, u8 *out)
417 {
418 unsigned int offset = sg->offset;
419 int err;
420
421 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
422 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
423 struct shash_desc *desc = *descp;
424 void *data;
425
426 desc->flags = hdesc->flags;
427
428 data = kmap_atomic(sg_page(sg));
429 err = crypto_shash_digest(desc, data + offset, nbytes, out);
430 kunmap_atomic(data);
431 crypto_yield(desc->flags);
432 goto out;
433 }
434
435 err = shash_compat_init(hdesc);
436 if (err)
437 goto out;
438
439 err = shash_compat_update(hdesc, sg, nbytes);
440 if (err)
441 goto out;
442
443 err = shash_compat_final(hdesc, out);
444
445 out:
446 return err;
447 }
448
449 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
450 {
451 struct shash_desc **descp = crypto_tfm_ctx(tfm);
452 struct shash_desc *desc = *descp;
453
454 crypto_free_shash(desc->tfm);
455 kzfree(desc);
456 }
457
458 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
459 {
460 struct hash_tfm *crt = &tfm->crt_hash;
461 struct crypto_alg *calg = tfm->__crt_alg;
462 struct shash_alg *alg = __crypto_shash_alg(calg);
463 struct shash_desc **descp = crypto_tfm_ctx(tfm);
464 struct crypto_shash *shash;
465 struct shash_desc *desc;
466
467 if (!crypto_mod_get(calg))
468 return -EAGAIN;
469
470 shash = crypto_create_tfm(calg, &crypto_shash_type);
471 if (IS_ERR(shash)) {
472 crypto_mod_put(calg);
473 return PTR_ERR(shash);
474 }
475
476 desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
477 GFP_KERNEL);
478 if (!desc) {
479 crypto_free_shash(shash);
480 return -ENOMEM;
481 }
482
483 *descp = desc;
484 desc->tfm = shash;
485 tfm->exit = crypto_exit_shash_ops_compat;
486
487 crt->init = shash_compat_init;
488 crt->update = shash_compat_update;
489 crt->final = shash_compat_final;
490 crt->digest = shash_compat_digest;
491 crt->setkey = shash_compat_setkey;
492
493 crt->digestsize = alg->digestsize;
494
495 return 0;
496 }
497
498 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
499 {
500 switch (mask & CRYPTO_ALG_TYPE_MASK) {
501 case CRYPTO_ALG_TYPE_HASH_MASK:
502 return crypto_init_shash_ops_compat(tfm);
503 }
504
505 return -EINVAL;
506 }
507
508 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
509 u32 mask)
510 {
511 switch (mask & CRYPTO_ALG_TYPE_MASK) {
512 case CRYPTO_ALG_TYPE_HASH_MASK:
513 return sizeof(struct shash_desc *);
514 }
515
516 return 0;
517 }
518
519 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
520 {
521 struct crypto_shash *hash = __crypto_shash_cast(tfm);
522
523 hash->descsize = crypto_shash_alg(hash)->descsize;
524 return 0;
525 }
526
527 #ifdef CONFIG_NET
528 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
529 {
530 struct crypto_report_hash rhash;
531 struct shash_alg *salg = __crypto_shash_alg(alg);
532
533 strncpy(rhash.type, "shash", sizeof(rhash.type));
534
535 rhash.blocksize = alg->cra_blocksize;
536 rhash.digestsize = salg->digestsize;
537
538 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
539 sizeof(struct crypto_report_hash), &rhash))
540 goto nla_put_failure;
541 return 0;
542
543 nla_put_failure:
544 return -EMSGSIZE;
545 }
546 #else
547 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
548 {
549 return -ENOSYS;
550 }
551 #endif
552
553 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
554 __attribute__ ((unused));
555 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
556 {
557 struct shash_alg *salg = __crypto_shash_alg(alg);
558
559 seq_printf(m, "type : shash\n");
560 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
561 seq_printf(m, "digestsize : %u\n", salg->digestsize);
562 }
563
564 static const struct crypto_type crypto_shash_type = {
565 .ctxsize = crypto_shash_ctxsize,
566 .extsize = crypto_alg_extsize,
567 .init = crypto_init_shash_ops,
568 .init_tfm = crypto_shash_init_tfm,
569 #ifdef CONFIG_PROC_FS
570 .show = crypto_shash_show,
571 #endif
572 .report = crypto_shash_report,
573 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
574 .maskset = CRYPTO_ALG_TYPE_MASK,
575 .type = CRYPTO_ALG_TYPE_SHASH,
576 .tfmsize = offsetof(struct crypto_shash, base),
577 };
578
579 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
580 u32 mask)
581 {
582 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
583 }
584 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
585
586 static int shash_prepare_alg(struct shash_alg *alg)
587 {
588 struct crypto_alg *base = &alg->base;
589
590 if (alg->digestsize > PAGE_SIZE / 8 ||
591 alg->descsize > PAGE_SIZE / 8 ||
592 alg->statesize > PAGE_SIZE / 8)
593 return -EINVAL;
594
595 base->cra_type = &crypto_shash_type;
596 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
597 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
598
599 if (!alg->finup)
600 alg->finup = shash_finup_unaligned;
601 if (!alg->digest)
602 alg->digest = shash_digest_unaligned;
603 if (!alg->export) {
604 alg->export = shash_default_export;
605 alg->import = shash_default_import;
606 alg->statesize = alg->descsize;
607 }
608 if (!alg->setkey)
609 alg->setkey = shash_no_setkey;
610
611 return 0;
612 }
613
614 int crypto_register_shash(struct shash_alg *alg)
615 {
616 struct crypto_alg *base = &alg->base;
617 int err;
618
619 err = shash_prepare_alg(alg);
620 if (err)
621 return err;
622
623 return crypto_register_alg(base);
624 }
625 EXPORT_SYMBOL_GPL(crypto_register_shash);
626
627 int crypto_unregister_shash(struct shash_alg *alg)
628 {
629 return crypto_unregister_alg(&alg->base);
630 }
631 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
632
633 int crypto_register_shashes(struct shash_alg *algs, int count)
634 {
635 int i, ret;
636
637 for (i = 0; i < count; i++) {
638 ret = crypto_register_shash(&algs[i]);
639 if (ret)
640 goto err;
641 }
642
643 return 0;
644
645 err:
646 for (--i; i >= 0; --i)
647 crypto_unregister_shash(&algs[i]);
648
649 return ret;
650 }
651 EXPORT_SYMBOL_GPL(crypto_register_shashes);
652
653 int crypto_unregister_shashes(struct shash_alg *algs, int count)
654 {
655 int i, ret;
656
657 for (i = count - 1; i >= 0; --i) {
658 ret = crypto_unregister_shash(&algs[i]);
659 if (ret)
660 pr_err("Failed to unregister %s %s: %d\n",
661 algs[i].base.cra_driver_name,
662 algs[i].base.cra_name, ret);
663 }
664
665 return 0;
666 }
667 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
668
669 int shash_register_instance(struct crypto_template *tmpl,
670 struct shash_instance *inst)
671 {
672 int err;
673
674 err = shash_prepare_alg(&inst->alg);
675 if (err)
676 return err;
677
678 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
679 }
680 EXPORT_SYMBOL_GPL(shash_register_instance);
681
682 void shash_free_instance(struct crypto_instance *inst)
683 {
684 crypto_drop_spawn(crypto_instance_ctx(inst));
685 kfree(shash_instance(inst));
686 }
687 EXPORT_SYMBOL_GPL(shash_free_instance);
688
689 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
690 struct shash_alg *alg,
691 struct crypto_instance *inst)
692 {
693 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
694 &crypto_shash_type);
695 }
696 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
697
698 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
699 {
700 struct crypto_alg *alg;
701
702 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
703 return IS_ERR(alg) ? ERR_CAST(alg) :
704 container_of(alg, struct shash_alg, base);
705 }
706 EXPORT_SYMBOL_GPL(shash_attr_alg);
707
708 MODULE_LICENSE("GPL");
709 MODULE_DESCRIPTION("Synchronous cryptographic hash type");