2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
25 static const struct crypto_type crypto_shash_type
;
27 static int shash_no_setkey(struct crypto_shash
*tfm
, const u8
*key
,
33 static int shash_setkey_unaligned(struct crypto_shash
*tfm
, const u8
*key
,
36 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
37 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
39 u8
*buffer
, *alignbuffer
;
42 absize
= keylen
+ (alignmask
& ~(crypto_tfm_ctx_alignment() - 1));
43 buffer
= kmalloc(absize
, GFP_KERNEL
);
47 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
48 memcpy(alignbuffer
, key
, keylen
);
49 err
= shash
->setkey(tfm
, alignbuffer
, keylen
);
54 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
57 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
58 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
60 if ((unsigned long)key
& alignmask
)
61 return shash_setkey_unaligned(tfm
, key
, keylen
);
63 return shash
->setkey(tfm
, key
, keylen
);
65 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
67 static inline unsigned int shash_align_buffer_size(unsigned len
,
70 typedef u8
__attribute__ ((aligned
)) u8_aligned
;
71 return len
+ (mask
& ~(__alignof__(u8_aligned
) - 1));
74 static int shash_update_unaligned(struct shash_desc
*desc
, const u8
*data
,
77 struct crypto_shash
*tfm
= desc
->tfm
;
78 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
79 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
80 unsigned int unaligned_len
= alignmask
+ 1 -
81 ((unsigned long)data
& alignmask
);
82 u8 ubuf
[shash_align_buffer_size(unaligned_len
, alignmask
)]
83 __attribute__ ((aligned
));
84 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
87 if (unaligned_len
> len
)
90 memcpy(buf
, data
, unaligned_len
);
91 err
= shash
->update(desc
, buf
, unaligned_len
);
92 memset(buf
, 0, unaligned_len
);
95 shash
->update(desc
, data
+ unaligned_len
, len
- unaligned_len
);
98 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
101 struct crypto_shash
*tfm
= desc
->tfm
;
102 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
103 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
105 if ((unsigned long)data
& alignmask
)
106 return shash_update_unaligned(desc
, data
, len
);
108 return shash
->update(desc
, data
, len
);
110 EXPORT_SYMBOL_GPL(crypto_shash_update
);
112 static int shash_final_unaligned(struct shash_desc
*desc
, u8
*out
)
114 struct crypto_shash
*tfm
= desc
->tfm
;
115 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
116 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
117 unsigned int ds
= crypto_shash_digestsize(tfm
);
118 u8 ubuf
[shash_align_buffer_size(ds
, alignmask
)]
119 __attribute__ ((aligned
));
120 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
123 err
= shash
->final(desc
, buf
);
127 memcpy(out
, buf
, ds
);
134 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
136 struct crypto_shash
*tfm
= desc
->tfm
;
137 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
138 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
140 if ((unsigned long)out
& alignmask
)
141 return shash_final_unaligned(desc
, out
);
143 return shash
->final(desc
, out
);
145 EXPORT_SYMBOL_GPL(crypto_shash_final
);
147 static int shash_finup_unaligned(struct shash_desc
*desc
, const u8
*data
,
148 unsigned int len
, u8
*out
)
150 return crypto_shash_update(desc
, data
, len
) ?:
151 crypto_shash_final(desc
, out
);
154 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
155 unsigned int len
, u8
*out
)
157 struct crypto_shash
*tfm
= desc
->tfm
;
158 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
159 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
161 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
162 return shash_finup_unaligned(desc
, data
, len
, out
);
164 return shash
->finup(desc
, data
, len
, out
);
166 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
168 static int shash_digest_unaligned(struct shash_desc
*desc
, const u8
*data
,
169 unsigned int len
, u8
*out
)
171 return crypto_shash_init(desc
) ?:
172 crypto_shash_finup(desc
, data
, len
, out
);
175 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
176 unsigned int len
, u8
*out
)
178 struct crypto_shash
*tfm
= desc
->tfm
;
179 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
180 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
182 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
183 return shash_digest_unaligned(desc
, data
, len
, out
);
185 return shash
->digest(desc
, data
, len
, out
);
187 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
189 static int shash_default_export(struct shash_desc
*desc
, void *out
)
191 memcpy(out
, shash_desc_ctx(desc
), crypto_shash_descsize(desc
->tfm
));
195 static int shash_default_import(struct shash_desc
*desc
, const void *in
)
197 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(desc
->tfm
));
201 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
204 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
206 return crypto_shash_setkey(*ctx
, key
, keylen
);
209 static int shash_async_init(struct ahash_request
*req
)
211 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
212 struct shash_desc
*desc
= ahash_request_ctx(req
);
215 desc
->flags
= req
->base
.flags
;
217 return crypto_shash_init(desc
);
220 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
222 struct crypto_hash_walk walk
;
225 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
226 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
227 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
231 EXPORT_SYMBOL_GPL(shash_ahash_update
);
233 static int shash_async_update(struct ahash_request
*req
)
235 return shash_ahash_update(req
, ahash_request_ctx(req
));
238 static int shash_async_final(struct ahash_request
*req
)
240 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
243 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
245 struct crypto_hash_walk walk
;
248 nbytes
= crypto_hash_walk_first(req
, &walk
);
250 return crypto_shash_final(desc
, req
->result
);
253 nbytes
= crypto_hash_walk_last(&walk
) ?
254 crypto_shash_finup(desc
, walk
.data
, nbytes
,
256 crypto_shash_update(desc
, walk
.data
, nbytes
);
257 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
258 } while (nbytes
> 0);
262 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
264 static int shash_async_finup(struct ahash_request
*req
)
266 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
267 struct shash_desc
*desc
= ahash_request_ctx(req
);
270 desc
->flags
= req
->base
.flags
;
272 return shash_ahash_finup(req
, desc
);
275 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
277 struct scatterlist
*sg
= req
->src
;
278 unsigned int offset
= sg
->offset
;
279 unsigned int nbytes
= req
->nbytes
;
282 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
285 data
= kmap_atomic(sg_page(sg
));
286 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
289 crypto_yield(desc
->flags
);
291 err
= crypto_shash_init(desc
) ?:
292 shash_ahash_finup(req
, desc
);
296 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
298 static int shash_async_digest(struct ahash_request
*req
)
300 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
301 struct shash_desc
*desc
= ahash_request_ctx(req
);
304 desc
->flags
= req
->base
.flags
;
306 return shash_ahash_digest(req
, desc
);
309 static int shash_async_export(struct ahash_request
*req
, void *out
)
311 return crypto_shash_export(ahash_request_ctx(req
), out
);
314 static int shash_async_import(struct ahash_request
*req
, const void *in
)
316 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
317 struct shash_desc
*desc
= ahash_request_ctx(req
);
320 desc
->flags
= req
->base
.flags
;
322 return crypto_shash_import(desc
, in
);
325 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
327 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
329 crypto_free_shash(*ctx
);
332 int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
334 struct crypto_alg
*calg
= tfm
->__crt_alg
;
335 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
336 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
337 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
338 struct crypto_shash
*shash
;
340 if (!crypto_mod_get(calg
))
343 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
345 crypto_mod_put(calg
);
346 return PTR_ERR(shash
);
350 tfm
->exit
= crypto_exit_shash_ops_async
;
352 crt
->init
= shash_async_init
;
353 crt
->update
= shash_async_update
;
354 crt
->final
= shash_async_final
;
355 crt
->finup
= shash_async_finup
;
356 crt
->digest
= shash_async_digest
;
359 crt
->setkey
= shash_async_setkey
;
361 crt
->export
= shash_async_export
;
363 crt
->import
= shash_async_import
;
365 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
370 static int shash_compat_setkey(struct crypto_hash
*tfm
, const u8
*key
,
373 struct shash_desc
**descp
= crypto_hash_ctx(tfm
);
374 struct shash_desc
*desc
= *descp
;
376 return crypto_shash_setkey(desc
->tfm
, key
, keylen
);
379 static int shash_compat_init(struct hash_desc
*hdesc
)
381 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
382 struct shash_desc
*desc
= *descp
;
384 desc
->flags
= hdesc
->flags
;
386 return crypto_shash_init(desc
);
389 static int shash_compat_update(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
392 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
393 struct shash_desc
*desc
= *descp
;
394 struct crypto_hash_walk walk
;
397 for (nbytes
= crypto_hash_walk_first_compat(hdesc
, &walk
, sg
, len
);
398 nbytes
> 0; nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
399 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
404 static int shash_compat_final(struct hash_desc
*hdesc
, u8
*out
)
406 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
408 return crypto_shash_final(*descp
, out
);
411 static int shash_compat_digest(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
412 unsigned int nbytes
, u8
*out
)
414 unsigned int offset
= sg
->offset
;
417 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
418 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
419 struct shash_desc
*desc
= *descp
;
422 desc
->flags
= hdesc
->flags
;
424 data
= kmap_atomic(sg_page(sg
));
425 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
, out
);
427 crypto_yield(desc
->flags
);
431 err
= shash_compat_init(hdesc
);
435 err
= shash_compat_update(hdesc
, sg
, nbytes
);
439 err
= shash_compat_final(hdesc
, out
);
445 static void crypto_exit_shash_ops_compat(struct crypto_tfm
*tfm
)
447 struct shash_desc
**descp
= crypto_tfm_ctx(tfm
);
448 struct shash_desc
*desc
= *descp
;
450 crypto_free_shash(desc
->tfm
);
454 static int crypto_init_shash_ops_compat(struct crypto_tfm
*tfm
)
456 struct hash_tfm
*crt
= &tfm
->crt_hash
;
457 struct crypto_alg
*calg
= tfm
->__crt_alg
;
458 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
459 struct shash_desc
**descp
= crypto_tfm_ctx(tfm
);
460 struct crypto_shash
*shash
;
461 struct shash_desc
*desc
;
463 if (!crypto_mod_get(calg
))
466 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
468 crypto_mod_put(calg
);
469 return PTR_ERR(shash
);
472 desc
= kmalloc(sizeof(*desc
) + crypto_shash_descsize(shash
),
475 crypto_free_shash(shash
);
481 tfm
->exit
= crypto_exit_shash_ops_compat
;
483 crt
->init
= shash_compat_init
;
484 crt
->update
= shash_compat_update
;
485 crt
->final
= shash_compat_final
;
486 crt
->digest
= shash_compat_digest
;
487 crt
->setkey
= shash_compat_setkey
;
489 crt
->digestsize
= alg
->digestsize
;
494 static int crypto_init_shash_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
496 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
497 case CRYPTO_ALG_TYPE_HASH_MASK
:
498 return crypto_init_shash_ops_compat(tfm
);
504 static unsigned int crypto_shash_ctxsize(struct crypto_alg
*alg
, u32 type
,
507 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
508 case CRYPTO_ALG_TYPE_HASH_MASK
:
509 return sizeof(struct shash_desc
*);
515 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
)
517 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
519 hash
->descsize
= crypto_shash_alg(hash
)->descsize
;
523 static unsigned int crypto_shash_extsize(struct crypto_alg
*alg
)
525 return alg
->cra_ctxsize
;
529 static int crypto_shash_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
531 struct crypto_report_hash rhash
;
532 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
534 strncpy(rhash
.type
, "shash", sizeof(rhash
.type
));
536 rhash
.blocksize
= alg
->cra_blocksize
;
537 rhash
.digestsize
= salg
->digestsize
;
539 if (nla_put(skb
, CRYPTOCFGA_REPORT_HASH
,
540 sizeof(struct crypto_report_hash
), &rhash
))
541 goto nla_put_failure
;
548 static int crypto_shash_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
554 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
555 __attribute__ ((unused
));
556 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
558 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
560 seq_printf(m
, "type : shash\n");
561 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
562 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
565 static const struct crypto_type crypto_shash_type
= {
566 .ctxsize
= crypto_shash_ctxsize
,
567 .extsize
= crypto_shash_extsize
,
568 .init
= crypto_init_shash_ops
,
569 .init_tfm
= crypto_shash_init_tfm
,
570 #ifdef CONFIG_PROC_FS
571 .show
= crypto_shash_show
,
573 .report
= crypto_shash_report
,
574 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
575 .maskset
= CRYPTO_ALG_TYPE_MASK
,
576 .type
= CRYPTO_ALG_TYPE_SHASH
,
577 .tfmsize
= offsetof(struct crypto_shash
, base
),
580 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
583 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
585 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
587 static int shash_prepare_alg(struct shash_alg
*alg
)
589 struct crypto_alg
*base
= &alg
->base
;
591 if (alg
->digestsize
> PAGE_SIZE
/ 8 ||
592 alg
->descsize
> PAGE_SIZE
/ 8 ||
593 alg
->statesize
> PAGE_SIZE
/ 8)
596 base
->cra_type
= &crypto_shash_type
;
597 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
598 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
601 alg
->finup
= shash_finup_unaligned
;
603 alg
->digest
= shash_digest_unaligned
;
605 alg
->export
= shash_default_export
;
606 alg
->import
= shash_default_import
;
607 alg
->statesize
= alg
->descsize
;
610 alg
->setkey
= shash_no_setkey
;
615 int crypto_register_shash(struct shash_alg
*alg
)
617 struct crypto_alg
*base
= &alg
->base
;
620 err
= shash_prepare_alg(alg
);
624 return crypto_register_alg(base
);
626 EXPORT_SYMBOL_GPL(crypto_register_shash
);
628 int crypto_unregister_shash(struct shash_alg
*alg
)
630 return crypto_unregister_alg(&alg
->base
);
632 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
634 int crypto_register_shashes(struct shash_alg
*algs
, int count
)
638 for (i
= 0; i
< count
; i
++) {
639 ret
= crypto_register_shash(&algs
[i
]);
647 for (--i
; i
>= 0; --i
)
648 crypto_unregister_shash(&algs
[i
]);
652 EXPORT_SYMBOL_GPL(crypto_register_shashes
);
654 int crypto_unregister_shashes(struct shash_alg
*algs
, int count
)
658 for (i
= count
- 1; i
>= 0; --i
) {
659 ret
= crypto_unregister_shash(&algs
[i
]);
661 pr_err("Failed to unregister %s %s: %d\n",
662 algs
[i
].base
.cra_driver_name
,
663 algs
[i
].base
.cra_name
, ret
);
668 EXPORT_SYMBOL_GPL(crypto_unregister_shashes
);
670 int shash_register_instance(struct crypto_template
*tmpl
,
671 struct shash_instance
*inst
)
675 err
= shash_prepare_alg(&inst
->alg
);
679 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
681 EXPORT_SYMBOL_GPL(shash_register_instance
);
683 void shash_free_instance(struct crypto_instance
*inst
)
685 crypto_drop_spawn(crypto_instance_ctx(inst
));
686 kfree(shash_instance(inst
));
688 EXPORT_SYMBOL_GPL(shash_free_instance
);
690 int crypto_init_shash_spawn(struct crypto_shash_spawn
*spawn
,
691 struct shash_alg
*alg
,
692 struct crypto_instance
*inst
)
694 return crypto_init_spawn2(&spawn
->base
, &alg
->base
, inst
,
697 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn
);
699 struct shash_alg
*shash_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
)
701 struct crypto_alg
*alg
;
703 alg
= crypto_attr_alg2(rta
, &crypto_shash_type
, type
, mask
);
704 return IS_ERR(alg
) ? ERR_CAST(alg
) :
705 container_of(alg
, struct shash_alg
, base
);
707 EXPORT_SYMBOL_GPL(shash_attr_alg
);
709 MODULE_LICENSE("GPL");
710 MODULE_DESCRIPTION("Synchronous cryptographic hash type");