2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
23 static const struct crypto_type crypto_shash_type
;
25 static int shash_setkey_unaligned(struct crypto_shash
*tfm
, const u8
*key
,
28 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
29 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
31 u8
*buffer
, *alignbuffer
;
34 absize
= keylen
+ (alignmask
& ~(CRYPTO_MINALIGN
- 1));
35 buffer
= kmalloc(absize
, GFP_KERNEL
);
39 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
40 memcpy(alignbuffer
, key
, keylen
);
41 err
= shash
->setkey(tfm
, alignbuffer
, keylen
);
42 memset(alignbuffer
, 0, keylen
);
47 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
50 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
51 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
56 if ((unsigned long)key
& alignmask
)
57 return shash_setkey_unaligned(tfm
, key
, keylen
);
59 return shash
->setkey(tfm
, key
, keylen
);
61 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
63 static inline unsigned int shash_align_buffer_size(unsigned len
,
66 return len
+ (mask
& ~(__alignof__(u8
__attribute__ ((aligned
))) - 1));
69 static int shash_update_unaligned(struct shash_desc
*desc
, const u8
*data
,
72 struct crypto_shash
*tfm
= desc
->tfm
;
73 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
74 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
75 unsigned int unaligned_len
= alignmask
+ 1 -
76 ((unsigned long)data
& alignmask
);
77 u8 buf
[shash_align_buffer_size(unaligned_len
, alignmask
)]
78 __attribute__ ((aligned
));
80 if (unaligned_len
> len
)
83 memcpy(buf
, data
, unaligned_len
);
85 return shash
->update(desc
, buf
, unaligned_len
) ?:
86 shash
->update(desc
, data
+ unaligned_len
, len
- unaligned_len
);
89 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
92 struct crypto_shash
*tfm
= desc
->tfm
;
93 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
94 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
96 if ((unsigned long)data
& alignmask
)
97 return shash_update_unaligned(desc
, data
, len
);
99 return shash
->update(desc
, data
, len
);
101 EXPORT_SYMBOL_GPL(crypto_shash_update
);
103 static int shash_final_unaligned(struct shash_desc
*desc
, u8
*out
)
105 struct crypto_shash
*tfm
= desc
->tfm
;
106 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
107 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
108 unsigned int ds
= crypto_shash_digestsize(tfm
);
109 u8 buf
[shash_align_buffer_size(ds
, alignmask
)]
110 __attribute__ ((aligned
));
113 err
= shash
->final(desc
, buf
);
114 memcpy(out
, buf
, ds
);
118 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
120 struct crypto_shash
*tfm
= desc
->tfm
;
121 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
122 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
124 if ((unsigned long)out
& alignmask
)
125 return shash_final_unaligned(desc
, out
);
127 return shash
->final(desc
, out
);
129 EXPORT_SYMBOL_GPL(crypto_shash_final
);
131 static int shash_finup_unaligned(struct shash_desc
*desc
, const u8
*data
,
132 unsigned int len
, u8
*out
)
134 return crypto_shash_update(desc
, data
, len
) ?:
135 crypto_shash_final(desc
, out
);
138 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
139 unsigned int len
, u8
*out
)
141 struct crypto_shash
*tfm
= desc
->tfm
;
142 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
143 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
145 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
146 return shash_finup_unaligned(desc
, data
, len
, out
);
148 return shash
->finup(desc
, data
, len
, out
);
150 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
152 static int shash_digest_unaligned(struct shash_desc
*desc
, const u8
*data
,
153 unsigned int len
, u8
*out
)
155 return crypto_shash_init(desc
) ?:
156 crypto_shash_finup(desc
, data
, len
, out
);
159 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
160 unsigned int len
, u8
*out
)
162 struct crypto_shash
*tfm
= desc
->tfm
;
163 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
164 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
166 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
167 return shash_digest_unaligned(desc
, data
, len
, out
);
169 return shash
->digest(desc
, data
, len
, out
);
171 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
173 static int shash_no_export(struct shash_desc
*desc
, void *out
)
178 static int shash_no_import(struct shash_desc
*desc
, const void *in
)
183 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
186 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
188 return crypto_shash_setkey(*ctx
, key
, keylen
);
191 static int shash_async_init(struct ahash_request
*req
)
193 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
194 struct shash_desc
*desc
= ahash_request_ctx(req
);
197 desc
->flags
= req
->base
.flags
;
199 return crypto_shash_init(desc
);
202 static int shash_async_update(struct ahash_request
*req
)
204 struct shash_desc
*desc
= ahash_request_ctx(req
);
205 struct crypto_hash_walk walk
;
208 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
209 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
210 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
215 static int shash_async_final(struct ahash_request
*req
)
217 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
220 static int shash_async_digest(struct ahash_request
*req
)
222 struct scatterlist
*sg
= req
->src
;
223 unsigned int offset
= sg
->offset
;
224 unsigned int nbytes
= req
->nbytes
;
227 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
228 struct crypto_shash
**ctx
=
229 crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
230 struct shash_desc
*desc
= ahash_request_ctx(req
);
234 desc
->flags
= req
->base
.flags
;
236 data
= crypto_kmap(sg_page(sg
), 0);
237 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
239 crypto_kunmap(data
, 0);
240 crypto_yield(desc
->flags
);
244 err
= shash_async_init(req
);
248 err
= shash_async_update(req
);
252 err
= shash_async_final(req
);
258 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
260 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
262 crypto_free_shash(*ctx
);
265 static int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
267 struct crypto_alg
*calg
= tfm
->__crt_alg
;
268 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
269 struct ahash_tfm
*crt
= &tfm
->crt_ahash
;
270 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
271 struct crypto_shash
*shash
;
273 if (!crypto_mod_get(calg
))
276 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
278 crypto_mod_put(calg
);
279 return PTR_ERR(shash
);
283 tfm
->exit
= crypto_exit_shash_ops_async
;
285 crt
->init
= shash_async_init
;
286 crt
->update
= shash_async_update
;
287 crt
->final
= shash_async_final
;
288 crt
->digest
= shash_async_digest
;
289 crt
->setkey
= shash_async_setkey
;
291 crt
->digestsize
= alg
->digestsize
;
292 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
297 static int shash_compat_setkey(struct crypto_hash
*tfm
, const u8
*key
,
300 struct shash_desc
*desc
= crypto_hash_ctx(tfm
);
302 return crypto_shash_setkey(desc
->tfm
, key
, keylen
);
305 static int shash_compat_init(struct hash_desc
*hdesc
)
307 struct shash_desc
*desc
= crypto_hash_ctx(hdesc
->tfm
);
309 desc
->flags
= hdesc
->flags
;
311 return crypto_shash_init(desc
);
314 static int shash_compat_update(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
317 struct shash_desc
*desc
= crypto_hash_ctx(hdesc
->tfm
);
318 struct crypto_hash_walk walk
;
321 for (nbytes
= crypto_hash_walk_first_compat(hdesc
, &walk
, sg
, len
);
322 nbytes
> 0; nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
323 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
328 static int shash_compat_final(struct hash_desc
*hdesc
, u8
*out
)
330 return crypto_shash_final(crypto_hash_ctx(hdesc
->tfm
), out
);
333 static int shash_compat_digest(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
334 unsigned int nbytes
, u8
*out
)
336 unsigned int offset
= sg
->offset
;
339 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
340 struct shash_desc
*desc
= crypto_hash_ctx(hdesc
->tfm
);
343 desc
->flags
= hdesc
->flags
;
345 data
= crypto_kmap(sg_page(sg
), 0);
346 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
, out
);
347 crypto_kunmap(data
, 0);
348 crypto_yield(desc
->flags
);
352 err
= shash_compat_init(hdesc
);
356 err
= shash_compat_update(hdesc
, sg
, nbytes
);
360 err
= shash_compat_final(hdesc
, out
);
366 static void crypto_exit_shash_ops_compat(struct crypto_tfm
*tfm
)
368 struct shash_desc
*desc
= crypto_tfm_ctx(tfm
);
370 crypto_free_shash(desc
->tfm
);
373 static int crypto_init_shash_ops_compat(struct crypto_tfm
*tfm
)
375 struct hash_tfm
*crt
= &tfm
->crt_hash
;
376 struct crypto_alg
*calg
= tfm
->__crt_alg
;
377 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
378 struct shash_desc
*desc
= crypto_tfm_ctx(tfm
);
379 struct crypto_shash
*shash
;
381 if (!crypto_mod_get(calg
))
384 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
386 crypto_mod_put(calg
);
387 return PTR_ERR(shash
);
391 tfm
->exit
= crypto_exit_shash_ops_compat
;
393 crt
->init
= shash_compat_init
;
394 crt
->update
= shash_compat_update
;
395 crt
->final
= shash_compat_final
;
396 crt
->digest
= shash_compat_digest
;
397 crt
->setkey
= shash_compat_setkey
;
399 crt
->digestsize
= alg
->digestsize
;
404 static int crypto_init_shash_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
406 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
407 case CRYPTO_ALG_TYPE_HASH_MASK
:
408 return crypto_init_shash_ops_compat(tfm
);
409 case CRYPTO_ALG_TYPE_AHASH_MASK
:
410 return crypto_init_shash_ops_async(tfm
);
416 static unsigned int crypto_shash_ctxsize(struct crypto_alg
*alg
, u32 type
,
419 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
421 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
422 case CRYPTO_ALG_TYPE_HASH_MASK
:
423 return sizeof(struct shash_desc
) + salg
->descsize
;
424 case CRYPTO_ALG_TYPE_AHASH_MASK
:
425 return sizeof(struct crypto_shash
*);
431 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
,
432 const struct crypto_type
*frontend
)
437 static unsigned int crypto_shash_extsize(struct crypto_alg
*alg
,
438 const struct crypto_type
*frontend
)
440 return alg
->cra_ctxsize
;
443 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
444 __attribute__ ((unused
));
445 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
447 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
449 seq_printf(m
, "type : shash\n");
450 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
451 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
452 seq_printf(m
, "descsize : %u\n", salg
->descsize
);
455 static const struct crypto_type crypto_shash_type
= {
456 .ctxsize
= crypto_shash_ctxsize
,
457 .extsize
= crypto_shash_extsize
,
458 .init
= crypto_init_shash_ops
,
459 .init_tfm
= crypto_shash_init_tfm
,
460 #ifdef CONFIG_PROC_FS
461 .show
= crypto_shash_show
,
463 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
464 .maskset
= CRYPTO_ALG_TYPE_MASK
,
465 .type
= CRYPTO_ALG_TYPE_SHASH
,
466 .tfmsize
= offsetof(struct crypto_shash
, base
),
469 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
472 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
474 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
476 static int shash_prepare_alg(struct shash_alg
*alg
)
478 struct crypto_alg
*base
= &alg
->base
;
480 if (alg
->digestsize
> PAGE_SIZE
/ 8 ||
481 alg
->descsize
> PAGE_SIZE
/ 8 ||
482 alg
->statesize
> PAGE_SIZE
/ 8)
485 base
->cra_type
= &crypto_shash_type
;
486 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
487 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
490 alg
->finup
= shash_finup_unaligned
;
492 alg
->digest
= shash_digest_unaligned
;
494 alg
->import
= shash_no_import
;
496 alg
->export
= shash_no_export
;
501 int crypto_register_shash(struct shash_alg
*alg
)
503 struct crypto_alg
*base
= &alg
->base
;
506 err
= shash_prepare_alg(alg
);
510 return crypto_register_alg(base
);
512 EXPORT_SYMBOL_GPL(crypto_register_shash
);
514 int crypto_unregister_shash(struct shash_alg
*alg
)
516 return crypto_unregister_alg(&alg
->base
);
518 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
520 int shash_register_instance(struct crypto_template
*tmpl
,
521 struct shash_instance
*inst
)
525 err
= shash_prepare_alg(&inst
->alg
);
529 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
531 EXPORT_SYMBOL_GPL(shash_register_instance
);
533 void shash_free_instance(struct crypto_instance
*inst
)
535 crypto_drop_spawn(crypto_instance_ctx(inst
));
536 kfree(shash_instance(inst
));
538 EXPORT_SYMBOL_GPL(shash_free_instance
);
540 int crypto_init_shash_spawn(struct crypto_shash_spawn
*spawn
,
541 struct shash_alg
*alg
,
542 struct crypto_instance
*inst
)
544 return crypto_init_spawn2(&spawn
->base
, &alg
->base
, inst
,
547 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn
);
549 struct shash_alg
*shash_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
)
551 struct crypto_alg
*alg
;
553 alg
= crypto_attr_alg2(rta
, &crypto_shash_type
, type
, mask
);
554 return IS_ERR(alg
) ? ERR_CAST(alg
) :
555 container_of(alg
, struct shash_alg
, base
);
557 EXPORT_SYMBOL_GPL(shash_attr_alg
);
559 MODULE_LICENSE("GPL");
560 MODULE_DESCRIPTION("Synchronous cryptographic hash type");