]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - crypto/shash.c
crypto: shash - Move finup/digest null checks to registration time
[mirror_ubuntu-bionic-kernel.git] / crypto / shash.c
1 /*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20
21 #include "internal.h"
22
23 static const struct crypto_type crypto_shash_type;
24
25 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27 {
28 struct shash_alg *shash = crypto_shash_alg(tfm);
29 unsigned long alignmask = crypto_shash_alignmask(tfm);
30 unsigned long absize;
31 u8 *buffer, *alignbuffer;
32 int err;
33
34 absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
35 buffer = kmalloc(absize, GFP_KERNEL);
36 if (!buffer)
37 return -ENOMEM;
38
39 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
40 memcpy(alignbuffer, key, keylen);
41 err = shash->setkey(tfm, alignbuffer, keylen);
42 memset(alignbuffer, 0, keylen);
43 kfree(buffer);
44 return err;
45 }
46
47 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
48 unsigned int keylen)
49 {
50 struct shash_alg *shash = crypto_shash_alg(tfm);
51 unsigned long alignmask = crypto_shash_alignmask(tfm);
52
53 if (!shash->setkey)
54 return -ENOSYS;
55
56 if ((unsigned long)key & alignmask)
57 return shash_setkey_unaligned(tfm, key, keylen);
58
59 return shash->setkey(tfm, key, keylen);
60 }
61 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
62
63 static inline unsigned int shash_align_buffer_size(unsigned len,
64 unsigned long mask)
65 {
66 return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
67 }
68
69 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
70 unsigned int len)
71 {
72 struct crypto_shash *tfm = desc->tfm;
73 struct shash_alg *shash = crypto_shash_alg(tfm);
74 unsigned long alignmask = crypto_shash_alignmask(tfm);
75 unsigned int unaligned_len = alignmask + 1 -
76 ((unsigned long)data & alignmask);
77 u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
78 __attribute__ ((aligned));
79
80 if (unaligned_len > len)
81 unaligned_len = len;
82
83 memcpy(buf, data, unaligned_len);
84
85 return shash->update(desc, buf, unaligned_len) ?:
86 shash->update(desc, data + unaligned_len, len - unaligned_len);
87 }
88
89 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
90 unsigned int len)
91 {
92 struct crypto_shash *tfm = desc->tfm;
93 struct shash_alg *shash = crypto_shash_alg(tfm);
94 unsigned long alignmask = crypto_shash_alignmask(tfm);
95
96 if ((unsigned long)data & alignmask)
97 return shash_update_unaligned(desc, data, len);
98
99 return shash->update(desc, data, len);
100 }
101 EXPORT_SYMBOL_GPL(crypto_shash_update);
102
103 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
104 {
105 struct crypto_shash *tfm = desc->tfm;
106 unsigned long alignmask = crypto_shash_alignmask(tfm);
107 struct shash_alg *shash = crypto_shash_alg(tfm);
108 unsigned int ds = crypto_shash_digestsize(tfm);
109 u8 buf[shash_align_buffer_size(ds, alignmask)]
110 __attribute__ ((aligned));
111 int err;
112
113 err = shash->final(desc, buf);
114 memcpy(out, buf, ds);
115 return err;
116 }
117
118 int crypto_shash_final(struct shash_desc *desc, u8 *out)
119 {
120 struct crypto_shash *tfm = desc->tfm;
121 struct shash_alg *shash = crypto_shash_alg(tfm);
122 unsigned long alignmask = crypto_shash_alignmask(tfm);
123
124 if ((unsigned long)out & alignmask)
125 return shash_final_unaligned(desc, out);
126
127 return shash->final(desc, out);
128 }
129 EXPORT_SYMBOL_GPL(crypto_shash_final);
130
131 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
132 unsigned int len, u8 *out)
133 {
134 return crypto_shash_update(desc, data, len) ?:
135 crypto_shash_final(desc, out);
136 }
137
138 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
139 unsigned int len, u8 *out)
140 {
141 struct crypto_shash *tfm = desc->tfm;
142 struct shash_alg *shash = crypto_shash_alg(tfm);
143 unsigned long alignmask = crypto_shash_alignmask(tfm);
144
145 if (((unsigned long)data | (unsigned long)out) & alignmask)
146 return shash_finup_unaligned(desc, data, len, out);
147
148 return shash->finup(desc, data, len, out);
149 }
150 EXPORT_SYMBOL_GPL(crypto_shash_finup);
151
152 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
153 unsigned int len, u8 *out)
154 {
155 return crypto_shash_init(desc) ?:
156 crypto_shash_finup(desc, data, len, out);
157 }
158
159 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
160 unsigned int len, u8 *out)
161 {
162 struct crypto_shash *tfm = desc->tfm;
163 struct shash_alg *shash = crypto_shash_alg(tfm);
164 unsigned long alignmask = crypto_shash_alignmask(tfm);
165
166 if (((unsigned long)data | (unsigned long)out) & alignmask)
167 return shash_digest_unaligned(desc, data, len, out);
168
169 return shash->digest(desc, data, len, out);
170 }
171 EXPORT_SYMBOL_GPL(crypto_shash_digest);
172
173 static int shash_no_export(struct shash_desc *desc, void *out)
174 {
175 return -ENOSYS;
176 }
177
178 static int shash_no_import(struct shash_desc *desc, const void *in)
179 {
180 return -ENOSYS;
181 }
182
183 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
184 unsigned int keylen)
185 {
186 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
187
188 return crypto_shash_setkey(*ctx, key, keylen);
189 }
190
191 static int shash_async_init(struct ahash_request *req)
192 {
193 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
194 struct shash_desc *desc = ahash_request_ctx(req);
195
196 desc->tfm = *ctx;
197 desc->flags = req->base.flags;
198
199 return crypto_shash_init(desc);
200 }
201
202 static int shash_async_update(struct ahash_request *req)
203 {
204 struct shash_desc *desc = ahash_request_ctx(req);
205 struct crypto_hash_walk walk;
206 int nbytes;
207
208 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
209 nbytes = crypto_hash_walk_done(&walk, nbytes))
210 nbytes = crypto_shash_update(desc, walk.data, nbytes);
211
212 return nbytes;
213 }
214
215 static int shash_async_final(struct ahash_request *req)
216 {
217 return crypto_shash_final(ahash_request_ctx(req), req->result);
218 }
219
220 static int shash_async_digest(struct ahash_request *req)
221 {
222 struct scatterlist *sg = req->src;
223 unsigned int offset = sg->offset;
224 unsigned int nbytes = req->nbytes;
225 int err;
226
227 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
228 struct crypto_shash **ctx =
229 crypto_ahash_ctx(crypto_ahash_reqtfm(req));
230 struct shash_desc *desc = ahash_request_ctx(req);
231 void *data;
232
233 desc->tfm = *ctx;
234 desc->flags = req->base.flags;
235
236 data = crypto_kmap(sg_page(sg), 0);
237 err = crypto_shash_digest(desc, data + offset, nbytes,
238 req->result);
239 crypto_kunmap(data, 0);
240 crypto_yield(desc->flags);
241 goto out;
242 }
243
244 err = shash_async_init(req);
245 if (err)
246 goto out;
247
248 err = shash_async_update(req);
249 if (err)
250 goto out;
251
252 err = shash_async_final(req);
253
254 out:
255 return err;
256 }
257
258 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
259 {
260 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
261
262 crypto_free_shash(*ctx);
263 }
264
265 static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
266 {
267 struct crypto_alg *calg = tfm->__crt_alg;
268 struct shash_alg *alg = __crypto_shash_alg(calg);
269 struct ahash_tfm *crt = &tfm->crt_ahash;
270 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
271 struct crypto_shash *shash;
272
273 if (!crypto_mod_get(calg))
274 return -EAGAIN;
275
276 shash = crypto_create_tfm(calg, &crypto_shash_type);
277 if (IS_ERR(shash)) {
278 crypto_mod_put(calg);
279 return PTR_ERR(shash);
280 }
281
282 *ctx = shash;
283 tfm->exit = crypto_exit_shash_ops_async;
284
285 crt->init = shash_async_init;
286 crt->update = shash_async_update;
287 crt->final = shash_async_final;
288 crt->digest = shash_async_digest;
289 crt->setkey = shash_async_setkey;
290
291 crt->digestsize = alg->digestsize;
292 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
293
294 return 0;
295 }
296
297 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
298 unsigned int keylen)
299 {
300 struct shash_desc *desc = crypto_hash_ctx(tfm);
301
302 return crypto_shash_setkey(desc->tfm, key, keylen);
303 }
304
305 static int shash_compat_init(struct hash_desc *hdesc)
306 {
307 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
308
309 desc->flags = hdesc->flags;
310
311 return crypto_shash_init(desc);
312 }
313
314 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
315 unsigned int len)
316 {
317 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
318 struct crypto_hash_walk walk;
319 int nbytes;
320
321 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
322 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
323 nbytes = crypto_shash_update(desc, walk.data, nbytes);
324
325 return nbytes;
326 }
327
328 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
329 {
330 return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
331 }
332
333 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
334 unsigned int nbytes, u8 *out)
335 {
336 unsigned int offset = sg->offset;
337 int err;
338
339 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
340 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
341 void *data;
342
343 desc->flags = hdesc->flags;
344
345 data = crypto_kmap(sg_page(sg), 0);
346 err = crypto_shash_digest(desc, data + offset, nbytes, out);
347 crypto_kunmap(data, 0);
348 crypto_yield(desc->flags);
349 goto out;
350 }
351
352 err = shash_compat_init(hdesc);
353 if (err)
354 goto out;
355
356 err = shash_compat_update(hdesc, sg, nbytes);
357 if (err)
358 goto out;
359
360 err = shash_compat_final(hdesc, out);
361
362 out:
363 return err;
364 }
365
366 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
367 {
368 struct shash_desc *desc= crypto_tfm_ctx(tfm);
369
370 crypto_free_shash(desc->tfm);
371 }
372
373 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
374 {
375 struct hash_tfm *crt = &tfm->crt_hash;
376 struct crypto_alg *calg = tfm->__crt_alg;
377 struct shash_alg *alg = __crypto_shash_alg(calg);
378 struct shash_desc *desc = crypto_tfm_ctx(tfm);
379 struct crypto_shash *shash;
380
381 if (!crypto_mod_get(calg))
382 return -EAGAIN;
383
384 shash = crypto_create_tfm(calg, &crypto_shash_type);
385 if (IS_ERR(shash)) {
386 crypto_mod_put(calg);
387 return PTR_ERR(shash);
388 }
389
390 desc->tfm = shash;
391 tfm->exit = crypto_exit_shash_ops_compat;
392
393 crt->init = shash_compat_init;
394 crt->update = shash_compat_update;
395 crt->final = shash_compat_final;
396 crt->digest = shash_compat_digest;
397 crt->setkey = shash_compat_setkey;
398
399 crt->digestsize = alg->digestsize;
400
401 return 0;
402 }
403
404 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
405 {
406 switch (mask & CRYPTO_ALG_TYPE_MASK) {
407 case CRYPTO_ALG_TYPE_HASH_MASK:
408 return crypto_init_shash_ops_compat(tfm);
409 case CRYPTO_ALG_TYPE_AHASH_MASK:
410 return crypto_init_shash_ops_async(tfm);
411 }
412
413 return -EINVAL;
414 }
415
416 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
417 u32 mask)
418 {
419 struct shash_alg *salg = __crypto_shash_alg(alg);
420
421 switch (mask & CRYPTO_ALG_TYPE_MASK) {
422 case CRYPTO_ALG_TYPE_HASH_MASK:
423 return sizeof(struct shash_desc) + salg->descsize;
424 case CRYPTO_ALG_TYPE_AHASH_MASK:
425 return sizeof(struct crypto_shash *);
426 }
427
428 return 0;
429 }
430
431 static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
432 const struct crypto_type *frontend)
433 {
434 return 0;
435 }
436
437 static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
438 const struct crypto_type *frontend)
439 {
440 return alg->cra_ctxsize;
441 }
442
443 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
444 __attribute__ ((unused));
445 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
446 {
447 struct shash_alg *salg = __crypto_shash_alg(alg);
448
449 seq_printf(m, "type : shash\n");
450 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
451 seq_printf(m, "digestsize : %u\n", salg->digestsize);
452 seq_printf(m, "descsize : %u\n", salg->descsize);
453 }
454
455 static const struct crypto_type crypto_shash_type = {
456 .ctxsize = crypto_shash_ctxsize,
457 .extsize = crypto_shash_extsize,
458 .init = crypto_init_shash_ops,
459 .init_tfm = crypto_shash_init_tfm,
460 #ifdef CONFIG_PROC_FS
461 .show = crypto_shash_show,
462 #endif
463 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
464 .maskset = CRYPTO_ALG_TYPE_MASK,
465 .type = CRYPTO_ALG_TYPE_SHASH,
466 .tfmsize = offsetof(struct crypto_shash, base),
467 };
468
469 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
470 u32 mask)
471 {
472 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
473 }
474 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
475
476 static int shash_prepare_alg(struct shash_alg *alg)
477 {
478 struct crypto_alg *base = &alg->base;
479
480 if (alg->digestsize > PAGE_SIZE / 8 ||
481 alg->descsize > PAGE_SIZE / 8 ||
482 alg->statesize > PAGE_SIZE / 8)
483 return -EINVAL;
484
485 base->cra_type = &crypto_shash_type;
486 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
487 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
488
489 if (!alg->finup)
490 alg->finup = shash_finup_unaligned;
491 if (!alg->digest)
492 alg->digest = shash_digest_unaligned;
493 if (!alg->import)
494 alg->import = shash_no_import;
495 if (!alg->export)
496 alg->export = shash_no_export;
497
498 return 0;
499 }
500
501 int crypto_register_shash(struct shash_alg *alg)
502 {
503 struct crypto_alg *base = &alg->base;
504 int err;
505
506 err = shash_prepare_alg(alg);
507 if (err)
508 return err;
509
510 return crypto_register_alg(base);
511 }
512 EXPORT_SYMBOL_GPL(crypto_register_shash);
513
514 int crypto_unregister_shash(struct shash_alg *alg)
515 {
516 return crypto_unregister_alg(&alg->base);
517 }
518 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
519
520 int shash_register_instance(struct crypto_template *tmpl,
521 struct shash_instance *inst)
522 {
523 int err;
524
525 err = shash_prepare_alg(&inst->alg);
526 if (err)
527 return err;
528
529 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
530 }
531 EXPORT_SYMBOL_GPL(shash_register_instance);
532
533 void shash_free_instance(struct crypto_instance *inst)
534 {
535 crypto_drop_spawn(crypto_instance_ctx(inst));
536 kfree(shash_instance(inst));
537 }
538 EXPORT_SYMBOL_GPL(shash_free_instance);
539
540 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
541 struct shash_alg *alg,
542 struct crypto_instance *inst)
543 {
544 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
545 &crypto_shash_type);
546 }
547 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
548
549 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
550 {
551 struct crypto_alg *alg;
552
553 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
554 return IS_ERR(alg) ? ERR_CAST(alg) :
555 container_of(alg, struct shash_alg, base);
556 }
557 EXPORT_SYMBOL_GPL(shash_attr_alg);
558
559 MODULE_LICENSE("GPL");
560 MODULE_DESCRIPTION("Synchronous cryptographic hash type");