]>
Commit | Line | Data |
---|---|---|
7b5a080b HX |
1 | /* |
2 | * Synchronous Cryptographic Hash operations. | |
3 | * | |
4 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License as published by the Free | |
8 | * Software Foundation; either version 2 of the License, or (at your option) | |
9 | * any later version. | |
10 | * | |
11 | */ | |
12 | ||
3b2f6df0 | 13 | #include <crypto/scatterwalk.h> |
7b5a080b HX |
14 | #include <crypto/internal/hash.h> |
15 | #include <linux/err.h> | |
16 | #include <linux/kernel.h> | |
17 | #include <linux/module.h> | |
18 | #include <linux/slab.h> | |
19 | #include <linux/seq_file.h> | |
20 | ||
3b2f6df0 HX |
21 | #include "internal.h" |
22 | ||
3f683d61 HX |
23 | static const struct crypto_type crypto_shash_type; |
24 | ||
7b5a080b HX |
25 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, |
26 | unsigned int keylen) | |
27 | { | |
28 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
29 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
30 | unsigned long absize; | |
31 | u8 *buffer, *alignbuffer; | |
32 | int err; | |
33 | ||
34 | absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1)); | |
35 | buffer = kmalloc(absize, GFP_KERNEL); | |
36 | if (!buffer) | |
37 | return -ENOMEM; | |
38 | ||
39 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
40 | memcpy(alignbuffer, key, keylen); | |
41 | err = shash->setkey(tfm, alignbuffer, keylen); | |
42 | memset(alignbuffer, 0, keylen); | |
43 | kfree(buffer); | |
44 | return err; | |
45 | } | |
46 | ||
47 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, | |
48 | unsigned int keylen) | |
49 | { | |
50 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
51 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
52 | ||
3751f402 HX |
53 | if (!shash->setkey) |
54 | return -ENOSYS; | |
55 | ||
7b5a080b HX |
56 | if ((unsigned long)key & alignmask) |
57 | return shash_setkey_unaligned(tfm, key, keylen); | |
58 | ||
59 | return shash->setkey(tfm, key, keylen); | |
60 | } | |
61 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); | |
62 | ||
63 | static inline unsigned int shash_align_buffer_size(unsigned len, | |
64 | unsigned long mask) | |
65 | { | |
66 | return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); | |
67 | } | |
68 | ||
69 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, | |
70 | unsigned int len) | |
71 | { | |
72 | struct crypto_shash *tfm = desc->tfm; | |
73 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
74 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
75 | unsigned int unaligned_len = alignmask + 1 - | |
76 | ((unsigned long)data & alignmask); | |
77 | u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] | |
78 | __attribute__ ((aligned)); | |
79 | ||
f4f68993 YS |
80 | if (unaligned_len > len) |
81 | unaligned_len = len; | |
82 | ||
7b5a080b HX |
83 | memcpy(buf, data, unaligned_len); |
84 | ||
85 | return shash->update(desc, buf, unaligned_len) ?: | |
86 | shash->update(desc, data + unaligned_len, len - unaligned_len); | |
87 | } | |
88 | ||
89 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, | |
90 | unsigned int len) | |
91 | { | |
92 | struct crypto_shash *tfm = desc->tfm; | |
93 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
94 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
95 | ||
96 | if ((unsigned long)data & alignmask) | |
97 | return shash_update_unaligned(desc, data, len); | |
98 | ||
99 | return shash->update(desc, data, len); | |
100 | } | |
101 | EXPORT_SYMBOL_GPL(crypto_shash_update); | |
102 | ||
103 | static int shash_final_unaligned(struct shash_desc *desc, u8 *out) | |
104 | { | |
105 | struct crypto_shash *tfm = desc->tfm; | |
106 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
107 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
108 | unsigned int ds = crypto_shash_digestsize(tfm); | |
109 | u8 buf[shash_align_buffer_size(ds, alignmask)] | |
110 | __attribute__ ((aligned)); | |
111 | int err; | |
112 | ||
113 | err = shash->final(desc, buf); | |
114 | memcpy(out, buf, ds); | |
115 | return err; | |
116 | } | |
117 | ||
118 | int crypto_shash_final(struct shash_desc *desc, u8 *out) | |
119 | { | |
120 | struct crypto_shash *tfm = desc->tfm; | |
121 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
122 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
123 | ||
124 | if ((unsigned long)out & alignmask) | |
125 | return shash_final_unaligned(desc, out); | |
126 | ||
127 | return shash->final(desc, out); | |
128 | } | |
129 | EXPORT_SYMBOL_GPL(crypto_shash_final); | |
130 | ||
131 | static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, | |
132 | unsigned int len, u8 *out) | |
133 | { | |
134 | return crypto_shash_update(desc, data, len) ?: | |
135 | crypto_shash_final(desc, out); | |
136 | } | |
137 | ||
138 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, | |
139 | unsigned int len, u8 *out) | |
140 | { | |
141 | struct crypto_shash *tfm = desc->tfm; | |
142 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
143 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
144 | ||
145 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
146 | !shash->finup) | |
147 | return shash_finup_unaligned(desc, data, len, out); | |
148 | ||
149 | return shash->finup(desc, data, len, out); | |
150 | } | |
151 | EXPORT_SYMBOL_GPL(crypto_shash_finup); | |
152 | ||
153 | static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, | |
154 | unsigned int len, u8 *out) | |
155 | { | |
156 | return crypto_shash_init(desc) ?: | |
157 | crypto_shash_update(desc, data, len) ?: | |
158 | crypto_shash_final(desc, out); | |
159 | } | |
160 | ||
161 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | |
162 | unsigned int len, u8 *out) | |
163 | { | |
164 | struct crypto_shash *tfm = desc->tfm; | |
165 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
166 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
167 | ||
168 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
169 | !shash->digest) | |
170 | return shash_digest_unaligned(desc, data, len, out); | |
171 | ||
172 | return shash->digest(desc, data, len, out); | |
173 | } | |
174 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | |
175 | ||
dec8b786 HX |
176 | int crypto_shash_import(struct shash_desc *desc, const u8 *in) |
177 | { | |
178 | struct crypto_shash *tfm = desc->tfm; | |
179 | struct shash_alg *alg = crypto_shash_alg(tfm); | |
180 | ||
181 | memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm)); | |
182 | ||
183 | if (alg->reinit) | |
184 | alg->reinit(desc); | |
185 | ||
186 | return 0; | |
187 | } | |
188 | EXPORT_SYMBOL_GPL(crypto_shash_import); | |
189 | ||
3b2f6df0 HX |
190 | static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, |
191 | unsigned int keylen) | |
192 | { | |
193 | struct crypto_shash **ctx = crypto_ahash_ctx(tfm); | |
194 | ||
195 | return crypto_shash_setkey(*ctx, key, keylen); | |
196 | } | |
197 | ||
198 | static int shash_async_init(struct ahash_request *req) | |
199 | { | |
200 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
201 | struct shash_desc *desc = ahash_request_ctx(req); | |
202 | ||
203 | desc->tfm = *ctx; | |
204 | desc->flags = req->base.flags; | |
205 | ||
206 | return crypto_shash_init(desc); | |
207 | } | |
208 | ||
209 | static int shash_async_update(struct ahash_request *req) | |
210 | { | |
211 | struct shash_desc *desc = ahash_request_ctx(req); | |
212 | struct crypto_hash_walk walk; | |
213 | int nbytes; | |
214 | ||
215 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | |
216 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
217 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
218 | ||
219 | return nbytes; | |
220 | } | |
221 | ||
222 | static int shash_async_final(struct ahash_request *req) | |
223 | { | |
224 | return crypto_shash_final(ahash_request_ctx(req), req->result); | |
225 | } | |
226 | ||
227 | static int shash_async_digest(struct ahash_request *req) | |
228 | { | |
229 | struct scatterlist *sg = req->src; | |
230 | unsigned int offset = sg->offset; | |
231 | unsigned int nbytes = req->nbytes; | |
232 | int err; | |
233 | ||
234 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
235 | struct crypto_shash **ctx = | |
236 | crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
237 | struct shash_desc *desc = ahash_request_ctx(req); | |
238 | void *data; | |
239 | ||
240 | desc->tfm = *ctx; | |
241 | desc->flags = req->base.flags; | |
242 | ||
243 | data = crypto_kmap(sg_page(sg), 0); | |
244 | err = crypto_shash_digest(desc, data + offset, nbytes, | |
245 | req->result); | |
246 | crypto_kunmap(data, 0); | |
247 | crypto_yield(desc->flags); | |
248 | goto out; | |
249 | } | |
250 | ||
251 | err = shash_async_init(req); | |
252 | if (err) | |
253 | goto out; | |
254 | ||
255 | err = shash_async_update(req); | |
256 | if (err) | |
257 | goto out; | |
258 | ||
259 | err = shash_async_final(req); | |
260 | ||
261 | out: | |
262 | return err; | |
263 | } | |
264 | ||
265 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | |
266 | { | |
267 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
268 | ||
269 | crypto_free_shash(*ctx); | |
270 | } | |
271 | ||
272 | static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | |
273 | { | |
274 | struct crypto_alg *calg = tfm->__crt_alg; | |
275 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
276 | struct ahash_tfm *crt = &tfm->crt_ahash; | |
277 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
278 | struct crypto_shash *shash; | |
279 | ||
280 | if (!crypto_mod_get(calg)) | |
281 | return -EAGAIN; | |
282 | ||
3f683d61 | 283 | shash = crypto_create_tfm(calg, &crypto_shash_type); |
3b2f6df0 HX |
284 | if (IS_ERR(shash)) { |
285 | crypto_mod_put(calg); | |
286 | return PTR_ERR(shash); | |
287 | } | |
288 | ||
289 | *ctx = shash; | |
290 | tfm->exit = crypto_exit_shash_ops_async; | |
291 | ||
292 | crt->init = shash_async_init; | |
293 | crt->update = shash_async_update; | |
294 | crt->final = shash_async_final; | |
295 | crt->digest = shash_async_digest; | |
296 | crt->setkey = shash_async_setkey; | |
297 | ||
298 | crt->digestsize = alg->digestsize; | |
299 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | |
300 | ||
301 | return 0; | |
302 | } | |
303 | ||
5f7082ed HX |
304 | static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, |
305 | unsigned int keylen) | |
306 | { | |
307 | struct shash_desc *desc = crypto_hash_ctx(tfm); | |
308 | ||
309 | return crypto_shash_setkey(desc->tfm, key, keylen); | |
310 | } | |
311 | ||
312 | static int shash_compat_init(struct hash_desc *hdesc) | |
313 | { | |
314 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
315 | ||
316 | desc->flags = hdesc->flags; | |
317 | ||
318 | return crypto_shash_init(desc); | |
319 | } | |
320 | ||
321 | static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, | |
322 | unsigned int len) | |
323 | { | |
324 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
325 | struct crypto_hash_walk walk; | |
326 | int nbytes; | |
327 | ||
328 | for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); | |
329 | nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
330 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
331 | ||
332 | return nbytes; | |
333 | } | |
334 | ||
335 | static int shash_compat_final(struct hash_desc *hdesc, u8 *out) | |
336 | { | |
337 | return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out); | |
338 | } | |
339 | ||
340 | static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, | |
341 | unsigned int nbytes, u8 *out) | |
342 | { | |
343 | unsigned int offset = sg->offset; | |
344 | int err; | |
345 | ||
346 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
347 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
348 | void *data; | |
349 | ||
350 | desc->flags = hdesc->flags; | |
351 | ||
352 | data = crypto_kmap(sg_page(sg), 0); | |
353 | err = crypto_shash_digest(desc, data + offset, nbytes, out); | |
354 | crypto_kunmap(data, 0); | |
355 | crypto_yield(desc->flags); | |
356 | goto out; | |
357 | } | |
358 | ||
359 | err = shash_compat_init(hdesc); | |
360 | if (err) | |
361 | goto out; | |
362 | ||
363 | err = shash_compat_update(hdesc, sg, nbytes); | |
364 | if (err) | |
365 | goto out; | |
366 | ||
367 | err = shash_compat_final(hdesc, out); | |
368 | ||
369 | out: | |
370 | return err; | |
371 | } | |
372 | ||
373 | static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) | |
374 | { | |
375 | struct shash_desc *desc= crypto_tfm_ctx(tfm); | |
376 | ||
377 | crypto_free_shash(desc->tfm); | |
378 | } | |
379 | ||
380 | static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) | |
381 | { | |
382 | struct hash_tfm *crt = &tfm->crt_hash; | |
383 | struct crypto_alg *calg = tfm->__crt_alg; | |
384 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
385 | struct shash_desc *desc = crypto_tfm_ctx(tfm); | |
386 | struct crypto_shash *shash; | |
387 | ||
4abfd73e AKR |
388 | if (!crypto_mod_get(calg)) |
389 | return -EAGAIN; | |
390 | ||
3f683d61 | 391 | shash = crypto_create_tfm(calg, &crypto_shash_type); |
4abfd73e AKR |
392 | if (IS_ERR(shash)) { |
393 | crypto_mod_put(calg); | |
5f7082ed | 394 | return PTR_ERR(shash); |
4abfd73e | 395 | } |
5f7082ed HX |
396 | |
397 | desc->tfm = shash; | |
398 | tfm->exit = crypto_exit_shash_ops_compat; | |
399 | ||
400 | crt->init = shash_compat_init; | |
401 | crt->update = shash_compat_update; | |
402 | crt->final = shash_compat_final; | |
403 | crt->digest = shash_compat_digest; | |
404 | crt->setkey = shash_compat_setkey; | |
405 | ||
406 | crt->digestsize = alg->digestsize; | |
407 | ||
408 | return 0; | |
409 | } | |
410 | ||
3b2f6df0 HX |
411 | static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) |
412 | { | |
413 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | |
5f7082ed HX |
414 | case CRYPTO_ALG_TYPE_HASH_MASK: |
415 | return crypto_init_shash_ops_compat(tfm); | |
3b2f6df0 HX |
416 | case CRYPTO_ALG_TYPE_AHASH_MASK: |
417 | return crypto_init_shash_ops_async(tfm); | |
418 | } | |
419 | ||
420 | return -EINVAL; | |
421 | } | |
422 | ||
423 | static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, | |
424 | u32 mask) | |
425 | { | |
5f7082ed HX |
426 | struct shash_alg *salg = __crypto_shash_alg(alg); |
427 | ||
3b2f6df0 | 428 | switch (mask & CRYPTO_ALG_TYPE_MASK) { |
5f7082ed HX |
429 | case CRYPTO_ALG_TYPE_HASH_MASK: |
430 | return sizeof(struct shash_desc) + salg->descsize; | |
3b2f6df0 HX |
431 | case CRYPTO_ALG_TYPE_AHASH_MASK: |
432 | return sizeof(struct crypto_shash *); | |
433 | } | |
434 | ||
435 | return 0; | |
436 | } | |
437 | ||
7b5a080b HX |
438 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, |
439 | const struct crypto_type *frontend) | |
440 | { | |
7b5a080b HX |
441 | return 0; |
442 | } | |
443 | ||
444 | static unsigned int crypto_shash_extsize(struct crypto_alg *alg, | |
445 | const struct crypto_type *frontend) | |
446 | { | |
447 | return alg->cra_ctxsize; | |
448 | } | |
449 | ||
450 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
451 | __attribute__ ((unused)); | |
452 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
453 | { | |
454 | struct shash_alg *salg = __crypto_shash_alg(alg); | |
455 | ||
456 | seq_printf(m, "type : shash\n"); | |
457 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | |
458 | seq_printf(m, "digestsize : %u\n", salg->digestsize); | |
459 | seq_printf(m, "descsize : %u\n", salg->descsize); | |
460 | } | |
461 | ||
462 | static const struct crypto_type crypto_shash_type = { | |
3b2f6df0 | 463 | .ctxsize = crypto_shash_ctxsize, |
7b5a080b | 464 | .extsize = crypto_shash_extsize, |
3b2f6df0 | 465 | .init = crypto_init_shash_ops, |
7b5a080b HX |
466 | .init_tfm = crypto_shash_init_tfm, |
467 | #ifdef CONFIG_PROC_FS | |
468 | .show = crypto_shash_show, | |
469 | #endif | |
470 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | |
471 | .maskset = CRYPTO_ALG_TYPE_MASK, | |
472 | .type = CRYPTO_ALG_TYPE_SHASH, | |
473 | .tfmsize = offsetof(struct crypto_shash, base), | |
474 | }; | |
475 | ||
476 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, | |
477 | u32 mask) | |
478 | { | |
3f683d61 | 479 | return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); |
7b5a080b HX |
480 | } |
481 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); | |
482 | ||
483 | int crypto_register_shash(struct shash_alg *alg) | |
484 | { | |
485 | struct crypto_alg *base = &alg->base; | |
486 | ||
487 | if (alg->digestsize > PAGE_SIZE / 8 || | |
488 | alg->descsize > PAGE_SIZE / 8) | |
489 | return -EINVAL; | |
490 | ||
491 | base->cra_type = &crypto_shash_type; | |
492 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | |
493 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; | |
494 | ||
495 | return crypto_register_alg(base); | |
496 | } | |
497 | EXPORT_SYMBOL_GPL(crypto_register_shash); | |
498 | ||
499 | int crypto_unregister_shash(struct shash_alg *alg) | |
500 | { | |
501 | return crypto_unregister_alg(&alg->base); | |
502 | } | |
503 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); | |
504 | ||
2e4fddd8 HX |
505 | void shash_free_instance(struct crypto_instance *inst) |
506 | { | |
507 | crypto_drop_spawn(crypto_instance_ctx(inst)); | |
508 | kfree(shash_instance(inst)); | |
509 | } | |
510 | EXPORT_SYMBOL_GPL(shash_free_instance); | |
511 | ||
7b5a080b HX |
512 | MODULE_LICENSE("GPL"); |
513 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |