]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Synchronous Cryptographic Hash operations. | |
3 | * | |
4 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License as published by the Free | |
8 | * Software Foundation; either version 2 of the License, or (at your option) | |
9 | * any later version. | |
10 | * | |
11 | */ | |
12 | ||
13 | #include <crypto/scatterwalk.h> | |
14 | #include <crypto/internal/hash.h> | |
15 | #include <linux/err.h> | |
16 | #include <linux/kernel.h> | |
17 | #include <linux/module.h> | |
18 | #include <linux/slab.h> | |
19 | #include <linux/seq_file.h> | |
20 | ||
21 | static const struct crypto_type crypto_shash_type; | |
22 | ||
23 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) | |
24 | { | |
25 | return container_of(tfm, struct crypto_shash, base); | |
26 | } | |
27 | ||
28 | #include "internal.h" | |
29 | ||
30 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, | |
31 | unsigned int keylen) | |
32 | { | |
33 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
34 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
35 | unsigned long absize; | |
36 | u8 *buffer, *alignbuffer; | |
37 | int err; | |
38 | ||
39 | absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1)); | |
40 | buffer = kmalloc(absize, GFP_KERNEL); | |
41 | if (!buffer) | |
42 | return -ENOMEM; | |
43 | ||
44 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
45 | memcpy(alignbuffer, key, keylen); | |
46 | err = shash->setkey(tfm, alignbuffer, keylen); | |
47 | memset(alignbuffer, 0, keylen); | |
48 | kfree(buffer); | |
49 | return err; | |
50 | } | |
51 | ||
52 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, | |
53 | unsigned int keylen) | |
54 | { | |
55 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
56 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
57 | ||
58 | if ((unsigned long)key & alignmask) | |
59 | return shash_setkey_unaligned(tfm, key, keylen); | |
60 | ||
61 | return shash->setkey(tfm, key, keylen); | |
62 | } | |
63 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); | |
64 | ||
65 | static inline unsigned int shash_align_buffer_size(unsigned len, | |
66 | unsigned long mask) | |
67 | { | |
68 | return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); | |
69 | } | |
70 | ||
71 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, | |
72 | unsigned int len) | |
73 | { | |
74 | struct crypto_shash *tfm = desc->tfm; | |
75 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
76 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
77 | unsigned int unaligned_len = alignmask + 1 - | |
78 | ((unsigned long)data & alignmask); | |
79 | u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] | |
80 | __attribute__ ((aligned)); | |
81 | ||
82 | memcpy(buf, data, unaligned_len); | |
83 | ||
84 | return shash->update(desc, buf, unaligned_len) ?: | |
85 | shash->update(desc, data + unaligned_len, len - unaligned_len); | |
86 | } | |
87 | ||
88 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, | |
89 | unsigned int len) | |
90 | { | |
91 | struct crypto_shash *tfm = desc->tfm; | |
92 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
93 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
94 | ||
95 | if ((unsigned long)data & alignmask) | |
96 | return shash_update_unaligned(desc, data, len); | |
97 | ||
98 | return shash->update(desc, data, len); | |
99 | } | |
100 | EXPORT_SYMBOL_GPL(crypto_shash_update); | |
101 | ||
102 | static int shash_final_unaligned(struct shash_desc *desc, u8 *out) | |
103 | { | |
104 | struct crypto_shash *tfm = desc->tfm; | |
105 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
106 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
107 | unsigned int ds = crypto_shash_digestsize(tfm); | |
108 | u8 buf[shash_align_buffer_size(ds, alignmask)] | |
109 | __attribute__ ((aligned)); | |
110 | int err; | |
111 | ||
112 | err = shash->final(desc, buf); | |
113 | memcpy(out, buf, ds); | |
114 | return err; | |
115 | } | |
116 | ||
117 | int crypto_shash_final(struct shash_desc *desc, u8 *out) | |
118 | { | |
119 | struct crypto_shash *tfm = desc->tfm; | |
120 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
121 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
122 | ||
123 | if ((unsigned long)out & alignmask) | |
124 | return shash_final_unaligned(desc, out); | |
125 | ||
126 | return shash->final(desc, out); | |
127 | } | |
128 | EXPORT_SYMBOL_GPL(crypto_shash_final); | |
129 | ||
130 | static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, | |
131 | unsigned int len, u8 *out) | |
132 | { | |
133 | return crypto_shash_update(desc, data, len) ?: | |
134 | crypto_shash_final(desc, out); | |
135 | } | |
136 | ||
137 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, | |
138 | unsigned int len, u8 *out) | |
139 | { | |
140 | struct crypto_shash *tfm = desc->tfm; | |
141 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
142 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
143 | ||
144 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
145 | !shash->finup) | |
146 | return shash_finup_unaligned(desc, data, len, out); | |
147 | ||
148 | return shash->finup(desc, data, len, out); | |
149 | } | |
150 | EXPORT_SYMBOL_GPL(crypto_shash_finup); | |
151 | ||
152 | static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, | |
153 | unsigned int len, u8 *out) | |
154 | { | |
155 | return crypto_shash_init(desc) ?: | |
156 | crypto_shash_update(desc, data, len) ?: | |
157 | crypto_shash_final(desc, out); | |
158 | } | |
159 | ||
160 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | |
161 | unsigned int len, u8 *out) | |
162 | { | |
163 | struct crypto_shash *tfm = desc->tfm; | |
164 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
165 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
166 | ||
167 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
168 | !shash->digest) | |
169 | return shash_digest_unaligned(desc, data, len, out); | |
170 | ||
171 | return shash->digest(desc, data, len, out); | |
172 | } | |
173 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | |
174 | ||
175 | static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, | |
176 | unsigned int keylen) | |
177 | { | |
178 | struct crypto_shash **ctx = crypto_ahash_ctx(tfm); | |
179 | ||
180 | return crypto_shash_setkey(*ctx, key, keylen); | |
181 | } | |
182 | ||
183 | static int shash_async_init(struct ahash_request *req) | |
184 | { | |
185 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
186 | struct shash_desc *desc = ahash_request_ctx(req); | |
187 | ||
188 | desc->tfm = *ctx; | |
189 | desc->flags = req->base.flags; | |
190 | ||
191 | return crypto_shash_init(desc); | |
192 | } | |
193 | ||
194 | static int shash_async_update(struct ahash_request *req) | |
195 | { | |
196 | struct shash_desc *desc = ahash_request_ctx(req); | |
197 | struct crypto_hash_walk walk; | |
198 | int nbytes; | |
199 | ||
200 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | |
201 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
202 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
203 | ||
204 | return nbytes; | |
205 | } | |
206 | ||
207 | static int shash_async_final(struct ahash_request *req) | |
208 | { | |
209 | return crypto_shash_final(ahash_request_ctx(req), req->result); | |
210 | } | |
211 | ||
212 | static int shash_async_digest(struct ahash_request *req) | |
213 | { | |
214 | struct scatterlist *sg = req->src; | |
215 | unsigned int offset = sg->offset; | |
216 | unsigned int nbytes = req->nbytes; | |
217 | int err; | |
218 | ||
219 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
220 | struct crypto_shash **ctx = | |
221 | crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
222 | struct shash_desc *desc = ahash_request_ctx(req); | |
223 | void *data; | |
224 | ||
225 | desc->tfm = *ctx; | |
226 | desc->flags = req->base.flags; | |
227 | ||
228 | data = crypto_kmap(sg_page(sg), 0); | |
229 | err = crypto_shash_digest(desc, data + offset, nbytes, | |
230 | req->result); | |
231 | crypto_kunmap(data, 0); | |
232 | crypto_yield(desc->flags); | |
233 | goto out; | |
234 | } | |
235 | ||
236 | err = shash_async_init(req); | |
237 | if (err) | |
238 | goto out; | |
239 | ||
240 | err = shash_async_update(req); | |
241 | if (err) | |
242 | goto out; | |
243 | ||
244 | err = shash_async_final(req); | |
245 | ||
246 | out: | |
247 | return err; | |
248 | } | |
249 | ||
250 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | |
251 | { | |
252 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
253 | ||
254 | crypto_free_shash(*ctx); | |
255 | } | |
256 | ||
257 | static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | |
258 | { | |
259 | struct crypto_alg *calg = tfm->__crt_alg; | |
260 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
261 | struct ahash_tfm *crt = &tfm->crt_ahash; | |
262 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
263 | struct crypto_shash *shash; | |
264 | ||
265 | if (!crypto_mod_get(calg)) | |
266 | return -EAGAIN; | |
267 | ||
268 | shash = __crypto_shash_cast(crypto_create_tfm( | |
269 | calg, &crypto_shash_type)); | |
270 | if (IS_ERR(shash)) { | |
271 | crypto_mod_put(calg); | |
272 | return PTR_ERR(shash); | |
273 | } | |
274 | ||
275 | *ctx = shash; | |
276 | tfm->exit = crypto_exit_shash_ops_async; | |
277 | ||
278 | crt->init = shash_async_init; | |
279 | crt->update = shash_async_update; | |
280 | crt->final = shash_async_final; | |
281 | crt->digest = shash_async_digest; | |
282 | crt->setkey = shash_async_setkey; | |
283 | ||
284 | crt->digestsize = alg->digestsize; | |
285 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | |
286 | ||
287 | return 0; | |
288 | } | |
289 | ||
290 | static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) | |
291 | { | |
292 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | |
293 | case CRYPTO_ALG_TYPE_AHASH_MASK: | |
294 | return crypto_init_shash_ops_async(tfm); | |
295 | } | |
296 | ||
297 | return -EINVAL; | |
298 | } | |
299 | ||
300 | static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, | |
301 | u32 mask) | |
302 | { | |
303 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | |
304 | case CRYPTO_ALG_TYPE_AHASH_MASK: | |
305 | return sizeof(struct crypto_shash *); | |
306 | } | |
307 | ||
308 | return 0; | |
309 | } | |
310 | ||
311 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, | |
312 | const struct crypto_type *frontend) | |
313 | { | |
314 | if (frontend->type != CRYPTO_ALG_TYPE_SHASH) | |
315 | return -EINVAL; | |
316 | return 0; | |
317 | } | |
318 | ||
319 | static unsigned int crypto_shash_extsize(struct crypto_alg *alg, | |
320 | const struct crypto_type *frontend) | |
321 | { | |
322 | return alg->cra_ctxsize; | |
323 | } | |
324 | ||
325 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
326 | __attribute__ ((unused)); | |
327 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
328 | { | |
329 | struct shash_alg *salg = __crypto_shash_alg(alg); | |
330 | ||
331 | seq_printf(m, "type : shash\n"); | |
332 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | |
333 | seq_printf(m, "digestsize : %u\n", salg->digestsize); | |
334 | seq_printf(m, "descsize : %u\n", salg->descsize); | |
335 | } | |
336 | ||
337 | static const struct crypto_type crypto_shash_type = { | |
338 | .ctxsize = crypto_shash_ctxsize, | |
339 | .extsize = crypto_shash_extsize, | |
340 | .init = crypto_init_shash_ops, | |
341 | .init_tfm = crypto_shash_init_tfm, | |
342 | #ifdef CONFIG_PROC_FS | |
343 | .show = crypto_shash_show, | |
344 | #endif | |
345 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | |
346 | .maskset = CRYPTO_ALG_TYPE_MASK, | |
347 | .type = CRYPTO_ALG_TYPE_SHASH, | |
348 | .tfmsize = offsetof(struct crypto_shash, base), | |
349 | }; | |
350 | ||
351 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, | |
352 | u32 mask) | |
353 | { | |
354 | return __crypto_shash_cast( | |
355 | crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask)); | |
356 | } | |
357 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); | |
358 | ||
359 | int crypto_register_shash(struct shash_alg *alg) | |
360 | { | |
361 | struct crypto_alg *base = &alg->base; | |
362 | ||
363 | if (alg->digestsize > PAGE_SIZE / 8 || | |
364 | alg->descsize > PAGE_SIZE / 8) | |
365 | return -EINVAL; | |
366 | ||
367 | base->cra_type = &crypto_shash_type; | |
368 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | |
369 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; | |
370 | ||
371 | return crypto_register_alg(base); | |
372 | } | |
373 | EXPORT_SYMBOL_GPL(crypto_register_shash); | |
374 | ||
375 | int crypto_unregister_shash(struct shash_alg *alg) | |
376 | { | |
377 | return crypto_unregister_alg(&alg->base); | |
378 | } | |
379 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); | |
380 | ||
381 | MODULE_LICENSE("GPL"); | |
382 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |