]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - crypto/api.c
UBUNTU: Ubuntu-4.15.0-96.97
[mirror_ubuntu-bionic-kernel.git] / crypto / api.c
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched/signal.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include <linux/completion.h>
28 #include "internal.h"
29
30 LIST_HEAD(crypto_alg_list);
31 EXPORT_SYMBOL_GPL(crypto_alg_list);
32 DECLARE_RWSEM(crypto_alg_sem);
33 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34
35 BLOCKING_NOTIFIER_HEAD(crypto_chain);
36 EXPORT_SYMBOL_GPL(crypto_chain);
37
38 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
39
40 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
41 {
42 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
43 }
44 EXPORT_SYMBOL_GPL(crypto_mod_get);
45
46 void crypto_mod_put(struct crypto_alg *alg)
47 {
48 struct module *module = alg->cra_module;
49
50 crypto_alg_put(alg);
51 module_put(module);
52 }
53 EXPORT_SYMBOL_GPL(crypto_mod_put);
54
55 static inline int crypto_is_test_larval(struct crypto_larval *larval)
56 {
57 return larval->alg.cra_driver_name[0];
58 }
59
60 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
61 u32 mask)
62 {
63 struct crypto_alg *q, *alg = NULL;
64 int best = -2;
65
66 list_for_each_entry(q, &crypto_alg_list, cra_list) {
67 int exact, fuzzy;
68
69 if (crypto_is_moribund(q))
70 continue;
71
72 if ((q->cra_flags ^ type) & mask)
73 continue;
74
75 if (crypto_is_larval(q) &&
76 !crypto_is_test_larval((struct crypto_larval *)q) &&
77 ((struct crypto_larval *)q)->mask != mask)
78 continue;
79
80 exact = !strcmp(q->cra_driver_name, name);
81 fuzzy = !strcmp(q->cra_name, name);
82 if (!exact && !(fuzzy && q->cra_priority > best))
83 continue;
84
85 if (unlikely(!crypto_mod_get(q)))
86 continue;
87
88 best = q->cra_priority;
89 if (alg)
90 crypto_mod_put(alg);
91 alg = q;
92
93 if (exact)
94 break;
95 }
96
97 return alg;
98 }
99
100 static void crypto_larval_destroy(struct crypto_alg *alg)
101 {
102 struct crypto_larval *larval = (void *)alg;
103
104 BUG_ON(!crypto_is_larval(alg));
105 if (larval->adult)
106 crypto_mod_put(larval->adult);
107 kfree(larval);
108 }
109
110 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
111 {
112 struct crypto_larval *larval;
113
114 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
115 if (!larval)
116 return ERR_PTR(-ENOMEM);
117
118 larval->mask = mask;
119 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
120 larval->alg.cra_priority = -1;
121 larval->alg.cra_destroy = crypto_larval_destroy;
122
123 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 init_completion(&larval->completion);
125
126 return larval;
127 }
128 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
129
130 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
131 u32 mask)
132 {
133 struct crypto_alg *alg;
134 struct crypto_larval *larval;
135
136 larval = crypto_larval_alloc(name, type, mask);
137 if (IS_ERR(larval))
138 return ERR_CAST(larval);
139
140 atomic_set(&larval->alg.cra_refcnt, 2);
141
142 down_write(&crypto_alg_sem);
143 alg = __crypto_alg_lookup(name, type, mask);
144 if (!alg) {
145 alg = &larval->alg;
146 list_add(&alg->cra_list, &crypto_alg_list);
147 }
148 up_write(&crypto_alg_sem);
149
150 if (alg != &larval->alg) {
151 kfree(larval);
152 if (crypto_is_larval(alg))
153 alg = crypto_larval_wait(alg);
154 }
155
156 return alg;
157 }
158
159 void crypto_larval_kill(struct crypto_alg *alg)
160 {
161 struct crypto_larval *larval = (void *)alg;
162
163 down_write(&crypto_alg_sem);
164 list_del(&alg->cra_list);
165 up_write(&crypto_alg_sem);
166 complete_all(&larval->completion);
167 crypto_alg_put(alg);
168 }
169 EXPORT_SYMBOL_GPL(crypto_larval_kill);
170
171 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
172 {
173 struct crypto_larval *larval = (void *)alg;
174 long timeout;
175
176 timeout = wait_for_completion_killable_timeout(
177 &larval->completion, 60 * HZ);
178
179 alg = larval->adult;
180 if (timeout < 0)
181 alg = ERR_PTR(-EINTR);
182 else if (!timeout)
183 alg = ERR_PTR(-ETIMEDOUT);
184 else if (!alg)
185 alg = ERR_PTR(-ENOENT);
186 else if (crypto_is_test_larval(larval) &&
187 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 alg = ERR_PTR(-EAGAIN);
189 else if (!crypto_mod_get(alg))
190 alg = ERR_PTR(-EAGAIN);
191 crypto_mod_put(&larval->alg);
192
193 return alg;
194 }
195
196 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
197 {
198 struct crypto_alg *alg;
199
200 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type, mask);
202 up_read(&crypto_alg_sem);
203
204 return alg;
205 }
206 EXPORT_SYMBOL_GPL(crypto_alg_lookup);
207
208 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
209 {
210 struct crypto_alg *alg;
211
212 if (!name)
213 return ERR_PTR(-ENOENT);
214
215 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
216 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
217
218 alg = crypto_alg_lookup(name, type, mask);
219 if (!alg && !(mask & CRYPTO_NOLOAD)) {
220 request_module("crypto-%s", name);
221
222 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
223 CRYPTO_ALG_NEED_FALLBACK))
224 request_module("crypto-%s-all", name);
225
226 alg = crypto_alg_lookup(name, type, mask);
227 }
228
229 if (alg)
230 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
231
232 return crypto_larval_add(name, type, mask);
233 }
234 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
235
236 int crypto_probing_notify(unsigned long val, void *v)
237 {
238 int ok;
239
240 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
241 if (ok == NOTIFY_DONE) {
242 request_module("cryptomgr");
243 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
244 }
245
246 return ok;
247 }
248 EXPORT_SYMBOL_GPL(crypto_probing_notify);
249
250 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
251 {
252 struct crypto_alg *alg;
253 struct crypto_alg *larval;
254 int ok;
255
256 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
257 type |= CRYPTO_ALG_TESTED;
258 mask |= CRYPTO_ALG_TESTED;
259 }
260
261 /*
262 * If the internal flag is set for a cipher, require a caller to
263 * to invoke the cipher with the internal flag to use that cipher.
264 * Also, if a caller wants to allocate a cipher that may or may
265 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
266 * !(mask & CRYPTO_ALG_INTERNAL).
267 */
268 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
269 mask |= CRYPTO_ALG_INTERNAL;
270
271 larval = crypto_larval_lookup(name, type, mask);
272 if (IS_ERR(larval) || !crypto_is_larval(larval))
273 return larval;
274
275 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
276
277 if (ok == NOTIFY_STOP)
278 alg = crypto_larval_wait(larval);
279 else {
280 crypto_mod_put(larval);
281 alg = ERR_PTR(-ENOENT);
282 }
283 crypto_larval_kill(larval);
284 return alg;
285 }
286 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
287
288 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
289 {
290 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
291
292 if (type_obj)
293 return type_obj->init(tfm, type, mask);
294
295 switch (crypto_tfm_alg_type(tfm)) {
296 case CRYPTO_ALG_TYPE_CIPHER:
297 return crypto_init_cipher_ops(tfm);
298
299 case CRYPTO_ALG_TYPE_COMPRESS:
300 return crypto_init_compress_ops(tfm);
301
302 default:
303 break;
304 }
305
306 BUG();
307 return -EINVAL;
308 }
309
310 static void crypto_exit_ops(struct crypto_tfm *tfm)
311 {
312 const struct crypto_type *type = tfm->__crt_alg->cra_type;
313
314 if (type && tfm->exit)
315 tfm->exit(tfm);
316 }
317
318 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
319 {
320 const struct crypto_type *type_obj = alg->cra_type;
321 unsigned int len;
322
323 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
324 if (type_obj)
325 return len + type_obj->ctxsize(alg, type, mask);
326
327 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
328 default:
329 BUG();
330
331 case CRYPTO_ALG_TYPE_CIPHER:
332 len += crypto_cipher_ctxsize(alg);
333 break;
334
335 case CRYPTO_ALG_TYPE_COMPRESS:
336 len += crypto_compress_ctxsize(alg);
337 break;
338 }
339
340 return len;
341 }
342
343 static void crypto_shoot_alg(struct crypto_alg *alg)
344 {
345 down_write(&crypto_alg_sem);
346 alg->cra_flags |= CRYPTO_ALG_DYING;
347 up_write(&crypto_alg_sem);
348 }
349
350 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
351 u32 mask)
352 {
353 struct crypto_tfm *tfm = NULL;
354 unsigned int tfm_size;
355 int err = -ENOMEM;
356
357 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
358 tfm = kzalloc(tfm_size, GFP_KERNEL);
359 if (tfm == NULL)
360 goto out_err;
361
362 tfm->__crt_alg = alg;
363
364 err = crypto_init_ops(tfm, type, mask);
365 if (err)
366 goto out_free_tfm;
367
368 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
369 goto cra_init_failed;
370
371 goto out;
372
373 cra_init_failed:
374 crypto_exit_ops(tfm);
375 out_free_tfm:
376 if (err == -EAGAIN)
377 crypto_shoot_alg(alg);
378 kfree(tfm);
379 out_err:
380 tfm = ERR_PTR(err);
381 out:
382 return tfm;
383 }
384 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
385
386 /*
387 * crypto_alloc_base - Locate algorithm and allocate transform
388 * @alg_name: Name of algorithm
389 * @type: Type of algorithm
390 * @mask: Mask for type comparison
391 *
392 * This function should not be used by new algorithm types.
393 * Please use crypto_alloc_tfm instead.
394 *
395 * crypto_alloc_base() will first attempt to locate an already loaded
396 * algorithm. If that fails and the kernel supports dynamically loadable
397 * modules, it will then attempt to load a module of the same name or
398 * alias. If that fails it will send a query to any loaded crypto manager
399 * to construct an algorithm on the fly. A refcount is grabbed on the
400 * algorithm which is then associated with the new transform.
401 *
402 * The returned transform is of a non-determinate type. Most people
403 * should use one of the more specific allocation functions such as
404 * crypto_alloc_blkcipher.
405 *
406 * In case of error the return value is an error pointer.
407 */
408 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
409 {
410 struct crypto_tfm *tfm;
411 int err;
412
413 for (;;) {
414 struct crypto_alg *alg;
415
416 alg = crypto_alg_mod_lookup(alg_name, type, mask);
417 if (IS_ERR(alg)) {
418 err = PTR_ERR(alg);
419 goto err;
420 }
421
422 tfm = __crypto_alloc_tfm(alg, type, mask);
423 if (!IS_ERR(tfm))
424 return tfm;
425
426 crypto_mod_put(alg);
427 err = PTR_ERR(tfm);
428
429 err:
430 if (err != -EAGAIN)
431 break;
432 if (fatal_signal_pending(current)) {
433 err = -EINTR;
434 break;
435 }
436 }
437
438 return ERR_PTR(err);
439 }
440 EXPORT_SYMBOL_GPL(crypto_alloc_base);
441
442 void *crypto_create_tfm(struct crypto_alg *alg,
443 const struct crypto_type *frontend)
444 {
445 char *mem;
446 struct crypto_tfm *tfm = NULL;
447 unsigned int tfmsize;
448 unsigned int total;
449 int err = -ENOMEM;
450
451 tfmsize = frontend->tfmsize;
452 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
453
454 mem = kzalloc(total, GFP_KERNEL);
455 if (mem == NULL)
456 goto out_err;
457
458 tfm = (struct crypto_tfm *)(mem + tfmsize);
459 tfm->__crt_alg = alg;
460
461 err = frontend->init_tfm(tfm);
462 if (err)
463 goto out_free_tfm;
464
465 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
466 goto cra_init_failed;
467
468 goto out;
469
470 cra_init_failed:
471 crypto_exit_ops(tfm);
472 out_free_tfm:
473 if (err == -EAGAIN)
474 crypto_shoot_alg(alg);
475 kfree(mem);
476 out_err:
477 mem = ERR_PTR(err);
478 out:
479 return mem;
480 }
481 EXPORT_SYMBOL_GPL(crypto_create_tfm);
482
483 struct crypto_alg *crypto_find_alg(const char *alg_name,
484 const struct crypto_type *frontend,
485 u32 type, u32 mask)
486 {
487 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
488 crypto_alg_mod_lookup;
489
490 if (frontend) {
491 type &= frontend->maskclear;
492 mask &= frontend->maskclear;
493 type |= frontend->type;
494 mask |= frontend->maskset;
495
496 if (frontend->lookup)
497 lookup = frontend->lookup;
498 }
499
500 return lookup(alg_name, type, mask);
501 }
502 EXPORT_SYMBOL_GPL(crypto_find_alg);
503
504 /*
505 * crypto_alloc_tfm - Locate algorithm and allocate transform
506 * @alg_name: Name of algorithm
507 * @frontend: Frontend algorithm type
508 * @type: Type of algorithm
509 * @mask: Mask for type comparison
510 *
511 * crypto_alloc_tfm() will first attempt to locate an already loaded
512 * algorithm. If that fails and the kernel supports dynamically loadable
513 * modules, it will then attempt to load a module of the same name or
514 * alias. If that fails it will send a query to any loaded crypto manager
515 * to construct an algorithm on the fly. A refcount is grabbed on the
516 * algorithm which is then associated with the new transform.
517 *
518 * The returned transform is of a non-determinate type. Most people
519 * should use one of the more specific allocation functions such as
520 * crypto_alloc_blkcipher.
521 *
522 * In case of error the return value is an error pointer.
523 */
524 void *crypto_alloc_tfm(const char *alg_name,
525 const struct crypto_type *frontend, u32 type, u32 mask)
526 {
527 void *tfm;
528 int err;
529
530 for (;;) {
531 struct crypto_alg *alg;
532
533 alg = crypto_find_alg(alg_name, frontend, type, mask);
534 if (IS_ERR(alg)) {
535 err = PTR_ERR(alg);
536 goto err;
537 }
538
539 tfm = crypto_create_tfm(alg, frontend);
540 if (!IS_ERR(tfm))
541 return tfm;
542
543 crypto_mod_put(alg);
544 err = PTR_ERR(tfm);
545
546 err:
547 if (err != -EAGAIN)
548 break;
549 if (fatal_signal_pending(current)) {
550 err = -EINTR;
551 break;
552 }
553 }
554
555 return ERR_PTR(err);
556 }
557 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
558
559 /*
560 * crypto_destroy_tfm - Free crypto transform
561 * @mem: Start of tfm slab
562 * @tfm: Transform to free
563 *
564 * This function frees up the transform and any associated resources,
565 * then drops the refcount on the associated algorithm.
566 */
567 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
568 {
569 struct crypto_alg *alg;
570
571 if (unlikely(!mem))
572 return;
573
574 alg = tfm->__crt_alg;
575
576 if (!tfm->exit && alg->cra_exit)
577 alg->cra_exit(tfm);
578 crypto_exit_ops(tfm);
579 crypto_mod_put(alg);
580 kzfree(mem);
581 }
582 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
583
584 int crypto_has_alg(const char *name, u32 type, u32 mask)
585 {
586 int ret = 0;
587 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
588
589 if (!IS_ERR(alg)) {
590 crypto_mod_put(alg);
591 ret = 1;
592 }
593
594 return ret;
595 }
596 EXPORT_SYMBOL_GPL(crypto_has_alg);
597
598 void crypto_req_done(struct crypto_async_request *req, int err)
599 {
600 struct crypto_wait *wait = req->data;
601
602 if (err == -EINPROGRESS)
603 return;
604
605 wait->err = err;
606 complete(&wait->completion);
607 }
608 EXPORT_SYMBOL_GPL(crypto_req_done);
609
610 MODULE_DESCRIPTION("Cryptographic core API");
611 MODULE_LICENSE("GPL");