]>
git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - crypto/api.c
2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list
);
30 EXPORT_SYMBOL_GPL(crypto_alg_list
);
31 DECLARE_RWSEM(crypto_alg_sem
);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem
);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain
);
35 EXPORT_SYMBOL_GPL(crypto_chain
);
37 static inline struct crypto_alg
*crypto_alg_get(struct crypto_alg
*alg
)
39 atomic_inc(&alg
->cra_refcnt
);
43 struct crypto_alg
*crypto_mod_get(struct crypto_alg
*alg
)
45 return try_module_get(alg
->cra_module
) ? crypto_alg_get(alg
) : NULL
;
47 EXPORT_SYMBOL_GPL(crypto_mod_get
);
49 void crypto_mod_put(struct crypto_alg
*alg
)
51 struct module
*module
= alg
->cra_module
;
56 EXPORT_SYMBOL_GPL(crypto_mod_put
);
58 static inline int crypto_is_test_larval(struct crypto_larval
*larval
)
60 return larval
->alg
.cra_driver_name
[0];
63 static struct crypto_alg
*__crypto_alg_lookup(const char *name
, u32 type
,
66 struct crypto_alg
*q
, *alg
= NULL
;
69 list_for_each_entry(q
, &crypto_alg_list
, cra_list
) {
72 if (crypto_is_moribund(q
))
75 if ((q
->cra_flags
^ type
) & mask
)
78 if (crypto_is_larval(q
) &&
79 !crypto_is_test_larval((struct crypto_larval
*)q
) &&
80 ((struct crypto_larval
*)q
)->mask
!= mask
)
83 exact
= !strcmp(q
->cra_driver_name
, name
);
84 fuzzy
= !strcmp(q
->cra_name
, name
);
85 if (!exact
&& !(fuzzy
&& q
->cra_priority
> best
))
88 if (unlikely(!crypto_mod_get(q
)))
91 best
= q
->cra_priority
;
103 static void crypto_larval_destroy(struct crypto_alg
*alg
)
105 struct crypto_larval
*larval
= (void *)alg
;
107 BUG_ON(!crypto_is_larval(alg
));
109 crypto_mod_put(larval
->adult
);
113 struct crypto_larval
*crypto_larval_alloc(const char *name
, u32 type
, u32 mask
)
115 struct crypto_larval
*larval
;
117 larval
= kzalloc(sizeof(*larval
), GFP_KERNEL
);
119 return ERR_PTR(-ENOMEM
);
122 larval
->alg
.cra_flags
= CRYPTO_ALG_LARVAL
| type
;
123 larval
->alg
.cra_priority
= -1;
124 larval
->alg
.cra_destroy
= crypto_larval_destroy
;
126 strlcpy(larval
->alg
.cra_name
, name
, CRYPTO_MAX_ALG_NAME
);
127 init_completion(&larval
->completion
);
131 EXPORT_SYMBOL_GPL(crypto_larval_alloc
);
133 static struct crypto_alg
*crypto_larval_add(const char *name
, u32 type
,
136 struct crypto_alg
*alg
;
137 struct crypto_larval
*larval
;
139 larval
= crypto_larval_alloc(name
, type
, mask
);
141 return ERR_CAST(larval
);
143 atomic_set(&larval
->alg
.cra_refcnt
, 2);
145 down_write(&crypto_alg_sem
);
146 alg
= __crypto_alg_lookup(name
, type
, mask
);
149 list_add(&alg
->cra_list
, &crypto_alg_list
);
151 up_write(&crypto_alg_sem
);
153 if (alg
!= &larval
->alg
)
159 void crypto_larval_kill(struct crypto_alg
*alg
)
161 struct crypto_larval
*larval
= (void *)alg
;
163 down_write(&crypto_alg_sem
);
164 list_del(&alg
->cra_list
);
165 up_write(&crypto_alg_sem
);
166 complete_all(&larval
->completion
);
169 EXPORT_SYMBOL_GPL(crypto_larval_kill
);
171 static struct crypto_alg
*crypto_larval_wait(struct crypto_alg
*alg
)
173 struct crypto_larval
*larval
= (void *)alg
;
176 timeout
= wait_for_completion_interruptible_timeout(
177 &larval
->completion
, 60 * HZ
);
181 alg
= ERR_PTR(-EINTR
);
183 alg
= ERR_PTR(-ETIMEDOUT
);
185 alg
= ERR_PTR(-ENOENT
);
186 else if (crypto_is_test_larval(larval
) &&
187 !(alg
->cra_flags
& CRYPTO_ALG_TESTED
))
188 alg
= ERR_PTR(-EAGAIN
);
189 else if (!crypto_mod_get(alg
))
190 alg
= ERR_PTR(-EAGAIN
);
191 crypto_mod_put(&larval
->alg
);
196 struct crypto_alg
*crypto_alg_lookup(const char *name
, u32 type
, u32 mask
)
198 struct crypto_alg
*alg
;
200 down_read(&crypto_alg_sem
);
201 alg
= __crypto_alg_lookup(name
, type
, mask
);
202 up_read(&crypto_alg_sem
);
206 EXPORT_SYMBOL_GPL(crypto_alg_lookup
);
208 struct crypto_alg
*crypto_larval_lookup(const char *name
, u32 type
, u32 mask
)
210 struct crypto_alg
*alg
;
213 return ERR_PTR(-ENOENT
);
215 mask
&= ~(CRYPTO_ALG_LARVAL
| CRYPTO_ALG_DEAD
);
218 alg
= crypto_alg_lookup(name
, type
, mask
);
220 request_module("%s", name
);
222 if (!((type
^ CRYPTO_ALG_NEED_FALLBACK
) & mask
&
223 CRYPTO_ALG_NEED_FALLBACK
))
224 request_module("%s-all", name
);
226 alg
= crypto_alg_lookup(name
, type
, mask
);
230 return crypto_is_larval(alg
) ? crypto_larval_wait(alg
) : alg
;
232 return crypto_larval_add(name
, type
, mask
);
234 EXPORT_SYMBOL_GPL(crypto_larval_lookup
);
236 int crypto_probing_notify(unsigned long val
, void *v
)
240 ok
= blocking_notifier_call_chain(&crypto_chain
, val
, v
);
241 if (ok
== NOTIFY_DONE
) {
242 request_module("cryptomgr");
243 ok
= blocking_notifier_call_chain(&crypto_chain
, val
, v
);
248 EXPORT_SYMBOL_GPL(crypto_probing_notify
);
250 struct crypto_alg
*crypto_alg_mod_lookup(const char *name
, u32 type
, u32 mask
)
252 struct crypto_alg
*alg
;
253 struct crypto_alg
*larval
;
256 if (!((type
| mask
) & CRYPTO_ALG_TESTED
)) {
257 type
|= CRYPTO_ALG_TESTED
;
258 mask
|= CRYPTO_ALG_TESTED
;
261 larval
= crypto_larval_lookup(name
, type
, mask
);
262 if (IS_ERR(larval
) || !crypto_is_larval(larval
))
265 ok
= crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST
, larval
);
267 if (ok
== NOTIFY_STOP
)
268 alg
= crypto_larval_wait(larval
);
270 crypto_mod_put(larval
);
271 alg
= ERR_PTR(-ENOENT
);
273 crypto_larval_kill(larval
);
276 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup
);
278 static int crypto_init_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
280 const struct crypto_type
*type_obj
= tfm
->__crt_alg
->cra_type
;
283 return type_obj
->init(tfm
, type
, mask
);
285 switch (crypto_tfm_alg_type(tfm
)) {
286 case CRYPTO_ALG_TYPE_CIPHER
:
287 return crypto_init_cipher_ops(tfm
);
289 case CRYPTO_ALG_TYPE_COMPRESS
:
290 return crypto_init_compress_ops(tfm
);
300 static void crypto_exit_ops(struct crypto_tfm
*tfm
)
302 const struct crypto_type
*type
= tfm
->__crt_alg
->cra_type
;
310 switch (crypto_tfm_alg_type(tfm
)) {
311 case CRYPTO_ALG_TYPE_CIPHER
:
312 crypto_exit_cipher_ops(tfm
);
315 case CRYPTO_ALG_TYPE_COMPRESS
:
316 crypto_exit_compress_ops(tfm
);
324 static unsigned int crypto_ctxsize(struct crypto_alg
*alg
, u32 type
, u32 mask
)
326 const struct crypto_type
*type_obj
= alg
->cra_type
;
329 len
= alg
->cra_alignmask
& ~(crypto_tfm_ctx_alignment() - 1);
331 return len
+ type_obj
->ctxsize(alg
, type
, mask
);
333 switch (alg
->cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
337 case CRYPTO_ALG_TYPE_CIPHER
:
338 len
+= crypto_cipher_ctxsize(alg
);
341 case CRYPTO_ALG_TYPE_COMPRESS
:
342 len
+= crypto_compress_ctxsize(alg
);
349 void crypto_shoot_alg(struct crypto_alg
*alg
)
351 down_write(&crypto_alg_sem
);
352 alg
->cra_flags
|= CRYPTO_ALG_DYING
;
353 up_write(&crypto_alg_sem
);
355 EXPORT_SYMBOL_GPL(crypto_shoot_alg
);
357 struct crypto_tfm
*__crypto_alloc_tfm(struct crypto_alg
*alg
, u32 type
,
360 struct crypto_tfm
*tfm
= NULL
;
361 unsigned int tfm_size
;
364 tfm_size
= sizeof(*tfm
) + crypto_ctxsize(alg
, type
, mask
);
365 tfm
= kzalloc(tfm_size
, GFP_KERNEL
);
369 tfm
->__crt_alg
= alg
;
371 err
= crypto_init_ops(tfm
, type
, mask
);
375 if (!tfm
->exit
&& alg
->cra_init
&& (err
= alg
->cra_init(tfm
)))
376 goto cra_init_failed
;
381 crypto_exit_ops(tfm
);
384 crypto_shoot_alg(alg
);
391 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm
);
394 * crypto_alloc_base - Locate algorithm and allocate transform
395 * @alg_name: Name of algorithm
396 * @type: Type of algorithm
397 * @mask: Mask for type comparison
399 * This function should not be used by new algorithm types.
400 * Plesae use crypto_alloc_tfm instead.
402 * crypto_alloc_base() will first attempt to locate an already loaded
403 * algorithm. If that fails and the kernel supports dynamically loadable
404 * modules, it will then attempt to load a module of the same name or
405 * alias. If that fails it will send a query to any loaded crypto manager
406 * to construct an algorithm on the fly. A refcount is grabbed on the
407 * algorithm which is then associated with the new transform.
409 * The returned transform is of a non-determinate type. Most people
410 * should use one of the more specific allocation functions such as
411 * crypto_alloc_blkcipher.
413 * In case of error the return value is an error pointer.
415 struct crypto_tfm
*crypto_alloc_base(const char *alg_name
, u32 type
, u32 mask
)
417 struct crypto_tfm
*tfm
;
421 struct crypto_alg
*alg
;
423 alg
= crypto_alg_mod_lookup(alg_name
, type
, mask
);
429 tfm
= __crypto_alloc_tfm(alg
, type
, mask
);
439 if (signal_pending(current
)) {
447 EXPORT_SYMBOL_GPL(crypto_alloc_base
);
449 void *crypto_create_tfm(struct crypto_alg
*alg
,
450 const struct crypto_type
*frontend
)
453 struct crypto_tfm
*tfm
= NULL
;
454 unsigned int tfmsize
;
458 tfmsize
= frontend
->tfmsize
;
459 total
= tfmsize
+ sizeof(*tfm
) + frontend
->extsize(alg
);
461 mem
= kzalloc(total
, GFP_KERNEL
);
465 tfm
= (struct crypto_tfm
*)(mem
+ tfmsize
);
466 tfm
->__crt_alg
= alg
;
468 err
= frontend
->init_tfm(tfm
);
472 if (!tfm
->exit
&& alg
->cra_init
&& (err
= alg
->cra_init(tfm
)))
473 goto cra_init_failed
;
478 crypto_exit_ops(tfm
);
481 crypto_shoot_alg(alg
);
488 EXPORT_SYMBOL_GPL(crypto_create_tfm
);
490 struct crypto_alg
*crypto_find_alg(const char *alg_name
,
491 const struct crypto_type
*frontend
,
494 struct crypto_alg
*(*lookup
)(const char *name
, u32 type
, u32 mask
) =
495 crypto_alg_mod_lookup
;
498 type
&= frontend
->maskclear
;
499 mask
&= frontend
->maskclear
;
500 type
|= frontend
->type
;
501 mask
|= frontend
->maskset
;
503 if (frontend
->lookup
)
504 lookup
= frontend
->lookup
;
507 return lookup(alg_name
, type
, mask
);
509 EXPORT_SYMBOL_GPL(crypto_find_alg
);
512 * crypto_alloc_tfm - Locate algorithm and allocate transform
513 * @alg_name: Name of algorithm
514 * @frontend: Frontend algorithm type
515 * @type: Type of algorithm
516 * @mask: Mask for type comparison
518 * crypto_alloc_tfm() will first attempt to locate an already loaded
519 * algorithm. If that fails and the kernel supports dynamically loadable
520 * modules, it will then attempt to load a module of the same name or
521 * alias. If that fails it will send a query to any loaded crypto manager
522 * to construct an algorithm on the fly. A refcount is grabbed on the
523 * algorithm which is then associated with the new transform.
525 * The returned transform is of a non-determinate type. Most people
526 * should use one of the more specific allocation functions such as
527 * crypto_alloc_blkcipher.
529 * In case of error the return value is an error pointer.
531 void *crypto_alloc_tfm(const char *alg_name
,
532 const struct crypto_type
*frontend
, u32 type
, u32 mask
)
538 struct crypto_alg
*alg
;
540 alg
= crypto_find_alg(alg_name
, frontend
, type
, mask
);
546 tfm
= crypto_create_tfm(alg
, frontend
);
556 if (signal_pending(current
)) {
564 EXPORT_SYMBOL_GPL(crypto_alloc_tfm
);
567 * crypto_destroy_tfm - Free crypto transform
568 * @mem: Start of tfm slab
569 * @tfm: Transform to free
571 * This function frees up the transform and any associated resources,
572 * then drops the refcount on the associated algorithm.
574 void crypto_destroy_tfm(void *mem
, struct crypto_tfm
*tfm
)
576 struct crypto_alg
*alg
;
581 alg
= tfm
->__crt_alg
;
583 if (!tfm
->exit
&& alg
->cra_exit
)
585 crypto_exit_ops(tfm
);
589 EXPORT_SYMBOL_GPL(crypto_destroy_tfm
);
591 int crypto_has_alg(const char *name
, u32 type
, u32 mask
)
594 struct crypto_alg
*alg
= crypto_alg_mod_lookup(name
, type
, mask
);
603 EXPORT_SYMBOL_GPL(crypto_has_alg
);
605 MODULE_DESCRIPTION("Cryptographic core API");
606 MODULE_LICENSE("GPL");