2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/atomic.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/list.h>
30 #include <linux/module.h>
31 #include <linux/scatterlist.h>
32 #include <linux/sched.h>
33 #include <linux/slab.h>
35 #define CRYPTD_MAX_CPU_QLEN 1000
37 struct cryptd_cpu_queue
{
38 struct crypto_queue queue
;
39 struct work_struct work
;
43 struct cryptd_cpu_queue __percpu
*cpu_queue
;
46 struct cryptd_instance_ctx
{
47 struct crypto_spawn spawn
;
48 struct cryptd_queue
*queue
;
51 struct skcipherd_instance_ctx
{
52 struct crypto_skcipher_spawn spawn
;
53 struct cryptd_queue
*queue
;
56 struct hashd_instance_ctx
{
57 struct crypto_shash_spawn spawn
;
58 struct cryptd_queue
*queue
;
61 struct aead_instance_ctx
{
62 struct crypto_aead_spawn aead_spawn
;
63 struct cryptd_queue
*queue
;
66 struct cryptd_blkcipher_ctx
{
68 struct crypto_blkcipher
*child
;
71 struct cryptd_blkcipher_request_ctx
{
72 crypto_completion_t complete
;
75 struct cryptd_skcipher_ctx
{
77 struct crypto_skcipher
*child
;
80 struct cryptd_skcipher_request_ctx
{
81 crypto_completion_t complete
;
84 struct cryptd_hash_ctx
{
86 struct crypto_shash
*child
;
89 struct cryptd_hash_request_ctx
{
90 crypto_completion_t complete
;
91 struct shash_desc desc
;
94 struct cryptd_aead_ctx
{
96 struct crypto_aead
*child
;
99 struct cryptd_aead_request_ctx
{
100 crypto_completion_t complete
;
103 static void cryptd_queue_worker(struct work_struct
*work
);
105 static int cryptd_init_queue(struct cryptd_queue
*queue
,
106 unsigned int max_cpu_qlen
)
109 struct cryptd_cpu_queue
*cpu_queue
;
111 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
112 if (!queue
->cpu_queue
)
114 for_each_possible_cpu(cpu
) {
115 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
116 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
117 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
122 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
125 struct cryptd_cpu_queue
*cpu_queue
;
127 for_each_possible_cpu(cpu
) {
128 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
129 BUG_ON(cpu_queue
->queue
.qlen
);
131 free_percpu(queue
->cpu_queue
);
134 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
135 struct crypto_async_request
*request
)
138 struct cryptd_cpu_queue
*cpu_queue
;
143 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
144 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
146 refcnt
= crypto_tfm_ctx(request
->tfm
);
147 may_backlog
= request
->flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
;
149 if (err
== -EBUSY
&& !may_backlog
)
152 queue_work_on(cpu
, kcrypto_wq
, &cpu_queue
->work
);
154 if (!atomic_read(refcnt
))
165 /* Called in workqueue context, do one real cryption work (via
166 * req->complete) and reschedule itself if there are more work to
168 static void cryptd_queue_worker(struct work_struct
*work
)
170 struct cryptd_cpu_queue
*cpu_queue
;
171 struct crypto_async_request
*req
, *backlog
;
173 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
175 * Only handle one request at a time to avoid hogging crypto workqueue.
176 * preempt_disable/enable is used to prevent being preempted by
177 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
178 * cryptd_enqueue_request() being accessed from software interrupts.
182 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
183 req
= crypto_dequeue_request(&cpu_queue
->queue
);
191 backlog
->complete(backlog
, -EINPROGRESS
);
192 req
->complete(req
, 0);
194 if (cpu_queue
->queue
.qlen
)
195 queue_work(kcrypto_wq
, &cpu_queue
->work
);
198 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
200 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
201 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
205 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
208 struct crypto_attr_type
*algt
;
210 algt
= crypto_get_attr_type(tb
);
214 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
215 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
218 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
219 const u8
*key
, unsigned int keylen
)
221 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
222 struct crypto_blkcipher
*child
= ctx
->child
;
225 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
226 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
227 CRYPTO_TFM_REQ_MASK
);
228 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
229 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
230 CRYPTO_TFM_RES_MASK
);
234 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
235 struct crypto_blkcipher
*child
,
237 int (*crypt
)(struct blkcipher_desc
*desc
,
238 struct scatterlist
*dst
,
239 struct scatterlist
*src
,
242 struct cryptd_blkcipher_request_ctx
*rctx
;
243 struct cryptd_blkcipher_ctx
*ctx
;
244 struct crypto_ablkcipher
*tfm
;
245 struct blkcipher_desc desc
;
248 rctx
= ablkcipher_request_ctx(req
);
250 if (unlikely(err
== -EINPROGRESS
))
254 desc
.info
= req
->info
;
255 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
257 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
259 req
->base
.complete
= rctx
->complete
;
262 tfm
= crypto_ablkcipher_reqtfm(req
);
263 ctx
= crypto_ablkcipher_ctx(tfm
);
264 refcnt
= atomic_read(&ctx
->refcnt
);
267 rctx
->complete(&req
->base
, err
);
270 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
271 crypto_free_ablkcipher(tfm
);
274 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
276 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
277 struct crypto_blkcipher
*child
= ctx
->child
;
279 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
280 crypto_blkcipher_crt(child
)->encrypt
);
283 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
285 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
286 struct crypto_blkcipher
*child
= ctx
->child
;
288 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
289 crypto_blkcipher_crt(child
)->decrypt
);
292 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
293 crypto_completion_t
compl)
295 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
296 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
297 struct cryptd_queue
*queue
;
299 queue
= cryptd_get_queue(crypto_ablkcipher_tfm(tfm
));
300 rctx
->complete
= req
->base
.complete
;
301 req
->base
.complete
= compl;
303 return cryptd_enqueue_request(queue
, &req
->base
);
306 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
308 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
311 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
313 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
316 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
318 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
319 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
320 struct crypto_spawn
*spawn
= &ictx
->spawn
;
321 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
322 struct crypto_blkcipher
*cipher
;
324 cipher
= crypto_spawn_blkcipher(spawn
);
326 return PTR_ERR(cipher
);
329 tfm
->crt_ablkcipher
.reqsize
=
330 sizeof(struct cryptd_blkcipher_request_ctx
);
334 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
336 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
338 crypto_free_blkcipher(ctx
->child
);
341 static int cryptd_init_instance(struct crypto_instance
*inst
,
342 struct crypto_alg
*alg
)
344 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
346 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
347 return -ENAMETOOLONG
;
349 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
351 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
352 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
353 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
358 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
362 struct crypto_instance
*inst
;
365 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
367 return ERR_PTR(-ENOMEM
);
369 inst
= (void *)(p
+ head
);
371 err
= cryptd_init_instance(inst
, alg
);
384 static int cryptd_create_blkcipher(struct crypto_template
*tmpl
,
386 struct cryptd_queue
*queue
)
388 struct cryptd_instance_ctx
*ctx
;
389 struct crypto_instance
*inst
;
390 struct crypto_alg
*alg
;
391 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
392 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
395 cryptd_check_internal(tb
, &type
, &mask
);
397 alg
= crypto_get_attr_alg(tb
, type
, mask
);
401 inst
= cryptd_alloc_instance(alg
, 0, sizeof(*ctx
));
406 ctx
= crypto_instance_ctx(inst
);
409 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
410 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
414 type
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
415 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
416 type
|= CRYPTO_ALG_INTERNAL
;
417 inst
->alg
.cra_flags
= type
;
418 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
420 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
421 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
422 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
424 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
426 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
428 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
429 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
431 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
432 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
433 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
435 err
= crypto_register_instance(tmpl
, inst
);
437 crypto_drop_spawn(&ctx
->spawn
);
447 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
448 const u8
*key
, unsigned int keylen
)
450 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
451 struct crypto_skcipher
*child
= ctx
->child
;
454 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
455 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
456 CRYPTO_TFM_REQ_MASK
);
457 err
= crypto_skcipher_setkey(child
, key
, keylen
);
458 crypto_skcipher_set_flags(parent
, crypto_skcipher_get_flags(child
) &
459 CRYPTO_TFM_RES_MASK
);
463 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
)
465 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
466 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
467 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
468 int refcnt
= atomic_read(&ctx
->refcnt
);
471 rctx
->complete(&req
->base
, err
);
474 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
475 crypto_free_skcipher(tfm
);
478 static void cryptd_skcipher_encrypt(struct crypto_async_request
*base
,
481 struct skcipher_request
*req
= skcipher_request_cast(base
);
482 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
483 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
484 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
485 struct crypto_skcipher
*child
= ctx
->child
;
486 SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
488 if (unlikely(err
== -EINPROGRESS
))
491 skcipher_request_set_tfm(subreq
, child
);
492 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
494 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
497 err
= crypto_skcipher_encrypt(subreq
);
498 skcipher_request_zero(subreq
);
500 req
->base
.complete
= rctx
->complete
;
503 cryptd_skcipher_complete(req
, err
);
506 static void cryptd_skcipher_decrypt(struct crypto_async_request
*base
,
509 struct skcipher_request
*req
= skcipher_request_cast(base
);
510 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
511 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
512 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
513 struct crypto_skcipher
*child
= ctx
->child
;
514 SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
516 if (unlikely(err
== -EINPROGRESS
))
519 skcipher_request_set_tfm(subreq
, child
);
520 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
522 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
525 err
= crypto_skcipher_decrypt(subreq
);
526 skcipher_request_zero(subreq
);
528 req
->base
.complete
= rctx
->complete
;
531 cryptd_skcipher_complete(req
, err
);
534 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
535 crypto_completion_t
compl)
537 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
538 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
539 struct cryptd_queue
*queue
;
541 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
542 rctx
->complete
= req
->base
.complete
;
543 req
->base
.complete
= compl;
545 return cryptd_enqueue_request(queue
, &req
->base
);
548 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
550 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
553 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
555 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
558 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
560 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
561 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
562 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
563 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
564 struct crypto_skcipher
*cipher
;
566 cipher
= crypto_spawn_skcipher(spawn
);
568 return PTR_ERR(cipher
);
571 crypto_skcipher_set_reqsize(
572 tfm
, sizeof(struct cryptd_skcipher_request_ctx
));
576 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
578 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
580 crypto_free_skcipher(ctx
->child
);
583 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
585 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
587 crypto_drop_skcipher(&ctx
->spawn
);
590 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
592 struct cryptd_queue
*queue
)
594 struct skcipherd_instance_ctx
*ctx
;
595 struct skcipher_instance
*inst
;
596 struct skcipher_alg
*alg
;
603 mask
= CRYPTO_ALG_ASYNC
;
605 cryptd_check_internal(tb
, &type
, &mask
);
607 name
= crypto_attr_alg_name(tb
[1]);
609 return PTR_ERR(name
);
611 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
615 ctx
= skcipher_instance_ctx(inst
);
618 crypto_set_skcipher_spawn(&ctx
->spawn
, skcipher_crypto_instance(inst
));
619 err
= crypto_grab_skcipher(&ctx
->spawn
, name
, type
, mask
);
623 alg
= crypto_spawn_skcipher_alg(&ctx
->spawn
);
624 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
626 goto out_drop_skcipher
;
628 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
629 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
631 inst
->alg
.ivsize
= crypto_skcipher_alg_ivsize(alg
);
632 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
633 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
);
634 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
);
636 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
638 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
639 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
641 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
642 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
643 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
645 inst
->free
= cryptd_skcipher_free
;
647 err
= skcipher_register_instance(tmpl
, inst
);
650 crypto_drop_skcipher(&ctx
->spawn
);
657 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
659 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
660 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
661 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
662 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
663 struct crypto_shash
*hash
;
665 hash
= crypto_spawn_shash(spawn
);
667 return PTR_ERR(hash
);
670 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
671 sizeof(struct cryptd_hash_request_ctx
) +
672 crypto_shash_descsize(hash
));
676 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
678 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
680 crypto_free_shash(ctx
->child
);
683 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
684 const u8
*key
, unsigned int keylen
)
686 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
687 struct crypto_shash
*child
= ctx
->child
;
690 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
691 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
692 CRYPTO_TFM_REQ_MASK
);
693 err
= crypto_shash_setkey(child
, key
, keylen
);
694 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
695 CRYPTO_TFM_RES_MASK
);
699 static int cryptd_hash_enqueue(struct ahash_request
*req
,
700 crypto_completion_t
compl)
702 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
703 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
704 struct cryptd_queue
*queue
=
705 cryptd_get_queue(crypto_ahash_tfm(tfm
));
707 rctx
->complete
= req
->base
.complete
;
708 req
->base
.complete
= compl;
710 return cryptd_enqueue_request(queue
, &req
->base
);
713 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
715 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
716 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
717 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
718 int refcnt
= atomic_read(&ctx
->refcnt
);
721 rctx
->complete(&req
->base
, err
);
724 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
725 crypto_free_ahash(tfm
);
728 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
730 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
731 struct crypto_shash
*child
= ctx
->child
;
732 struct ahash_request
*req
= ahash_request_cast(req_async
);
733 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
734 struct shash_desc
*desc
= &rctx
->desc
;
736 if (unlikely(err
== -EINPROGRESS
))
740 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
742 err
= crypto_shash_init(desc
);
744 req
->base
.complete
= rctx
->complete
;
747 cryptd_hash_complete(req
, err
);
750 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
752 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
755 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
757 struct ahash_request
*req
= ahash_request_cast(req_async
);
758 struct cryptd_hash_request_ctx
*rctx
;
760 rctx
= ahash_request_ctx(req
);
762 if (unlikely(err
== -EINPROGRESS
))
765 err
= shash_ahash_update(req
, &rctx
->desc
);
767 req
->base
.complete
= rctx
->complete
;
770 cryptd_hash_complete(req
, err
);
773 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
775 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
778 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
780 struct ahash_request
*req
= ahash_request_cast(req_async
);
781 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
783 if (unlikely(err
== -EINPROGRESS
))
786 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
788 req
->base
.complete
= rctx
->complete
;
791 cryptd_hash_complete(req
, err
);
794 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
796 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
799 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
801 struct ahash_request
*req
= ahash_request_cast(req_async
);
802 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
804 if (unlikely(err
== -EINPROGRESS
))
807 err
= shash_ahash_finup(req
, &rctx
->desc
);
809 req
->base
.complete
= rctx
->complete
;
812 cryptd_hash_complete(req
, err
);
815 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
817 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
820 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
822 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
823 struct crypto_shash
*child
= ctx
->child
;
824 struct ahash_request
*req
= ahash_request_cast(req_async
);
825 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
826 struct shash_desc
*desc
= &rctx
->desc
;
828 if (unlikely(err
== -EINPROGRESS
))
832 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
834 err
= shash_ahash_digest(req
, desc
);
836 req
->base
.complete
= rctx
->complete
;
839 cryptd_hash_complete(req
, err
);
842 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
844 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
847 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
849 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
851 return crypto_shash_export(&rctx
->desc
, out
);
854 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
856 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
857 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
858 struct shash_desc
*desc
= cryptd_shash_desc(req
);
860 desc
->tfm
= ctx
->child
;
861 desc
->flags
= req
->base
.flags
;
863 return crypto_shash_import(desc
, in
);
866 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
867 struct cryptd_queue
*queue
)
869 struct hashd_instance_ctx
*ctx
;
870 struct ahash_instance
*inst
;
871 struct shash_alg
*salg
;
872 struct crypto_alg
*alg
;
877 cryptd_check_internal(tb
, &type
, &mask
);
879 salg
= shash_attr_alg(tb
[1], type
, mask
);
881 return PTR_ERR(salg
);
884 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
890 ctx
= ahash_instance_ctx(inst
);
893 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
894 ahash_crypto_instance(inst
));
898 type
= CRYPTO_ALG_ASYNC
;
899 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
900 type
|= CRYPTO_ALG_INTERNAL
;
901 inst
->alg
.halg
.base
.cra_flags
= type
;
903 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
904 inst
->alg
.halg
.statesize
= salg
->statesize
;
905 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
907 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
908 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
910 inst
->alg
.init
= cryptd_hash_init_enqueue
;
911 inst
->alg
.update
= cryptd_hash_update_enqueue
;
912 inst
->alg
.final
= cryptd_hash_final_enqueue
;
913 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
914 inst
->alg
.export
= cryptd_hash_export
;
915 inst
->alg
.import
= cryptd_hash_import
;
916 inst
->alg
.setkey
= cryptd_hash_setkey
;
917 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
919 err
= ahash_register_instance(tmpl
, inst
);
921 crypto_drop_shash(&ctx
->spawn
);
931 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
932 const u8
*key
, unsigned int keylen
)
934 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
935 struct crypto_aead
*child
= ctx
->child
;
937 return crypto_aead_setkey(child
, key
, keylen
);
940 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
941 unsigned int authsize
)
943 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
944 struct crypto_aead
*child
= ctx
->child
;
946 return crypto_aead_setauthsize(child
, authsize
);
949 static void cryptd_aead_crypt(struct aead_request
*req
,
950 struct crypto_aead
*child
,
952 int (*crypt
)(struct aead_request
*req
))
954 struct cryptd_aead_request_ctx
*rctx
;
955 struct cryptd_aead_ctx
*ctx
;
956 crypto_completion_t
compl;
957 struct crypto_aead
*tfm
;
960 rctx
= aead_request_ctx(req
);
961 compl = rctx
->complete
;
963 tfm
= crypto_aead_reqtfm(req
);
965 if (unlikely(err
== -EINPROGRESS
))
967 aead_request_set_tfm(req
, child
);
971 ctx
= crypto_aead_ctx(tfm
);
972 refcnt
= atomic_read(&ctx
->refcnt
);
975 compl(&req
->base
, err
);
978 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
979 crypto_free_aead(tfm
);
982 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
984 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
985 struct crypto_aead
*child
= ctx
->child
;
986 struct aead_request
*req
;
988 req
= container_of(areq
, struct aead_request
, base
);
989 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
992 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
994 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
995 struct crypto_aead
*child
= ctx
->child
;
996 struct aead_request
*req
;
998 req
= container_of(areq
, struct aead_request
, base
);
999 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
1002 static int cryptd_aead_enqueue(struct aead_request
*req
,
1003 crypto_completion_t
compl)
1005 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
1006 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1007 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
1009 rctx
->complete
= req
->base
.complete
;
1010 req
->base
.complete
= compl;
1011 return cryptd_enqueue_request(queue
, &req
->base
);
1014 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
1016 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
1019 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
1021 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
1024 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
1026 struct aead_instance
*inst
= aead_alg_instance(tfm
);
1027 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
1028 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
1029 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
1030 struct crypto_aead
*cipher
;
1032 cipher
= crypto_spawn_aead(spawn
);
1034 return PTR_ERR(cipher
);
1036 ctx
->child
= cipher
;
1037 crypto_aead_set_reqsize(
1038 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
1039 crypto_aead_reqsize(cipher
)));
1043 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
1045 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
1046 crypto_free_aead(ctx
->child
);
1049 static int cryptd_create_aead(struct crypto_template
*tmpl
,
1051 struct cryptd_queue
*queue
)
1053 struct aead_instance_ctx
*ctx
;
1054 struct aead_instance
*inst
;
1055 struct aead_alg
*alg
;
1058 u32 mask
= CRYPTO_ALG_ASYNC
;
1061 cryptd_check_internal(tb
, &type
, &mask
);
1063 name
= crypto_attr_alg_name(tb
[1]);
1065 return PTR_ERR(name
);
1067 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
1071 ctx
= aead_instance_ctx(inst
);
1074 crypto_set_aead_spawn(&ctx
->aead_spawn
, aead_crypto_instance(inst
));
1075 err
= crypto_grab_aead(&ctx
->aead_spawn
, name
, type
, mask
);
1079 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
1080 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
1084 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
1085 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
1086 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
1088 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
1089 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
1091 inst
->alg
.init
= cryptd_aead_init_tfm
;
1092 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
1093 inst
->alg
.setkey
= cryptd_aead_setkey
;
1094 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
1095 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
1096 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
1098 err
= aead_register_instance(tmpl
, inst
);
1101 crypto_drop_aead(&ctx
->aead_spawn
);
1108 static struct cryptd_queue queue
;
1110 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
1112 struct crypto_attr_type
*algt
;
1114 algt
= crypto_get_attr_type(tb
);
1116 return PTR_ERR(algt
);
1118 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
1119 case CRYPTO_ALG_TYPE_BLKCIPHER
:
1120 if ((algt
->type
& CRYPTO_ALG_TYPE_MASK
) ==
1121 CRYPTO_ALG_TYPE_BLKCIPHER
)
1122 return cryptd_create_blkcipher(tmpl
, tb
, &queue
);
1124 return cryptd_create_skcipher(tmpl
, tb
, &queue
);
1125 case CRYPTO_ALG_TYPE_DIGEST
:
1126 return cryptd_create_hash(tmpl
, tb
, &queue
);
1127 case CRYPTO_ALG_TYPE_AEAD
:
1128 return cryptd_create_aead(tmpl
, tb
, &queue
);
1134 static void cryptd_free(struct crypto_instance
*inst
)
1136 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
1137 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
1138 struct aead_instance_ctx
*aead_ctx
= crypto_instance_ctx(inst
);
1140 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
1141 case CRYPTO_ALG_TYPE_AHASH
:
1142 crypto_drop_shash(&hctx
->spawn
);
1143 kfree(ahash_instance(inst
));
1145 case CRYPTO_ALG_TYPE_AEAD
:
1146 crypto_drop_aead(&aead_ctx
->aead_spawn
);
1147 kfree(aead_instance(inst
));
1150 crypto_drop_spawn(&ctx
->spawn
);
1155 static struct crypto_template cryptd_tmpl
= {
1157 .create
= cryptd_create
,
1158 .free
= cryptd_free
,
1159 .module
= THIS_MODULE
,
1162 struct cryptd_ablkcipher
*cryptd_alloc_ablkcipher(const char *alg_name
,
1165 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1166 struct cryptd_blkcipher_ctx
*ctx
;
1167 struct crypto_tfm
*tfm
;
1169 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1170 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1171 return ERR_PTR(-EINVAL
);
1172 type
= crypto_skcipher_type(type
);
1173 mask
&= ~CRYPTO_ALG_TYPE_MASK
;
1174 mask
|= (CRYPTO_ALG_GENIV
| CRYPTO_ALG_TYPE_BLKCIPHER_MASK
);
1175 tfm
= crypto_alloc_base(cryptd_alg_name
, type
, mask
);
1177 return ERR_CAST(tfm
);
1178 if (tfm
->__crt_alg
->cra_module
!= THIS_MODULE
) {
1179 crypto_free_tfm(tfm
);
1180 return ERR_PTR(-EINVAL
);
1183 ctx
= crypto_tfm_ctx(tfm
);
1184 atomic_set(&ctx
->refcnt
, 1);
1186 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm
));
1188 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher
);
1190 struct crypto_blkcipher
*cryptd_ablkcipher_child(struct cryptd_ablkcipher
*tfm
)
1192 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
1195 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child
);
1197 bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher
*tfm
)
1199 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
1201 return atomic_read(&ctx
->refcnt
) - 1;
1203 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued
);
1205 void cryptd_free_ablkcipher(struct cryptd_ablkcipher
*tfm
)
1207 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
1209 if (atomic_dec_and_test(&ctx
->refcnt
))
1210 crypto_free_ablkcipher(&tfm
->base
);
1212 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher
);
1214 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
1217 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1218 struct cryptd_skcipher_ctx
*ctx
;
1219 struct crypto_skcipher
*tfm
;
1221 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1222 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1223 return ERR_PTR(-EINVAL
);
1225 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
1227 return ERR_CAST(tfm
);
1229 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1230 crypto_free_skcipher(tfm
);
1231 return ERR_PTR(-EINVAL
);
1234 ctx
= crypto_skcipher_ctx(tfm
);
1235 atomic_set(&ctx
->refcnt
, 1);
1237 return container_of(tfm
, struct cryptd_skcipher
, base
);
1239 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
1241 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
1243 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1247 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
1249 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
1251 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1253 return atomic_read(&ctx
->refcnt
) - 1;
1255 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
1257 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
1259 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1261 if (atomic_dec_and_test(&ctx
->refcnt
))
1262 crypto_free_skcipher(&tfm
->base
);
1264 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
1266 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
1269 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1270 struct cryptd_hash_ctx
*ctx
;
1271 struct crypto_ahash
*tfm
;
1273 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1274 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1275 return ERR_PTR(-EINVAL
);
1276 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
1278 return ERR_CAST(tfm
);
1279 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1280 crypto_free_ahash(tfm
);
1281 return ERR_PTR(-EINVAL
);
1284 ctx
= crypto_ahash_ctx(tfm
);
1285 atomic_set(&ctx
->refcnt
, 1);
1287 return __cryptd_ahash_cast(tfm
);
1289 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
1291 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
1293 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1297 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1299 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1301 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1304 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1306 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1308 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1310 return atomic_read(&ctx
->refcnt
) - 1;
1312 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1314 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1316 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1318 if (atomic_dec_and_test(&ctx
->refcnt
))
1319 crypto_free_ahash(&tfm
->base
);
1321 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1323 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1326 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1327 struct cryptd_aead_ctx
*ctx
;
1328 struct crypto_aead
*tfm
;
1330 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1331 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1332 return ERR_PTR(-EINVAL
);
1333 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1335 return ERR_CAST(tfm
);
1336 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1337 crypto_free_aead(tfm
);
1338 return ERR_PTR(-EINVAL
);
1341 ctx
= crypto_aead_ctx(tfm
);
1342 atomic_set(&ctx
->refcnt
, 1);
1344 return __cryptd_aead_cast(tfm
);
1346 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1348 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1350 struct cryptd_aead_ctx
*ctx
;
1351 ctx
= crypto_aead_ctx(&tfm
->base
);
1354 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1356 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1358 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1360 return atomic_read(&ctx
->refcnt
) - 1;
1362 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1364 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1366 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1368 if (atomic_dec_and_test(&ctx
->refcnt
))
1369 crypto_free_aead(&tfm
->base
);
1371 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1373 static int __init
cryptd_init(void)
1377 err
= cryptd_init_queue(&queue
, CRYPTD_MAX_CPU_QLEN
);
1381 err
= crypto_register_template(&cryptd_tmpl
);
1383 cryptd_fini_queue(&queue
);
1388 static void __exit
cryptd_exit(void)
1390 cryptd_fini_queue(&queue
);
1391 crypto_unregister_template(&cryptd_tmpl
);
1394 subsys_initcall(cryptd_init
);
1395 module_exit(cryptd_exit
);
1397 MODULE_LICENSE("GPL");
1398 MODULE_DESCRIPTION("Software async crypto daemon");
1399 MODULE_ALIAS_CRYPTO("cryptd");