2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/atomic.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/list.h>
30 #include <linux/module.h>
31 #include <linux/scatterlist.h>
32 #include <linux/sched.h>
33 #include <linux/slab.h>
35 static unsigned int cryptd_max_cpu_qlen
= 1000;
36 module_param(cryptd_max_cpu_qlen
, uint
, 0);
37 MODULE_PARM_DESC(cryptd_max_cpu_qlen
, "Set cryptd Max queue depth");
39 struct cryptd_cpu_queue
{
40 struct crypto_queue queue
;
41 struct work_struct work
;
45 struct cryptd_cpu_queue __percpu
*cpu_queue
;
48 struct cryptd_instance_ctx
{
49 struct crypto_spawn spawn
;
50 struct cryptd_queue
*queue
;
53 struct skcipherd_instance_ctx
{
54 struct crypto_skcipher_spawn spawn
;
55 struct cryptd_queue
*queue
;
58 struct hashd_instance_ctx
{
59 struct crypto_shash_spawn spawn
;
60 struct cryptd_queue
*queue
;
63 struct aead_instance_ctx
{
64 struct crypto_aead_spawn aead_spawn
;
65 struct cryptd_queue
*queue
;
68 struct cryptd_skcipher_ctx
{
70 struct crypto_sync_skcipher
*child
;
73 struct cryptd_skcipher_request_ctx
{
74 crypto_completion_t complete
;
77 struct cryptd_hash_ctx
{
79 struct crypto_shash
*child
;
82 struct cryptd_hash_request_ctx
{
83 crypto_completion_t complete
;
84 struct shash_desc desc
;
87 struct cryptd_aead_ctx
{
89 struct crypto_aead
*child
;
92 struct cryptd_aead_request_ctx
{
93 crypto_completion_t complete
;
96 static void cryptd_queue_worker(struct work_struct
*work
);
98 static int cryptd_init_queue(struct cryptd_queue
*queue
,
99 unsigned int max_cpu_qlen
)
102 struct cryptd_cpu_queue
*cpu_queue
;
104 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
105 if (!queue
->cpu_queue
)
107 for_each_possible_cpu(cpu
) {
108 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
109 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
110 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
112 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen
);
116 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
119 struct cryptd_cpu_queue
*cpu_queue
;
121 for_each_possible_cpu(cpu
) {
122 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
123 BUG_ON(cpu_queue
->queue
.qlen
);
125 free_percpu(queue
->cpu_queue
);
128 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
129 struct crypto_async_request
*request
)
132 struct cryptd_cpu_queue
*cpu_queue
;
136 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
137 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
139 refcnt
= crypto_tfm_ctx(request
->tfm
);
144 queue_work_on(cpu
, kcrypto_wq
, &cpu_queue
->work
);
146 if (!atomic_read(refcnt
))
157 /* Called in workqueue context, do one real cryption work (via
158 * req->complete) and reschedule itself if there are more work to
160 static void cryptd_queue_worker(struct work_struct
*work
)
162 struct cryptd_cpu_queue
*cpu_queue
;
163 struct crypto_async_request
*req
, *backlog
;
165 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
167 * Only handle one request at a time to avoid hogging crypto workqueue.
168 * preempt_disable/enable is used to prevent being preempted by
169 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
170 * cryptd_enqueue_request() being accessed from software interrupts.
174 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
175 req
= crypto_dequeue_request(&cpu_queue
->queue
);
183 backlog
->complete(backlog
, -EINPROGRESS
);
184 req
->complete(req
, 0);
186 if (cpu_queue
->queue
.qlen
)
187 queue_work(kcrypto_wq
, &cpu_queue
->work
);
190 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
192 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
193 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
197 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
200 struct crypto_attr_type
*algt
;
202 algt
= crypto_get_attr_type(tb
);
206 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
207 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
210 static int cryptd_init_instance(struct crypto_instance
*inst
,
211 struct crypto_alg
*alg
)
213 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
215 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
216 return -ENAMETOOLONG
;
218 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
220 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
221 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
222 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
227 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
231 struct crypto_instance
*inst
;
234 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
236 return ERR_PTR(-ENOMEM
);
238 inst
= (void *)(p
+ head
);
240 err
= cryptd_init_instance(inst
, alg
);
253 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
254 const u8
*key
, unsigned int keylen
)
256 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
257 struct crypto_sync_skcipher
*child
= ctx
->child
;
260 crypto_sync_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
261 crypto_sync_skcipher_set_flags(child
,
262 crypto_skcipher_get_flags(parent
) &
263 CRYPTO_TFM_REQ_MASK
);
264 err
= crypto_sync_skcipher_setkey(child
, key
, keylen
);
265 crypto_skcipher_set_flags(parent
,
266 crypto_sync_skcipher_get_flags(child
) &
267 CRYPTO_TFM_RES_MASK
);
271 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
)
273 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
274 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
275 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
276 int refcnt
= atomic_read(&ctx
->refcnt
);
279 rctx
->complete(&req
->base
, err
);
282 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
283 crypto_free_skcipher(tfm
);
286 static void cryptd_skcipher_encrypt(struct crypto_async_request
*base
,
289 struct skcipher_request
*req
= skcipher_request_cast(base
);
290 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
291 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
292 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
293 struct crypto_sync_skcipher
*child
= ctx
->child
;
294 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
296 if (unlikely(err
== -EINPROGRESS
))
299 skcipher_request_set_sync_tfm(subreq
, child
);
300 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
302 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
305 err
= crypto_skcipher_encrypt(subreq
);
306 skcipher_request_zero(subreq
);
308 req
->base
.complete
= rctx
->complete
;
311 cryptd_skcipher_complete(req
, err
);
314 static void cryptd_skcipher_decrypt(struct crypto_async_request
*base
,
317 struct skcipher_request
*req
= skcipher_request_cast(base
);
318 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
319 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
320 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
321 struct crypto_sync_skcipher
*child
= ctx
->child
;
322 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
324 if (unlikely(err
== -EINPROGRESS
))
327 skcipher_request_set_sync_tfm(subreq
, child
);
328 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
330 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
333 err
= crypto_skcipher_decrypt(subreq
);
334 skcipher_request_zero(subreq
);
336 req
->base
.complete
= rctx
->complete
;
339 cryptd_skcipher_complete(req
, err
);
342 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
343 crypto_completion_t
compl)
345 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
346 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
347 struct cryptd_queue
*queue
;
349 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
350 rctx
->complete
= req
->base
.complete
;
351 req
->base
.complete
= compl;
353 return cryptd_enqueue_request(queue
, &req
->base
);
356 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
358 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
361 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
363 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
366 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
368 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
369 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
370 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
371 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
372 struct crypto_skcipher
*cipher
;
374 cipher
= crypto_spawn_skcipher(spawn
);
376 return PTR_ERR(cipher
);
378 ctx
->child
= (struct crypto_sync_skcipher
*)cipher
;
379 crypto_skcipher_set_reqsize(
380 tfm
, sizeof(struct cryptd_skcipher_request_ctx
));
384 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
386 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
388 crypto_free_sync_skcipher(ctx
->child
);
391 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
393 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
395 crypto_drop_skcipher(&ctx
->spawn
);
398 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
400 struct cryptd_queue
*queue
)
402 struct skcipherd_instance_ctx
*ctx
;
403 struct skcipher_instance
*inst
;
404 struct skcipher_alg
*alg
;
411 mask
= CRYPTO_ALG_ASYNC
;
413 cryptd_check_internal(tb
, &type
, &mask
);
415 name
= crypto_attr_alg_name(tb
[1]);
417 return PTR_ERR(name
);
419 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
423 ctx
= skcipher_instance_ctx(inst
);
426 crypto_set_skcipher_spawn(&ctx
->spawn
, skcipher_crypto_instance(inst
));
427 err
= crypto_grab_skcipher(&ctx
->spawn
, name
, type
, mask
);
431 alg
= crypto_spawn_skcipher_alg(&ctx
->spawn
);
432 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
434 goto out_drop_skcipher
;
436 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
437 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
439 inst
->alg
.ivsize
= crypto_skcipher_alg_ivsize(alg
);
440 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
441 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
);
442 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
);
444 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
446 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
447 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
449 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
450 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
451 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
453 inst
->free
= cryptd_skcipher_free
;
455 err
= skcipher_register_instance(tmpl
, inst
);
458 crypto_drop_skcipher(&ctx
->spawn
);
465 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
467 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
468 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
469 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
470 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
471 struct crypto_shash
*hash
;
473 hash
= crypto_spawn_shash(spawn
);
475 return PTR_ERR(hash
);
478 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
479 sizeof(struct cryptd_hash_request_ctx
) +
480 crypto_shash_descsize(hash
));
484 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
486 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
488 crypto_free_shash(ctx
->child
);
491 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
492 const u8
*key
, unsigned int keylen
)
494 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
495 struct crypto_shash
*child
= ctx
->child
;
498 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
499 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
500 CRYPTO_TFM_REQ_MASK
);
501 err
= crypto_shash_setkey(child
, key
, keylen
);
502 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
503 CRYPTO_TFM_RES_MASK
);
507 static int cryptd_hash_enqueue(struct ahash_request
*req
,
508 crypto_completion_t
compl)
510 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
511 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
512 struct cryptd_queue
*queue
=
513 cryptd_get_queue(crypto_ahash_tfm(tfm
));
515 rctx
->complete
= req
->base
.complete
;
516 req
->base
.complete
= compl;
518 return cryptd_enqueue_request(queue
, &req
->base
);
521 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
523 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
524 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
525 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
526 int refcnt
= atomic_read(&ctx
->refcnt
);
529 rctx
->complete(&req
->base
, err
);
532 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
533 crypto_free_ahash(tfm
);
536 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
538 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
539 struct crypto_shash
*child
= ctx
->child
;
540 struct ahash_request
*req
= ahash_request_cast(req_async
);
541 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
542 struct shash_desc
*desc
= &rctx
->desc
;
544 if (unlikely(err
== -EINPROGRESS
))
549 err
= crypto_shash_init(desc
);
551 req
->base
.complete
= rctx
->complete
;
554 cryptd_hash_complete(req
, err
);
557 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
559 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
562 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
564 struct ahash_request
*req
= ahash_request_cast(req_async
);
565 struct cryptd_hash_request_ctx
*rctx
;
567 rctx
= ahash_request_ctx(req
);
569 if (unlikely(err
== -EINPROGRESS
))
572 err
= shash_ahash_update(req
, &rctx
->desc
);
574 req
->base
.complete
= rctx
->complete
;
577 cryptd_hash_complete(req
, err
);
580 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
582 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
585 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
587 struct ahash_request
*req
= ahash_request_cast(req_async
);
588 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
590 if (unlikely(err
== -EINPROGRESS
))
593 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
595 req
->base
.complete
= rctx
->complete
;
598 cryptd_hash_complete(req
, err
);
601 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
603 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
606 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
608 struct ahash_request
*req
= ahash_request_cast(req_async
);
609 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
611 if (unlikely(err
== -EINPROGRESS
))
614 err
= shash_ahash_finup(req
, &rctx
->desc
);
616 req
->base
.complete
= rctx
->complete
;
619 cryptd_hash_complete(req
, err
);
622 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
624 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
627 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
629 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
630 struct crypto_shash
*child
= ctx
->child
;
631 struct ahash_request
*req
= ahash_request_cast(req_async
);
632 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
633 struct shash_desc
*desc
= &rctx
->desc
;
635 if (unlikely(err
== -EINPROGRESS
))
640 err
= shash_ahash_digest(req
, desc
);
642 req
->base
.complete
= rctx
->complete
;
645 cryptd_hash_complete(req
, err
);
648 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
650 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
653 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
655 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
657 return crypto_shash_export(&rctx
->desc
, out
);
660 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
662 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
663 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
664 struct shash_desc
*desc
= cryptd_shash_desc(req
);
666 desc
->tfm
= ctx
->child
;
668 return crypto_shash_import(desc
, in
);
671 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
672 struct cryptd_queue
*queue
)
674 struct hashd_instance_ctx
*ctx
;
675 struct ahash_instance
*inst
;
676 struct shash_alg
*salg
;
677 struct crypto_alg
*alg
;
682 cryptd_check_internal(tb
, &type
, &mask
);
684 salg
= shash_attr_alg(tb
[1], type
, mask
);
686 return PTR_ERR(salg
);
689 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
695 ctx
= ahash_instance_ctx(inst
);
698 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
699 ahash_crypto_instance(inst
));
703 inst
->alg
.halg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
704 (alg
->cra_flags
& (CRYPTO_ALG_INTERNAL
|
705 CRYPTO_ALG_OPTIONAL_KEY
));
707 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
708 inst
->alg
.halg
.statesize
= salg
->statesize
;
709 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
711 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
712 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
714 inst
->alg
.init
= cryptd_hash_init_enqueue
;
715 inst
->alg
.update
= cryptd_hash_update_enqueue
;
716 inst
->alg
.final
= cryptd_hash_final_enqueue
;
717 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
718 inst
->alg
.export
= cryptd_hash_export
;
719 inst
->alg
.import
= cryptd_hash_import
;
720 if (crypto_shash_alg_has_setkey(salg
))
721 inst
->alg
.setkey
= cryptd_hash_setkey
;
722 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
724 err
= ahash_register_instance(tmpl
, inst
);
726 crypto_drop_shash(&ctx
->spawn
);
736 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
737 const u8
*key
, unsigned int keylen
)
739 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
740 struct crypto_aead
*child
= ctx
->child
;
742 return crypto_aead_setkey(child
, key
, keylen
);
745 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
746 unsigned int authsize
)
748 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
749 struct crypto_aead
*child
= ctx
->child
;
751 return crypto_aead_setauthsize(child
, authsize
);
754 static void cryptd_aead_crypt(struct aead_request
*req
,
755 struct crypto_aead
*child
,
757 int (*crypt
)(struct aead_request
*req
))
759 struct cryptd_aead_request_ctx
*rctx
;
760 struct cryptd_aead_ctx
*ctx
;
761 crypto_completion_t
compl;
762 struct crypto_aead
*tfm
;
765 rctx
= aead_request_ctx(req
);
766 compl = rctx
->complete
;
768 tfm
= crypto_aead_reqtfm(req
);
770 if (unlikely(err
== -EINPROGRESS
))
772 aead_request_set_tfm(req
, child
);
776 ctx
= crypto_aead_ctx(tfm
);
777 refcnt
= atomic_read(&ctx
->refcnt
);
780 compl(&req
->base
, err
);
783 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
784 crypto_free_aead(tfm
);
787 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
789 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
790 struct crypto_aead
*child
= ctx
->child
;
791 struct aead_request
*req
;
793 req
= container_of(areq
, struct aead_request
, base
);
794 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
797 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
799 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
800 struct crypto_aead
*child
= ctx
->child
;
801 struct aead_request
*req
;
803 req
= container_of(areq
, struct aead_request
, base
);
804 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
807 static int cryptd_aead_enqueue(struct aead_request
*req
,
808 crypto_completion_t
compl)
810 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
811 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
812 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
814 rctx
->complete
= req
->base
.complete
;
815 req
->base
.complete
= compl;
816 return cryptd_enqueue_request(queue
, &req
->base
);
819 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
821 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
824 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
826 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
829 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
831 struct aead_instance
*inst
= aead_alg_instance(tfm
);
832 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
833 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
834 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
835 struct crypto_aead
*cipher
;
837 cipher
= crypto_spawn_aead(spawn
);
839 return PTR_ERR(cipher
);
842 crypto_aead_set_reqsize(
843 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
844 crypto_aead_reqsize(cipher
)));
848 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
850 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
851 crypto_free_aead(ctx
->child
);
854 static int cryptd_create_aead(struct crypto_template
*tmpl
,
856 struct cryptd_queue
*queue
)
858 struct aead_instance_ctx
*ctx
;
859 struct aead_instance
*inst
;
860 struct aead_alg
*alg
;
863 u32 mask
= CRYPTO_ALG_ASYNC
;
866 cryptd_check_internal(tb
, &type
, &mask
);
868 name
= crypto_attr_alg_name(tb
[1]);
870 return PTR_ERR(name
);
872 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
876 ctx
= aead_instance_ctx(inst
);
879 crypto_set_aead_spawn(&ctx
->aead_spawn
, aead_crypto_instance(inst
));
880 err
= crypto_grab_aead(&ctx
->aead_spawn
, name
, type
, mask
);
884 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
885 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
889 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
890 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
891 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
893 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
894 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
896 inst
->alg
.init
= cryptd_aead_init_tfm
;
897 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
898 inst
->alg
.setkey
= cryptd_aead_setkey
;
899 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
900 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
901 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
903 err
= aead_register_instance(tmpl
, inst
);
906 crypto_drop_aead(&ctx
->aead_spawn
);
913 static struct cryptd_queue queue
;
915 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
917 struct crypto_attr_type
*algt
;
919 algt
= crypto_get_attr_type(tb
);
921 return PTR_ERR(algt
);
923 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
924 case CRYPTO_ALG_TYPE_BLKCIPHER
:
925 return cryptd_create_skcipher(tmpl
, tb
, &queue
);
926 case CRYPTO_ALG_TYPE_DIGEST
:
927 return cryptd_create_hash(tmpl
, tb
, &queue
);
928 case CRYPTO_ALG_TYPE_AEAD
:
929 return cryptd_create_aead(tmpl
, tb
, &queue
);
935 static void cryptd_free(struct crypto_instance
*inst
)
937 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
938 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
939 struct aead_instance_ctx
*aead_ctx
= crypto_instance_ctx(inst
);
941 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
942 case CRYPTO_ALG_TYPE_AHASH
:
943 crypto_drop_shash(&hctx
->spawn
);
944 kfree(ahash_instance(inst
));
946 case CRYPTO_ALG_TYPE_AEAD
:
947 crypto_drop_aead(&aead_ctx
->aead_spawn
);
948 kfree(aead_instance(inst
));
951 crypto_drop_spawn(&ctx
->spawn
);
956 static struct crypto_template cryptd_tmpl
= {
958 .create
= cryptd_create
,
960 .module
= THIS_MODULE
,
963 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
966 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
967 struct cryptd_skcipher_ctx
*ctx
;
968 struct crypto_skcipher
*tfm
;
970 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
971 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
972 return ERR_PTR(-EINVAL
);
974 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
976 return ERR_CAST(tfm
);
978 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
979 crypto_free_skcipher(tfm
);
980 return ERR_PTR(-EINVAL
);
983 ctx
= crypto_skcipher_ctx(tfm
);
984 atomic_set(&ctx
->refcnt
, 1);
986 return container_of(tfm
, struct cryptd_skcipher
, base
);
988 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
990 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
992 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
994 return &ctx
->child
->base
;
996 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
998 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
1000 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1002 return atomic_read(&ctx
->refcnt
) - 1;
1004 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
1006 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
1008 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1010 if (atomic_dec_and_test(&ctx
->refcnt
))
1011 crypto_free_skcipher(&tfm
->base
);
1013 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
1015 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
1018 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1019 struct cryptd_hash_ctx
*ctx
;
1020 struct crypto_ahash
*tfm
;
1022 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1023 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1024 return ERR_PTR(-EINVAL
);
1025 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
1027 return ERR_CAST(tfm
);
1028 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1029 crypto_free_ahash(tfm
);
1030 return ERR_PTR(-EINVAL
);
1033 ctx
= crypto_ahash_ctx(tfm
);
1034 atomic_set(&ctx
->refcnt
, 1);
1036 return __cryptd_ahash_cast(tfm
);
1038 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
1040 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
1042 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1046 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1048 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1050 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1053 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1055 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1057 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1059 return atomic_read(&ctx
->refcnt
) - 1;
1061 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1063 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1065 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1067 if (atomic_dec_and_test(&ctx
->refcnt
))
1068 crypto_free_ahash(&tfm
->base
);
1070 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1072 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1075 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1076 struct cryptd_aead_ctx
*ctx
;
1077 struct crypto_aead
*tfm
;
1079 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1080 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1081 return ERR_PTR(-EINVAL
);
1082 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1084 return ERR_CAST(tfm
);
1085 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1086 crypto_free_aead(tfm
);
1087 return ERR_PTR(-EINVAL
);
1090 ctx
= crypto_aead_ctx(tfm
);
1091 atomic_set(&ctx
->refcnt
, 1);
1093 return __cryptd_aead_cast(tfm
);
1095 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1097 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1099 struct cryptd_aead_ctx
*ctx
;
1100 ctx
= crypto_aead_ctx(&tfm
->base
);
1103 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1105 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1107 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1109 return atomic_read(&ctx
->refcnt
) - 1;
1111 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1113 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1115 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1117 if (atomic_dec_and_test(&ctx
->refcnt
))
1118 crypto_free_aead(&tfm
->base
);
1120 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1122 static int __init
cryptd_init(void)
1126 err
= cryptd_init_queue(&queue
, cryptd_max_cpu_qlen
);
1130 err
= crypto_register_template(&cryptd_tmpl
);
1132 cryptd_fini_queue(&queue
);
1137 static void __exit
cryptd_exit(void)
1139 cryptd_fini_queue(&queue
);
1140 crypto_unregister_template(&cryptd_tmpl
);
1143 subsys_initcall(cryptd_init
);
1144 module_exit(cryptd_exit
);
1146 MODULE_LICENSE("GPL");
1147 MODULE_DESCRIPTION("Software async crypto daemon");
1148 MODULE_ALIAS_CRYPTO("cryptd");