1 // SPDX-License-Identifier: GPL-2.0-only
3 * pcrypt - Parallel crypto wrapper.
5 * Copyright (C) 2009 secunet Security Networks AG
6 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <linux/atomic.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 #include <linux/notifier.h>
17 #include <linux/kobject.h>
18 #include <linux/cpu.h>
19 #include <crypto/pcrypt.h>
21 struct padata_pcrypt
{
22 struct padata_instance
*pinst
;
25 * Cpumask for callback CPUs. It should be
26 * equal to serial cpumask of corresponding padata instance,
27 * so it is updated when padata notifies us about serial
30 * cb_cpumask is protected by RCU. This fact prevents us from
31 * using cpumask_var_t directly because the actual type of
32 * cpumsak_var_t depends on kernel configuration(particularly on
33 * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
34 * cpumask_var_t may be either a pointer to the struct cpumask
35 * or a variable allocated on the stack. Thus we can not safely use
36 * cpumask_var_t with RCU operations such as rcu_assign_pointer or
37 * rcu_dereference. So cpumask_var_t is wrapped with struct
38 * pcrypt_cpumask which makes possible to use it with RCU.
40 struct pcrypt_cpumask
{
43 struct notifier_block nblock
;
46 static struct padata_pcrypt pencrypt
;
47 static struct padata_pcrypt pdecrypt
;
48 static struct kset
*pcrypt_kset
;
50 struct pcrypt_instance_ctx
{
51 struct crypto_aead_spawn spawn
;
55 struct pcrypt_aead_ctx
{
56 struct crypto_aead
*child
;
60 static int pcrypt_do_parallel(struct padata_priv
*padata
, unsigned int *cb_cpu
,
61 struct padata_pcrypt
*pcrypt
)
63 unsigned int cpu_index
, cpu
, i
;
64 struct pcrypt_cpumask
*cpumask
;
69 cpumask
= rcu_dereference_bh(pcrypt
->cb_cpumask
);
70 if (cpumask_test_cpu(cpu
, cpumask
->mask
))
73 if (!cpumask_weight(cpumask
->mask
))
76 cpu_index
= cpu
% cpumask_weight(cpumask
->mask
);
78 cpu
= cpumask_first(cpumask
->mask
);
79 for (i
= 0; i
< cpu_index
; i
++)
80 cpu
= cpumask_next(cpu
, cpumask
->mask
);
86 return padata_do_parallel(pcrypt
->pinst
, padata
, cpu
);
89 static int pcrypt_aead_setkey(struct crypto_aead
*parent
,
90 const u8
*key
, unsigned int keylen
)
92 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
94 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
97 static int pcrypt_aead_setauthsize(struct crypto_aead
*parent
,
98 unsigned int authsize
)
100 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
102 return crypto_aead_setauthsize(ctx
->child
, authsize
);
105 static void pcrypt_aead_serial(struct padata_priv
*padata
)
107 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
108 struct aead_request
*req
= pcrypt_request_ctx(preq
);
110 aead_request_complete(req
->base
.data
, padata
->info
);
113 static void pcrypt_aead_done(struct crypto_async_request
*areq
, int err
)
115 struct aead_request
*req
= areq
->data
;
116 struct pcrypt_request
*preq
= aead_request_ctx(req
);
117 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
120 req
->base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
122 padata_do_serial(padata
);
125 static void pcrypt_aead_enc(struct padata_priv
*padata
)
127 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
128 struct aead_request
*req
= pcrypt_request_ctx(preq
);
130 padata
->info
= crypto_aead_encrypt(req
);
132 if (padata
->info
== -EINPROGRESS
)
135 padata_do_serial(padata
);
138 static int pcrypt_aead_encrypt(struct aead_request
*req
)
141 struct pcrypt_request
*preq
= aead_request_ctx(req
);
142 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
143 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
144 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
145 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
146 u32 flags
= aead_request_flags(req
);
148 memset(padata
, 0, sizeof(struct padata_priv
));
150 padata
->parallel
= pcrypt_aead_enc
;
151 padata
->serial
= pcrypt_aead_serial
;
153 aead_request_set_tfm(creq
, ctx
->child
);
154 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
155 pcrypt_aead_done
, req
);
156 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
157 req
->cryptlen
, req
->iv
);
158 aead_request_set_ad(creq
, req
->assoclen
);
160 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pencrypt
);
167 static void pcrypt_aead_dec(struct padata_priv
*padata
)
169 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
170 struct aead_request
*req
= pcrypt_request_ctx(preq
);
172 padata
->info
= crypto_aead_decrypt(req
);
174 if (padata
->info
== -EINPROGRESS
)
177 padata_do_serial(padata
);
180 static int pcrypt_aead_decrypt(struct aead_request
*req
)
183 struct pcrypt_request
*preq
= aead_request_ctx(req
);
184 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
185 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
186 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
187 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
188 u32 flags
= aead_request_flags(req
);
190 memset(padata
, 0, sizeof(struct padata_priv
));
192 padata
->parallel
= pcrypt_aead_dec
;
193 padata
->serial
= pcrypt_aead_serial
;
195 aead_request_set_tfm(creq
, ctx
->child
);
196 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
197 pcrypt_aead_done
, req
);
198 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
199 req
->cryptlen
, req
->iv
);
200 aead_request_set_ad(creq
, req
->assoclen
);
202 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pdecrypt
);
209 static int pcrypt_aead_init_tfm(struct crypto_aead
*tfm
)
212 struct aead_instance
*inst
= aead_alg_instance(tfm
);
213 struct pcrypt_instance_ctx
*ictx
= aead_instance_ctx(inst
);
214 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
215 struct crypto_aead
*cipher
;
217 cpu_index
= (unsigned int)atomic_inc_return(&ictx
->tfm_count
) %
218 cpumask_weight(cpu_online_mask
);
220 ctx
->cb_cpu
= cpumask_first(cpu_online_mask
);
221 for (cpu
= 0; cpu
< cpu_index
; cpu
++)
222 ctx
->cb_cpu
= cpumask_next(ctx
->cb_cpu
, cpu_online_mask
);
224 cipher
= crypto_spawn_aead(&ictx
->spawn
);
227 return PTR_ERR(cipher
);
230 crypto_aead_set_reqsize(tfm
, sizeof(struct pcrypt_request
) +
231 sizeof(struct aead_request
) +
232 crypto_aead_reqsize(cipher
));
237 static void pcrypt_aead_exit_tfm(struct crypto_aead
*tfm
)
239 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
241 crypto_free_aead(ctx
->child
);
244 static void pcrypt_free(struct aead_instance
*inst
)
246 struct pcrypt_instance_ctx
*ctx
= aead_instance_ctx(inst
);
248 crypto_drop_aead(&ctx
->spawn
);
252 static int pcrypt_init_instance(struct crypto_instance
*inst
,
253 struct crypto_alg
*alg
)
255 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
256 "pcrypt(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
257 return -ENAMETOOLONG
;
259 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
261 inst
->alg
.cra_priority
= alg
->cra_priority
+ 100;
262 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
263 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
268 static int pcrypt_create_aead(struct crypto_template
*tmpl
, struct rtattr
**tb
,
271 struct pcrypt_instance_ctx
*ctx
;
272 struct crypto_attr_type
*algt
;
273 struct aead_instance
*inst
;
274 struct aead_alg
*alg
;
278 algt
= crypto_get_attr_type(tb
);
280 return PTR_ERR(algt
);
282 name
= crypto_attr_alg_name(tb
[1]);
284 return PTR_ERR(name
);
286 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
290 ctx
= aead_instance_ctx(inst
);
291 crypto_set_aead_spawn(&ctx
->spawn
, aead_crypto_instance(inst
));
293 err
= crypto_grab_aead(&ctx
->spawn
, name
, 0, 0);
297 alg
= crypto_spawn_aead_alg(&ctx
->spawn
);
298 err
= pcrypt_init_instance(aead_crypto_instance(inst
), &alg
->base
);
302 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
;
304 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
305 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
307 inst
->alg
.base
.cra_ctxsize
= sizeof(struct pcrypt_aead_ctx
);
309 inst
->alg
.init
= pcrypt_aead_init_tfm
;
310 inst
->alg
.exit
= pcrypt_aead_exit_tfm
;
312 inst
->alg
.setkey
= pcrypt_aead_setkey
;
313 inst
->alg
.setauthsize
= pcrypt_aead_setauthsize
;
314 inst
->alg
.encrypt
= pcrypt_aead_encrypt
;
315 inst
->alg
.decrypt
= pcrypt_aead_decrypt
;
317 inst
->free
= pcrypt_free
;
319 err
= aead_register_instance(tmpl
, inst
);
327 crypto_drop_aead(&ctx
->spawn
);
333 static int pcrypt_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
335 struct crypto_attr_type
*algt
;
337 algt
= crypto_get_attr_type(tb
);
339 return PTR_ERR(algt
);
341 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
342 case CRYPTO_ALG_TYPE_AEAD
:
343 return pcrypt_create_aead(tmpl
, tb
, algt
->type
, algt
->mask
);
349 static int pcrypt_cpumask_change_notify(struct notifier_block
*self
,
350 unsigned long val
, void *data
)
352 struct padata_pcrypt
*pcrypt
;
353 struct pcrypt_cpumask
*new_mask
, *old_mask
;
354 struct padata_cpumask
*cpumask
= (struct padata_cpumask
*)data
;
356 if (!(val
& PADATA_CPU_SERIAL
))
359 pcrypt
= container_of(self
, struct padata_pcrypt
, nblock
);
360 new_mask
= kmalloc(sizeof(*new_mask
), GFP_KERNEL
);
363 if (!alloc_cpumask_var(&new_mask
->mask
, GFP_KERNEL
)) {
368 old_mask
= pcrypt
->cb_cpumask
;
370 cpumask_copy(new_mask
->mask
, cpumask
->cbcpu
);
371 rcu_assign_pointer(pcrypt
->cb_cpumask
, new_mask
);
374 free_cpumask_var(old_mask
->mask
);
379 static int pcrypt_sysfs_add(struct padata_instance
*pinst
, const char *name
)
383 pinst
->kobj
.kset
= pcrypt_kset
;
384 ret
= kobject_add(&pinst
->kobj
, NULL
, "%s", name
);
386 kobject_uevent(&pinst
->kobj
, KOBJ_ADD
);
391 static int pcrypt_init_padata(struct padata_pcrypt
*pcrypt
,
395 struct pcrypt_cpumask
*mask
;
399 pcrypt
->pinst
= padata_alloc_possible(name
);
403 mask
= kmalloc(sizeof(*mask
), GFP_KERNEL
);
405 goto err_free_padata
;
406 if (!alloc_cpumask_var(&mask
->mask
, GFP_KERNEL
)) {
408 goto err_free_padata
;
411 cpumask_and(mask
->mask
, cpu_possible_mask
, cpu_online_mask
);
412 rcu_assign_pointer(pcrypt
->cb_cpumask
, mask
);
414 pcrypt
->nblock
.notifier_call
= pcrypt_cpumask_change_notify
;
415 ret
= padata_register_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
417 goto err_free_cpumask
;
419 ret
= pcrypt_sysfs_add(pcrypt
->pinst
, name
);
421 goto err_unregister_notifier
;
427 err_unregister_notifier
:
428 padata_unregister_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
430 free_cpumask_var(mask
->mask
);
433 padata_free(pcrypt
->pinst
);
440 static void pcrypt_fini_padata(struct padata_pcrypt
*pcrypt
)
442 free_cpumask_var(pcrypt
->cb_cpumask
->mask
);
443 kfree(pcrypt
->cb_cpumask
);
445 padata_stop(pcrypt
->pinst
);
446 padata_unregister_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
447 padata_free(pcrypt
->pinst
);
450 static struct crypto_template pcrypt_tmpl
= {
452 .create
= pcrypt_create
,
453 .module
= THIS_MODULE
,
456 static int __init
pcrypt_init(void)
460 pcrypt_kset
= kset_create_and_add("pcrypt", NULL
, kernel_kobj
);
464 err
= pcrypt_init_padata(&pencrypt
, "pencrypt");
468 err
= pcrypt_init_padata(&pdecrypt
, "pdecrypt");
470 goto err_deinit_pencrypt
;
472 padata_start(pencrypt
.pinst
);
473 padata_start(pdecrypt
.pinst
);
475 return crypto_register_template(&pcrypt_tmpl
);
478 pcrypt_fini_padata(&pencrypt
);
480 kset_unregister(pcrypt_kset
);
485 static void __exit
pcrypt_exit(void)
487 pcrypt_fini_padata(&pencrypt
);
488 pcrypt_fini_padata(&pdecrypt
);
490 kset_unregister(pcrypt_kset
);
491 crypto_unregister_template(&pcrypt_tmpl
);
494 subsys_initcall(pcrypt_init
);
495 module_exit(pcrypt_exit
);
497 MODULE_LICENSE("GPL");
498 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
499 MODULE_DESCRIPTION("Parallel crypto wrapper");
500 MODULE_ALIAS_CRYPTO("pcrypt");