]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blame - crypto/pcrypt.c
padata: make padata_do_parallel find alternate callback CPU
[mirror_ubuntu-hirsute-kernel.git] / crypto / pcrypt.c
CommitLineData
a61127c2 1// SPDX-License-Identifier: GPL-2.0-only
5068c7a8
SK
2/*
3 * pcrypt - Parallel crypto wrapper.
4 *
5 * Copyright (C) 2009 secunet Security Networks AG
6 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
5068c7a8
SK
7 */
8
9#include <crypto/algapi.h>
10#include <crypto/internal/aead.h>
a5a22e57 11#include <linux/atomic.h>
5068c7a8
SK
12#include <linux/err.h>
13#include <linux/init.h>
14#include <linux/module.h>
15#include <linux/slab.h>
e15bacbe 16#include <linux/notifier.h>
a3fb1e33 17#include <linux/kobject.h>
d3f64e46 18#include <linux/cpu.h>
5068c7a8
SK
19#include <crypto/pcrypt.h>
20
c57e842e 21struct padata_pcrypt {
e15bacbe 22 struct padata_instance *pinst;
e15bacbe
DK
23
24 /*
25 * Cpumask for callback CPUs. It should be
26 * equal to serial cpumask of corresponding padata instance,
27 * so it is updated when padata notifies us about serial
28 * cpumask change.
29 *
30 * cb_cpumask is protected by RCU. This fact prevents us from
31 * using cpumask_var_t directly because the actual type of
32 * cpumsak_var_t depends on kernel configuration(particularly on
33 * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
34 * cpumask_var_t may be either a pointer to the struct cpumask
35 * or a variable allocated on the stack. Thus we can not safely use
36 * cpumask_var_t with RCU operations such as rcu_assign_pointer or
37 * rcu_dereference. So cpumask_var_t is wrapped with struct
38 * pcrypt_cpumask which makes possible to use it with RCU.
39 */
40 struct pcrypt_cpumask {
41 cpumask_var_t mask;
42 } *cb_cpumask;
43 struct notifier_block nblock;
44};
45
c57e842e
SK
46static struct padata_pcrypt pencrypt;
47static struct padata_pcrypt pdecrypt;
a3fb1e33 48static struct kset *pcrypt_kset;
5068c7a8
SK
49
50struct pcrypt_instance_ctx {
66d948e7 51 struct crypto_aead_spawn spawn;
a5a22e57 52 atomic_t tfm_count;
5068c7a8
SK
53};
54
55struct pcrypt_aead_ctx {
56 struct crypto_aead *child;
57 unsigned int cb_cpu;
58};
59
5068c7a8
SK
60static int pcrypt_aead_setkey(struct crypto_aead *parent,
61 const u8 *key, unsigned int keylen)
62{
63 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
64
65 return crypto_aead_setkey(ctx->child, key, keylen);
66}
67
68static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
69 unsigned int authsize)
70{
71 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
72
73 return crypto_aead_setauthsize(ctx->child, authsize);
74}
75
76static void pcrypt_aead_serial(struct padata_priv *padata)
77{
78 struct pcrypt_request *preq = pcrypt_padata_request(padata);
79 struct aead_request *req = pcrypt_request_ctx(preq);
80
81 aead_request_complete(req->base.data, padata->info);
82}
83
5068c7a8
SK
84static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
85{
86 struct aead_request *req = areq->data;
87 struct pcrypt_request *preq = aead_request_ctx(req);
88 struct padata_priv *padata = pcrypt_request_padata(preq);
89
90 padata->info = err;
91 req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
92
93 padata_do_serial(padata);
94}
95
96static void pcrypt_aead_enc(struct padata_priv *padata)
97{
98 struct pcrypt_request *preq = pcrypt_padata_request(padata);
99 struct aead_request *req = pcrypt_request_ctx(preq);
100
101 padata->info = crypto_aead_encrypt(req);
102
5a1436be 103 if (padata->info == -EINPROGRESS)
5068c7a8
SK
104 return;
105
106 padata_do_serial(padata);
107}
108
109static int pcrypt_aead_encrypt(struct aead_request *req)
110{
111 int err;
112 struct pcrypt_request *preq = aead_request_ctx(req);
113 struct aead_request *creq = pcrypt_request_ctx(preq);
114 struct padata_priv *padata = pcrypt_request_padata(preq);
115 struct crypto_aead *aead = crypto_aead_reqtfm(req);
116 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
117 u32 flags = aead_request_flags(req);
118
119 memset(padata, 0, sizeof(struct padata_priv));
120
121 padata->parallel = pcrypt_aead_enc;
122 padata->serial = pcrypt_aead_serial;
123
124 aead_request_set_tfm(creq, ctx->child);
125 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
126 pcrypt_aead_done, req);
127 aead_request_set_crypt(creq, req->src, req->dst,
128 req->cryptlen, req->iv);
0496f560 129 aead_request_set_ad(creq, req->assoclen);
5068c7a8 130
e6ce0e08 131 err = padata_do_parallel(pencrypt.pinst, padata, &ctx->cb_cpu);
83f619f3
SK
132 if (!err)
133 return -EINPROGRESS;
5068c7a8
SK
134
135 return err;
136}
137
138static void pcrypt_aead_dec(struct padata_priv *padata)
139{
140 struct pcrypt_request *preq = pcrypt_padata_request(padata);
141 struct aead_request *req = pcrypt_request_ctx(preq);
142
143 padata->info = crypto_aead_decrypt(req);
144
5a1436be 145 if (padata->info == -EINPROGRESS)
5068c7a8
SK
146 return;
147
148 padata_do_serial(padata);
149}
150
151static int pcrypt_aead_decrypt(struct aead_request *req)
152{
153 int err;
154 struct pcrypt_request *preq = aead_request_ctx(req);
155 struct aead_request *creq = pcrypt_request_ctx(preq);
156 struct padata_priv *padata = pcrypt_request_padata(preq);
157 struct crypto_aead *aead = crypto_aead_reqtfm(req);
158 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
159 u32 flags = aead_request_flags(req);
160
161 memset(padata, 0, sizeof(struct padata_priv));
162
163 padata->parallel = pcrypt_aead_dec;
164 padata->serial = pcrypt_aead_serial;
165
166 aead_request_set_tfm(creq, ctx->child);
167 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
168 pcrypt_aead_done, req);
169 aead_request_set_crypt(creq, req->src, req->dst,
170 req->cryptlen, req->iv);
0496f560 171 aead_request_set_ad(creq, req->assoclen);
5068c7a8 172
e6ce0e08 173 err = padata_do_parallel(pdecrypt.pinst, padata, &ctx->cb_cpu);
83f619f3
SK
174 if (!err)
175 return -EINPROGRESS;
5068c7a8
SK
176
177 return err;
178}
179
0496f560 180static int pcrypt_aead_init_tfm(struct crypto_aead *tfm)
5068c7a8
SK
181{
182 int cpu, cpu_index;
0496f560
HX
183 struct aead_instance *inst = aead_alg_instance(tfm);
184 struct pcrypt_instance_ctx *ictx = aead_instance_ctx(inst);
185 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
5068c7a8
SK
186 struct crypto_aead *cipher;
187
a5a22e57
HX
188 cpu_index = (unsigned int)atomic_inc_return(&ictx->tfm_count) %
189 cpumask_weight(cpu_online_mask);
5068c7a8 190
fbf0ca1b 191 ctx->cb_cpu = cpumask_first(cpu_online_mask);
5068c7a8 192 for (cpu = 0; cpu < cpu_index; cpu++)
fbf0ca1b 193 ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
5068c7a8 194
0496f560 195 cipher = crypto_spawn_aead(&ictx->spawn);
5068c7a8
SK
196
197 if (IS_ERR(cipher))
198 return PTR_ERR(cipher);
199
200 ctx->child = cipher;
0496f560
HX
201 crypto_aead_set_reqsize(tfm, sizeof(struct pcrypt_request) +
202 sizeof(struct aead_request) +
203 crypto_aead_reqsize(cipher));
5068c7a8
SK
204
205 return 0;
206}
207
0496f560 208static void pcrypt_aead_exit_tfm(struct crypto_aead *tfm)
5068c7a8 209{
0496f560 210 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
5068c7a8
SK
211
212 crypto_free_aead(ctx->child);
213}
214
d76c6810
EB
215static void pcrypt_free(struct aead_instance *inst)
216{
217 struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst);
218
219 crypto_drop_aead(&ctx->spawn);
220 kfree(inst);
221}
222
66d948e7
HX
223static int pcrypt_init_instance(struct crypto_instance *inst,
224 struct crypto_alg *alg)
5068c7a8 225{
5068c7a8
SK
226 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
227 "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
66d948e7 228 return -ENAMETOOLONG;
5068c7a8
SK
229
230 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
231
5068c7a8
SK
232 inst->alg.cra_priority = alg->cra_priority + 100;
233 inst->alg.cra_blocksize = alg->cra_blocksize;
234 inst->alg.cra_alignmask = alg->cra_alignmask;
235
66d948e7 236 return 0;
5068c7a8
SK
237}
238
0496f560
HX
239static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb,
240 u32 type, u32 mask)
5068c7a8 241{
66d948e7 242 struct pcrypt_instance_ctx *ctx;
846f97df 243 struct crypto_attr_type *algt;
0496f560
HX
244 struct aead_instance *inst;
245 struct aead_alg *alg;
66d948e7
HX
246 const char *name;
247 int err;
248
846f97df
HX
249 algt = crypto_get_attr_type(tb);
250 if (IS_ERR(algt))
251 return PTR_ERR(algt);
252
66d948e7
HX
253 name = crypto_attr_alg_name(tb[1]);
254 if (IS_ERR(name))
0496f560 255 return PTR_ERR(name);
66d948e7
HX
256
257 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
258 if (!inst)
0496f560 259 return -ENOMEM;
66d948e7 260
0496f560
HX
261 ctx = aead_instance_ctx(inst);
262 crypto_set_aead_spawn(&ctx->spawn, aead_crypto_instance(inst));
5068c7a8 263
5e4b8c1f 264 err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
66d948e7
HX
265 if (err)
266 goto out_free_inst;
5068c7a8 267
0496f560
HX
268 alg = crypto_spawn_aead_alg(&ctx->spawn);
269 err = pcrypt_init_instance(aead_crypto_instance(inst), &alg->base);
66d948e7
HX
270 if (err)
271 goto out_drop_aead;
5068c7a8 272
846f97df 273 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC;
846f97df 274
0496f560
HX
275 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
276 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
5068c7a8 277
0496f560 278 inst->alg.base.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
5068c7a8 279
0496f560
HX
280 inst->alg.init = pcrypt_aead_init_tfm;
281 inst->alg.exit = pcrypt_aead_exit_tfm;
5068c7a8 282
0496f560
HX
283 inst->alg.setkey = pcrypt_aead_setkey;
284 inst->alg.setauthsize = pcrypt_aead_setauthsize;
285 inst->alg.encrypt = pcrypt_aead_encrypt;
286 inst->alg.decrypt = pcrypt_aead_decrypt;
5068c7a8 287
d76c6810
EB
288 inst->free = pcrypt_free;
289
0496f560
HX
290 err = aead_register_instance(tmpl, inst);
291 if (err)
292 goto out_drop_aead;
5068c7a8 293
66d948e7 294out:
0496f560 295 return err;
66d948e7
HX
296
297out_drop_aead:
298 crypto_drop_aead(&ctx->spawn);
299out_free_inst:
300 kfree(inst);
66d948e7 301 goto out;
5068c7a8
SK
302}
303
0496f560 304static int pcrypt_create(struct crypto_template *tmpl, struct rtattr **tb)
5068c7a8
SK
305{
306 struct crypto_attr_type *algt;
307
308 algt = crypto_get_attr_type(tb);
309 if (IS_ERR(algt))
0496f560 310 return PTR_ERR(algt);
5068c7a8
SK
311
312 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
313 case CRYPTO_ALG_TYPE_AEAD:
0496f560 314 return pcrypt_create_aead(tmpl, tb, algt->type, algt->mask);
5068c7a8
SK
315 }
316
0496f560 317 return -EINVAL;
5068c7a8
SK
318}
319
e15bacbe
DK
320static int pcrypt_cpumask_change_notify(struct notifier_block *self,
321 unsigned long val, void *data)
322{
c57e842e 323 struct padata_pcrypt *pcrypt;
e15bacbe 324 struct pcrypt_cpumask *new_mask, *old_mask;
d3f64e46 325 struct padata_cpumask *cpumask = (struct padata_cpumask *)data;
e15bacbe
DK
326
327 if (!(val & PADATA_CPU_SERIAL))
328 return 0;
329
c57e842e 330 pcrypt = container_of(self, struct padata_pcrypt, nblock);
e15bacbe
DK
331 new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL);
332 if (!new_mask)
333 return -ENOMEM;
334 if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) {
335 kfree(new_mask);
336 return -ENOMEM;
337 }
338
339 old_mask = pcrypt->cb_cpumask;
340
d3f64e46 341 cpumask_copy(new_mask->mask, cpumask->cbcpu);
e15bacbe 342 rcu_assign_pointer(pcrypt->cb_cpumask, new_mask);
a0076e17 343 synchronize_rcu();
e15bacbe
DK
344
345 free_cpumask_var(old_mask->mask);
346 kfree(old_mask);
347 return 0;
348}
349
a3fb1e33
DK
350static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
351{
352 int ret;
353
354 pinst->kobj.kset = pcrypt_kset;
b1e3874c 355 ret = kobject_add(&pinst->kobj, NULL, "%s", name);
a3fb1e33
DK
356 if (!ret)
357 kobject_uevent(&pinst->kobj, KOBJ_ADD);
358
359 return ret;
360}
361
c57e842e
SK
362static int pcrypt_init_padata(struct padata_pcrypt *pcrypt,
363 const char *name)
e15bacbe
DK
364{
365 int ret = -ENOMEM;
366 struct pcrypt_cpumask *mask;
367
d3f64e46
SK
368 get_online_cpus();
369
b128a304 370 pcrypt->pinst = padata_alloc_possible(name);
e15bacbe 371 if (!pcrypt->pinst)
b128a304 372 goto err;
e15bacbe
DK
373
374 mask = kmalloc(sizeof(*mask), GFP_KERNEL);
375 if (!mask)
376 goto err_free_padata;
377 if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) {
378 kfree(mask);
379 goto err_free_padata;
380 }
381
fbf0ca1b 382 cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask);
e15bacbe
DK
383 rcu_assign_pointer(pcrypt->cb_cpumask, mask);
384
385 pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify;
386 ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
387 if (ret)
388 goto err_free_cpumask;
389
a3fb1e33
DK
390 ret = pcrypt_sysfs_add(pcrypt->pinst, name);
391 if (ret)
392 goto err_unregister_notifier;
393
d3f64e46
SK
394 put_online_cpus();
395
e15bacbe 396 return ret;
d3f64e46 397
a3fb1e33
DK
398err_unregister_notifier:
399 padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
e15bacbe
DK
400err_free_cpumask:
401 free_cpumask_var(mask->mask);
402 kfree(mask);
403err_free_padata:
404 padata_free(pcrypt->pinst);
e15bacbe 405err:
d3f64e46
SK
406 put_online_cpus();
407
e15bacbe
DK
408 return ret;
409}
410
c57e842e 411static void pcrypt_fini_padata(struct padata_pcrypt *pcrypt)
e15bacbe
DK
412{
413 free_cpumask_var(pcrypt->cb_cpumask->mask);
414 kfree(pcrypt->cb_cpumask);
415
416 padata_stop(pcrypt->pinst);
417 padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
e15bacbe
DK
418 padata_free(pcrypt->pinst);
419}
420
5068c7a8
SK
421static struct crypto_template pcrypt_tmpl = {
422 .name = "pcrypt",
0496f560 423 .create = pcrypt_create,
5068c7a8
SK
424 .module = THIS_MODULE,
425};
426
427static int __init pcrypt_init(void)
428{
a3fb1e33
DK
429 int err = -ENOMEM;
430
431 pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj);
432 if (!pcrypt_kset)
433 goto err;
5068c7a8 434
c57e842e 435 err = pcrypt_init_padata(&pencrypt, "pencrypt");
4c879170 436 if (err)
a3fb1e33 437 goto err_unreg_kset;
4c879170 438
c57e842e 439 err = pcrypt_init_padata(&pdecrypt, "pdecrypt");
4c879170 440 if (err)
e15bacbe 441 goto err_deinit_pencrypt;
4c879170 442
e15bacbe
DK
443 padata_start(pencrypt.pinst);
444 padata_start(pdecrypt.pinst);
5068c7a8 445
e15bacbe 446 return crypto_register_template(&pcrypt_tmpl);
5068c7a8 447
e15bacbe 448err_deinit_pencrypt:
c57e842e 449 pcrypt_fini_padata(&pencrypt);
a3fb1e33
DK
450err_unreg_kset:
451 kset_unregister(pcrypt_kset);
5068c7a8 452err:
4c879170 453 return err;
5068c7a8
SK
454}
455
456static void __exit pcrypt_exit(void)
457{
c57e842e
SK
458 pcrypt_fini_padata(&pencrypt);
459 pcrypt_fini_padata(&pdecrypt);
5068c7a8 460
a3fb1e33 461 kset_unregister(pcrypt_kset);
5068c7a8
SK
462 crypto_unregister_template(&pcrypt_tmpl);
463}
464
c4741b23 465subsys_initcall(pcrypt_init);
5068c7a8
SK
466module_exit(pcrypt_exit);
467
468MODULE_LICENSE("GPL");
469MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
470MODULE_DESCRIPTION("Parallel crypto wrapper");
4943ba16 471MODULE_ALIAS_CRYPTO("pcrypt");