2 * Shared crypto simd helpers
4 * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 * Copyright (c) 2016 Herbert Xu <herbert@gondor.apana.org.au>
6 * Copyright (c) 2019 Google LLC
8 * Based on aesni-intel_glue.c by:
9 * Copyright (C) 2008, Intel Corp.
10 * Author: Huang Ying <ying.huang@intel.com>
12 * This program is free software; you can redistribute it and/or modify
13 * it under the terms of the GNU General Public License as published by
14 * the Free Software Foundation; either version 2 of the License, or
15 * (at your option) any later version.
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 * GNU General Public License for more details.
22 * You should have received a copy of the GNU General Public License
23 * along with this program. If not, see <http://www.gnu.org/licenses/>.
27 * Shared crypto SIMD helpers. These functions dynamically create and register
28 * an skcipher or AEAD algorithm that wraps another, internal algorithm. The
29 * wrapper ensures that the internal algorithm is only executed in a context
30 * where SIMD instructions are usable, i.e. where may_use_simd() returns true.
31 * If SIMD is already usable, the wrapper directly calls the internal algorithm.
32 * Otherwise it defers execution to a workqueue via cryptd.
34 * This is an alternative to the internal algorithm implementing a fallback for
35 * the !may_use_simd() case itself.
37 * Note that the wrapper algorithm is asynchronous, i.e. it has the
38 * CRYPTO_ALG_ASYNC flag set. Therefore it won't be found by users who
39 * explicitly allocate a synchronous algorithm.
42 #include <crypto/cryptd.h>
43 #include <crypto/internal/aead.h>
44 #include <crypto/internal/simd.h>
45 #include <crypto/internal/skcipher.h>
46 #include <linux/kernel.h>
47 #include <linux/module.h>
48 #include <linux/preempt.h>
51 /* skcipher support */
53 struct simd_skcipher_alg
{
54 const char *ialg_name
;
55 struct skcipher_alg alg
;
58 struct simd_skcipher_ctx
{
59 struct cryptd_skcipher
*cryptd_tfm
;
62 static int simd_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
65 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
66 struct crypto_skcipher
*child
= &ctx
->cryptd_tfm
->base
;
69 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
70 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(tfm
) &
72 err
= crypto_skcipher_setkey(child
, key
, key_len
);
73 crypto_skcipher_set_flags(tfm
, crypto_skcipher_get_flags(child
) &
78 static int simd_skcipher_encrypt(struct skcipher_request
*req
)
80 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
81 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
82 struct skcipher_request
*subreq
;
83 struct crypto_skcipher
*child
;
85 subreq
= skcipher_request_ctx(req
);
88 if (!crypto_simd_usable() ||
89 (in_atomic() && cryptd_skcipher_queued(ctx
->cryptd_tfm
)))
90 child
= &ctx
->cryptd_tfm
->base
;
92 child
= cryptd_skcipher_child(ctx
->cryptd_tfm
);
94 skcipher_request_set_tfm(subreq
, child
);
96 return crypto_skcipher_encrypt(subreq
);
99 static int simd_skcipher_decrypt(struct skcipher_request
*req
)
101 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
102 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
103 struct skcipher_request
*subreq
;
104 struct crypto_skcipher
*child
;
106 subreq
= skcipher_request_ctx(req
);
109 if (!crypto_simd_usable() ||
110 (in_atomic() && cryptd_skcipher_queued(ctx
->cryptd_tfm
)))
111 child
= &ctx
->cryptd_tfm
->base
;
113 child
= cryptd_skcipher_child(ctx
->cryptd_tfm
);
115 skcipher_request_set_tfm(subreq
, child
);
117 return crypto_skcipher_decrypt(subreq
);
120 static void simd_skcipher_exit(struct crypto_skcipher
*tfm
)
122 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
124 cryptd_free_skcipher(ctx
->cryptd_tfm
);
127 static int simd_skcipher_init(struct crypto_skcipher
*tfm
)
129 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
130 struct cryptd_skcipher
*cryptd_tfm
;
131 struct simd_skcipher_alg
*salg
;
132 struct skcipher_alg
*alg
;
135 alg
= crypto_skcipher_alg(tfm
);
136 salg
= container_of(alg
, struct simd_skcipher_alg
, alg
);
138 cryptd_tfm
= cryptd_alloc_skcipher(salg
->ialg_name
,
140 CRYPTO_ALG_INTERNAL
);
141 if (IS_ERR(cryptd_tfm
))
142 return PTR_ERR(cryptd_tfm
);
144 ctx
->cryptd_tfm
= cryptd_tfm
;
146 reqsize
= crypto_skcipher_reqsize(cryptd_skcipher_child(cryptd_tfm
));
147 reqsize
= max(reqsize
, crypto_skcipher_reqsize(&cryptd_tfm
->base
));
148 reqsize
+= sizeof(struct skcipher_request
);
150 crypto_skcipher_set_reqsize(tfm
, reqsize
);
155 struct simd_skcipher_alg
*simd_skcipher_create_compat(const char *algname
,
157 const char *basename
)
159 struct simd_skcipher_alg
*salg
;
160 struct crypto_skcipher
*tfm
;
161 struct skcipher_alg
*ialg
;
162 struct skcipher_alg
*alg
;
165 tfm
= crypto_alloc_skcipher(basename
, CRYPTO_ALG_INTERNAL
,
166 CRYPTO_ALG_INTERNAL
| CRYPTO_ALG_ASYNC
);
168 return ERR_CAST(tfm
);
170 ialg
= crypto_skcipher_alg(tfm
);
172 salg
= kzalloc(sizeof(*salg
), GFP_KERNEL
);
174 salg
= ERR_PTR(-ENOMEM
);
178 salg
->ialg_name
= basename
;
182 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", algname
) >=
186 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
187 drvname
) >= CRYPTO_MAX_ALG_NAME
)
190 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
;
191 alg
->base
.cra_priority
= ialg
->base
.cra_priority
;
192 alg
->base
.cra_blocksize
= ialg
->base
.cra_blocksize
;
193 alg
->base
.cra_alignmask
= ialg
->base
.cra_alignmask
;
194 alg
->base
.cra_module
= ialg
->base
.cra_module
;
195 alg
->base
.cra_ctxsize
= sizeof(struct simd_skcipher_ctx
);
197 alg
->ivsize
= ialg
->ivsize
;
198 alg
->chunksize
= ialg
->chunksize
;
199 alg
->min_keysize
= ialg
->min_keysize
;
200 alg
->max_keysize
= ialg
->max_keysize
;
202 alg
->init
= simd_skcipher_init
;
203 alg
->exit
= simd_skcipher_exit
;
205 alg
->setkey
= simd_skcipher_setkey
;
206 alg
->encrypt
= simd_skcipher_encrypt
;
207 alg
->decrypt
= simd_skcipher_decrypt
;
209 err
= crypto_register_skcipher(alg
);
214 crypto_free_skcipher(tfm
);
222 EXPORT_SYMBOL_GPL(simd_skcipher_create_compat
);
224 struct simd_skcipher_alg
*simd_skcipher_create(const char *algname
,
225 const char *basename
)
227 char drvname
[CRYPTO_MAX_ALG_NAME
];
229 if (snprintf(drvname
, CRYPTO_MAX_ALG_NAME
, "simd-%s", basename
) >=
231 return ERR_PTR(-ENAMETOOLONG
);
233 return simd_skcipher_create_compat(algname
, drvname
, basename
);
235 EXPORT_SYMBOL_GPL(simd_skcipher_create
);
237 void simd_skcipher_free(struct simd_skcipher_alg
*salg
)
239 crypto_unregister_skcipher(&salg
->alg
);
242 EXPORT_SYMBOL_GPL(simd_skcipher_free
);
244 int simd_register_skciphers_compat(struct skcipher_alg
*algs
, int count
,
245 struct simd_skcipher_alg
**simd_algs
)
251 const char *basename
;
252 struct simd_skcipher_alg
*simd
;
254 err
= crypto_register_skciphers(algs
, count
);
258 for (i
= 0; i
< count
; i
++) {
259 WARN_ON(strncmp(algs
[i
].base
.cra_name
, "__", 2));
260 WARN_ON(strncmp(algs
[i
].base
.cra_driver_name
, "__", 2));
261 algname
= algs
[i
].base
.cra_name
+ 2;
262 drvname
= algs
[i
].base
.cra_driver_name
+ 2;
263 basename
= algs
[i
].base
.cra_driver_name
;
264 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
273 simd_unregister_skciphers(algs
, count
, simd_algs
);
276 EXPORT_SYMBOL_GPL(simd_register_skciphers_compat
);
278 void simd_unregister_skciphers(struct skcipher_alg
*algs
, int count
,
279 struct simd_skcipher_alg
**simd_algs
)
283 crypto_unregister_skciphers(algs
, count
);
285 for (i
= 0; i
< count
; i
++) {
287 simd_skcipher_free(simd_algs
[i
]);
292 EXPORT_SYMBOL_GPL(simd_unregister_skciphers
);
296 struct simd_aead_alg
{
297 const char *ialg_name
;
301 struct simd_aead_ctx
{
302 struct cryptd_aead
*cryptd_tfm
;
305 static int simd_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
306 unsigned int key_len
)
308 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
309 struct crypto_aead
*child
= &ctx
->cryptd_tfm
->base
;
312 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
313 crypto_aead_set_flags(child
, crypto_aead_get_flags(tfm
) &
314 CRYPTO_TFM_REQ_MASK
);
315 err
= crypto_aead_setkey(child
, key
, key_len
);
316 crypto_aead_set_flags(tfm
, crypto_aead_get_flags(child
) &
317 CRYPTO_TFM_RES_MASK
);
321 static int simd_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
323 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
324 struct crypto_aead
*child
= &ctx
->cryptd_tfm
->base
;
326 return crypto_aead_setauthsize(child
, authsize
);
329 static int simd_aead_encrypt(struct aead_request
*req
)
331 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
332 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
333 struct aead_request
*subreq
;
334 struct crypto_aead
*child
;
336 subreq
= aead_request_ctx(req
);
339 if (!crypto_simd_usable() ||
340 (in_atomic() && cryptd_aead_queued(ctx
->cryptd_tfm
)))
341 child
= &ctx
->cryptd_tfm
->base
;
343 child
= cryptd_aead_child(ctx
->cryptd_tfm
);
345 aead_request_set_tfm(subreq
, child
);
347 return crypto_aead_encrypt(subreq
);
350 static int simd_aead_decrypt(struct aead_request
*req
)
352 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
353 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
354 struct aead_request
*subreq
;
355 struct crypto_aead
*child
;
357 subreq
= aead_request_ctx(req
);
360 if (!crypto_simd_usable() ||
361 (in_atomic() && cryptd_aead_queued(ctx
->cryptd_tfm
)))
362 child
= &ctx
->cryptd_tfm
->base
;
364 child
= cryptd_aead_child(ctx
->cryptd_tfm
);
366 aead_request_set_tfm(subreq
, child
);
368 return crypto_aead_decrypt(subreq
);
371 static void simd_aead_exit(struct crypto_aead
*tfm
)
373 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
375 cryptd_free_aead(ctx
->cryptd_tfm
);
378 static int simd_aead_init(struct crypto_aead
*tfm
)
380 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
381 struct cryptd_aead
*cryptd_tfm
;
382 struct simd_aead_alg
*salg
;
383 struct aead_alg
*alg
;
386 alg
= crypto_aead_alg(tfm
);
387 salg
= container_of(alg
, struct simd_aead_alg
, alg
);
389 cryptd_tfm
= cryptd_alloc_aead(salg
->ialg_name
, CRYPTO_ALG_INTERNAL
,
390 CRYPTO_ALG_INTERNAL
);
391 if (IS_ERR(cryptd_tfm
))
392 return PTR_ERR(cryptd_tfm
);
394 ctx
->cryptd_tfm
= cryptd_tfm
;
396 reqsize
= crypto_aead_reqsize(cryptd_aead_child(cryptd_tfm
));
397 reqsize
= max(reqsize
, crypto_aead_reqsize(&cryptd_tfm
->base
));
398 reqsize
+= sizeof(struct aead_request
);
400 crypto_aead_set_reqsize(tfm
, reqsize
);
405 struct simd_aead_alg
*simd_aead_create_compat(const char *algname
,
407 const char *basename
)
409 struct simd_aead_alg
*salg
;
410 struct crypto_aead
*tfm
;
411 struct aead_alg
*ialg
;
412 struct aead_alg
*alg
;
415 tfm
= crypto_alloc_aead(basename
, CRYPTO_ALG_INTERNAL
,
416 CRYPTO_ALG_INTERNAL
| CRYPTO_ALG_ASYNC
);
418 return ERR_CAST(tfm
);
420 ialg
= crypto_aead_alg(tfm
);
422 salg
= kzalloc(sizeof(*salg
), GFP_KERNEL
);
424 salg
= ERR_PTR(-ENOMEM
);
428 salg
->ialg_name
= basename
;
432 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", algname
) >=
436 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
437 drvname
) >= CRYPTO_MAX_ALG_NAME
)
440 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
;
441 alg
->base
.cra_priority
= ialg
->base
.cra_priority
;
442 alg
->base
.cra_blocksize
= ialg
->base
.cra_blocksize
;
443 alg
->base
.cra_alignmask
= ialg
->base
.cra_alignmask
;
444 alg
->base
.cra_module
= ialg
->base
.cra_module
;
445 alg
->base
.cra_ctxsize
= sizeof(struct simd_aead_ctx
);
447 alg
->ivsize
= ialg
->ivsize
;
448 alg
->maxauthsize
= ialg
->maxauthsize
;
449 alg
->chunksize
= ialg
->chunksize
;
451 alg
->init
= simd_aead_init
;
452 alg
->exit
= simd_aead_exit
;
454 alg
->setkey
= simd_aead_setkey
;
455 alg
->setauthsize
= simd_aead_setauthsize
;
456 alg
->encrypt
= simd_aead_encrypt
;
457 alg
->decrypt
= simd_aead_decrypt
;
459 err
= crypto_register_aead(alg
);
464 crypto_free_aead(tfm
);
472 EXPORT_SYMBOL_GPL(simd_aead_create_compat
);
474 struct simd_aead_alg
*simd_aead_create(const char *algname
,
475 const char *basename
)
477 char drvname
[CRYPTO_MAX_ALG_NAME
];
479 if (snprintf(drvname
, CRYPTO_MAX_ALG_NAME
, "simd-%s", basename
) >=
481 return ERR_PTR(-ENAMETOOLONG
);
483 return simd_aead_create_compat(algname
, drvname
, basename
);
485 EXPORT_SYMBOL_GPL(simd_aead_create
);
487 void simd_aead_free(struct simd_aead_alg
*salg
)
489 crypto_unregister_aead(&salg
->alg
);
492 EXPORT_SYMBOL_GPL(simd_aead_free
);
494 int simd_register_aeads_compat(struct aead_alg
*algs
, int count
,
495 struct simd_aead_alg
**simd_algs
)
501 const char *basename
;
502 struct simd_aead_alg
*simd
;
504 err
= crypto_register_aeads(algs
, count
);
508 for (i
= 0; i
< count
; i
++) {
509 WARN_ON(strncmp(algs
[i
].base
.cra_name
, "__", 2));
510 WARN_ON(strncmp(algs
[i
].base
.cra_driver_name
, "__", 2));
511 algname
= algs
[i
].base
.cra_name
+ 2;
512 drvname
= algs
[i
].base
.cra_driver_name
+ 2;
513 basename
= algs
[i
].base
.cra_driver_name
;
514 simd
= simd_aead_create_compat(algname
, drvname
, basename
);
523 simd_unregister_aeads(algs
, count
, simd_algs
);
526 EXPORT_SYMBOL_GPL(simd_register_aeads_compat
);
528 void simd_unregister_aeads(struct aead_alg
*algs
, int count
,
529 struct simd_aead_alg
**simd_algs
)
533 crypto_unregister_aeads(algs
, count
);
535 for (i
= 0; i
< count
; i
++) {
537 simd_aead_free(simd_algs
[i
]);
542 EXPORT_SYMBOL_GPL(simd_unregister_aeads
);
544 MODULE_LICENSE("GPL");