]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - crypto/aead.c
MAINTAINERS: Update my email address
[mirror_ubuntu-jammy-kernel.git] / crypto / aead.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * AEAD: Authenticated Encryption with Associated Data
4 *
5 * This file provides API support for AEAD algorithms.
6 *
7 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
8 */
9
10 #include <crypto/internal/geniv.h>
11 #include <crypto/internal/rng.h>
12 #include <crypto/null.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/rtnetlink.h>
19 #include <linux/slab.h>
20 #include <linux/seq_file.h>
21 #include <linux/cryptouser.h>
22 #include <linux/compiler.h>
23 #include <net/netlink.h>
24
25 #include "internal.h"
26
27 static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
28 unsigned int keylen)
29 {
30 unsigned long alignmask = crypto_aead_alignmask(tfm);
31 int ret;
32 u8 *buffer, *alignbuffer;
33 unsigned long absize;
34
35 absize = keylen + alignmask;
36 buffer = kmalloc(absize, GFP_ATOMIC);
37 if (!buffer)
38 return -ENOMEM;
39
40 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
41 memcpy(alignbuffer, key, keylen);
42 ret = crypto_aead_alg(tfm)->setkey(tfm, alignbuffer, keylen);
43 memset(alignbuffer, 0, keylen);
44 kfree(buffer);
45 return ret;
46 }
47
48 int crypto_aead_setkey(struct crypto_aead *tfm,
49 const u8 *key, unsigned int keylen)
50 {
51 unsigned long alignmask = crypto_aead_alignmask(tfm);
52 int err;
53
54 if ((unsigned long)key & alignmask)
55 err = setkey_unaligned(tfm, key, keylen);
56 else
57 err = crypto_aead_alg(tfm)->setkey(tfm, key, keylen);
58
59 if (unlikely(err)) {
60 crypto_aead_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
61 return err;
62 }
63
64 crypto_aead_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
65 return 0;
66 }
67 EXPORT_SYMBOL_GPL(crypto_aead_setkey);
68
69 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
70 {
71 int err;
72
73 if (authsize > crypto_aead_maxauthsize(tfm))
74 return -EINVAL;
75
76 if (crypto_aead_alg(tfm)->setauthsize) {
77 err = crypto_aead_alg(tfm)->setauthsize(tfm, authsize);
78 if (err)
79 return err;
80 }
81
82 tfm->authsize = authsize;
83 return 0;
84 }
85 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
86
87 int crypto_aead_encrypt(struct aead_request *req)
88 {
89 struct crypto_aead *aead = crypto_aead_reqtfm(req);
90 struct crypto_alg *alg = aead->base.__crt_alg;
91 unsigned int cryptlen = req->cryptlen;
92 int ret;
93
94 crypto_stats_get(alg);
95 if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY)
96 ret = -ENOKEY;
97 else
98 ret = crypto_aead_alg(aead)->encrypt(req);
99 crypto_stats_aead_encrypt(cryptlen, alg, ret);
100 return ret;
101 }
102 EXPORT_SYMBOL_GPL(crypto_aead_encrypt);
103
104 int crypto_aead_decrypt(struct aead_request *req)
105 {
106 struct crypto_aead *aead = crypto_aead_reqtfm(req);
107 struct crypto_alg *alg = aead->base.__crt_alg;
108 unsigned int cryptlen = req->cryptlen;
109 int ret;
110
111 crypto_stats_get(alg);
112 if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY)
113 ret = -ENOKEY;
114 else if (req->cryptlen < crypto_aead_authsize(aead))
115 ret = -EINVAL;
116 else
117 ret = crypto_aead_alg(aead)->decrypt(req);
118 crypto_stats_aead_decrypt(cryptlen, alg, ret);
119 return ret;
120 }
121 EXPORT_SYMBOL_GPL(crypto_aead_decrypt);
122
123 static void crypto_aead_exit_tfm(struct crypto_tfm *tfm)
124 {
125 struct crypto_aead *aead = __crypto_aead_cast(tfm);
126 struct aead_alg *alg = crypto_aead_alg(aead);
127
128 alg->exit(aead);
129 }
130
131 static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
132 {
133 struct crypto_aead *aead = __crypto_aead_cast(tfm);
134 struct aead_alg *alg = crypto_aead_alg(aead);
135
136 crypto_aead_set_flags(aead, CRYPTO_TFM_NEED_KEY);
137
138 aead->authsize = alg->maxauthsize;
139
140 if (alg->exit)
141 aead->base.exit = crypto_aead_exit_tfm;
142
143 if (alg->init)
144 return alg->init(aead);
145
146 return 0;
147 }
148
149 #ifdef CONFIG_NET
150 static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
151 {
152 struct crypto_report_aead raead;
153 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
154
155 memset(&raead, 0, sizeof(raead));
156
157 strscpy(raead.type, "aead", sizeof(raead.type));
158 strscpy(raead.geniv, "<none>", sizeof(raead.geniv));
159
160 raead.blocksize = alg->cra_blocksize;
161 raead.maxauthsize = aead->maxauthsize;
162 raead.ivsize = aead->ivsize;
163
164 return nla_put(skb, CRYPTOCFGA_REPORT_AEAD, sizeof(raead), &raead);
165 }
166 #else
167 static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
168 {
169 return -ENOSYS;
170 }
171 #endif
172
173 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
174 __maybe_unused;
175 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
176 {
177 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
178
179 seq_printf(m, "type : aead\n");
180 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
181 "yes" : "no");
182 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
183 seq_printf(m, "ivsize : %u\n", aead->ivsize);
184 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
185 seq_printf(m, "geniv : <none>\n");
186 }
187
188 static void crypto_aead_free_instance(struct crypto_instance *inst)
189 {
190 struct aead_instance *aead = aead_instance(inst);
191
192 if (!aead->free) {
193 inst->tmpl->free(inst);
194 return;
195 }
196
197 aead->free(aead);
198 }
199
200 static const struct crypto_type crypto_aead_type = {
201 .extsize = crypto_alg_extsize,
202 .init_tfm = crypto_aead_init_tfm,
203 .free = crypto_aead_free_instance,
204 #ifdef CONFIG_PROC_FS
205 .show = crypto_aead_show,
206 #endif
207 .report = crypto_aead_report,
208 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
209 .maskset = CRYPTO_ALG_TYPE_MASK,
210 .type = CRYPTO_ALG_TYPE_AEAD,
211 .tfmsize = offsetof(struct crypto_aead, base),
212 };
213
214 static int aead_geniv_setkey(struct crypto_aead *tfm,
215 const u8 *key, unsigned int keylen)
216 {
217 struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
218
219 return crypto_aead_setkey(ctx->child, key, keylen);
220 }
221
222 static int aead_geniv_setauthsize(struct crypto_aead *tfm,
223 unsigned int authsize)
224 {
225 struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
226
227 return crypto_aead_setauthsize(ctx->child, authsize);
228 }
229
230 struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
231 struct rtattr **tb, u32 type, u32 mask)
232 {
233 const char *name;
234 struct crypto_aead_spawn *spawn;
235 struct crypto_attr_type *algt;
236 struct aead_instance *inst;
237 struct aead_alg *alg;
238 unsigned int ivsize;
239 unsigned int maxauthsize;
240 int err;
241
242 algt = crypto_get_attr_type(tb);
243 if (IS_ERR(algt))
244 return ERR_CAST(algt);
245
246 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
247 return ERR_PTR(-EINVAL);
248
249 name = crypto_attr_alg_name(tb[1]);
250 if (IS_ERR(name))
251 return ERR_CAST(name);
252
253 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
254 if (!inst)
255 return ERR_PTR(-ENOMEM);
256
257 spawn = aead_instance_ctx(inst);
258
259 /* Ignore async algorithms if necessary. */
260 mask |= crypto_requires_sync(algt->type, algt->mask);
261
262 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
263 err = crypto_grab_aead(spawn, name, type, mask);
264 if (err)
265 goto err_free_inst;
266
267 alg = crypto_spawn_aead_alg(spawn);
268
269 ivsize = crypto_aead_alg_ivsize(alg);
270 maxauthsize = crypto_aead_alg_maxauthsize(alg);
271
272 err = -EINVAL;
273 if (ivsize < sizeof(u64))
274 goto err_drop_alg;
275
276 err = -ENAMETOOLONG;
277 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
278 "%s(%s)", tmpl->name, alg->base.cra_name) >=
279 CRYPTO_MAX_ALG_NAME)
280 goto err_drop_alg;
281 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
282 "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
283 CRYPTO_MAX_ALG_NAME)
284 goto err_drop_alg;
285
286 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
287 inst->alg.base.cra_priority = alg->base.cra_priority;
288 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
289 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
290 inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
291
292 inst->alg.setkey = aead_geniv_setkey;
293 inst->alg.setauthsize = aead_geniv_setauthsize;
294
295 inst->alg.ivsize = ivsize;
296 inst->alg.maxauthsize = maxauthsize;
297
298 out:
299 return inst;
300
301 err_drop_alg:
302 crypto_drop_aead(spawn);
303 err_free_inst:
304 kfree(inst);
305 inst = ERR_PTR(err);
306 goto out;
307 }
308 EXPORT_SYMBOL_GPL(aead_geniv_alloc);
309
310 void aead_geniv_free(struct aead_instance *inst)
311 {
312 crypto_drop_aead(aead_instance_ctx(inst));
313 kfree(inst);
314 }
315 EXPORT_SYMBOL_GPL(aead_geniv_free);
316
317 int aead_init_geniv(struct crypto_aead *aead)
318 {
319 struct aead_geniv_ctx *ctx = crypto_aead_ctx(aead);
320 struct aead_instance *inst = aead_alg_instance(aead);
321 struct crypto_aead *child;
322 int err;
323
324 spin_lock_init(&ctx->lock);
325
326 err = crypto_get_default_rng();
327 if (err)
328 goto out;
329
330 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
331 crypto_aead_ivsize(aead));
332 crypto_put_default_rng();
333 if (err)
334 goto out;
335
336 ctx->sknull = crypto_get_default_null_skcipher();
337 err = PTR_ERR(ctx->sknull);
338 if (IS_ERR(ctx->sknull))
339 goto out;
340
341 child = crypto_spawn_aead(aead_instance_ctx(inst));
342 err = PTR_ERR(child);
343 if (IS_ERR(child))
344 goto drop_null;
345
346 ctx->child = child;
347 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(child) +
348 sizeof(struct aead_request));
349
350 err = 0;
351
352 out:
353 return err;
354
355 drop_null:
356 crypto_put_default_null_skcipher();
357 goto out;
358 }
359 EXPORT_SYMBOL_GPL(aead_init_geniv);
360
361 void aead_exit_geniv(struct crypto_aead *tfm)
362 {
363 struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
364
365 crypto_free_aead(ctx->child);
366 crypto_put_default_null_skcipher();
367 }
368 EXPORT_SYMBOL_GPL(aead_exit_geniv);
369
370 int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
371 u32 type, u32 mask)
372 {
373 spawn->base.frontend = &crypto_aead_type;
374 return crypto_grab_spawn(&spawn->base, name, type, mask);
375 }
376 EXPORT_SYMBOL_GPL(crypto_grab_aead);
377
378 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
379 {
380 return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
381 }
382 EXPORT_SYMBOL_GPL(crypto_alloc_aead);
383
384 static int aead_prepare_alg(struct aead_alg *alg)
385 {
386 struct crypto_alg *base = &alg->base;
387
388 if (max3(alg->maxauthsize, alg->ivsize, alg->chunksize) >
389 PAGE_SIZE / 8)
390 return -EINVAL;
391
392 if (!alg->chunksize)
393 alg->chunksize = base->cra_blocksize;
394
395 base->cra_type = &crypto_aead_type;
396 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
397 base->cra_flags |= CRYPTO_ALG_TYPE_AEAD;
398
399 return 0;
400 }
401
402 int crypto_register_aead(struct aead_alg *alg)
403 {
404 struct crypto_alg *base = &alg->base;
405 int err;
406
407 err = aead_prepare_alg(alg);
408 if (err)
409 return err;
410
411 return crypto_register_alg(base);
412 }
413 EXPORT_SYMBOL_GPL(crypto_register_aead);
414
415 void crypto_unregister_aead(struct aead_alg *alg)
416 {
417 crypto_unregister_alg(&alg->base);
418 }
419 EXPORT_SYMBOL_GPL(crypto_unregister_aead);
420
421 int crypto_register_aeads(struct aead_alg *algs, int count)
422 {
423 int i, ret;
424
425 for (i = 0; i < count; i++) {
426 ret = crypto_register_aead(&algs[i]);
427 if (ret)
428 goto err;
429 }
430
431 return 0;
432
433 err:
434 for (--i; i >= 0; --i)
435 crypto_unregister_aead(&algs[i]);
436
437 return ret;
438 }
439 EXPORT_SYMBOL_GPL(crypto_register_aeads);
440
441 void crypto_unregister_aeads(struct aead_alg *algs, int count)
442 {
443 int i;
444
445 for (i = count - 1; i >= 0; --i)
446 crypto_unregister_aead(&algs[i]);
447 }
448 EXPORT_SYMBOL_GPL(crypto_unregister_aeads);
449
450 int aead_register_instance(struct crypto_template *tmpl,
451 struct aead_instance *inst)
452 {
453 int err;
454
455 err = aead_prepare_alg(&inst->alg);
456 if (err)
457 return err;
458
459 return crypto_register_instance(tmpl, aead_crypto_instance(inst));
460 }
461 EXPORT_SYMBOL_GPL(aead_register_instance);
462
463 MODULE_LICENSE("GPL");
464 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");