]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - crypto/aead.c
crypto: nx - Remove unnecessary maxauthsize check
[mirror_ubuntu-bionic-kernel.git] / crypto / aead.c
CommitLineData
1ae97820
HX
1/*
2 * AEAD: Authenticated Encryption with Associated Data
3922538f 3 *
1ae97820
HX
4 * This file provides API support for AEAD algorithms.
5 *
6 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
3922538f 10 * Software Foundation; either version 2 of the License, or (at your option)
1ae97820
HX
11 * any later version.
12 *
13 */
14
5b6d2d7f 15#include <crypto/internal/aead.h>
996d98d8 16#include <crypto/scatterwalk.h>
5b6d2d7f 17#include <linux/err.h>
1ae97820
HX
18#include <linux/init.h>
19#include <linux/kernel.h>
20#include <linux/module.h>
d29ce988 21#include <linux/rtnetlink.h>
d43c36dc 22#include <linux/sched.h>
1ae97820
HX
23#include <linux/slab.h>
24#include <linux/seq_file.h>
6ad414fe
SK
25#include <linux/cryptouser.h>
26#include <net/netlink.h>
1ae97820 27
5b6d2d7f
HX
28#include "internal.h"
29
5d1d65f8
HX
30static int aead_null_givencrypt(struct aead_givcrypt_request *req);
31static int aead_null_givdecrypt(struct aead_givcrypt_request *req);
32
1ae97820
HX
33static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
34 unsigned int keylen)
35{
2d0f230f 36 struct old_aead_alg *aead = crypto_old_aead_alg(tfm);
1ae97820
HX
37 unsigned long alignmask = crypto_aead_alignmask(tfm);
38 int ret;
39 u8 *buffer, *alignbuffer;
40 unsigned long absize;
41
42 absize = keylen + alignmask;
43 buffer = kmalloc(absize, GFP_ATOMIC);
44 if (!buffer)
45 return -ENOMEM;
46
47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
48 memcpy(alignbuffer, key, keylen);
49 ret = aead->setkey(tfm, alignbuffer, keylen);
50 memset(alignbuffer, 0, keylen);
51 kfree(buffer);
52 return ret;
53}
54
5d1d65f8
HX
55int crypto_aead_setkey(struct crypto_aead *tfm,
56 const u8 *key, unsigned int keylen)
1ae97820 57{
2d0f230f 58 struct old_aead_alg *aead = crypto_old_aead_alg(tfm);
1ae97820
HX
59 unsigned long alignmask = crypto_aead_alignmask(tfm);
60
5d1d65f8
HX
61 tfm = tfm->child;
62
1ae97820
HX
63 if ((unsigned long)key & alignmask)
64 return setkey_unaligned(tfm, key, keylen);
65
66 return aead->setkey(tfm, key, keylen);
67}
5d1d65f8 68EXPORT_SYMBOL_GPL(crypto_aead_setkey);
1ae97820 69
7ba683a6
HX
70int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
71{
72 int err;
73
2d0f230f 74 if (authsize > crypto_old_aead_alg(tfm)->maxauthsize)
7ba683a6
HX
75 return -EINVAL;
76
2d0f230f
HX
77 if (crypto_old_aead_alg(tfm)->setauthsize) {
78 err = crypto_old_aead_alg(tfm)->setauthsize(
79 tfm->child, authsize);
7ba683a6
HX
80 if (err)
81 return err;
82 }
83
5d1d65f8
HX
84 tfm->child->authsize = authsize;
85 tfm->authsize = authsize;
7ba683a6
HX
86 return 0;
87}
88EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
89
996d98d8
HX
90struct aead_old_request {
91 struct scatterlist srcbuf[2];
92 struct scatterlist dstbuf[2];
93 struct aead_request subreq;
94};
95
96unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
97{
98 return tfm->reqsize + sizeof(struct aead_old_request);
99}
100EXPORT_SYMBOL_GPL(crypto_aead_reqsize);
101
102static int old_crypt(struct aead_request *req,
103 int (*crypt)(struct aead_request *req))
104{
105 struct aead_old_request *nreq = aead_request_ctx(req);
106 struct crypto_aead *aead = crypto_aead_reqtfm(req);
107 struct scatterlist *src, *dst;
108
109 if (req->old)
110 return crypt(req);
111
112 src = scatterwalk_ffwd(nreq->srcbuf, req->src,
113 req->assoclen + req->cryptoff);
114 dst = scatterwalk_ffwd(nreq->dstbuf, req->dst,
115 req->assoclen + req->cryptoff);
116
117 aead_request_set_tfm(&nreq->subreq, aead);
118 aead_request_set_callback(&nreq->subreq, aead_request_flags(req),
119 req->base.complete, req->base.data);
120 aead_request_set_crypt(&nreq->subreq, src, dst, req->cryptlen,
121 req->iv);
122 aead_request_set_assoc(&nreq->subreq, req->src, req->assoclen);
123
124 return crypt(&nreq->subreq);
125}
126
127static int old_encrypt(struct aead_request *req)
128{
129 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2d0f230f 130 struct old_aead_alg *alg = crypto_old_aead_alg(aead);
996d98d8
HX
131
132 return old_crypt(req, alg->encrypt);
133}
134
135static int old_decrypt(struct aead_request *req)
136{
137 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2d0f230f 138 struct old_aead_alg *alg = crypto_old_aead_alg(aead);
996d98d8
HX
139
140 return old_crypt(req, alg->decrypt);
141}
142
aedb30dc 143static int no_givcrypt(struct aead_givcrypt_request *req)
743edf57
HX
144{
145 return -ENOSYS;
146}
147
5d1d65f8 148static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
1ae97820 149{
2d0f230f 150 struct old_aead_alg *alg = &tfm->__crt_alg->cra_aead;
5d1d65f8 151 struct crypto_aead *crt = __crypto_aead_cast(tfm);
1ae97820 152
7ba683a6 153 if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
1ae97820
HX
154 return -EINVAL;
155
996d98d8
HX
156 crt->encrypt = old_encrypt;
157 crt->decrypt = old_decrypt;
5d1d65f8
HX
158 if (alg->ivsize) {
159 crt->givencrypt = alg->givencrypt ?: no_givcrypt;
160 crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
161 } else {
162 crt->givencrypt = aead_null_givencrypt;
163 crt->givdecrypt = aead_null_givdecrypt;
164 }
165 crt->child = __crypto_aead_cast(tfm);
1ae97820 166 crt->ivsize = alg->ivsize;
7ba683a6 167 crt->authsize = alg->maxauthsize;
1ae97820
HX
168
169 return 0;
170}
171
3acc8473 172#ifdef CONFIG_NET
6ad414fe
SK
173static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
174{
175 struct crypto_report_aead raead;
2d0f230f 176 struct old_aead_alg *aead = &alg->cra_aead;
6ad414fe 177
9a5467bf
MK
178 strncpy(raead.type, "aead", sizeof(raead.type));
179 strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
6ad414fe
SK
180
181 raead.blocksize = alg->cra_blocksize;
182 raead.maxauthsize = aead->maxauthsize;
183 raead.ivsize = aead->ivsize;
184
6662df33
DM
185 if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
186 sizeof(struct crypto_report_aead), &raead))
187 goto nla_put_failure;
6ad414fe
SK
188 return 0;
189
190nla_put_failure:
191 return -EMSGSIZE;
192}
3acc8473
HX
193#else
194static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
195{
196 return -ENOSYS;
197}
198#endif
6ad414fe 199
1ae97820
HX
200static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
201 __attribute__ ((unused));
202static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
203{
2d0f230f 204 struct old_aead_alg *aead = &alg->cra_aead;
1ae97820
HX
205
206 seq_printf(m, "type : aead\n");
189ed66e
HX
207 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
208 "yes" : "no");
1ae97820
HX
209 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
210 seq_printf(m, "ivsize : %u\n", aead->ivsize);
7ba683a6 211 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
5b6d2d7f 212 seq_printf(m, "geniv : %s\n", aead->geniv ?: "<built-in>");
1ae97820
HX
213}
214
215const struct crypto_type crypto_aead_type = {
5d1d65f8
HX
216 .extsize = crypto_alg_extsize,
217 .init_tfm = crypto_aead_init_tfm,
1ae97820
HX
218#ifdef CONFIG_PROC_FS
219 .show = crypto_aead_show,
220#endif
6ad414fe 221 .report = crypto_aead_report,
5d1d65f8
HX
222 .lookup = crypto_lookup_aead,
223 .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
224 .maskset = CRYPTO_ALG_TYPE_MASK,
225 .type = CRYPTO_ALG_TYPE_AEAD,
226 .tfmsize = offsetof(struct crypto_aead, base),
1ae97820
HX
227};
228EXPORT_SYMBOL_GPL(crypto_aead_type);
229
5b6d2d7f
HX
230static int aead_null_givencrypt(struct aead_givcrypt_request *req)
231{
232 return crypto_aead_encrypt(&req->areq);
233}
234
235static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
236{
237 return crypto_aead_decrypt(&req->areq);
238}
239
3acc8473 240#ifdef CONFIG_NET
b735d0a9
SK
241static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
242{
243 struct crypto_report_aead raead;
2d0f230f 244 struct old_aead_alg *aead = &alg->cra_aead;
b735d0a9 245
9a5467bf
MK
246 strncpy(raead.type, "nivaead", sizeof(raead.type));
247 strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
b735d0a9
SK
248
249 raead.blocksize = alg->cra_blocksize;
250 raead.maxauthsize = aead->maxauthsize;
251 raead.ivsize = aead->ivsize;
252
6662df33
DM
253 if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
254 sizeof(struct crypto_report_aead), &raead))
255 goto nla_put_failure;
b735d0a9
SK
256 return 0;
257
258nla_put_failure:
259 return -EMSGSIZE;
260}
3acc8473
HX
261#else
262static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
263{
264 return -ENOSYS;
265}
266#endif
b735d0a9
SK
267
268
5b6d2d7f
HX
269static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
270 __attribute__ ((unused));
271static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
272{
2d0f230f 273 struct old_aead_alg *aead = &alg->cra_aead;
5b6d2d7f
HX
274
275 seq_printf(m, "type : nivaead\n");
189ed66e
HX
276 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
277 "yes" : "no");
5b6d2d7f
HX
278 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
279 seq_printf(m, "ivsize : %u\n", aead->ivsize);
280 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
281 seq_printf(m, "geniv : %s\n", aead->geniv);
282}
283
284const struct crypto_type crypto_nivaead_type = {
5d1d65f8
HX
285 .extsize = crypto_alg_extsize,
286 .init_tfm = crypto_aead_init_tfm,
5b6d2d7f
HX
287#ifdef CONFIG_PROC_FS
288 .show = crypto_nivaead_show,
289#endif
b735d0a9 290 .report = crypto_nivaead_report,
5d1d65f8
HX
291 .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
292 .maskset = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV,
293 .type = CRYPTO_ALG_TYPE_AEAD,
294 .tfmsize = offsetof(struct crypto_aead, base),
5b6d2d7f
HX
295};
296EXPORT_SYMBOL_GPL(crypto_nivaead_type);
297
298static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
299 const char *name, u32 type, u32 mask)
300{
5d1d65f8
HX
301 spawn->base.frontend = &crypto_nivaead_type;
302 return crypto_grab_spawn(&spawn->base, name, type, mask);
5b6d2d7f
HX
303}
304
305struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
306 struct rtattr **tb, u32 type,
307 u32 mask)
308{
309 const char *name;
310 struct crypto_aead_spawn *spawn;
311 struct crypto_attr_type *algt;
312 struct crypto_instance *inst;
313 struct crypto_alg *alg;
314 int err;
315
316 algt = crypto_get_attr_type(tb);
5b6d2d7f 317 if (IS_ERR(algt))
3e8afe35 318 return ERR_CAST(algt);
5b6d2d7f
HX
319
320 if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
321 algt->mask)
322 return ERR_PTR(-EINVAL);
323
324 name = crypto_attr_alg_name(tb[1]);
5b6d2d7f 325 if (IS_ERR(name))
3e8afe35 326 return ERR_CAST(name);
5b6d2d7f
HX
327
328 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
329 if (!inst)
330 return ERR_PTR(-ENOMEM);
331
332 spawn = crypto_instance_ctx(inst);
333
334 /* Ignore async algorithms if necessary. */
335 mask |= crypto_requires_sync(algt->type, algt->mask);
336
337 crypto_set_aead_spawn(spawn, inst);
338 err = crypto_grab_nivaead(spawn, name, type, mask);
339 if (err)
340 goto err_free_inst;
341
342 alg = crypto_aead_spawn_alg(spawn);
343
344 err = -EINVAL;
345 if (!alg->cra_aead.ivsize)
346 goto err_drop_alg;
347
348 /*
349 * This is only true if we're constructing an algorithm with its
350 * default IV generator. For the default generator we elide the
351 * template name and double-check the IV generator.
352 */
353 if (algt->mask & CRYPTO_ALG_GENIV) {
354 if (strcmp(tmpl->name, alg->cra_aead.geniv))
355 goto err_drop_alg;
356
357 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
358 memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
359 CRYPTO_MAX_ALG_NAME);
360 } else {
361 err = -ENAMETOOLONG;
362 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
363 "%s(%s)", tmpl->name, alg->cra_name) >=
364 CRYPTO_MAX_ALG_NAME)
365 goto err_drop_alg;
366 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
367 "%s(%s)", tmpl->name, alg->cra_driver_name) >=
368 CRYPTO_MAX_ALG_NAME)
369 goto err_drop_alg;
370 }
371
372 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV;
373 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
374 inst->alg.cra_priority = alg->cra_priority;
375 inst->alg.cra_blocksize = alg->cra_blocksize;
376 inst->alg.cra_alignmask = alg->cra_alignmask;
377 inst->alg.cra_type = &crypto_aead_type;
378
379 inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
380 inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
381 inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
382
383 inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
384 inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
385 inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt;
386 inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt;
387
388out:
389 return inst;
390
391err_drop_alg:
392 crypto_drop_aead(spawn);
393err_free_inst:
394 kfree(inst);
395 inst = ERR_PTR(err);
396 goto out;
397}
398EXPORT_SYMBOL_GPL(aead_geniv_alloc);
399
400void aead_geniv_free(struct crypto_instance *inst)
401{
402 crypto_drop_aead(crypto_instance_ctx(inst));
403 kfree(inst);
404}
405EXPORT_SYMBOL_GPL(aead_geniv_free);
406
407int aead_geniv_init(struct crypto_tfm *tfm)
408{
409 struct crypto_instance *inst = (void *)tfm->__crt_alg;
5d1d65f8 410 struct crypto_aead *child;
5b6d2d7f
HX
411 struct crypto_aead *aead;
412
5d1d65f8 413 aead = __crypto_aead_cast(tfm);
5b6d2d7f 414
5d1d65f8
HX
415 child = crypto_spawn_aead(crypto_instance_ctx(inst));
416 if (IS_ERR(child))
417 return PTR_ERR(child);
418
419 aead->child = child;
420 aead->reqsize += crypto_aead_reqsize(child);
5b6d2d7f
HX
421
422 return 0;
423}
424EXPORT_SYMBOL_GPL(aead_geniv_init);
425
426void aead_geniv_exit(struct crypto_tfm *tfm)
427{
5d1d65f8 428 crypto_free_aead(__crypto_aead_cast(tfm)->child);
5b6d2d7f
HX
429}
430EXPORT_SYMBOL_GPL(aead_geniv_exit);
431
d29ce988
HX
432static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
433{
434 struct rtattr *tb[3];
435 struct {
436 struct rtattr attr;
437 struct crypto_attr_type data;
438 } ptype;
439 struct {
440 struct rtattr attr;
441 struct crypto_attr_alg data;
442 } palg;
443 struct crypto_template *tmpl;
444 struct crypto_instance *inst;
445 struct crypto_alg *larval;
446 const char *geniv;
447 int err;
448
449 larval = crypto_larval_lookup(alg->cra_driver_name,
450 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
451 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
452 err = PTR_ERR(larval);
453 if (IS_ERR(larval))
454 goto out;
455
456 err = -EAGAIN;
457 if (!crypto_is_larval(larval))
458 goto drop_larval;
459
460 ptype.attr.rta_len = sizeof(ptype);
461 ptype.attr.rta_type = CRYPTOA_TYPE;
462 ptype.data.type = type | CRYPTO_ALG_GENIV;
463 /* GENIV tells the template that we're making a default geniv. */
464 ptype.data.mask = mask | CRYPTO_ALG_GENIV;
465 tb[0] = &ptype.attr;
466
467 palg.attr.rta_len = sizeof(palg);
468 palg.attr.rta_type = CRYPTOA_ALG;
469 /* Must use the exact name to locate ourselves. */
470 memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
471 tb[1] = &palg.attr;
472
473 tb[2] = NULL;
474
475 geniv = alg->cra_aead.geniv;
476
477 tmpl = crypto_lookup_template(geniv);
478 err = -ENOENT;
479 if (!tmpl)
480 goto kill_larval;
481
482 inst = tmpl->alloc(tb);
483 err = PTR_ERR(inst);
484 if (IS_ERR(inst))
485 goto put_tmpl;
486
4fad478a
JJ
487 err = crypto_register_instance(tmpl, inst);
488 if (err) {
d29ce988
HX
489 tmpl->free(inst);
490 goto put_tmpl;
491 }
492
493 /* Redo the lookup to use the instance we just registered. */
494 err = -EAGAIN;
495
496put_tmpl:
497 crypto_tmpl_put(tmpl);
498kill_larval:
499 crypto_larval_kill(larval);
500drop_larval:
501 crypto_mod_put(larval);
502out:
503 crypto_mod_put(alg);
504 return err;
505}
506
1e122994 507struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask)
d29ce988
HX
508{
509 struct crypto_alg *alg;
510
511 alg = crypto_alg_mod_lookup(name, type, mask);
512 if (IS_ERR(alg))
513 return alg;
514
515 if (alg->cra_type == &crypto_aead_type)
516 return alg;
517
518 if (!alg->cra_aead.ivsize)
519 return alg;
520
5852ae42
HX
521 crypto_mod_put(alg);
522 alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
523 mask & ~CRYPTO_ALG_TESTED);
524 if (IS_ERR(alg))
525 return alg;
526
527 if (alg->cra_type == &crypto_aead_type) {
80f7b355 528 if (~alg->cra_flags & (type ^ ~mask) & CRYPTO_ALG_TESTED) {
5852ae42
HX
529 crypto_mod_put(alg);
530 alg = ERR_PTR(-ENOENT);
531 }
532 return alg;
533 }
534
535 BUG_ON(!alg->cra_aead.ivsize);
536
d29ce988
HX
537 return ERR_PTR(crypto_nivaead_default(alg, type, mask));
538}
1e122994 539EXPORT_SYMBOL_GPL(crypto_lookup_aead);
d29ce988
HX
540
541int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
542 u32 type, u32 mask)
543{
5d1d65f8
HX
544 spawn->base.frontend = &crypto_aead_type;
545 return crypto_grab_spawn(&spawn->base, name, type, mask);
d29ce988
HX
546}
547EXPORT_SYMBOL_GPL(crypto_grab_aead);
548
549struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
550{
5d1d65f8 551 return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
d29ce988
HX
552}
553EXPORT_SYMBOL_GPL(crypto_alloc_aead);
554
1ae97820
HX
555MODULE_LICENSE("GPL");
556MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");