]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blame - crypto/seqiv.c
crypto: eseqiv - Move IV seeding into init function
[mirror_ubuntu-jammy-kernel.git] / crypto / seqiv.c
CommitLineData
0a270321
HX
1/*
2 * seqiv: Sequence Number IV Generator
3 *
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
6 *
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
661cfd0e 16#include <crypto/internal/geniv.h>
0a270321 17#include <crypto/internal/skcipher.h>
856e3f40 18#include <crypto/null.h>
a0f000ec 19#include <crypto/rng.h>
856e3f40 20#include <crypto/scatterwalk.h>
0a270321
HX
21#include <linux/err.h>
22#include <linux/init.h>
23#include <linux/kernel.h>
24#include <linux/module.h>
5a0e3ad6 25#include <linux/slab.h>
0a270321
HX
26#include <linux/spinlock.h>
27#include <linux/string.h>
28
dd04446e
HX
29struct seqniv_request_ctx {
30 struct scatterlist dst[2];
31 struct aead_request subreq;
32};
33
0a270321
HX
34struct seqiv_ctx {
35 spinlock_t lock;
36 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
37};
38
856e3f40 39struct seqiv_aead_ctx {
661cfd0e
HX
40 /* aead_geniv_ctx must be first the element */
41 struct aead_geniv_ctx geniv;
856e3f40
HX
42 struct crypto_blkcipher *null;
43 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
44};
45
0677157b
HX
46static void seqiv_free(struct crypto_instance *inst);
47
0a270321
HX
48static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
49{
50 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
51 struct crypto_ablkcipher *geniv;
52
53 if (err == -EINPROGRESS)
54 return;
55
56 if (err)
57 goto out;
58
59 geniv = skcipher_givcrypt_reqtfm(req);
60 memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv));
61
62out:
63 kfree(subreq->info);
64}
65
66static void seqiv_complete(struct crypto_async_request *base, int err)
67{
68 struct skcipher_givcrypt_request *req = base->data;
69
70 seqiv_complete2(req, err);
71 skcipher_givcrypt_complete(req, err);
72}
73
14df4d80
HX
74static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err)
75{
76 struct aead_request *subreq = aead_givcrypt_reqctx(req);
77 struct crypto_aead *geniv;
78
79 if (err == -EINPROGRESS)
80 return;
81
82 if (err)
83 goto out;
84
85 geniv = aead_givcrypt_reqtfm(req);
86 memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv));
87
88out:
89 kfree(subreq->iv);
90}
91
92static void seqiv_aead_complete(struct crypto_async_request *base, int err)
93{
94 struct aead_givcrypt_request *req = base->data;
95
96 seqiv_aead_complete2(req, err);
97 aead_givcrypt_complete(req, err);
98}
99
856e3f40
HX
100static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
101{
102 struct aead_request *subreq = aead_request_ctx(req);
103 struct crypto_aead *geniv;
104
105 if (err == -EINPROGRESS)
106 return;
107
108 if (err)
109 goto out;
110
111 geniv = crypto_aead_reqtfm(req);
112 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
113
114out:
115 kzfree(subreq->iv);
116}
117
118static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
119 int err)
120{
121 struct aead_request *req = base->data;
122
123 seqiv_aead_encrypt_complete2(req, err);
124 aead_request_complete(req, err);
125}
126
dd04446e
HX
127static void seqniv_aead_encrypt_complete2(struct aead_request *req, int err)
128{
129 unsigned int ivsize = 8;
130 u8 data[20];
131
132 if (err == -EINPROGRESS)
133 return;
134
135 /* Swap IV and ESP header back to correct order. */
136 scatterwalk_map_and_copy(data, req->dst, 0, req->assoclen + ivsize, 0);
137 scatterwalk_map_and_copy(data + ivsize, req->dst, 0, req->assoclen, 1);
138 scatterwalk_map_and_copy(data, req->dst, req->assoclen, ivsize, 1);
139}
140
141static void seqniv_aead_encrypt_complete(struct crypto_async_request *base,
142 int err)
143{
144 struct aead_request *req = base->data;
145
146 seqniv_aead_encrypt_complete2(req, err);
147 aead_request_complete(req, err);
148}
149
150static void seqniv_aead_decrypt_complete2(struct aead_request *req, int err)
151{
152 u8 data[4];
153
154 if (err == -EINPROGRESS)
155 return;
156
157 /* Move ESP header back to correct location. */
158 scatterwalk_map_and_copy(data, req->dst, 16, req->assoclen - 8, 0);
159 scatterwalk_map_and_copy(data, req->dst, 8, req->assoclen - 8, 1);
160}
161
162static void seqniv_aead_decrypt_complete(struct crypto_async_request *base,
163 int err)
164{
165 struct aead_request *req = base->data;
166
167 seqniv_aead_decrypt_complete2(req, err);
168 aead_request_complete(req, err);
169}
170
14df4d80
HX
171static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
172 unsigned int ivsize)
173{
174 unsigned int len = ivsize;
175
176 if (ivsize > sizeof(u64)) {
177 memset(info, 0, ivsize - sizeof(u64));
178 len = sizeof(u64);
179 }
180 seq = cpu_to_be64(seq);
181 memcpy(info + ivsize - len, &seq, len);
182 crypto_xor(info, ctx->salt, ivsize);
183}
184
0a270321
HX
185static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
186{
187 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
188 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
189 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
3e3dc25f 190 crypto_completion_t compl;
0a270321
HX
191 void *data;
192 u8 *info;
0a270321 193 unsigned int ivsize;
0a270321
HX
194 int err;
195
196 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
197
3e3dc25f 198 compl = req->creq.base.complete;
0a270321
HX
199 data = req->creq.base.data;
200 info = req->creq.info;
201
202 ivsize = crypto_ablkcipher_ivsize(geniv);
203
204 if (unlikely(!IS_ALIGNED((unsigned long)info,
205 crypto_ablkcipher_alignmask(geniv) + 1))) {
206 info = kmalloc(ivsize, req->creq.base.flags &
207 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
208 GFP_ATOMIC);
209 if (!info)
210 return -ENOMEM;
211
3e3dc25f 212 compl = seqiv_complete;
0a270321
HX
213 data = req;
214 }
215
3e3dc25f 216 ablkcipher_request_set_callback(subreq, req->creq.base.flags, compl,
0a270321
HX
217 data);
218 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
219 req->creq.nbytes, info);
220
14df4d80 221 seqiv_geniv(ctx, info, req->seq, ivsize);
0a270321
HX
222 memcpy(req->giv, info, ivsize);
223
224 err = crypto_ablkcipher_encrypt(subreq);
225 if (unlikely(info != req->creq.info))
226 seqiv_complete2(req, err);
227 return err;
228}
229
14df4d80
HX
230static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
231{
232 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
233 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
234 struct aead_request *areq = &req->areq;
235 struct aead_request *subreq = aead_givcrypt_reqctx(req);
3e3dc25f 236 crypto_completion_t compl;
14df4d80
HX
237 void *data;
238 u8 *info;
239 unsigned int ivsize;
240 int err;
241
242 aead_request_set_tfm(subreq, aead_geniv_base(geniv));
243
3e3dc25f 244 compl = areq->base.complete;
14df4d80
HX
245 data = areq->base.data;
246 info = areq->iv;
247
248 ivsize = crypto_aead_ivsize(geniv);
249
250 if (unlikely(!IS_ALIGNED((unsigned long)info,
251 crypto_aead_alignmask(geniv) + 1))) {
252 info = kmalloc(ivsize, areq->base.flags &
253 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
254 GFP_ATOMIC);
255 if (!info)
256 return -ENOMEM;
257
3e3dc25f 258 compl = seqiv_aead_complete;
14df4d80
HX
259 data = req;
260 }
261
3e3dc25f 262 aead_request_set_callback(subreq, areq->base.flags, compl, data);
14df4d80
HX
263 aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen,
264 info);
265 aead_request_set_assoc(subreq, areq->assoc, areq->assoclen);
266
267 seqiv_geniv(ctx, info, req->seq, ivsize);
268 memcpy(req->giv, info, ivsize);
269
270 err = crypto_aead_encrypt(subreq);
271 if (unlikely(info != areq->iv))
272 seqiv_aead_complete2(req, err);
273 return err;
274}
275
661cfd0e 276static int seqniv_aead_encrypt(struct aead_request *req)
856e3f40
HX
277{
278 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
279 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
dd04446e
HX
280 struct seqniv_request_ctx *rctx = aead_request_ctx(req);
281 struct aead_request *subreq = &rctx->subreq;
282 struct scatterlist *dst;
856e3f40
HX
283 crypto_completion_t compl;
284 void *data;
dd04446e
HX
285 unsigned int ivsize = 8;
286 u8 buf[20] __attribute__ ((aligned(__alignof__(u32))));
856e3f40
HX
287 int err;
288
dd04446e
HX
289 if (req->cryptlen < ivsize)
290 return -EINVAL;
856e3f40 291
dd04446e
HX
292 /* ESP AD is at most 12 bytes (ESN). */
293 if (req->assoclen > 12)
294 return -EINVAL;
856e3f40 295
661cfd0e 296 aead_request_set_tfm(subreq, ctx->geniv.child);
856e3f40 297
dd04446e
HX
298 compl = seqniv_aead_encrypt_complete;
299 data = req;
856e3f40 300
dd04446e 301 if (req->src != req->dst) {
dd04446e
HX
302 struct blkcipher_desc desc = {
303 .tfm = ctx->null,
304 };
305
d0ad1b24
HX
306 err = crypto_blkcipher_encrypt(&desc, req->dst, req->src,
307 req->assoclen + req->cryptlen);
dd04446e
HX
308 if (err)
309 return err;
856e3f40
HX
310 }
311
dd04446e
HX
312 dst = scatterwalk_ffwd(rctx->dst, req->dst, ivsize);
313
856e3f40 314 aead_request_set_callback(subreq, req->base.flags, compl, data);
dd04446e
HX
315 aead_request_set_crypt(subreq, dst, dst,
316 req->cryptlen - ivsize, req->iv);
374d4ad1 317 aead_request_set_ad(subreq, req->assoclen);
856e3f40 318
dd04446e
HX
319 memcpy(buf, req->iv, ivsize);
320 crypto_xor(buf, ctx->salt, ivsize);
321 memcpy(req->iv, buf, ivsize);
322
323 /* Swap order of IV and ESP AD for ICV generation. */
324 scatterwalk_map_and_copy(buf + ivsize, req->dst, 0, req->assoclen, 0);
325 scatterwalk_map_and_copy(buf, req->dst, 0, req->assoclen + ivsize, 1);
856e3f40
HX
326
327 err = crypto_aead_encrypt(subreq);
dd04446e 328 seqniv_aead_encrypt_complete2(req, err);
856e3f40
HX
329 return err;
330}
331
332static int seqiv_aead_encrypt(struct aead_request *req)
333{
334 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
335 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
336 struct aead_request *subreq = aead_request_ctx(req);
337 crypto_completion_t compl;
338 void *data;
339 u8 *info;
dd04446e 340 unsigned int ivsize = 8;
856e3f40
HX
341 int err;
342
dd04446e
HX
343 if (req->cryptlen < ivsize)
344 return -EINVAL;
345
661cfd0e 346 aead_request_set_tfm(subreq, ctx->geniv.child);
856e3f40
HX
347
348 compl = req->base.complete;
349 data = req->base.data;
350 info = req->iv;
351
856e3f40 352 if (req->src != req->dst) {
856e3f40
HX
353 struct blkcipher_desc desc = {
354 .tfm = ctx->null,
355 };
356
d0ad1b24
HX
357 err = crypto_blkcipher_encrypt(&desc, req->dst, req->src,
358 req->assoclen + req->cryptlen);
856e3f40
HX
359 if (err)
360 return err;
361 }
362
363 if (unlikely(!IS_ALIGNED((unsigned long)info,
364 crypto_aead_alignmask(geniv) + 1))) {
365 info = kmalloc(ivsize, req->base.flags &
366 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
367 GFP_ATOMIC);
368 if (!info)
369 return -ENOMEM;
370
371 memcpy(info, req->iv, ivsize);
372 compl = seqiv_aead_encrypt_complete;
373 data = req;
374 }
375
376 aead_request_set_callback(subreq, req->base.flags, compl, data);
377 aead_request_set_crypt(subreq, req->dst, req->dst,
378 req->cryptlen - ivsize, info);
374d4ad1 379 aead_request_set_ad(subreq, req->assoclen + ivsize);
856e3f40
HX
380
381 crypto_xor(info, ctx->salt, ivsize);
382 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
383
384 err = crypto_aead_encrypt(subreq);
385 if (unlikely(info != req->iv))
386 seqiv_aead_encrypt_complete2(req, err);
387 return err;
388}
389
661cfd0e 390static int seqniv_aead_decrypt(struct aead_request *req)
856e3f40
HX
391{
392 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
393 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
dd04446e
HX
394 struct seqniv_request_ctx *rctx = aead_request_ctx(req);
395 struct aead_request *subreq = &rctx->subreq;
396 struct scatterlist *dst;
856e3f40
HX
397 crypto_completion_t compl;
398 void *data;
dd04446e
HX
399 unsigned int ivsize = 8;
400 u8 buf[20];
401 int err;
402
403 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
404 return -EINVAL;
856e3f40 405
661cfd0e 406 aead_request_set_tfm(subreq, ctx->geniv.child);
856e3f40
HX
407
408 compl = req->base.complete;
409 data = req->base.data;
410
dd04446e
HX
411 if (req->assoclen > 12)
412 return -EINVAL;
413 else if (req->assoclen > 8) {
414 compl = seqniv_aead_decrypt_complete;
415 data = req;
416 }
417
418 if (req->src != req->dst) {
dd04446e
HX
419 struct blkcipher_desc desc = {
420 .tfm = ctx->null,
421 };
422
d0ad1b24
HX
423 err = crypto_blkcipher_encrypt(&desc, req->dst, req->src,
424 req->assoclen + req->cryptlen);
dd04446e
HX
425 if (err)
426 return err;
427 }
428
429 /* Move ESP AD forward for ICV generation. */
430 scatterwalk_map_and_copy(buf, req->dst, 0, req->assoclen + ivsize, 0);
431 memcpy(req->iv, buf + req->assoclen, ivsize);
432 scatterwalk_map_and_copy(buf, req->dst, ivsize, req->assoclen, 1);
433
434 dst = scatterwalk_ffwd(rctx->dst, req->dst, ivsize);
856e3f40
HX
435
436 aead_request_set_callback(subreq, req->base.flags, compl, data);
dd04446e 437 aead_request_set_crypt(subreq, dst, dst,
856e3f40 438 req->cryptlen - ivsize, req->iv);
374d4ad1 439 aead_request_set_ad(subreq, req->assoclen);
856e3f40 440
dd04446e
HX
441 err = crypto_aead_decrypt(subreq);
442 if (req->assoclen > 8)
443 seqniv_aead_decrypt_complete2(req, err);
444 return err;
856e3f40
HX
445}
446
447static int seqiv_aead_decrypt(struct aead_request *req)
448{
449 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
450 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
451 struct aead_request *subreq = aead_request_ctx(req);
452 crypto_completion_t compl;
453 void *data;
dd04446e
HX
454 unsigned int ivsize = 8;
455
456 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
457 return -EINVAL;
856e3f40 458
661cfd0e 459 aead_request_set_tfm(subreq, ctx->geniv.child);
856e3f40
HX
460
461 compl = req->base.complete;
462 data = req->base.data;
463
856e3f40
HX
464 aead_request_set_callback(subreq, req->base.flags, compl, data);
465 aead_request_set_crypt(subreq, req->src, req->dst,
466 req->cryptlen - ivsize, req->iv);
374d4ad1 467 aead_request_set_ad(subreq, req->assoclen + ivsize);
856e3f40
HX
468
469 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
470 if (req->src != req->dst)
471 scatterwalk_map_and_copy(req->iv, req->dst,
472 req->assoclen, ivsize, 1);
473
474 return crypto_aead_decrypt(subreq);
475}
476
0a270321
HX
477static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
478{
479 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
480 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
a0f000ec 481 int err = 0;
0a270321
HX
482
483 spin_lock_bh(&ctx->lock);
484 if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first)
485 goto unlock;
486
487 crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
a0f000ec
HX
488 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
489 crypto_ablkcipher_ivsize(geniv));
0a270321
HX
490
491unlock:
492 spin_unlock_bh(&ctx->lock);
493
a0f000ec
HX
494 if (err)
495 return err;
496
0a270321
HX
497 return seqiv_givencrypt(req);
498}
499
14df4d80
HX
500static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req)
501{
502 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
503 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
a0f000ec 504 int err = 0;
14df4d80
HX
505
506 spin_lock_bh(&ctx->lock);
507 if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first)
508 goto unlock;
509
510 crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt;
a0f000ec
HX
511 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
512 crypto_aead_ivsize(geniv));
14df4d80
HX
513
514unlock:
515 spin_unlock_bh(&ctx->lock);
516
a0f000ec
HX
517 if (err)
518 return err;
519
14df4d80
HX
520 return seqiv_aead_givencrypt(req);
521}
522
661cfd0e 523static int seqniv_aead_encrypt_first(struct aead_request *req)
856e3f40
HX
524{
525 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
526 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
527 int err = 0;
528
661cfd0e
HX
529 spin_lock_bh(&ctx->geniv.lock);
530 if (geniv->encrypt != seqniv_aead_encrypt_first)
856e3f40
HX
531 goto unlock;
532
661cfd0e 533 geniv->encrypt = seqniv_aead_encrypt;
856e3f40
HX
534 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
535 crypto_aead_ivsize(geniv));
536
537unlock:
661cfd0e 538 spin_unlock_bh(&ctx->geniv.lock);
856e3f40
HX
539
540 if (err)
541 return err;
542
661cfd0e 543 return seqniv_aead_encrypt(req);
856e3f40
HX
544}
545
546static int seqiv_aead_encrypt_first(struct aead_request *req)
547{
548 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
549 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
550 int err = 0;
551
661cfd0e 552 spin_lock_bh(&ctx->geniv.lock);
856e3f40
HX
553 if (geniv->encrypt != seqiv_aead_encrypt_first)
554 goto unlock;
555
556 geniv->encrypt = seqiv_aead_encrypt;
557 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
558 crypto_aead_ivsize(geniv));
559
560unlock:
661cfd0e 561 spin_unlock_bh(&ctx->geniv.lock);
856e3f40
HX
562
563 if (err)
564 return err;
565
566 return seqiv_aead_encrypt(req);
567}
568
0a270321
HX
569static int seqiv_init(struct crypto_tfm *tfm)
570{
571 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
572 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
573
574 spin_lock_init(&ctx->lock);
575
576 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
577
578 return skcipher_geniv_init(tfm);
579}
580
856e3f40 581static int seqiv_old_aead_init(struct crypto_tfm *tfm)
14df4d80
HX
582{
583 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
584 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
585
586 spin_lock_init(&ctx->lock);
587
ba6d8e39
HX
588 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
589 sizeof(struct aead_request));
14df4d80
HX
590
591 return aead_geniv_init(tfm);
592}
593
dd04446e 594static int seqiv_aead_init_common(struct crypto_tfm *tfm, unsigned int reqsize)
856e3f40
HX
595{
596 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
597 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
598 int err;
599
661cfd0e 600 spin_lock_init(&ctx->geniv.lock);
856e3f40
HX
601
602 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
603
604 ctx->null = crypto_get_default_null_skcipher();
605 err = PTR_ERR(ctx->null);
606 if (IS_ERR(ctx->null))
607 goto out;
608
609 err = aead_geniv_init(tfm);
610 if (err)
611 goto drop_null;
612
661cfd0e 613 ctx->geniv.child = geniv->child;
856e3f40
HX
614 geniv->child = geniv;
615
616out:
617 return err;
618
619drop_null:
620 crypto_put_default_null_skcipher();
621 goto out;
622}
623
dd04446e 624static int seqiv_aead_init(struct crypto_tfm *tfm)
856e3f40 625{
dd04446e
HX
626 return seqiv_aead_init_common(tfm, sizeof(struct aead_request));
627}
856e3f40 628
dd04446e
HX
629static int seqniv_aead_init(struct crypto_tfm *tfm)
630{
631 return seqiv_aead_init_common(tfm, sizeof(struct seqniv_request_ctx));
856e3f40
HX
632}
633
634static void seqiv_aead_exit(struct crypto_tfm *tfm)
635{
636 struct seqiv_aead_ctx *ctx = crypto_tfm_ctx(tfm);
637
661cfd0e 638 crypto_free_aead(ctx->geniv.child);
856e3f40
HX
639 crypto_put_default_null_skcipher();
640}
641
0677157b
HX
642static int seqiv_ablkcipher_create(struct crypto_template *tmpl,
643 struct rtattr **tb)
0a270321
HX
644{
645 struct crypto_instance *inst;
0677157b 646 int err;
0a270321 647
0677157b 648 inst = skcipher_geniv_alloc(tmpl, tb, 0, 0);
14df4d80 649
0a270321 650 if (IS_ERR(inst))
0677157b 651 return PTR_ERR(inst);
0a270321 652
0677157b
HX
653 err = -EINVAL;
654 if (inst->alg.cra_ablkcipher.ivsize < sizeof(u64))
655 goto free_inst;
c0ecf891 656
0a270321
HX
657 inst->alg.cra_ablkcipher.givencrypt = seqiv_givencrypt_first;
658
659 inst->alg.cra_init = seqiv_init;
660 inst->alg.cra_exit = skcipher_geniv_exit;
661
0a270321 662 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
856e3f40 663 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
0a270321 664
0677157b
HX
665 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
666
667 err = crypto_register_instance(tmpl, inst);
668 if (err)
669 goto free_inst;
670
0a270321 671out:
0677157b
HX
672 return err;
673
674free_inst:
675 skcipher_geniv_free(inst);
676 goto out;
0a270321
HX
677}
678
0677157b
HX
679static int seqiv_old_aead_create(struct crypto_template *tmpl,
680 struct aead_instance *aead)
856e3f40
HX
681{
682 struct crypto_instance *inst = aead_crypto_instance(aead);
0677157b 683 int err = -EINVAL;
856e3f40 684
0677157b
HX
685 if (inst->alg.cra_aead.ivsize < sizeof(u64))
686 goto free_inst;
856e3f40
HX
687
688 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first;
689
690 inst->alg.cra_init = seqiv_old_aead_init;
691 inst->alg.cra_exit = aead_geniv_exit;
692
693 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
694 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
695
0677157b
HX
696 err = crypto_register_instance(tmpl, inst);
697 if (err)
698 goto free_inst;
699
700out:
701 return err;
702
703free_inst:
704 aead_geniv_free(aead);
705 goto out;
856e3f40
HX
706}
707
0677157b 708static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
14df4d80 709{
856e3f40
HX
710 struct aead_instance *inst;
711 struct crypto_aead_spawn *spawn;
712 struct aead_alg *alg;
0677157b 713 int err;
14df4d80 714
0677157b 715 inst = aead_geniv_alloc(tmpl, tb, 0, 0);
14df4d80
HX
716
717 if (IS_ERR(inst))
0677157b
HX
718 return PTR_ERR(inst);
719
720 inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
14df4d80 721
856e3f40 722 if (inst->alg.base.cra_aead.encrypt)
0677157b 723 return seqiv_old_aead_create(tmpl, inst);
856e3f40 724
661cfd0e
HX
725 spawn = aead_instance_ctx(inst);
726 alg = crypto_spawn_aead_alg(spawn);
727
728 if (alg->base.cra_aead.encrypt)
729 goto done;
730
0677157b 731 err = -EINVAL;
dd04446e 732 if (inst->alg.ivsize != sizeof(u64))
0677157b 733 goto free_inst;
c0ecf891 734
856e3f40
HX
735 inst->alg.encrypt = seqiv_aead_encrypt_first;
736 inst->alg.decrypt = seqiv_aead_decrypt;
14df4d80 737
856e3f40
HX
738 inst->alg.base.cra_init = seqiv_aead_init;
739 inst->alg.base.cra_exit = seqiv_aead_exit;
740
741 inst->alg.base.cra_ctxsize = sizeof(struct seqiv_aead_ctx);
742 inst->alg.base.cra_ctxsize += inst->alg.base.cra_aead.ivsize;
743
661cfd0e 744done:
0677157b
HX
745 err = aead_register_instance(tmpl, inst);
746 if (err)
747 goto free_inst;
748
14df4d80 749out:
0677157b
HX
750 return err;
751
752free_inst:
753 aead_geniv_free(inst);
754 goto out;
14df4d80
HX
755}
756
0677157b 757static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
14df4d80
HX
758{
759 struct crypto_attr_type *algt;
14df4d80
HX
760 int err;
761
762 algt = crypto_get_attr_type(tb);
14df4d80 763 if (IS_ERR(algt))
0677157b 764 return PTR_ERR(algt);
14df4d80 765
a0f000ec
HX
766 err = crypto_get_default_rng();
767 if (err)
0677157b 768 return err;
a0f000ec 769
14df4d80 770 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
0677157b 771 err = seqiv_ablkcipher_create(tmpl, tb);
14df4d80 772 else
0677157b 773 err = seqiv_aead_create(tmpl, tb);
14df4d80 774
0677157b
HX
775 if (err)
776 crypto_put_default_rng();
a0f000ec 777
0677157b 778 return err;
14df4d80
HX
779}
780
0677157b 781static int seqniv_create(struct crypto_template *tmpl, struct rtattr **tb)
3c08fee7
HX
782{
783 struct aead_instance *inst;
784 struct crypto_aead_spawn *spawn;
785 struct aead_alg *alg;
786 int err;
787
788 err = crypto_get_default_rng();
789 if (err)
0677157b 790 return err;
3c08fee7 791
0677157b
HX
792 inst = aead_geniv_alloc(tmpl, tb, 0, 0);
793 err = PTR_ERR(inst);
3c08fee7
HX
794 if (IS_ERR(inst))
795 goto put_rng;
796
661cfd0e
HX
797 spawn = aead_instance_ctx(inst);
798 alg = crypto_spawn_aead_alg(spawn);
799
800 if (alg->base.cra_aead.encrypt)
801 goto done;
802
0677157b 803 err = -EINVAL;
dd04446e 804 if (inst->alg.ivsize != sizeof(u64))
0677157b 805 goto free_inst;
3c08fee7 806
661cfd0e
HX
807 inst->alg.encrypt = seqniv_aead_encrypt_first;
808 inst->alg.decrypt = seqniv_aead_decrypt;
3c08fee7 809
dd04446e
HX
810 inst->alg.base.cra_init = seqniv_aead_init;
811 inst->alg.base.cra_exit = seqiv_aead_exit;
3c08fee7
HX
812
813 inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
814 inst->alg.base.cra_ctxsize = sizeof(struct seqiv_aead_ctx);
ccdb8a03 815 inst->alg.base.cra_ctxsize += inst->alg.ivsize;
3c08fee7 816
661cfd0e 817done:
0677157b
HX
818 err = aead_register_instance(tmpl, inst);
819 if (err)
820 goto free_inst;
821
3c08fee7 822out:
0677157b 823 return err;
3c08fee7 824
0677157b
HX
825free_inst:
826 aead_geniv_free(inst);
3c08fee7
HX
827put_rng:
828 crypto_put_default_rng();
829 goto out;
830}
831
14df4d80
HX
832static void seqiv_free(struct crypto_instance *inst)
833{
834 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
835 skcipher_geniv_free(inst);
836 else
856e3f40 837 aead_geniv_free(aead_instance(inst));
a0f000ec 838 crypto_put_default_rng();
14df4d80
HX
839}
840
0a270321
HX
841static struct crypto_template seqiv_tmpl = {
842 .name = "seqiv",
0677157b 843 .create = seqiv_create,
14df4d80 844 .free = seqiv_free,
0a270321
HX
845 .module = THIS_MODULE,
846};
847
3c08fee7
HX
848static struct crypto_template seqniv_tmpl = {
849 .name = "seqniv",
0677157b 850 .create = seqniv_create,
3c08fee7
HX
851 .free = seqiv_free,
852 .module = THIS_MODULE,
853};
854
0a270321
HX
855static int __init seqiv_module_init(void)
856{
3c08fee7
HX
857 int err;
858
859 err = crypto_register_template(&seqiv_tmpl);
860 if (err)
861 goto out;
862
863 err = crypto_register_template(&seqniv_tmpl);
864 if (err)
865 goto out_undo_niv;
866
867out:
868 return err;
869
870out_undo_niv:
871 crypto_unregister_template(&seqiv_tmpl);
872 goto out;
0a270321
HX
873}
874
875static void __exit seqiv_module_exit(void)
876{
056c04ba 877 crypto_unregister_template(&seqniv_tmpl);
0a270321
HX
878 crypto_unregister_template(&seqiv_tmpl);
879}
880
881module_init(seqiv_module_init);
882module_exit(seqiv_module_exit);
883
884MODULE_LICENSE("GPL");
885MODULE_DESCRIPTION("Sequence Number IV Generator");
4943ba16 886MODULE_ALIAS_CRYPTO("seqiv");
3c08fee7 887MODULE_ALIAS_CRYPTO("seqniv");