]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - crypto/gcm.c
Merge series "ASoC: meson: tdm fixes" from Jerome Brunet <jbrunet@baylibre.com>:
[mirror_ubuntu-jammy-kernel.git] / crypto / gcm.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * GCM: Galois/Counter Mode.
4 *
5 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6 */
7
8 #include <crypto/gf128mul.h>
9 #include <crypto/internal/aead.h>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/null.h>
13 #include <crypto/scatterwalk.h>
14 #include <crypto/gcm.h>
15 #include <crypto/hash.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/slab.h>
21
22 struct gcm_instance_ctx {
23 struct crypto_skcipher_spawn ctr;
24 struct crypto_ahash_spawn ghash;
25 };
26
27 struct crypto_gcm_ctx {
28 struct crypto_skcipher *ctr;
29 struct crypto_ahash *ghash;
30 };
31
32 struct crypto_rfc4106_ctx {
33 struct crypto_aead *child;
34 u8 nonce[4];
35 };
36
37 struct crypto_rfc4106_req_ctx {
38 struct scatterlist src[3];
39 struct scatterlist dst[3];
40 struct aead_request subreq;
41 };
42
43 struct crypto_rfc4543_instance_ctx {
44 struct crypto_aead_spawn aead;
45 };
46
47 struct crypto_rfc4543_ctx {
48 struct crypto_aead *child;
49 struct crypto_sync_skcipher *null;
50 u8 nonce[4];
51 };
52
53 struct crypto_rfc4543_req_ctx {
54 struct aead_request subreq;
55 };
56
57 struct crypto_gcm_ghash_ctx {
58 unsigned int cryptlen;
59 struct scatterlist *src;
60 int (*complete)(struct aead_request *req, u32 flags);
61 };
62
63 struct crypto_gcm_req_priv_ctx {
64 u8 iv[16];
65 u8 auth_tag[16];
66 u8 iauth_tag[16];
67 struct scatterlist src[3];
68 struct scatterlist dst[3];
69 struct scatterlist sg;
70 struct crypto_gcm_ghash_ctx ghash_ctx;
71 union {
72 struct ahash_request ahreq;
73 struct skcipher_request skreq;
74 } u;
75 };
76
77 static struct {
78 u8 buf[16];
79 struct scatterlist sg;
80 } *gcm_zeroes;
81
82 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
83
84 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
85 struct aead_request *req)
86 {
87 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
88
89 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
90 }
91
92 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
93 unsigned int keylen)
94 {
95 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
96 struct crypto_ahash *ghash = ctx->ghash;
97 struct crypto_skcipher *ctr = ctx->ctr;
98 struct {
99 be128 hash;
100 u8 iv[16];
101
102 struct crypto_wait wait;
103
104 struct scatterlist sg[1];
105 struct skcipher_request req;
106 } *data;
107 int err;
108
109 crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
110 crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
111 CRYPTO_TFM_REQ_MASK);
112 err = crypto_skcipher_setkey(ctr, key, keylen);
113 if (err)
114 return err;
115
116 data = kzalloc(sizeof(*data) + crypto_skcipher_reqsize(ctr),
117 GFP_KERNEL);
118 if (!data)
119 return -ENOMEM;
120
121 crypto_init_wait(&data->wait);
122 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
123 skcipher_request_set_tfm(&data->req, ctr);
124 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
125 CRYPTO_TFM_REQ_MAY_BACKLOG,
126 crypto_req_done,
127 &data->wait);
128 skcipher_request_set_crypt(&data->req, data->sg, data->sg,
129 sizeof(data->hash), data->iv);
130
131 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req),
132 &data->wait);
133
134 if (err)
135 goto out;
136
137 crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
138 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
139 CRYPTO_TFM_REQ_MASK);
140 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
141 out:
142 kzfree(data);
143 return err;
144 }
145
146 static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
147 unsigned int authsize)
148 {
149 return crypto_gcm_check_authsize(authsize);
150 }
151
152 static void crypto_gcm_init_common(struct aead_request *req)
153 {
154 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
155 __be32 counter = cpu_to_be32(1);
156 struct scatterlist *sg;
157
158 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
159 memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE);
160 memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4);
161
162 sg_init_table(pctx->src, 3);
163 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
164 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
165 if (sg != pctx->src + 1)
166 sg_chain(pctx->src, 2, sg);
167
168 if (req->src != req->dst) {
169 sg_init_table(pctx->dst, 3);
170 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
171 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
172 if (sg != pctx->dst + 1)
173 sg_chain(pctx->dst, 2, sg);
174 }
175 }
176
177 static void crypto_gcm_init_crypt(struct aead_request *req,
178 unsigned int cryptlen)
179 {
180 struct crypto_aead *aead = crypto_aead_reqtfm(req);
181 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
182 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
183 struct skcipher_request *skreq = &pctx->u.skreq;
184 struct scatterlist *dst;
185
186 dst = req->src == req->dst ? pctx->src : pctx->dst;
187
188 skcipher_request_set_tfm(skreq, ctx->ctr);
189 skcipher_request_set_crypt(skreq, pctx->src, dst,
190 cryptlen + sizeof(pctx->auth_tag),
191 pctx->iv);
192 }
193
194 static inline unsigned int gcm_remain(unsigned int len)
195 {
196 len &= 0xfU;
197 return len ? 16 - len : 0;
198 }
199
200 static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
201
202 static int gcm_hash_update(struct aead_request *req,
203 crypto_completion_t compl,
204 struct scatterlist *src,
205 unsigned int len, u32 flags)
206 {
207 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
208 struct ahash_request *ahreq = &pctx->u.ahreq;
209
210 ahash_request_set_callback(ahreq, flags, compl, req);
211 ahash_request_set_crypt(ahreq, src, NULL, len);
212
213 return crypto_ahash_update(ahreq);
214 }
215
216 static int gcm_hash_remain(struct aead_request *req,
217 unsigned int remain,
218 crypto_completion_t compl, u32 flags)
219 {
220 return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags);
221 }
222
223 static int gcm_hash_len(struct aead_request *req, u32 flags)
224 {
225 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
226 struct ahash_request *ahreq = &pctx->u.ahreq;
227 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
228 be128 lengths;
229
230 lengths.a = cpu_to_be64(req->assoclen * 8);
231 lengths.b = cpu_to_be64(gctx->cryptlen * 8);
232 memcpy(pctx->iauth_tag, &lengths, 16);
233 sg_init_one(&pctx->sg, pctx->iauth_tag, 16);
234 ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req);
235 ahash_request_set_crypt(ahreq, &pctx->sg,
236 pctx->iauth_tag, sizeof(lengths));
237
238 return crypto_ahash_finup(ahreq);
239 }
240
241 static int gcm_hash_len_continue(struct aead_request *req, u32 flags)
242 {
243 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
244 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
245
246 return gctx->complete(req, flags);
247 }
248
249 static void gcm_hash_len_done(struct crypto_async_request *areq, int err)
250 {
251 struct aead_request *req = areq->data;
252
253 if (err)
254 goto out;
255
256 err = gcm_hash_len_continue(req, 0);
257 if (err == -EINPROGRESS)
258 return;
259
260 out:
261 aead_request_complete(req, err);
262 }
263
264 static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags)
265 {
266 return gcm_hash_len(req, flags) ?:
267 gcm_hash_len_continue(req, flags);
268 }
269
270 static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
271 int err)
272 {
273 struct aead_request *req = areq->data;
274
275 if (err)
276 goto out;
277
278 err = gcm_hash_crypt_remain_continue(req, 0);
279 if (err == -EINPROGRESS)
280 return;
281
282 out:
283 aead_request_complete(req, err);
284 }
285
286 static int gcm_hash_crypt_continue(struct aead_request *req, u32 flags)
287 {
288 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
289 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
290 unsigned int remain;
291
292 remain = gcm_remain(gctx->cryptlen);
293 if (remain)
294 return gcm_hash_remain(req, remain,
295 gcm_hash_crypt_remain_done, flags) ?:
296 gcm_hash_crypt_remain_continue(req, flags);
297
298 return gcm_hash_crypt_remain_continue(req, flags);
299 }
300
301 static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err)
302 {
303 struct aead_request *req = areq->data;
304
305 if (err)
306 goto out;
307
308 err = gcm_hash_crypt_continue(req, 0);
309 if (err == -EINPROGRESS)
310 return;
311
312 out:
313 aead_request_complete(req, err);
314 }
315
316 static int gcm_hash_assoc_remain_continue(struct aead_request *req, u32 flags)
317 {
318 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
319 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
320
321 if (gctx->cryptlen)
322 return gcm_hash_update(req, gcm_hash_crypt_done,
323 gctx->src, gctx->cryptlen, flags) ?:
324 gcm_hash_crypt_continue(req, flags);
325
326 return gcm_hash_crypt_remain_continue(req, flags);
327 }
328
329 static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
330 int err)
331 {
332 struct aead_request *req = areq->data;
333
334 if (err)
335 goto out;
336
337 err = gcm_hash_assoc_remain_continue(req, 0);
338 if (err == -EINPROGRESS)
339 return;
340
341 out:
342 aead_request_complete(req, err);
343 }
344
345 static int gcm_hash_assoc_continue(struct aead_request *req, u32 flags)
346 {
347 unsigned int remain;
348
349 remain = gcm_remain(req->assoclen);
350 if (remain)
351 return gcm_hash_remain(req, remain,
352 gcm_hash_assoc_remain_done, flags) ?:
353 gcm_hash_assoc_remain_continue(req, flags);
354
355 return gcm_hash_assoc_remain_continue(req, flags);
356 }
357
358 static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err)
359 {
360 struct aead_request *req = areq->data;
361
362 if (err)
363 goto out;
364
365 err = gcm_hash_assoc_continue(req, 0);
366 if (err == -EINPROGRESS)
367 return;
368
369 out:
370 aead_request_complete(req, err);
371 }
372
373 static int gcm_hash_init_continue(struct aead_request *req, u32 flags)
374 {
375 if (req->assoclen)
376 return gcm_hash_update(req, gcm_hash_assoc_done,
377 req->src, req->assoclen, flags) ?:
378 gcm_hash_assoc_continue(req, flags);
379
380 return gcm_hash_assoc_remain_continue(req, flags);
381 }
382
383 static void gcm_hash_init_done(struct crypto_async_request *areq, int err)
384 {
385 struct aead_request *req = areq->data;
386
387 if (err)
388 goto out;
389
390 err = gcm_hash_init_continue(req, 0);
391 if (err == -EINPROGRESS)
392 return;
393
394 out:
395 aead_request_complete(req, err);
396 }
397
398 static int gcm_hash(struct aead_request *req, u32 flags)
399 {
400 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
401 struct ahash_request *ahreq = &pctx->u.ahreq;
402 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
403
404 ahash_request_set_tfm(ahreq, ctx->ghash);
405
406 ahash_request_set_callback(ahreq, flags, gcm_hash_init_done, req);
407 return crypto_ahash_init(ahreq) ?:
408 gcm_hash_init_continue(req, flags);
409 }
410
411 static int gcm_enc_copy_hash(struct aead_request *req, u32 flags)
412 {
413 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
414 struct crypto_aead *aead = crypto_aead_reqtfm(req);
415 u8 *auth_tag = pctx->auth_tag;
416
417 crypto_xor(auth_tag, pctx->iauth_tag, 16);
418 scatterwalk_map_and_copy(auth_tag, req->dst,
419 req->assoclen + req->cryptlen,
420 crypto_aead_authsize(aead), 1);
421 return 0;
422 }
423
424 static int gcm_encrypt_continue(struct aead_request *req, u32 flags)
425 {
426 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
427 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
428
429 gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
430 gctx->cryptlen = req->cryptlen;
431 gctx->complete = gcm_enc_copy_hash;
432
433 return gcm_hash(req, flags);
434 }
435
436 static void gcm_encrypt_done(struct crypto_async_request *areq, int err)
437 {
438 struct aead_request *req = areq->data;
439
440 if (err)
441 goto out;
442
443 err = gcm_encrypt_continue(req, 0);
444 if (err == -EINPROGRESS)
445 return;
446
447 out:
448 aead_request_complete(req, err);
449 }
450
451 static int crypto_gcm_encrypt(struct aead_request *req)
452 {
453 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
454 struct skcipher_request *skreq = &pctx->u.skreq;
455 u32 flags = aead_request_flags(req);
456
457 crypto_gcm_init_common(req);
458 crypto_gcm_init_crypt(req, req->cryptlen);
459 skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req);
460
461 return crypto_skcipher_encrypt(skreq) ?:
462 gcm_encrypt_continue(req, flags);
463 }
464
465 static int crypto_gcm_verify(struct aead_request *req)
466 {
467 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
468 struct crypto_aead *aead = crypto_aead_reqtfm(req);
469 u8 *auth_tag = pctx->auth_tag;
470 u8 *iauth_tag = pctx->iauth_tag;
471 unsigned int authsize = crypto_aead_authsize(aead);
472 unsigned int cryptlen = req->cryptlen - authsize;
473
474 crypto_xor(auth_tag, iauth_tag, 16);
475 scatterwalk_map_and_copy(iauth_tag, req->src,
476 req->assoclen + cryptlen, authsize, 0);
477 return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
478 }
479
480 static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
481 {
482 struct aead_request *req = areq->data;
483
484 if (!err)
485 err = crypto_gcm_verify(req);
486
487 aead_request_complete(req, err);
488 }
489
490 static int gcm_dec_hash_continue(struct aead_request *req, u32 flags)
491 {
492 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
493 struct skcipher_request *skreq = &pctx->u.skreq;
494 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
495
496 crypto_gcm_init_crypt(req, gctx->cryptlen);
497 skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req);
498 return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req);
499 }
500
501 static int crypto_gcm_decrypt(struct aead_request *req)
502 {
503 struct crypto_aead *aead = crypto_aead_reqtfm(req);
504 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
505 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
506 unsigned int authsize = crypto_aead_authsize(aead);
507 unsigned int cryptlen = req->cryptlen;
508 u32 flags = aead_request_flags(req);
509
510 cryptlen -= authsize;
511
512 crypto_gcm_init_common(req);
513
514 gctx->src = sg_next(pctx->src);
515 gctx->cryptlen = cryptlen;
516 gctx->complete = gcm_dec_hash_continue;
517
518 return gcm_hash(req, flags);
519 }
520
521 static int crypto_gcm_init_tfm(struct crypto_aead *tfm)
522 {
523 struct aead_instance *inst = aead_alg_instance(tfm);
524 struct gcm_instance_ctx *ictx = aead_instance_ctx(inst);
525 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
526 struct crypto_skcipher *ctr;
527 struct crypto_ahash *ghash;
528 unsigned long align;
529 int err;
530
531 ghash = crypto_spawn_ahash(&ictx->ghash);
532 if (IS_ERR(ghash))
533 return PTR_ERR(ghash);
534
535 ctr = crypto_spawn_skcipher(&ictx->ctr);
536 err = PTR_ERR(ctr);
537 if (IS_ERR(ctr))
538 goto err_free_hash;
539
540 ctx->ctr = ctr;
541 ctx->ghash = ghash;
542
543 align = crypto_aead_alignmask(tfm);
544 align &= ~(crypto_tfm_ctx_alignment() - 1);
545 crypto_aead_set_reqsize(tfm,
546 align + offsetof(struct crypto_gcm_req_priv_ctx, u) +
547 max(sizeof(struct skcipher_request) +
548 crypto_skcipher_reqsize(ctr),
549 sizeof(struct ahash_request) +
550 crypto_ahash_reqsize(ghash)));
551
552 return 0;
553
554 err_free_hash:
555 crypto_free_ahash(ghash);
556 return err;
557 }
558
559 static void crypto_gcm_exit_tfm(struct crypto_aead *tfm)
560 {
561 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
562
563 crypto_free_ahash(ctx->ghash);
564 crypto_free_skcipher(ctx->ctr);
565 }
566
567 static void crypto_gcm_free(struct aead_instance *inst)
568 {
569 struct gcm_instance_ctx *ctx = aead_instance_ctx(inst);
570
571 crypto_drop_skcipher(&ctx->ctr);
572 crypto_drop_ahash(&ctx->ghash);
573 kfree(inst);
574 }
575
576 static int crypto_gcm_create_common(struct crypto_template *tmpl,
577 struct rtattr **tb,
578 const char *ctr_name,
579 const char *ghash_name)
580 {
581 struct crypto_attr_type *algt;
582 u32 mask;
583 struct aead_instance *inst;
584 struct gcm_instance_ctx *ctx;
585 struct skcipher_alg *ctr;
586 struct hash_alg_common *ghash;
587 int err;
588
589 algt = crypto_get_attr_type(tb);
590 if (IS_ERR(algt))
591 return PTR_ERR(algt);
592
593 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
594 return -EINVAL;
595
596 mask = crypto_requires_sync(algt->type, algt->mask);
597
598 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
599 if (!inst)
600 return -ENOMEM;
601 ctx = aead_instance_ctx(inst);
602
603 err = crypto_grab_ahash(&ctx->ghash, aead_crypto_instance(inst),
604 ghash_name, 0, mask);
605 if (err)
606 goto err_free_inst;
607 ghash = crypto_spawn_ahash_alg(&ctx->ghash);
608
609 err = -EINVAL;
610 if (strcmp(ghash->base.cra_name, "ghash") != 0 ||
611 ghash->digestsize != 16)
612 goto err_free_inst;
613
614 err = crypto_grab_skcipher(&ctx->ctr, aead_crypto_instance(inst),
615 ctr_name, 0, mask);
616 if (err)
617 goto err_free_inst;
618 ctr = crypto_spawn_skcipher_alg(&ctx->ctr);
619
620 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
621 err = -EINVAL;
622 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
623 crypto_skcipher_alg_ivsize(ctr) != 16 ||
624 ctr->base.cra_blocksize != 1)
625 goto err_free_inst;
626
627 err = -ENAMETOOLONG;
628 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
629 "gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
630 goto err_free_inst;
631
632 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
633 "gcm_base(%s,%s)", ctr->base.cra_driver_name,
634 ghash->base.cra_driver_name) >=
635 CRYPTO_MAX_ALG_NAME)
636 goto err_free_inst;
637
638 inst->alg.base.cra_flags = (ghash->base.cra_flags |
639 ctr->base.cra_flags) & CRYPTO_ALG_ASYNC;
640 inst->alg.base.cra_priority = (ghash->base.cra_priority +
641 ctr->base.cra_priority) / 2;
642 inst->alg.base.cra_blocksize = 1;
643 inst->alg.base.cra_alignmask = ghash->base.cra_alignmask |
644 ctr->base.cra_alignmask;
645 inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
646 inst->alg.ivsize = GCM_AES_IV_SIZE;
647 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
648 inst->alg.maxauthsize = 16;
649 inst->alg.init = crypto_gcm_init_tfm;
650 inst->alg.exit = crypto_gcm_exit_tfm;
651 inst->alg.setkey = crypto_gcm_setkey;
652 inst->alg.setauthsize = crypto_gcm_setauthsize;
653 inst->alg.encrypt = crypto_gcm_encrypt;
654 inst->alg.decrypt = crypto_gcm_decrypt;
655
656 inst->free = crypto_gcm_free;
657
658 err = aead_register_instance(tmpl, inst);
659 if (err) {
660 err_free_inst:
661 crypto_gcm_free(inst);
662 }
663 return err;
664 }
665
666 static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb)
667 {
668 const char *cipher_name;
669 char ctr_name[CRYPTO_MAX_ALG_NAME];
670
671 cipher_name = crypto_attr_alg_name(tb[1]);
672 if (IS_ERR(cipher_name))
673 return PTR_ERR(cipher_name);
674
675 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
676 CRYPTO_MAX_ALG_NAME)
677 return -ENAMETOOLONG;
678
679 return crypto_gcm_create_common(tmpl, tb, ctr_name, "ghash");
680 }
681
682 static int crypto_gcm_base_create(struct crypto_template *tmpl,
683 struct rtattr **tb)
684 {
685 const char *ctr_name;
686 const char *ghash_name;
687
688 ctr_name = crypto_attr_alg_name(tb[1]);
689 if (IS_ERR(ctr_name))
690 return PTR_ERR(ctr_name);
691
692 ghash_name = crypto_attr_alg_name(tb[2]);
693 if (IS_ERR(ghash_name))
694 return PTR_ERR(ghash_name);
695
696 return crypto_gcm_create_common(tmpl, tb, ctr_name, ghash_name);
697 }
698
699 static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
700 unsigned int keylen)
701 {
702 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
703 struct crypto_aead *child = ctx->child;
704
705 if (keylen < 4)
706 return -EINVAL;
707
708 keylen -= 4;
709 memcpy(ctx->nonce, key + keylen, 4);
710
711 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
712 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
713 CRYPTO_TFM_REQ_MASK);
714 return crypto_aead_setkey(child, key, keylen);
715 }
716
717 static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
718 unsigned int authsize)
719 {
720 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
721 int err;
722
723 err = crypto_rfc4106_check_authsize(authsize);
724 if (err)
725 return err;
726
727 return crypto_aead_setauthsize(ctx->child, authsize);
728 }
729
730 static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
731 {
732 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req);
733 struct crypto_aead *aead = crypto_aead_reqtfm(req);
734 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
735 struct aead_request *subreq = &rctx->subreq;
736 struct crypto_aead *child = ctx->child;
737 struct scatterlist *sg;
738 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
739 crypto_aead_alignmask(child) + 1);
740
741 scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0);
742
743 memcpy(iv, ctx->nonce, 4);
744 memcpy(iv + 4, req->iv, 8);
745
746 sg_init_table(rctx->src, 3);
747 sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
748 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
749 if (sg != rctx->src + 1)
750 sg_chain(rctx->src, 2, sg);
751
752 if (req->src != req->dst) {
753 sg_init_table(rctx->dst, 3);
754 sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
755 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
756 if (sg != rctx->dst + 1)
757 sg_chain(rctx->dst, 2, sg);
758 }
759
760 aead_request_set_tfm(subreq, child);
761 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
762 req->base.data);
763 aead_request_set_crypt(subreq, rctx->src,
764 req->src == req->dst ? rctx->src : rctx->dst,
765 req->cryptlen, iv);
766 aead_request_set_ad(subreq, req->assoclen - 8);
767
768 return subreq;
769 }
770
771 static int crypto_rfc4106_encrypt(struct aead_request *req)
772 {
773 int err;
774
775 err = crypto_ipsec_check_assoclen(req->assoclen);
776 if (err)
777 return err;
778
779 req = crypto_rfc4106_crypt(req);
780
781 return crypto_aead_encrypt(req);
782 }
783
784 static int crypto_rfc4106_decrypt(struct aead_request *req)
785 {
786 int err;
787
788 err = crypto_ipsec_check_assoclen(req->assoclen);
789 if (err)
790 return err;
791
792 req = crypto_rfc4106_crypt(req);
793
794 return crypto_aead_decrypt(req);
795 }
796
797 static int crypto_rfc4106_init_tfm(struct crypto_aead *tfm)
798 {
799 struct aead_instance *inst = aead_alg_instance(tfm);
800 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
801 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
802 struct crypto_aead *aead;
803 unsigned long align;
804
805 aead = crypto_spawn_aead(spawn);
806 if (IS_ERR(aead))
807 return PTR_ERR(aead);
808
809 ctx->child = aead;
810
811 align = crypto_aead_alignmask(aead);
812 align &= ~(crypto_tfm_ctx_alignment() - 1);
813 crypto_aead_set_reqsize(
814 tfm,
815 sizeof(struct crypto_rfc4106_req_ctx) +
816 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
817 align + 24);
818
819 return 0;
820 }
821
822 static void crypto_rfc4106_exit_tfm(struct crypto_aead *tfm)
823 {
824 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
825
826 crypto_free_aead(ctx->child);
827 }
828
829 static void crypto_rfc4106_free(struct aead_instance *inst)
830 {
831 crypto_drop_aead(aead_instance_ctx(inst));
832 kfree(inst);
833 }
834
835 static int crypto_rfc4106_create(struct crypto_template *tmpl,
836 struct rtattr **tb)
837 {
838 struct crypto_attr_type *algt;
839 u32 mask;
840 struct aead_instance *inst;
841 struct crypto_aead_spawn *spawn;
842 struct aead_alg *alg;
843 int err;
844
845 algt = crypto_get_attr_type(tb);
846 if (IS_ERR(algt))
847 return PTR_ERR(algt);
848
849 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
850 return -EINVAL;
851
852 mask = crypto_requires_sync(algt->type, algt->mask);
853
854 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
855 if (!inst)
856 return -ENOMEM;
857
858 spawn = aead_instance_ctx(inst);
859 err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
860 crypto_attr_alg_name(tb[1]), 0, mask);
861 if (err)
862 goto err_free_inst;
863
864 alg = crypto_spawn_aead_alg(spawn);
865
866 err = -EINVAL;
867
868 /* Underlying IV size must be 12. */
869 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
870 goto err_free_inst;
871
872 /* Not a stream cipher? */
873 if (alg->base.cra_blocksize != 1)
874 goto err_free_inst;
875
876 err = -ENAMETOOLONG;
877 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
878 "rfc4106(%s)", alg->base.cra_name) >=
879 CRYPTO_MAX_ALG_NAME ||
880 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
881 "rfc4106(%s)", alg->base.cra_driver_name) >=
882 CRYPTO_MAX_ALG_NAME)
883 goto err_free_inst;
884
885 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
886 inst->alg.base.cra_priority = alg->base.cra_priority;
887 inst->alg.base.cra_blocksize = 1;
888 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
889
890 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
891
892 inst->alg.ivsize = GCM_RFC4106_IV_SIZE;
893 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
894 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
895
896 inst->alg.init = crypto_rfc4106_init_tfm;
897 inst->alg.exit = crypto_rfc4106_exit_tfm;
898
899 inst->alg.setkey = crypto_rfc4106_setkey;
900 inst->alg.setauthsize = crypto_rfc4106_setauthsize;
901 inst->alg.encrypt = crypto_rfc4106_encrypt;
902 inst->alg.decrypt = crypto_rfc4106_decrypt;
903
904 inst->free = crypto_rfc4106_free;
905
906 err = aead_register_instance(tmpl, inst);
907 if (err) {
908 err_free_inst:
909 crypto_rfc4106_free(inst);
910 }
911 return err;
912 }
913
914 static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
915 unsigned int keylen)
916 {
917 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
918 struct crypto_aead *child = ctx->child;
919
920 if (keylen < 4)
921 return -EINVAL;
922
923 keylen -= 4;
924 memcpy(ctx->nonce, key + keylen, 4);
925
926 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
927 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
928 CRYPTO_TFM_REQ_MASK);
929 return crypto_aead_setkey(child, key, keylen);
930 }
931
932 static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
933 unsigned int authsize)
934 {
935 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
936
937 if (authsize != 16)
938 return -EINVAL;
939
940 return crypto_aead_setauthsize(ctx->child, authsize);
941 }
942
943 static int crypto_rfc4543_crypt(struct aead_request *req, bool enc)
944 {
945 struct crypto_aead *aead = crypto_aead_reqtfm(req);
946 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
947 struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req);
948 struct aead_request *subreq = &rctx->subreq;
949 unsigned int authsize = crypto_aead_authsize(aead);
950 u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
951 crypto_aead_alignmask(ctx->child) + 1);
952 int err;
953
954 if (req->src != req->dst) {
955 err = crypto_rfc4543_copy_src_to_dst(req, enc);
956 if (err)
957 return err;
958 }
959
960 memcpy(iv, ctx->nonce, 4);
961 memcpy(iv + 4, req->iv, 8);
962
963 aead_request_set_tfm(subreq, ctx->child);
964 aead_request_set_callback(subreq, req->base.flags,
965 req->base.complete, req->base.data);
966 aead_request_set_crypt(subreq, req->src, req->dst,
967 enc ? 0 : authsize, iv);
968 aead_request_set_ad(subreq, req->assoclen + req->cryptlen -
969 subreq->cryptlen);
970
971 return enc ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq);
972 }
973
974 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
975 {
976 struct crypto_aead *aead = crypto_aead_reqtfm(req);
977 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
978 unsigned int authsize = crypto_aead_authsize(aead);
979 unsigned int nbytes = req->assoclen + req->cryptlen -
980 (enc ? 0 : authsize);
981 SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
982
983 skcipher_request_set_sync_tfm(nreq, ctx->null);
984 skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL);
985 skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL);
986
987 return crypto_skcipher_encrypt(nreq);
988 }
989
990 static int crypto_rfc4543_encrypt(struct aead_request *req)
991 {
992 return crypto_ipsec_check_assoclen(req->assoclen) ?:
993 crypto_rfc4543_crypt(req, true);
994 }
995
996 static int crypto_rfc4543_decrypt(struct aead_request *req)
997 {
998 return crypto_ipsec_check_assoclen(req->assoclen) ?:
999 crypto_rfc4543_crypt(req, false);
1000 }
1001
1002 static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm)
1003 {
1004 struct aead_instance *inst = aead_alg_instance(tfm);
1005 struct crypto_rfc4543_instance_ctx *ictx = aead_instance_ctx(inst);
1006 struct crypto_aead_spawn *spawn = &ictx->aead;
1007 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
1008 struct crypto_aead *aead;
1009 struct crypto_sync_skcipher *null;
1010 unsigned long align;
1011 int err = 0;
1012
1013 aead = crypto_spawn_aead(spawn);
1014 if (IS_ERR(aead))
1015 return PTR_ERR(aead);
1016
1017 null = crypto_get_default_null_skcipher();
1018 err = PTR_ERR(null);
1019 if (IS_ERR(null))
1020 goto err_free_aead;
1021
1022 ctx->child = aead;
1023 ctx->null = null;
1024
1025 align = crypto_aead_alignmask(aead);
1026 align &= ~(crypto_tfm_ctx_alignment() - 1);
1027 crypto_aead_set_reqsize(
1028 tfm,
1029 sizeof(struct crypto_rfc4543_req_ctx) +
1030 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
1031 align + GCM_AES_IV_SIZE);
1032
1033 return 0;
1034
1035 err_free_aead:
1036 crypto_free_aead(aead);
1037 return err;
1038 }
1039
1040 static void crypto_rfc4543_exit_tfm(struct crypto_aead *tfm)
1041 {
1042 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
1043
1044 crypto_free_aead(ctx->child);
1045 crypto_put_default_null_skcipher();
1046 }
1047
1048 static void crypto_rfc4543_free(struct aead_instance *inst)
1049 {
1050 struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst);
1051
1052 crypto_drop_aead(&ctx->aead);
1053
1054 kfree(inst);
1055 }
1056
1057 static int crypto_rfc4543_create(struct crypto_template *tmpl,
1058 struct rtattr **tb)
1059 {
1060 struct crypto_attr_type *algt;
1061 u32 mask;
1062 struct aead_instance *inst;
1063 struct aead_alg *alg;
1064 struct crypto_rfc4543_instance_ctx *ctx;
1065 int err;
1066
1067 algt = crypto_get_attr_type(tb);
1068 if (IS_ERR(algt))
1069 return PTR_ERR(algt);
1070
1071 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
1072 return -EINVAL;
1073
1074 mask = crypto_requires_sync(algt->type, algt->mask);
1075
1076 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1077 if (!inst)
1078 return -ENOMEM;
1079
1080 ctx = aead_instance_ctx(inst);
1081 err = crypto_grab_aead(&ctx->aead, aead_crypto_instance(inst),
1082 crypto_attr_alg_name(tb[1]), 0, mask);
1083 if (err)
1084 goto err_free_inst;
1085
1086 alg = crypto_spawn_aead_alg(&ctx->aead);
1087
1088 err = -EINVAL;
1089
1090 /* Underlying IV size must be 12. */
1091 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
1092 goto err_free_inst;
1093
1094 /* Not a stream cipher? */
1095 if (alg->base.cra_blocksize != 1)
1096 goto err_free_inst;
1097
1098 err = -ENAMETOOLONG;
1099 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
1100 "rfc4543(%s)", alg->base.cra_name) >=
1101 CRYPTO_MAX_ALG_NAME ||
1102 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1103 "rfc4543(%s)", alg->base.cra_driver_name) >=
1104 CRYPTO_MAX_ALG_NAME)
1105 goto err_free_inst;
1106
1107 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
1108 inst->alg.base.cra_priority = alg->base.cra_priority;
1109 inst->alg.base.cra_blocksize = 1;
1110 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
1111
1112 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx);
1113
1114 inst->alg.ivsize = GCM_RFC4543_IV_SIZE;
1115 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
1116 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
1117
1118 inst->alg.init = crypto_rfc4543_init_tfm;
1119 inst->alg.exit = crypto_rfc4543_exit_tfm;
1120
1121 inst->alg.setkey = crypto_rfc4543_setkey;
1122 inst->alg.setauthsize = crypto_rfc4543_setauthsize;
1123 inst->alg.encrypt = crypto_rfc4543_encrypt;
1124 inst->alg.decrypt = crypto_rfc4543_decrypt;
1125
1126 inst->free = crypto_rfc4543_free;
1127
1128 err = aead_register_instance(tmpl, inst);
1129 if (err) {
1130 err_free_inst:
1131 crypto_rfc4543_free(inst);
1132 }
1133 return err;
1134 }
1135
1136 static struct crypto_template crypto_gcm_tmpls[] = {
1137 {
1138 .name = "gcm_base",
1139 .create = crypto_gcm_base_create,
1140 .module = THIS_MODULE,
1141 }, {
1142 .name = "gcm",
1143 .create = crypto_gcm_create,
1144 .module = THIS_MODULE,
1145 }, {
1146 .name = "rfc4106",
1147 .create = crypto_rfc4106_create,
1148 .module = THIS_MODULE,
1149 }, {
1150 .name = "rfc4543",
1151 .create = crypto_rfc4543_create,
1152 .module = THIS_MODULE,
1153 },
1154 };
1155
1156 static int __init crypto_gcm_module_init(void)
1157 {
1158 int err;
1159
1160 gcm_zeroes = kzalloc(sizeof(*gcm_zeroes), GFP_KERNEL);
1161 if (!gcm_zeroes)
1162 return -ENOMEM;
1163
1164 sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf));
1165
1166 err = crypto_register_templates(crypto_gcm_tmpls,
1167 ARRAY_SIZE(crypto_gcm_tmpls));
1168 if (err)
1169 kfree(gcm_zeroes);
1170
1171 return err;
1172 }
1173
1174 static void __exit crypto_gcm_module_exit(void)
1175 {
1176 kfree(gcm_zeroes);
1177 crypto_unregister_templates(crypto_gcm_tmpls,
1178 ARRAY_SIZE(crypto_gcm_tmpls));
1179 }
1180
1181 subsys_initcall(crypto_gcm_module_init);
1182 module_exit(crypto_gcm_module_exit);
1183
1184 MODULE_LICENSE("GPL");
1185 MODULE_DESCRIPTION("Galois/Counter Mode");
1186 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
1187 MODULE_ALIAS_CRYPTO("gcm_base");
1188 MODULE_ALIAS_CRYPTO("rfc4106");
1189 MODULE_ALIAS_CRYPTO("rfc4543");
1190 MODULE_ALIAS_CRYPTO("gcm");