]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - crypto/gcm.c
x86/speculation/mmio: Reuse SRBDS mitigation for SBDS
[mirror_ubuntu-jammy-kernel.git] / crypto / gcm.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * GCM: Galois/Counter Mode.
4 *
5 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6 */
7
8 #include <crypto/gf128mul.h>
9 #include <crypto/internal/aead.h>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/null.h>
13 #include <crypto/scatterwalk.h>
14 #include <crypto/gcm.h>
15 #include <crypto/hash.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/slab.h>
21
22 struct gcm_instance_ctx {
23 struct crypto_skcipher_spawn ctr;
24 struct crypto_ahash_spawn ghash;
25 };
26
27 struct crypto_gcm_ctx {
28 struct crypto_skcipher *ctr;
29 struct crypto_ahash *ghash;
30 };
31
32 struct crypto_rfc4106_ctx {
33 struct crypto_aead *child;
34 u8 nonce[4];
35 };
36
37 struct crypto_rfc4106_req_ctx {
38 struct scatterlist src[3];
39 struct scatterlist dst[3];
40 struct aead_request subreq;
41 };
42
43 struct crypto_rfc4543_instance_ctx {
44 struct crypto_aead_spawn aead;
45 };
46
47 struct crypto_rfc4543_ctx {
48 struct crypto_aead *child;
49 struct crypto_sync_skcipher *null;
50 u8 nonce[4];
51 };
52
53 struct crypto_rfc4543_req_ctx {
54 struct aead_request subreq;
55 };
56
57 struct crypto_gcm_ghash_ctx {
58 unsigned int cryptlen;
59 struct scatterlist *src;
60 int (*complete)(struct aead_request *req, u32 flags);
61 };
62
63 struct crypto_gcm_req_priv_ctx {
64 u8 iv[16];
65 u8 auth_tag[16];
66 u8 iauth_tag[16];
67 struct scatterlist src[3];
68 struct scatterlist dst[3];
69 struct scatterlist sg;
70 struct crypto_gcm_ghash_ctx ghash_ctx;
71 union {
72 struct ahash_request ahreq;
73 struct skcipher_request skreq;
74 } u;
75 };
76
77 static struct {
78 u8 buf[16];
79 struct scatterlist sg;
80 } *gcm_zeroes;
81
82 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
83
84 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
85 struct aead_request *req)
86 {
87 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
88
89 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
90 }
91
92 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
93 unsigned int keylen)
94 {
95 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
96 struct crypto_ahash *ghash = ctx->ghash;
97 struct crypto_skcipher *ctr = ctx->ctr;
98 struct {
99 be128 hash;
100 u8 iv[16];
101
102 struct crypto_wait wait;
103
104 struct scatterlist sg[1];
105 struct skcipher_request req;
106 } *data;
107 int err;
108
109 crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
110 crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
111 CRYPTO_TFM_REQ_MASK);
112 err = crypto_skcipher_setkey(ctr, key, keylen);
113 if (err)
114 return err;
115
116 data = kzalloc(sizeof(*data) + crypto_skcipher_reqsize(ctr),
117 GFP_KERNEL);
118 if (!data)
119 return -ENOMEM;
120
121 crypto_init_wait(&data->wait);
122 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
123 skcipher_request_set_tfm(&data->req, ctr);
124 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
125 CRYPTO_TFM_REQ_MAY_BACKLOG,
126 crypto_req_done,
127 &data->wait);
128 skcipher_request_set_crypt(&data->req, data->sg, data->sg,
129 sizeof(data->hash), data->iv);
130
131 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req),
132 &data->wait);
133
134 if (err)
135 goto out;
136
137 crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
138 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
139 CRYPTO_TFM_REQ_MASK);
140 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
141 out:
142 kfree_sensitive(data);
143 return err;
144 }
145
146 static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
147 unsigned int authsize)
148 {
149 return crypto_gcm_check_authsize(authsize);
150 }
151
152 static void crypto_gcm_init_common(struct aead_request *req)
153 {
154 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
155 __be32 counter = cpu_to_be32(1);
156 struct scatterlist *sg;
157
158 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
159 memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE);
160 memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4);
161
162 sg_init_table(pctx->src, 3);
163 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
164 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
165 if (sg != pctx->src + 1)
166 sg_chain(pctx->src, 2, sg);
167
168 if (req->src != req->dst) {
169 sg_init_table(pctx->dst, 3);
170 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
171 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
172 if (sg != pctx->dst + 1)
173 sg_chain(pctx->dst, 2, sg);
174 }
175 }
176
177 static void crypto_gcm_init_crypt(struct aead_request *req,
178 unsigned int cryptlen)
179 {
180 struct crypto_aead *aead = crypto_aead_reqtfm(req);
181 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
182 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
183 struct skcipher_request *skreq = &pctx->u.skreq;
184 struct scatterlist *dst;
185
186 dst = req->src == req->dst ? pctx->src : pctx->dst;
187
188 skcipher_request_set_tfm(skreq, ctx->ctr);
189 skcipher_request_set_crypt(skreq, pctx->src, dst,
190 cryptlen + sizeof(pctx->auth_tag),
191 pctx->iv);
192 }
193
194 static inline unsigned int gcm_remain(unsigned int len)
195 {
196 len &= 0xfU;
197 return len ? 16 - len : 0;
198 }
199
200 static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
201
202 static int gcm_hash_update(struct aead_request *req,
203 crypto_completion_t compl,
204 struct scatterlist *src,
205 unsigned int len, u32 flags)
206 {
207 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
208 struct ahash_request *ahreq = &pctx->u.ahreq;
209
210 ahash_request_set_callback(ahreq, flags, compl, req);
211 ahash_request_set_crypt(ahreq, src, NULL, len);
212
213 return crypto_ahash_update(ahreq);
214 }
215
216 static int gcm_hash_remain(struct aead_request *req,
217 unsigned int remain,
218 crypto_completion_t compl, u32 flags)
219 {
220 return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags);
221 }
222
223 static int gcm_hash_len(struct aead_request *req, u32 flags)
224 {
225 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
226 struct ahash_request *ahreq = &pctx->u.ahreq;
227 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
228 be128 lengths;
229
230 lengths.a = cpu_to_be64(req->assoclen * 8);
231 lengths.b = cpu_to_be64(gctx->cryptlen * 8);
232 memcpy(pctx->iauth_tag, &lengths, 16);
233 sg_init_one(&pctx->sg, pctx->iauth_tag, 16);
234 ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req);
235 ahash_request_set_crypt(ahreq, &pctx->sg,
236 pctx->iauth_tag, sizeof(lengths));
237
238 return crypto_ahash_finup(ahreq);
239 }
240
241 static int gcm_hash_len_continue(struct aead_request *req, u32 flags)
242 {
243 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
244 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
245
246 return gctx->complete(req, flags);
247 }
248
249 static void gcm_hash_len_done(struct crypto_async_request *areq, int err)
250 {
251 struct aead_request *req = areq->data;
252
253 if (err)
254 goto out;
255
256 err = gcm_hash_len_continue(req, 0);
257 if (err == -EINPROGRESS)
258 return;
259
260 out:
261 aead_request_complete(req, err);
262 }
263
264 static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags)
265 {
266 return gcm_hash_len(req, flags) ?:
267 gcm_hash_len_continue(req, flags);
268 }
269
270 static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
271 int err)
272 {
273 struct aead_request *req = areq->data;
274
275 if (err)
276 goto out;
277
278 err = gcm_hash_crypt_remain_continue(req, 0);
279 if (err == -EINPROGRESS)
280 return;
281
282 out:
283 aead_request_complete(req, err);
284 }
285
286 static int gcm_hash_crypt_continue(struct aead_request *req, u32 flags)
287 {
288 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
289 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
290 unsigned int remain;
291
292 remain = gcm_remain(gctx->cryptlen);
293 if (remain)
294 return gcm_hash_remain(req, remain,
295 gcm_hash_crypt_remain_done, flags) ?:
296 gcm_hash_crypt_remain_continue(req, flags);
297
298 return gcm_hash_crypt_remain_continue(req, flags);
299 }
300
301 static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err)
302 {
303 struct aead_request *req = areq->data;
304
305 if (err)
306 goto out;
307
308 err = gcm_hash_crypt_continue(req, 0);
309 if (err == -EINPROGRESS)
310 return;
311
312 out:
313 aead_request_complete(req, err);
314 }
315
316 static int gcm_hash_assoc_remain_continue(struct aead_request *req, u32 flags)
317 {
318 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
319 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
320
321 if (gctx->cryptlen)
322 return gcm_hash_update(req, gcm_hash_crypt_done,
323 gctx->src, gctx->cryptlen, flags) ?:
324 gcm_hash_crypt_continue(req, flags);
325
326 return gcm_hash_crypt_remain_continue(req, flags);
327 }
328
329 static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
330 int err)
331 {
332 struct aead_request *req = areq->data;
333
334 if (err)
335 goto out;
336
337 err = gcm_hash_assoc_remain_continue(req, 0);
338 if (err == -EINPROGRESS)
339 return;
340
341 out:
342 aead_request_complete(req, err);
343 }
344
345 static int gcm_hash_assoc_continue(struct aead_request *req, u32 flags)
346 {
347 unsigned int remain;
348
349 remain = gcm_remain(req->assoclen);
350 if (remain)
351 return gcm_hash_remain(req, remain,
352 gcm_hash_assoc_remain_done, flags) ?:
353 gcm_hash_assoc_remain_continue(req, flags);
354
355 return gcm_hash_assoc_remain_continue(req, flags);
356 }
357
358 static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err)
359 {
360 struct aead_request *req = areq->data;
361
362 if (err)
363 goto out;
364
365 err = gcm_hash_assoc_continue(req, 0);
366 if (err == -EINPROGRESS)
367 return;
368
369 out:
370 aead_request_complete(req, err);
371 }
372
373 static int gcm_hash_init_continue(struct aead_request *req, u32 flags)
374 {
375 if (req->assoclen)
376 return gcm_hash_update(req, gcm_hash_assoc_done,
377 req->src, req->assoclen, flags) ?:
378 gcm_hash_assoc_continue(req, flags);
379
380 return gcm_hash_assoc_remain_continue(req, flags);
381 }
382
383 static void gcm_hash_init_done(struct crypto_async_request *areq, int err)
384 {
385 struct aead_request *req = areq->data;
386
387 if (err)
388 goto out;
389
390 err = gcm_hash_init_continue(req, 0);
391 if (err == -EINPROGRESS)
392 return;
393
394 out:
395 aead_request_complete(req, err);
396 }
397
398 static int gcm_hash(struct aead_request *req, u32 flags)
399 {
400 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
401 struct ahash_request *ahreq = &pctx->u.ahreq;
402 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
403
404 ahash_request_set_tfm(ahreq, ctx->ghash);
405
406 ahash_request_set_callback(ahreq, flags, gcm_hash_init_done, req);
407 return crypto_ahash_init(ahreq) ?:
408 gcm_hash_init_continue(req, flags);
409 }
410
411 static int gcm_enc_copy_hash(struct aead_request *req, u32 flags)
412 {
413 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
414 struct crypto_aead *aead = crypto_aead_reqtfm(req);
415 u8 *auth_tag = pctx->auth_tag;
416
417 crypto_xor(auth_tag, pctx->iauth_tag, 16);
418 scatterwalk_map_and_copy(auth_tag, req->dst,
419 req->assoclen + req->cryptlen,
420 crypto_aead_authsize(aead), 1);
421 return 0;
422 }
423
424 static int gcm_encrypt_continue(struct aead_request *req, u32 flags)
425 {
426 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
427 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
428
429 gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
430 gctx->cryptlen = req->cryptlen;
431 gctx->complete = gcm_enc_copy_hash;
432
433 return gcm_hash(req, flags);
434 }
435
436 static void gcm_encrypt_done(struct crypto_async_request *areq, int err)
437 {
438 struct aead_request *req = areq->data;
439
440 if (err)
441 goto out;
442
443 err = gcm_encrypt_continue(req, 0);
444 if (err == -EINPROGRESS)
445 return;
446
447 out:
448 aead_request_complete(req, err);
449 }
450
451 static int crypto_gcm_encrypt(struct aead_request *req)
452 {
453 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
454 struct skcipher_request *skreq = &pctx->u.skreq;
455 u32 flags = aead_request_flags(req);
456
457 crypto_gcm_init_common(req);
458 crypto_gcm_init_crypt(req, req->cryptlen);
459 skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req);
460
461 return crypto_skcipher_encrypt(skreq) ?:
462 gcm_encrypt_continue(req, flags);
463 }
464
465 static int crypto_gcm_verify(struct aead_request *req)
466 {
467 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
468 struct crypto_aead *aead = crypto_aead_reqtfm(req);
469 u8 *auth_tag = pctx->auth_tag;
470 u8 *iauth_tag = pctx->iauth_tag;
471 unsigned int authsize = crypto_aead_authsize(aead);
472 unsigned int cryptlen = req->cryptlen - authsize;
473
474 crypto_xor(auth_tag, iauth_tag, 16);
475 scatterwalk_map_and_copy(iauth_tag, req->src,
476 req->assoclen + cryptlen, authsize, 0);
477 return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
478 }
479
480 static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
481 {
482 struct aead_request *req = areq->data;
483
484 if (!err)
485 err = crypto_gcm_verify(req);
486
487 aead_request_complete(req, err);
488 }
489
490 static int gcm_dec_hash_continue(struct aead_request *req, u32 flags)
491 {
492 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
493 struct skcipher_request *skreq = &pctx->u.skreq;
494 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
495
496 crypto_gcm_init_crypt(req, gctx->cryptlen);
497 skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req);
498 return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req);
499 }
500
501 static int crypto_gcm_decrypt(struct aead_request *req)
502 {
503 struct crypto_aead *aead = crypto_aead_reqtfm(req);
504 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
505 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
506 unsigned int authsize = crypto_aead_authsize(aead);
507 unsigned int cryptlen = req->cryptlen;
508 u32 flags = aead_request_flags(req);
509
510 cryptlen -= authsize;
511
512 crypto_gcm_init_common(req);
513
514 gctx->src = sg_next(pctx->src);
515 gctx->cryptlen = cryptlen;
516 gctx->complete = gcm_dec_hash_continue;
517
518 return gcm_hash(req, flags);
519 }
520
521 static int crypto_gcm_init_tfm(struct crypto_aead *tfm)
522 {
523 struct aead_instance *inst = aead_alg_instance(tfm);
524 struct gcm_instance_ctx *ictx = aead_instance_ctx(inst);
525 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
526 struct crypto_skcipher *ctr;
527 struct crypto_ahash *ghash;
528 unsigned long align;
529 int err;
530
531 ghash = crypto_spawn_ahash(&ictx->ghash);
532 if (IS_ERR(ghash))
533 return PTR_ERR(ghash);
534
535 ctr = crypto_spawn_skcipher(&ictx->ctr);
536 err = PTR_ERR(ctr);
537 if (IS_ERR(ctr))
538 goto err_free_hash;
539
540 ctx->ctr = ctr;
541 ctx->ghash = ghash;
542
543 align = crypto_aead_alignmask(tfm);
544 align &= ~(crypto_tfm_ctx_alignment() - 1);
545 crypto_aead_set_reqsize(tfm,
546 align + offsetof(struct crypto_gcm_req_priv_ctx, u) +
547 max(sizeof(struct skcipher_request) +
548 crypto_skcipher_reqsize(ctr),
549 sizeof(struct ahash_request) +
550 crypto_ahash_reqsize(ghash)));
551
552 return 0;
553
554 err_free_hash:
555 crypto_free_ahash(ghash);
556 return err;
557 }
558
559 static void crypto_gcm_exit_tfm(struct crypto_aead *tfm)
560 {
561 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
562
563 crypto_free_ahash(ctx->ghash);
564 crypto_free_skcipher(ctx->ctr);
565 }
566
567 static void crypto_gcm_free(struct aead_instance *inst)
568 {
569 struct gcm_instance_ctx *ctx = aead_instance_ctx(inst);
570
571 crypto_drop_skcipher(&ctx->ctr);
572 crypto_drop_ahash(&ctx->ghash);
573 kfree(inst);
574 }
575
576 static int crypto_gcm_create_common(struct crypto_template *tmpl,
577 struct rtattr **tb,
578 const char *ctr_name,
579 const char *ghash_name)
580 {
581 u32 mask;
582 struct aead_instance *inst;
583 struct gcm_instance_ctx *ctx;
584 struct skcipher_alg *ctr;
585 struct hash_alg_common *ghash;
586 int err;
587
588 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
589 if (err)
590 return err;
591
592 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
593 if (!inst)
594 return -ENOMEM;
595 ctx = aead_instance_ctx(inst);
596
597 err = crypto_grab_ahash(&ctx->ghash, aead_crypto_instance(inst),
598 ghash_name, 0, mask);
599 if (err)
600 goto err_free_inst;
601 ghash = crypto_spawn_ahash_alg(&ctx->ghash);
602
603 err = -EINVAL;
604 if (strcmp(ghash->base.cra_name, "ghash") != 0 ||
605 ghash->digestsize != 16)
606 goto err_free_inst;
607
608 err = crypto_grab_skcipher(&ctx->ctr, aead_crypto_instance(inst),
609 ctr_name, 0, mask);
610 if (err)
611 goto err_free_inst;
612 ctr = crypto_spawn_skcipher_alg(&ctx->ctr);
613
614 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
615 err = -EINVAL;
616 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
617 crypto_skcipher_alg_ivsize(ctr) != 16 ||
618 ctr->base.cra_blocksize != 1)
619 goto err_free_inst;
620
621 err = -ENAMETOOLONG;
622 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
623 "gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
624 goto err_free_inst;
625
626 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
627 "gcm_base(%s,%s)", ctr->base.cra_driver_name,
628 ghash->base.cra_driver_name) >=
629 CRYPTO_MAX_ALG_NAME)
630 goto err_free_inst;
631
632 inst->alg.base.cra_priority = (ghash->base.cra_priority +
633 ctr->base.cra_priority) / 2;
634 inst->alg.base.cra_blocksize = 1;
635 inst->alg.base.cra_alignmask = ghash->base.cra_alignmask |
636 ctr->base.cra_alignmask;
637 inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
638 inst->alg.ivsize = GCM_AES_IV_SIZE;
639 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
640 inst->alg.maxauthsize = 16;
641 inst->alg.init = crypto_gcm_init_tfm;
642 inst->alg.exit = crypto_gcm_exit_tfm;
643 inst->alg.setkey = crypto_gcm_setkey;
644 inst->alg.setauthsize = crypto_gcm_setauthsize;
645 inst->alg.encrypt = crypto_gcm_encrypt;
646 inst->alg.decrypt = crypto_gcm_decrypt;
647
648 inst->free = crypto_gcm_free;
649
650 err = aead_register_instance(tmpl, inst);
651 if (err) {
652 err_free_inst:
653 crypto_gcm_free(inst);
654 }
655 return err;
656 }
657
658 static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb)
659 {
660 const char *cipher_name;
661 char ctr_name[CRYPTO_MAX_ALG_NAME];
662
663 cipher_name = crypto_attr_alg_name(tb[1]);
664 if (IS_ERR(cipher_name))
665 return PTR_ERR(cipher_name);
666
667 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
668 CRYPTO_MAX_ALG_NAME)
669 return -ENAMETOOLONG;
670
671 return crypto_gcm_create_common(tmpl, tb, ctr_name, "ghash");
672 }
673
674 static int crypto_gcm_base_create(struct crypto_template *tmpl,
675 struct rtattr **tb)
676 {
677 const char *ctr_name;
678 const char *ghash_name;
679
680 ctr_name = crypto_attr_alg_name(tb[1]);
681 if (IS_ERR(ctr_name))
682 return PTR_ERR(ctr_name);
683
684 ghash_name = crypto_attr_alg_name(tb[2]);
685 if (IS_ERR(ghash_name))
686 return PTR_ERR(ghash_name);
687
688 return crypto_gcm_create_common(tmpl, tb, ctr_name, ghash_name);
689 }
690
691 static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
692 unsigned int keylen)
693 {
694 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
695 struct crypto_aead *child = ctx->child;
696
697 if (keylen < 4)
698 return -EINVAL;
699
700 keylen -= 4;
701 memcpy(ctx->nonce, key + keylen, 4);
702
703 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
704 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
705 CRYPTO_TFM_REQ_MASK);
706 return crypto_aead_setkey(child, key, keylen);
707 }
708
709 static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
710 unsigned int authsize)
711 {
712 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
713 int err;
714
715 err = crypto_rfc4106_check_authsize(authsize);
716 if (err)
717 return err;
718
719 return crypto_aead_setauthsize(ctx->child, authsize);
720 }
721
722 static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
723 {
724 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req);
725 struct crypto_aead *aead = crypto_aead_reqtfm(req);
726 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
727 struct aead_request *subreq = &rctx->subreq;
728 struct crypto_aead *child = ctx->child;
729 struct scatterlist *sg;
730 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
731 crypto_aead_alignmask(child) + 1);
732
733 scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0);
734
735 memcpy(iv, ctx->nonce, 4);
736 memcpy(iv + 4, req->iv, 8);
737
738 sg_init_table(rctx->src, 3);
739 sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
740 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
741 if (sg != rctx->src + 1)
742 sg_chain(rctx->src, 2, sg);
743
744 if (req->src != req->dst) {
745 sg_init_table(rctx->dst, 3);
746 sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
747 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
748 if (sg != rctx->dst + 1)
749 sg_chain(rctx->dst, 2, sg);
750 }
751
752 aead_request_set_tfm(subreq, child);
753 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
754 req->base.data);
755 aead_request_set_crypt(subreq, rctx->src,
756 req->src == req->dst ? rctx->src : rctx->dst,
757 req->cryptlen, iv);
758 aead_request_set_ad(subreq, req->assoclen - 8);
759
760 return subreq;
761 }
762
763 static int crypto_rfc4106_encrypt(struct aead_request *req)
764 {
765 int err;
766
767 err = crypto_ipsec_check_assoclen(req->assoclen);
768 if (err)
769 return err;
770
771 req = crypto_rfc4106_crypt(req);
772
773 return crypto_aead_encrypt(req);
774 }
775
776 static int crypto_rfc4106_decrypt(struct aead_request *req)
777 {
778 int err;
779
780 err = crypto_ipsec_check_assoclen(req->assoclen);
781 if (err)
782 return err;
783
784 req = crypto_rfc4106_crypt(req);
785
786 return crypto_aead_decrypt(req);
787 }
788
789 static int crypto_rfc4106_init_tfm(struct crypto_aead *tfm)
790 {
791 struct aead_instance *inst = aead_alg_instance(tfm);
792 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
793 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
794 struct crypto_aead *aead;
795 unsigned long align;
796
797 aead = crypto_spawn_aead(spawn);
798 if (IS_ERR(aead))
799 return PTR_ERR(aead);
800
801 ctx->child = aead;
802
803 align = crypto_aead_alignmask(aead);
804 align &= ~(crypto_tfm_ctx_alignment() - 1);
805 crypto_aead_set_reqsize(
806 tfm,
807 sizeof(struct crypto_rfc4106_req_ctx) +
808 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
809 align + 24);
810
811 return 0;
812 }
813
814 static void crypto_rfc4106_exit_tfm(struct crypto_aead *tfm)
815 {
816 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
817
818 crypto_free_aead(ctx->child);
819 }
820
821 static void crypto_rfc4106_free(struct aead_instance *inst)
822 {
823 crypto_drop_aead(aead_instance_ctx(inst));
824 kfree(inst);
825 }
826
827 static int crypto_rfc4106_create(struct crypto_template *tmpl,
828 struct rtattr **tb)
829 {
830 u32 mask;
831 struct aead_instance *inst;
832 struct crypto_aead_spawn *spawn;
833 struct aead_alg *alg;
834 int err;
835
836 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
837 if (err)
838 return err;
839
840 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
841 if (!inst)
842 return -ENOMEM;
843
844 spawn = aead_instance_ctx(inst);
845 err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
846 crypto_attr_alg_name(tb[1]), 0, mask);
847 if (err)
848 goto err_free_inst;
849
850 alg = crypto_spawn_aead_alg(spawn);
851
852 err = -EINVAL;
853
854 /* Underlying IV size must be 12. */
855 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
856 goto err_free_inst;
857
858 /* Not a stream cipher? */
859 if (alg->base.cra_blocksize != 1)
860 goto err_free_inst;
861
862 err = -ENAMETOOLONG;
863 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
864 "rfc4106(%s)", alg->base.cra_name) >=
865 CRYPTO_MAX_ALG_NAME ||
866 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
867 "rfc4106(%s)", alg->base.cra_driver_name) >=
868 CRYPTO_MAX_ALG_NAME)
869 goto err_free_inst;
870
871 inst->alg.base.cra_priority = alg->base.cra_priority;
872 inst->alg.base.cra_blocksize = 1;
873 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
874
875 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
876
877 inst->alg.ivsize = GCM_RFC4106_IV_SIZE;
878 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
879 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
880
881 inst->alg.init = crypto_rfc4106_init_tfm;
882 inst->alg.exit = crypto_rfc4106_exit_tfm;
883
884 inst->alg.setkey = crypto_rfc4106_setkey;
885 inst->alg.setauthsize = crypto_rfc4106_setauthsize;
886 inst->alg.encrypt = crypto_rfc4106_encrypt;
887 inst->alg.decrypt = crypto_rfc4106_decrypt;
888
889 inst->free = crypto_rfc4106_free;
890
891 err = aead_register_instance(tmpl, inst);
892 if (err) {
893 err_free_inst:
894 crypto_rfc4106_free(inst);
895 }
896 return err;
897 }
898
899 static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
900 unsigned int keylen)
901 {
902 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
903 struct crypto_aead *child = ctx->child;
904
905 if (keylen < 4)
906 return -EINVAL;
907
908 keylen -= 4;
909 memcpy(ctx->nonce, key + keylen, 4);
910
911 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
912 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
913 CRYPTO_TFM_REQ_MASK);
914 return crypto_aead_setkey(child, key, keylen);
915 }
916
917 static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
918 unsigned int authsize)
919 {
920 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
921
922 if (authsize != 16)
923 return -EINVAL;
924
925 return crypto_aead_setauthsize(ctx->child, authsize);
926 }
927
928 static int crypto_rfc4543_crypt(struct aead_request *req, bool enc)
929 {
930 struct crypto_aead *aead = crypto_aead_reqtfm(req);
931 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
932 struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req);
933 struct aead_request *subreq = &rctx->subreq;
934 unsigned int authsize = crypto_aead_authsize(aead);
935 u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
936 crypto_aead_alignmask(ctx->child) + 1);
937 int err;
938
939 if (req->src != req->dst) {
940 err = crypto_rfc4543_copy_src_to_dst(req, enc);
941 if (err)
942 return err;
943 }
944
945 memcpy(iv, ctx->nonce, 4);
946 memcpy(iv + 4, req->iv, 8);
947
948 aead_request_set_tfm(subreq, ctx->child);
949 aead_request_set_callback(subreq, req->base.flags,
950 req->base.complete, req->base.data);
951 aead_request_set_crypt(subreq, req->src, req->dst,
952 enc ? 0 : authsize, iv);
953 aead_request_set_ad(subreq, req->assoclen + req->cryptlen -
954 subreq->cryptlen);
955
956 return enc ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq);
957 }
958
959 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
960 {
961 struct crypto_aead *aead = crypto_aead_reqtfm(req);
962 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
963 unsigned int authsize = crypto_aead_authsize(aead);
964 unsigned int nbytes = req->assoclen + req->cryptlen -
965 (enc ? 0 : authsize);
966 SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
967
968 skcipher_request_set_sync_tfm(nreq, ctx->null);
969 skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL);
970 skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL);
971
972 return crypto_skcipher_encrypt(nreq);
973 }
974
975 static int crypto_rfc4543_encrypt(struct aead_request *req)
976 {
977 return crypto_ipsec_check_assoclen(req->assoclen) ?:
978 crypto_rfc4543_crypt(req, true);
979 }
980
981 static int crypto_rfc4543_decrypt(struct aead_request *req)
982 {
983 return crypto_ipsec_check_assoclen(req->assoclen) ?:
984 crypto_rfc4543_crypt(req, false);
985 }
986
987 static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm)
988 {
989 struct aead_instance *inst = aead_alg_instance(tfm);
990 struct crypto_rfc4543_instance_ctx *ictx = aead_instance_ctx(inst);
991 struct crypto_aead_spawn *spawn = &ictx->aead;
992 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
993 struct crypto_aead *aead;
994 struct crypto_sync_skcipher *null;
995 unsigned long align;
996 int err = 0;
997
998 aead = crypto_spawn_aead(spawn);
999 if (IS_ERR(aead))
1000 return PTR_ERR(aead);
1001
1002 null = crypto_get_default_null_skcipher();
1003 err = PTR_ERR(null);
1004 if (IS_ERR(null))
1005 goto err_free_aead;
1006
1007 ctx->child = aead;
1008 ctx->null = null;
1009
1010 align = crypto_aead_alignmask(aead);
1011 align &= ~(crypto_tfm_ctx_alignment() - 1);
1012 crypto_aead_set_reqsize(
1013 tfm,
1014 sizeof(struct crypto_rfc4543_req_ctx) +
1015 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
1016 align + GCM_AES_IV_SIZE);
1017
1018 return 0;
1019
1020 err_free_aead:
1021 crypto_free_aead(aead);
1022 return err;
1023 }
1024
1025 static void crypto_rfc4543_exit_tfm(struct crypto_aead *tfm)
1026 {
1027 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
1028
1029 crypto_free_aead(ctx->child);
1030 crypto_put_default_null_skcipher();
1031 }
1032
1033 static void crypto_rfc4543_free(struct aead_instance *inst)
1034 {
1035 struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst);
1036
1037 crypto_drop_aead(&ctx->aead);
1038
1039 kfree(inst);
1040 }
1041
1042 static int crypto_rfc4543_create(struct crypto_template *tmpl,
1043 struct rtattr **tb)
1044 {
1045 u32 mask;
1046 struct aead_instance *inst;
1047 struct aead_alg *alg;
1048 struct crypto_rfc4543_instance_ctx *ctx;
1049 int err;
1050
1051 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
1052 if (err)
1053 return err;
1054
1055 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1056 if (!inst)
1057 return -ENOMEM;
1058
1059 ctx = aead_instance_ctx(inst);
1060 err = crypto_grab_aead(&ctx->aead, aead_crypto_instance(inst),
1061 crypto_attr_alg_name(tb[1]), 0, mask);
1062 if (err)
1063 goto err_free_inst;
1064
1065 alg = crypto_spawn_aead_alg(&ctx->aead);
1066
1067 err = -EINVAL;
1068
1069 /* Underlying IV size must be 12. */
1070 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
1071 goto err_free_inst;
1072
1073 /* Not a stream cipher? */
1074 if (alg->base.cra_blocksize != 1)
1075 goto err_free_inst;
1076
1077 err = -ENAMETOOLONG;
1078 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
1079 "rfc4543(%s)", alg->base.cra_name) >=
1080 CRYPTO_MAX_ALG_NAME ||
1081 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1082 "rfc4543(%s)", alg->base.cra_driver_name) >=
1083 CRYPTO_MAX_ALG_NAME)
1084 goto err_free_inst;
1085
1086 inst->alg.base.cra_priority = alg->base.cra_priority;
1087 inst->alg.base.cra_blocksize = 1;
1088 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
1089
1090 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx);
1091
1092 inst->alg.ivsize = GCM_RFC4543_IV_SIZE;
1093 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
1094 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
1095
1096 inst->alg.init = crypto_rfc4543_init_tfm;
1097 inst->alg.exit = crypto_rfc4543_exit_tfm;
1098
1099 inst->alg.setkey = crypto_rfc4543_setkey;
1100 inst->alg.setauthsize = crypto_rfc4543_setauthsize;
1101 inst->alg.encrypt = crypto_rfc4543_encrypt;
1102 inst->alg.decrypt = crypto_rfc4543_decrypt;
1103
1104 inst->free = crypto_rfc4543_free;
1105
1106 err = aead_register_instance(tmpl, inst);
1107 if (err) {
1108 err_free_inst:
1109 crypto_rfc4543_free(inst);
1110 }
1111 return err;
1112 }
1113
1114 static struct crypto_template crypto_gcm_tmpls[] = {
1115 {
1116 .name = "gcm_base",
1117 .create = crypto_gcm_base_create,
1118 .module = THIS_MODULE,
1119 }, {
1120 .name = "gcm",
1121 .create = crypto_gcm_create,
1122 .module = THIS_MODULE,
1123 }, {
1124 .name = "rfc4106",
1125 .create = crypto_rfc4106_create,
1126 .module = THIS_MODULE,
1127 }, {
1128 .name = "rfc4543",
1129 .create = crypto_rfc4543_create,
1130 .module = THIS_MODULE,
1131 },
1132 };
1133
1134 static int __init crypto_gcm_module_init(void)
1135 {
1136 int err;
1137
1138 gcm_zeroes = kzalloc(sizeof(*gcm_zeroes), GFP_KERNEL);
1139 if (!gcm_zeroes)
1140 return -ENOMEM;
1141
1142 sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf));
1143
1144 err = crypto_register_templates(crypto_gcm_tmpls,
1145 ARRAY_SIZE(crypto_gcm_tmpls));
1146 if (err)
1147 kfree(gcm_zeroes);
1148
1149 return err;
1150 }
1151
1152 static void __exit crypto_gcm_module_exit(void)
1153 {
1154 kfree(gcm_zeroes);
1155 crypto_unregister_templates(crypto_gcm_tmpls,
1156 ARRAY_SIZE(crypto_gcm_tmpls));
1157 }
1158
1159 subsys_initcall(crypto_gcm_module_init);
1160 module_exit(crypto_gcm_module_exit);
1161
1162 MODULE_LICENSE("GPL");
1163 MODULE_DESCRIPTION("Galois/Counter Mode");
1164 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
1165 MODULE_ALIAS_CRYPTO("gcm_base");
1166 MODULE_ALIAS_CRYPTO("rfc4106");
1167 MODULE_ALIAS_CRYPTO("rfc4543");
1168 MODULE_ALIAS_CRYPTO("gcm");