]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blame - crypto/gcm.c
[CRYPTO] gcm: Allow block cipher parameter
[mirror_ubuntu-zesty-kernel.git] / crypto / gcm.c
CommitLineData
28db8e3e
MH
1/*
2 * GCM: Galois/Counter Mode.
3 *
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
9 */
10
11#include <crypto/algapi.h>
12#include <crypto/gf128mul.h>
42c271c6 13#include <crypto/scatterwalk.h>
84c91152 14#include <linux/completion.h>
28db8e3e
MH
15#include <linux/err.h>
16#include <linux/init.h>
17#include <linux/kernel.h>
18#include <linux/module.h>
19#include <linux/slab.h>
20
42c271c6 21#include "internal.h"
28db8e3e
MH
22
23struct gcm_instance_ctx {
24 struct crypto_spawn ctr;
25};
26
27struct crypto_gcm_ctx {
28 struct crypto_ablkcipher *ctr;
29 struct gf128mul_4k *gf128;
30};
31
32struct crypto_gcm_ghash_ctx {
33 u32 bytes;
34 u32 flags;
35 struct gf128mul_4k *gf128;
36 u8 buffer[16];
37};
38
39struct crypto_gcm_req_priv_ctx {
40 u8 auth_tag[16];
6160b289 41 u8 iauth_tag[16];
84c91152
HX
42 struct scatterlist src[2];
43 struct scatterlist dst[2];
28db8e3e 44 struct crypto_gcm_ghash_ctx ghash;
7f681378 45 struct ablkcipher_request abreq;
28db8e3e
MH
46};
47
84c91152
HX
48struct crypto_gcm_setkey_result {
49 int err;
50 struct completion completion;
51};
52
2589469d
HX
53static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
54 struct aead_request *req)
55{
56 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
57
58 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
59}
60
28db8e3e
MH
61static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
62 struct gf128mul_4k *gf128)
63{
64 ctx->bytes = 0;
65 ctx->flags = flags;
66 ctx->gf128 = gf128;
67 memset(ctx->buffer, 0, 16);
68}
69
70static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
71 const u8 *src, unsigned int srclen)
72{
73 u8 *dst = ctx->buffer;
74
75 if (ctx->bytes) {
76 int n = min(srclen, ctx->bytes);
77 u8 *pos = dst + (16 - ctx->bytes);
78
79 ctx->bytes -= n;
80 srclen -= n;
81
82 while (n--)
83 *pos++ ^= *src++;
84
85 if (!ctx->bytes)
86 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
87 }
88
89 while (srclen >= 16) {
90 crypto_xor(dst, src, 16);
91 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
92 src += 16;
93 srclen -= 16;
94 }
95
96 if (srclen) {
97 ctx->bytes = 16 - srclen;
98 while (srclen--)
99 *dst++ ^= *src++;
100 }
101}
102
103static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
104 struct scatterlist *sg, int len)
105{
106 struct scatter_walk walk;
107 u8 *src;
108 int n;
109
6160b289
HX
110 if (!len)
111 return;
112
28db8e3e
MH
113 scatterwalk_start(&walk, sg);
114
115 while (len) {
116 n = scatterwalk_clamp(&walk, len);
117
118 if (!n) {
b2ab4a57 119 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
28db8e3e
MH
120 n = scatterwalk_clamp(&walk, len);
121 }
122
123 src = scatterwalk_map(&walk, 0);
124
125 crypto_gcm_ghash_update(ctx, src, n);
126 len -= n;
127
128 scatterwalk_unmap(src, 0);
129 scatterwalk_advance(&walk, n);
130 scatterwalk_done(&walk, 0, len);
131 if (len)
132 crypto_yield(ctx->flags);
133 }
134}
135
136static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
137{
138 u8 *dst = ctx->buffer;
139
140 if (ctx->bytes) {
141 u8 *tmp = dst + (16 - ctx->bytes);
142
143 while (ctx->bytes--)
144 *tmp++ ^= 0;
145
146 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
147 }
148
149 ctx->bytes = 0;
150}
151
152static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
153 unsigned int authlen,
154 unsigned int cryptlen, u8 *dst)
155{
156 u8 *buf = ctx->buffer;
157 u128 lengths;
158
159 lengths.a = cpu_to_be64(authlen * 8);
160 lengths.b = cpu_to_be64(cryptlen * 8);
161
162 crypto_gcm_ghash_flush(ctx);
163 crypto_xor(buf, (u8 *)&lengths, 16);
164 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
165 crypto_xor(dst, buf, 16);
166}
167
84c91152 168static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
28db8e3e 169{
84c91152 170 struct crypto_gcm_setkey_result *result = req->data;
28db8e3e 171
84c91152
HX
172 if (err == -EINPROGRESS)
173 return;
174
175 result->err = err;
176 complete(&result->completion);
28db8e3e
MH
177}
178
179static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
180 unsigned int keylen)
181{
182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
183 struct crypto_ablkcipher *ctr = ctx->ctr;
84c91152
HX
184 struct {
185 be128 hash;
186 u8 iv[8];
187
188 struct crypto_gcm_setkey_result result;
189
190 struct scatterlist sg[1];
191 struct ablkcipher_request req;
192 } *data;
193 int err;
28db8e3e
MH
194
195 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
196 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
197 CRYPTO_TFM_REQ_MASK);
198
199 err = crypto_ablkcipher_setkey(ctr, key, keylen);
200 if (err)
84c91152 201 return err;
28db8e3e
MH
202
203 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
204 CRYPTO_TFM_RES_MASK);
205
84c91152
HX
206 data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
207 GFP_KERNEL);
208 if (!data)
209 return -ENOMEM;
210
211 init_completion(&data->result.completion);
212 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
213 ablkcipher_request_set_tfm(&data->req, ctr);
214 ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
215 CRYPTO_TFM_REQ_MAY_BACKLOG,
216 crypto_gcm_setkey_done,
217 &data->result);
218 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
219 sizeof(data->hash), data->iv);
220
221 err = crypto_ablkcipher_encrypt(&data->req);
222 if (err == -EINPROGRESS || err == -EBUSY) {
223 err = wait_for_completion_interruptible(
224 &data->result.completion);
225 if (!err)
226 err = data->result.err;
227 }
228
28db8e3e
MH
229 if (err)
230 goto out;
231
232 if (ctx->gf128 != NULL)
233 gf128mul_free_4k(ctx->gf128);
234
84c91152 235 ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
28db8e3e
MH
236
237 if (ctx->gf128 == NULL)
238 err = -ENOMEM;
239
84c91152
HX
240out:
241 kfree(data);
28db8e3e
MH
242 return err;
243}
244
84c91152
HX
245static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
246 struct aead_request *req,
247 unsigned int cryptlen)
28db8e3e
MH
248{
249 struct crypto_aead *aead = crypto_aead_reqtfm(req);
250 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
2589469d 251 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
28db8e3e 252 u32 flags = req->base.tfm->crt_flags;
28db8e3e 253 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
84c91152
HX
254 struct scatterlist *dst;
255 __be32 counter = cpu_to_be32(1);
256
257 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
258 memcpy(req->iv + 12, &counter, 4);
259
260 sg_init_table(pctx->src, 2);
261 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
262 scatterwalk_sg_chain(pctx->src, 2, req->src);
263
264 dst = pctx->src;
265 if (req->src != req->dst) {
266 sg_init_table(pctx->dst, 2);
267 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
268 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
269 dst = pctx->dst;
270 }
28db8e3e
MH
271
272 ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
84c91152
HX
273 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
274 cryptlen + sizeof(pctx->auth_tag),
275 req->iv);
28db8e3e
MH
276
277 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
278
6160b289
HX
279 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
280 crypto_gcm_ghash_flush(ghash);
28db8e3e
MH
281}
282
6160b289 283static int crypto_gcm_hash(struct aead_request *req)
28db8e3e 284{
6160b289 285 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2589469d 286 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
28db8e3e
MH
287 u8 *auth_tag = pctx->auth_tag;
288 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
289
290 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
291 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
292 auth_tag);
293
6160b289
HX
294 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
295 crypto_aead_authsize(aead), 1);
296 return 0;
297}
298
299static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
300{
301 struct aead_request *req = areq->data;
302
303 if (!err)
304 err = crypto_gcm_hash(req);
305
28db8e3e
MH
306 aead_request_complete(req, err);
307}
308
309static int crypto_gcm_encrypt(struct aead_request *req)
310{
2589469d 311 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
7f681378 312 struct ablkcipher_request *abreq = &pctx->abreq;
84c91152
HX
313 int err;
314
315 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
316 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
317 crypto_gcm_encrypt_done, req);
28db8e3e 318
84c91152 319 err = crypto_ablkcipher_encrypt(abreq);
28db8e3e
MH
320 if (err)
321 return err;
322
6160b289 323 return crypto_gcm_hash(req);
28db8e3e
MH
324}
325
84c91152
HX
326static int crypto_gcm_verify(struct aead_request *req)
327{
328 struct crypto_aead *aead = crypto_aead_reqtfm(req);
329 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
330 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
331 u8 *auth_tag = pctx->auth_tag;
332 u8 *iauth_tag = pctx->iauth_tag;
333 unsigned int authsize = crypto_aead_authsize(aead);
334 unsigned int cryptlen = req->cryptlen - authsize;
335
336 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
337
338 authsize = crypto_aead_authsize(aead);
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
340 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
341}
342
28db8e3e
MH
343static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
344{
84c91152
HX
345 struct aead_request *req = areq->data;
346
347 if (!err)
348 err = crypto_gcm_verify(req);
349
350 aead_request_complete(req, err);
28db8e3e
MH
351}
352
353static int crypto_gcm_decrypt(struct aead_request *req)
354{
6160b289 355 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2589469d 356 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
7f681378 357 struct ablkcipher_request *abreq = &pctx->abreq;
28db8e3e 358 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
6160b289
HX
359 unsigned int cryptlen = req->cryptlen;
360 unsigned int authsize = crypto_aead_authsize(aead);
28db8e3e
MH
361 int err;
362
6160b289 363 if (cryptlen < authsize)
28db8e3e 364 return -EINVAL;
6160b289 365 cryptlen -= authsize;
28db8e3e 366
84c91152
HX
367 crypto_gcm_init_crypt(abreq, req, cryptlen);
368 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
369 crypto_gcm_decrypt_done, req);
28db8e3e 370
6160b289 371 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
28db8e3e 372
84c91152
HX
373 err = crypto_ablkcipher_decrypt(abreq);
374 if (err)
375 return err;
28db8e3e 376
84c91152 377 return crypto_gcm_verify(req);
28db8e3e
MH
378}
379
380static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
381{
382 struct crypto_instance *inst = (void *)tfm->__crt_alg;
383 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
384 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
385 struct crypto_ablkcipher *ctr;
386 unsigned long align;
387 int err;
388
389 ctr = crypto_spawn_ablkcipher(&ictx->ctr);
390 err = PTR_ERR(ctr);
391 if (IS_ERR(ctr))
392 return err;
393
394 ctx->ctr = ctr;
395 ctx->gf128 = NULL;
396
2589469d 397 align = crypto_tfm_alg_alignmask(tfm);
28db8e3e 398 align &= ~(crypto_tfm_ctx_alignment() - 1);
7f681378
HX
399 tfm->crt_aead.reqsize = align +
400 sizeof(struct crypto_gcm_req_priv_ctx) +
401 crypto_ablkcipher_reqsize(ctr);
28db8e3e
MH
402
403 return 0;
404}
405
406static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
407{
408 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
409
410 if (ctx->gf128 != NULL)
411 gf128mul_free_4k(ctx->gf128);
412
413 crypto_free_ablkcipher(ctx->ctr);
414}
415
d00aa19b
HX
416static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
417 const char *full_name,
418 const char *ctr_name)
28db8e3e 419{
d00aa19b 420 struct crypto_attr_type *algt;
28db8e3e
MH
421 struct crypto_instance *inst;
422 struct crypto_alg *ctr;
28db8e3e
MH
423 struct gcm_instance_ctx *ctx;
424 int err;
28db8e3e 425
d00aa19b
HX
426 algt = crypto_get_attr_type(tb);
427 err = PTR_ERR(algt);
428 if (IS_ERR(algt))
28db8e3e
MH
429 return ERR_PTR(err);
430
d00aa19b
HX
431 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
432 return ERR_PTR(-EINVAL);
28db8e3e
MH
433
434 ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
435 CRYPTO_ALG_TYPE_MASK);
436
437 if (IS_ERR(ctr))
438 return ERR_PTR(PTR_ERR(ctr));
439
d00aa19b
HX
440 /* We only support 16-byte blocks. */
441 if ((ctr->cra_type == &crypto_blkcipher_type ?
442 ctr->cra_blkcipher.ivsize : ctr->cra_ablkcipher.ivsize) != 16)
443 goto out_put_ctr;
444
445 /* Not a stream cipher? */
446 err = -EINVAL;
447 if (ctr->cra_blocksize != 1)
28db8e3e
MH
448 goto out_put_ctr;
449
450 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
451 err = -ENOMEM;
452 if (!inst)
453 goto out_put_ctr;
454
455 err = -ENAMETOOLONG;
d00aa19b
HX
456 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
457 "gcm_base(%s)", ctr->cra_driver_name) >=
458 CRYPTO_MAX_ALG_NAME)
28db8e3e
MH
459 goto err_free_inst;
460
28db8e3e
MH
461 ctx = crypto_instance_ctx(inst);
462 err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK);
463 if (err)
464 goto err_free_inst;
465
d00aa19b
HX
466 memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
467
28db8e3e
MH
468 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
469 inst->alg.cra_priority = ctr->cra_priority;
d00aa19b 470 inst->alg.cra_blocksize = 1;
2589469d 471 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
28db8e3e 472 inst->alg.cra_type = &crypto_aead_type;
84c91152 473 inst->alg.cra_aead.ivsize = 16;
7ba683a6 474 inst->alg.cra_aead.maxauthsize = 16;
28db8e3e
MH
475 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
476 inst->alg.cra_init = crypto_gcm_init_tfm;
477 inst->alg.cra_exit = crypto_gcm_exit_tfm;
478 inst->alg.cra_aead.setkey = crypto_gcm_setkey;
479 inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
480 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
481
482out:
483 crypto_mod_put(ctr);
484 return inst;
485err_free_inst:
486 kfree(inst);
487out_put_ctr:
488 inst = ERR_PTR(err);
489 goto out;
490}
491
d00aa19b
HX
492static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
493{
494 int err;
495 const char *cipher_name;
496 char ctr_name[CRYPTO_MAX_ALG_NAME];
497 char full_name[CRYPTO_MAX_ALG_NAME];
498
499 cipher_name = crypto_attr_alg_name(tb[1]);
500 err = PTR_ERR(cipher_name);
501 if (IS_ERR(cipher_name))
502 return ERR_PTR(err);
503
504 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
505 CRYPTO_MAX_ALG_NAME)
506 return ERR_PTR(-ENAMETOOLONG);
507
508 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >=
509 CRYPTO_MAX_ALG_NAME)
510 return ERR_PTR(-ENAMETOOLONG);
511
512 return crypto_gcm_alloc_common(tb, full_name, ctr_name);
513}
514
28db8e3e
MH
515static void crypto_gcm_free(struct crypto_instance *inst)
516{
517 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
518
519 crypto_drop_spawn(&ctx->ctr);
520 kfree(inst);
521}
522
523static struct crypto_template crypto_gcm_tmpl = {
524 .name = "gcm",
525 .alloc = crypto_gcm_alloc,
526 .free = crypto_gcm_free,
527 .module = THIS_MODULE,
528};
529
d00aa19b
HX
530static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
531{
532 int err;
533 const char *ctr_name;
534 char full_name[CRYPTO_MAX_ALG_NAME];
535
536 ctr_name = crypto_attr_alg_name(tb[1]);
537 err = PTR_ERR(ctr_name);
538 if (IS_ERR(ctr_name))
539 return ERR_PTR(err);
540
541 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)",
542 ctr_name) >= CRYPTO_MAX_ALG_NAME)
543 return ERR_PTR(-ENAMETOOLONG);
544
545 return crypto_gcm_alloc_common(tb, full_name, ctr_name);
546}
547
548static struct crypto_template crypto_gcm_base_tmpl = {
549 .name = "gcm_base",
550 .alloc = crypto_gcm_base_alloc,
551 .free = crypto_gcm_free,
552 .module = THIS_MODULE,
553};
554
28db8e3e
MH
555static int __init crypto_gcm_module_init(void)
556{
d00aa19b
HX
557 int err;
558
559 err = crypto_register_template(&crypto_gcm_base_tmpl);
560 if (err)
561 goto out;
562
563 err = crypto_register_template(&crypto_gcm_tmpl);
564 if (err)
565 goto out_undo_base;
566
567out:
568 return err;
569
570out_undo_base:
571 crypto_unregister_template(&crypto_gcm_base_tmpl);
572 goto out;
28db8e3e
MH
573}
574
575static void __exit crypto_gcm_module_exit(void)
576{
577 crypto_unregister_template(&crypto_gcm_tmpl);
d00aa19b 578 crypto_unregister_template(&crypto_gcm_base_tmpl);
28db8e3e
MH
579}
580
581module_init(crypto_gcm_module_init);
582module_exit(crypto_gcm_module_exit);
583
584MODULE_LICENSE("GPL");
585MODULE_DESCRIPTION("Galois/Counter Mode");
586MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
d00aa19b 587MODULE_ALIAS("gcm_base");