]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * GCM: Galois/Counter Mode. | |
3 | * | |
4 | * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi> | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License version 2 as published | |
8 | * by the Free Software Foundation. | |
9 | */ | |
10 | ||
11 | #include <crypto/gf128mul.h> | |
12 | #include <crypto/internal/aead.h> | |
13 | #include <crypto/internal/skcipher.h> | |
14 | #include <crypto/internal/hash.h> | |
15 | #include <crypto/null.h> | |
16 | #include <crypto/scatterwalk.h> | |
17 | #include <crypto/hash.h> | |
18 | #include "internal.h" | |
19 | #include <linux/completion.h> | |
20 | #include <linux/err.h> | |
21 | #include <linux/init.h> | |
22 | #include <linux/kernel.h> | |
23 | #include <linux/module.h> | |
24 | #include <linux/slab.h> | |
25 | ||
26 | struct gcm_instance_ctx { | |
27 | struct crypto_skcipher_spawn ctr; | |
28 | struct crypto_ahash_spawn ghash; | |
29 | }; | |
30 | ||
31 | struct crypto_gcm_ctx { | |
32 | struct crypto_skcipher *ctr; | |
33 | struct crypto_ahash *ghash; | |
34 | }; | |
35 | ||
36 | struct crypto_rfc4106_ctx { | |
37 | struct crypto_aead *child; | |
38 | u8 nonce[4]; | |
39 | }; | |
40 | ||
41 | struct crypto_rfc4106_req_ctx { | |
42 | struct scatterlist src[3]; | |
43 | struct scatterlist dst[3]; | |
44 | struct aead_request subreq; | |
45 | }; | |
46 | ||
47 | struct crypto_rfc4543_instance_ctx { | |
48 | struct crypto_aead_spawn aead; | |
49 | }; | |
50 | ||
51 | struct crypto_rfc4543_ctx { | |
52 | struct crypto_aead *child; | |
53 | struct crypto_skcipher *null; | |
54 | u8 nonce[4]; | |
55 | }; | |
56 | ||
57 | struct crypto_rfc4543_req_ctx { | |
58 | struct aead_request subreq; | |
59 | }; | |
60 | ||
61 | struct crypto_gcm_ghash_ctx { | |
62 | unsigned int cryptlen; | |
63 | struct scatterlist *src; | |
64 | int (*complete)(struct aead_request *req, u32 flags); | |
65 | }; | |
66 | ||
67 | struct crypto_gcm_req_priv_ctx { | |
68 | u8 iv[16]; | |
69 | u8 auth_tag[16]; | |
70 | u8 iauth_tag[16]; | |
71 | struct scatterlist src[3]; | |
72 | struct scatterlist dst[3]; | |
73 | struct scatterlist sg; | |
74 | struct crypto_gcm_ghash_ctx ghash_ctx; | |
75 | union { | |
76 | struct ahash_request ahreq; | |
77 | struct skcipher_request skreq; | |
78 | } u; | |
79 | }; | |
80 | ||
81 | struct crypto_gcm_setkey_result { | |
82 | int err; | |
83 | struct completion completion; | |
84 | }; | |
85 | ||
86 | static struct { | |
87 | u8 buf[16]; | |
88 | struct scatterlist sg; | |
89 | } *gcm_zeroes; | |
90 | ||
91 | static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc); | |
92 | ||
93 | static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( | |
94 | struct aead_request *req) | |
95 | { | |
96 | unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); | |
97 | ||
98 | return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); | |
99 | } | |
100 | ||
101 | static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err) | |
102 | { | |
103 | struct crypto_gcm_setkey_result *result = req->data; | |
104 | ||
105 | if (err == -EINPROGRESS) | |
106 | return; | |
107 | ||
108 | result->err = err; | |
109 | complete(&result->completion); | |
110 | } | |
111 | ||
112 | static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, | |
113 | unsigned int keylen) | |
114 | { | |
115 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); | |
116 | struct crypto_ahash *ghash = ctx->ghash; | |
117 | struct crypto_skcipher *ctr = ctx->ctr; | |
118 | struct { | |
119 | be128 hash; | |
120 | u8 iv[16]; | |
121 | ||
122 | struct crypto_gcm_setkey_result result; | |
123 | ||
124 | struct scatterlist sg[1]; | |
125 | struct skcipher_request req; | |
126 | } *data; | |
127 | int err; | |
128 | ||
129 | crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); | |
130 | crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) & | |
131 | CRYPTO_TFM_REQ_MASK); | |
132 | err = crypto_skcipher_setkey(ctr, key, keylen); | |
133 | crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) & | |
134 | CRYPTO_TFM_RES_MASK); | |
135 | if (err) | |
136 | return err; | |
137 | ||
138 | data = kzalloc(sizeof(*data) + crypto_skcipher_reqsize(ctr), | |
139 | GFP_KERNEL); | |
140 | if (!data) | |
141 | return -ENOMEM; | |
142 | ||
143 | init_completion(&data->result.completion); | |
144 | sg_init_one(data->sg, &data->hash, sizeof(data->hash)); | |
145 | skcipher_request_set_tfm(&data->req, ctr); | |
146 | skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | | |
147 | CRYPTO_TFM_REQ_MAY_BACKLOG, | |
148 | crypto_gcm_setkey_done, | |
149 | &data->result); | |
150 | skcipher_request_set_crypt(&data->req, data->sg, data->sg, | |
151 | sizeof(data->hash), data->iv); | |
152 | ||
153 | err = crypto_skcipher_encrypt(&data->req); | |
154 | if (err == -EINPROGRESS || err == -EBUSY) { | |
155 | err = wait_for_completion_interruptible( | |
156 | &data->result.completion); | |
157 | if (!err) | |
158 | err = data->result.err; | |
159 | } | |
160 | ||
161 | if (err) | |
162 | goto out; | |
163 | ||
164 | crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK); | |
165 | crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) & | |
166 | CRYPTO_TFM_REQ_MASK); | |
167 | err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128)); | |
168 | crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) & | |
169 | CRYPTO_TFM_RES_MASK); | |
170 | ||
171 | out: | |
172 | kzfree(data); | |
173 | return err; | |
174 | } | |
175 | ||
176 | static int crypto_gcm_setauthsize(struct crypto_aead *tfm, | |
177 | unsigned int authsize) | |
178 | { | |
179 | switch (authsize) { | |
180 | case 4: | |
181 | case 8: | |
182 | case 12: | |
183 | case 13: | |
184 | case 14: | |
185 | case 15: | |
186 | case 16: | |
187 | break; | |
188 | default: | |
189 | return -EINVAL; | |
190 | } | |
191 | ||
192 | return 0; | |
193 | } | |
194 | ||
195 | static void crypto_gcm_init_common(struct aead_request *req) | |
196 | { | |
197 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
198 | __be32 counter = cpu_to_be32(1); | |
199 | struct scatterlist *sg; | |
200 | ||
201 | memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag)); | |
202 | memcpy(pctx->iv, req->iv, 12); | |
203 | memcpy(pctx->iv + 12, &counter, 4); | |
204 | ||
205 | sg_init_table(pctx->src, 3); | |
206 | sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag)); | |
207 | sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); | |
208 | if (sg != pctx->src + 1) | |
209 | sg_chain(pctx->src, 2, sg); | |
210 | ||
211 | if (req->src != req->dst) { | |
212 | sg_init_table(pctx->dst, 3); | |
213 | sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag)); | |
214 | sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); | |
215 | if (sg != pctx->dst + 1) | |
216 | sg_chain(pctx->dst, 2, sg); | |
217 | } | |
218 | } | |
219 | ||
220 | static void crypto_gcm_init_crypt(struct aead_request *req, | |
221 | unsigned int cryptlen) | |
222 | { | |
223 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
224 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); | |
225 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
226 | struct skcipher_request *skreq = &pctx->u.skreq; | |
227 | struct scatterlist *dst; | |
228 | ||
229 | dst = req->src == req->dst ? pctx->src : pctx->dst; | |
230 | ||
231 | skcipher_request_set_tfm(skreq, ctx->ctr); | |
232 | skcipher_request_set_crypt(skreq, pctx->src, dst, | |
233 | cryptlen + sizeof(pctx->auth_tag), | |
234 | pctx->iv); | |
235 | } | |
236 | ||
237 | static inline unsigned int gcm_remain(unsigned int len) | |
238 | { | |
239 | len &= 0xfU; | |
240 | return len ? 16 - len : 0; | |
241 | } | |
242 | ||
243 | static void gcm_hash_len_done(struct crypto_async_request *areq, int err); | |
244 | ||
245 | static int gcm_hash_update(struct aead_request *req, | |
246 | crypto_completion_t compl, | |
247 | struct scatterlist *src, | |
248 | unsigned int len, u32 flags) | |
249 | { | |
250 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
251 | struct ahash_request *ahreq = &pctx->u.ahreq; | |
252 | ||
253 | ahash_request_set_callback(ahreq, flags, compl, req); | |
254 | ahash_request_set_crypt(ahreq, src, NULL, len); | |
255 | ||
256 | return crypto_ahash_update(ahreq); | |
257 | } | |
258 | ||
259 | static int gcm_hash_remain(struct aead_request *req, | |
260 | unsigned int remain, | |
261 | crypto_completion_t compl, u32 flags) | |
262 | { | |
263 | return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags); | |
264 | } | |
265 | ||
266 | static int gcm_hash_len(struct aead_request *req, u32 flags) | |
267 | { | |
268 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
269 | struct ahash_request *ahreq = &pctx->u.ahreq; | |
270 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
271 | u128 lengths; | |
272 | ||
273 | lengths.a = cpu_to_be64(req->assoclen * 8); | |
274 | lengths.b = cpu_to_be64(gctx->cryptlen * 8); | |
275 | memcpy(pctx->iauth_tag, &lengths, 16); | |
276 | sg_init_one(&pctx->sg, pctx->iauth_tag, 16); | |
277 | ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req); | |
278 | ahash_request_set_crypt(ahreq, &pctx->sg, | |
279 | pctx->iauth_tag, sizeof(lengths)); | |
280 | ||
281 | return crypto_ahash_finup(ahreq); | |
282 | } | |
283 | ||
284 | static int gcm_hash_len_continue(struct aead_request *req, u32 flags) | |
285 | { | |
286 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
287 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
288 | ||
289 | return gctx->complete(req, flags); | |
290 | } | |
291 | ||
292 | static void gcm_hash_len_done(struct crypto_async_request *areq, int err) | |
293 | { | |
294 | struct aead_request *req = areq->data; | |
295 | ||
296 | if (err) | |
297 | goto out; | |
298 | ||
299 | err = gcm_hash_len_continue(req, 0); | |
300 | if (err == -EINPROGRESS) | |
301 | return; | |
302 | ||
303 | out: | |
304 | aead_request_complete(req, err); | |
305 | } | |
306 | ||
307 | static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags) | |
308 | { | |
309 | return gcm_hash_len(req, flags) ?: | |
310 | gcm_hash_len_continue(req, flags); | |
311 | } | |
312 | ||
313 | static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq, | |
314 | int err) | |
315 | { | |
316 | struct aead_request *req = areq->data; | |
317 | ||
318 | if (err) | |
319 | goto out; | |
320 | ||
321 | err = gcm_hash_crypt_remain_continue(req, 0); | |
322 | if (err == -EINPROGRESS) | |
323 | return; | |
324 | ||
325 | out: | |
326 | aead_request_complete(req, err); | |
327 | } | |
328 | ||
329 | static int gcm_hash_crypt_continue(struct aead_request *req, u32 flags) | |
330 | { | |
331 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
332 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
333 | unsigned int remain; | |
334 | ||
335 | remain = gcm_remain(gctx->cryptlen); | |
336 | if (remain) | |
337 | return gcm_hash_remain(req, remain, | |
338 | gcm_hash_crypt_remain_done, flags) ?: | |
339 | gcm_hash_crypt_remain_continue(req, flags); | |
340 | ||
341 | return gcm_hash_crypt_remain_continue(req, flags); | |
342 | } | |
343 | ||
344 | static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err) | |
345 | { | |
346 | struct aead_request *req = areq->data; | |
347 | ||
348 | if (err) | |
349 | goto out; | |
350 | ||
351 | err = gcm_hash_crypt_continue(req, 0); | |
352 | if (err == -EINPROGRESS) | |
353 | return; | |
354 | ||
355 | out: | |
356 | aead_request_complete(req, err); | |
357 | } | |
358 | ||
359 | static int gcm_hash_assoc_remain_continue(struct aead_request *req, u32 flags) | |
360 | { | |
361 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
362 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
363 | ||
364 | if (gctx->cryptlen) | |
365 | return gcm_hash_update(req, gcm_hash_crypt_done, | |
366 | gctx->src, gctx->cryptlen, flags) ?: | |
367 | gcm_hash_crypt_continue(req, flags); | |
368 | ||
369 | return gcm_hash_crypt_remain_continue(req, flags); | |
370 | } | |
371 | ||
372 | static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq, | |
373 | int err) | |
374 | { | |
375 | struct aead_request *req = areq->data; | |
376 | ||
377 | if (err) | |
378 | goto out; | |
379 | ||
380 | err = gcm_hash_assoc_remain_continue(req, 0); | |
381 | if (err == -EINPROGRESS) | |
382 | return; | |
383 | ||
384 | out: | |
385 | aead_request_complete(req, err); | |
386 | } | |
387 | ||
388 | static int gcm_hash_assoc_continue(struct aead_request *req, u32 flags) | |
389 | { | |
390 | unsigned int remain; | |
391 | ||
392 | remain = gcm_remain(req->assoclen); | |
393 | if (remain) | |
394 | return gcm_hash_remain(req, remain, | |
395 | gcm_hash_assoc_remain_done, flags) ?: | |
396 | gcm_hash_assoc_remain_continue(req, flags); | |
397 | ||
398 | return gcm_hash_assoc_remain_continue(req, flags); | |
399 | } | |
400 | ||
401 | static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err) | |
402 | { | |
403 | struct aead_request *req = areq->data; | |
404 | ||
405 | if (err) | |
406 | goto out; | |
407 | ||
408 | err = gcm_hash_assoc_continue(req, 0); | |
409 | if (err == -EINPROGRESS) | |
410 | return; | |
411 | ||
412 | out: | |
413 | aead_request_complete(req, err); | |
414 | } | |
415 | ||
416 | static int gcm_hash_init_continue(struct aead_request *req, u32 flags) | |
417 | { | |
418 | if (req->assoclen) | |
419 | return gcm_hash_update(req, gcm_hash_assoc_done, | |
420 | req->src, req->assoclen, flags) ?: | |
421 | gcm_hash_assoc_continue(req, flags); | |
422 | ||
423 | return gcm_hash_assoc_remain_continue(req, flags); | |
424 | } | |
425 | ||
426 | static void gcm_hash_init_done(struct crypto_async_request *areq, int err) | |
427 | { | |
428 | struct aead_request *req = areq->data; | |
429 | ||
430 | if (err) | |
431 | goto out; | |
432 | ||
433 | err = gcm_hash_init_continue(req, 0); | |
434 | if (err == -EINPROGRESS) | |
435 | return; | |
436 | ||
437 | out: | |
438 | aead_request_complete(req, err); | |
439 | } | |
440 | ||
441 | static int gcm_hash(struct aead_request *req, u32 flags) | |
442 | { | |
443 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
444 | struct ahash_request *ahreq = &pctx->u.ahreq; | |
445 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); | |
446 | ||
447 | ahash_request_set_tfm(ahreq, ctx->ghash); | |
448 | ||
449 | ahash_request_set_callback(ahreq, flags, gcm_hash_init_done, req); | |
450 | return crypto_ahash_init(ahreq) ?: | |
451 | gcm_hash_init_continue(req, flags); | |
452 | } | |
453 | ||
454 | static int gcm_enc_copy_hash(struct aead_request *req, u32 flags) | |
455 | { | |
456 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
457 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
458 | u8 *auth_tag = pctx->auth_tag; | |
459 | ||
460 | crypto_xor(auth_tag, pctx->iauth_tag, 16); | |
461 | scatterwalk_map_and_copy(auth_tag, req->dst, | |
462 | req->assoclen + req->cryptlen, | |
463 | crypto_aead_authsize(aead), 1); | |
464 | return 0; | |
465 | } | |
466 | ||
467 | static int gcm_encrypt_continue(struct aead_request *req, u32 flags) | |
468 | { | |
469 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
470 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
471 | ||
472 | gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst); | |
473 | gctx->cryptlen = req->cryptlen; | |
474 | gctx->complete = gcm_enc_copy_hash; | |
475 | ||
476 | return gcm_hash(req, flags); | |
477 | } | |
478 | ||
479 | static void gcm_encrypt_done(struct crypto_async_request *areq, int err) | |
480 | { | |
481 | struct aead_request *req = areq->data; | |
482 | ||
483 | if (err) | |
484 | goto out; | |
485 | ||
486 | err = gcm_encrypt_continue(req, 0); | |
487 | if (err == -EINPROGRESS) | |
488 | return; | |
489 | ||
490 | out: | |
491 | aead_request_complete(req, err); | |
492 | } | |
493 | ||
494 | static int crypto_gcm_encrypt(struct aead_request *req) | |
495 | { | |
496 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
497 | struct skcipher_request *skreq = &pctx->u.skreq; | |
498 | u32 flags = aead_request_flags(req); | |
499 | ||
500 | crypto_gcm_init_common(req); | |
501 | crypto_gcm_init_crypt(req, req->cryptlen); | |
502 | skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req); | |
503 | ||
504 | return crypto_skcipher_encrypt(skreq) ?: | |
505 | gcm_encrypt_continue(req, flags); | |
506 | } | |
507 | ||
508 | static int crypto_gcm_verify(struct aead_request *req) | |
509 | { | |
510 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
511 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
512 | u8 *auth_tag = pctx->auth_tag; | |
513 | u8 *iauth_tag = pctx->iauth_tag; | |
514 | unsigned int authsize = crypto_aead_authsize(aead); | |
515 | unsigned int cryptlen = req->cryptlen - authsize; | |
516 | ||
517 | crypto_xor(auth_tag, iauth_tag, 16); | |
518 | scatterwalk_map_and_copy(iauth_tag, req->src, | |
519 | req->assoclen + cryptlen, authsize, 0); | |
520 | return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; | |
521 | } | |
522 | ||
523 | static void gcm_decrypt_done(struct crypto_async_request *areq, int err) | |
524 | { | |
525 | struct aead_request *req = areq->data; | |
526 | ||
527 | if (!err) | |
528 | err = crypto_gcm_verify(req); | |
529 | ||
530 | aead_request_complete(req, err); | |
531 | } | |
532 | ||
533 | static int gcm_dec_hash_continue(struct aead_request *req, u32 flags) | |
534 | { | |
535 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
536 | struct skcipher_request *skreq = &pctx->u.skreq; | |
537 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
538 | ||
539 | crypto_gcm_init_crypt(req, gctx->cryptlen); | |
540 | skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req); | |
541 | return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req); | |
542 | } | |
543 | ||
544 | static int crypto_gcm_decrypt(struct aead_request *req) | |
545 | { | |
546 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
547 | struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); | |
548 | struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; | |
549 | unsigned int authsize = crypto_aead_authsize(aead); | |
550 | unsigned int cryptlen = req->cryptlen; | |
551 | u32 flags = aead_request_flags(req); | |
552 | ||
553 | cryptlen -= authsize; | |
554 | ||
555 | crypto_gcm_init_common(req); | |
556 | ||
557 | gctx->src = sg_next(pctx->src); | |
558 | gctx->cryptlen = cryptlen; | |
559 | gctx->complete = gcm_dec_hash_continue; | |
560 | ||
561 | return gcm_hash(req, flags); | |
562 | } | |
563 | ||
564 | static int crypto_gcm_init_tfm(struct crypto_aead *tfm) | |
565 | { | |
566 | struct aead_instance *inst = aead_alg_instance(tfm); | |
567 | struct gcm_instance_ctx *ictx = aead_instance_ctx(inst); | |
568 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm); | |
569 | struct crypto_skcipher *ctr; | |
570 | struct crypto_ahash *ghash; | |
571 | unsigned long align; | |
572 | int err; | |
573 | ||
574 | ghash = crypto_spawn_ahash(&ictx->ghash); | |
575 | if (IS_ERR(ghash)) | |
576 | return PTR_ERR(ghash); | |
577 | ||
578 | ctr = crypto_spawn_skcipher(&ictx->ctr); | |
579 | err = PTR_ERR(ctr); | |
580 | if (IS_ERR(ctr)) | |
581 | goto err_free_hash; | |
582 | ||
583 | ctx->ctr = ctr; | |
584 | ctx->ghash = ghash; | |
585 | ||
586 | align = crypto_aead_alignmask(tfm); | |
587 | align &= ~(crypto_tfm_ctx_alignment() - 1); | |
588 | crypto_aead_set_reqsize(tfm, | |
589 | align + offsetof(struct crypto_gcm_req_priv_ctx, u) + | |
590 | max(sizeof(struct skcipher_request) + | |
591 | crypto_skcipher_reqsize(ctr), | |
592 | sizeof(struct ahash_request) + | |
593 | crypto_ahash_reqsize(ghash))); | |
594 | ||
595 | return 0; | |
596 | ||
597 | err_free_hash: | |
598 | crypto_free_ahash(ghash); | |
599 | return err; | |
600 | } | |
601 | ||
602 | static void crypto_gcm_exit_tfm(struct crypto_aead *tfm) | |
603 | { | |
604 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm); | |
605 | ||
606 | crypto_free_ahash(ctx->ghash); | |
607 | crypto_free_skcipher(ctx->ctr); | |
608 | } | |
609 | ||
610 | static void crypto_gcm_free(struct aead_instance *inst) | |
611 | { | |
612 | struct gcm_instance_ctx *ctx = aead_instance_ctx(inst); | |
613 | ||
614 | crypto_drop_skcipher(&ctx->ctr); | |
615 | crypto_drop_ahash(&ctx->ghash); | |
616 | kfree(inst); | |
617 | } | |
618 | ||
619 | static int crypto_gcm_create_common(struct crypto_template *tmpl, | |
620 | struct rtattr **tb, | |
621 | const char *full_name, | |
622 | const char *ctr_name, | |
623 | const char *ghash_name) | |
624 | { | |
625 | struct crypto_attr_type *algt; | |
626 | struct aead_instance *inst; | |
627 | struct skcipher_alg *ctr; | |
628 | struct crypto_alg *ghash_alg; | |
629 | struct hash_alg_common *ghash; | |
630 | struct gcm_instance_ctx *ctx; | |
631 | int err; | |
632 | ||
633 | algt = crypto_get_attr_type(tb); | |
634 | if (IS_ERR(algt)) | |
635 | return PTR_ERR(algt); | |
636 | ||
637 | if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) | |
638 | return -EINVAL; | |
639 | ||
640 | ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type, | |
641 | CRYPTO_ALG_TYPE_HASH, | |
642 | CRYPTO_ALG_TYPE_AHASH_MASK | | |
643 | crypto_requires_sync(algt->type, | |
644 | algt->mask)); | |
645 | if (IS_ERR(ghash_alg)) | |
646 | return PTR_ERR(ghash_alg); | |
647 | ||
648 | ghash = __crypto_hash_alg_common(ghash_alg); | |
649 | ||
650 | err = -ENOMEM; | |
651 | inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); | |
652 | if (!inst) | |
653 | goto out_put_ghash; | |
654 | ||
655 | ctx = aead_instance_ctx(inst); | |
656 | err = crypto_init_ahash_spawn(&ctx->ghash, ghash, | |
657 | aead_crypto_instance(inst)); | |
658 | if (err) | |
659 | goto err_free_inst; | |
660 | ||
661 | err = -EINVAL; | |
662 | if (ghash->digestsize != 16) | |
663 | goto err_drop_ghash; | |
664 | ||
665 | crypto_set_skcipher_spawn(&ctx->ctr, aead_crypto_instance(inst)); | |
666 | err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0, | |
667 | crypto_requires_sync(algt->type, | |
668 | algt->mask)); | |
669 | if (err) | |
670 | goto err_drop_ghash; | |
671 | ||
672 | ctr = crypto_spawn_skcipher_alg(&ctx->ctr); | |
673 | ||
674 | /* We only support 16-byte blocks. */ | |
675 | err = -EINVAL; | |
676 | if (crypto_skcipher_alg_ivsize(ctr) != 16) | |
677 | goto out_put_ctr; | |
678 | ||
679 | /* Not a stream cipher? */ | |
680 | if (ctr->base.cra_blocksize != 1) | |
681 | goto out_put_ctr; | |
682 | ||
683 | err = -ENAMETOOLONG; | |
684 | if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, | |
685 | "gcm_base(%s,%s)", ctr->base.cra_driver_name, | |
686 | ghash_alg->cra_driver_name) >= | |
687 | CRYPTO_MAX_ALG_NAME) | |
688 | goto out_put_ctr; | |
689 | ||
690 | memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME); | |
691 | ||
692 | inst->alg.base.cra_flags = (ghash->base.cra_flags | | |
693 | ctr->base.cra_flags) & CRYPTO_ALG_ASYNC; | |
694 | inst->alg.base.cra_priority = (ghash->base.cra_priority + | |
695 | ctr->base.cra_priority) / 2; | |
696 | inst->alg.base.cra_blocksize = 1; | |
697 | inst->alg.base.cra_alignmask = ghash->base.cra_alignmask | | |
698 | ctr->base.cra_alignmask; | |
699 | inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx); | |
700 | inst->alg.ivsize = 12; | |
701 | inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr); | |
702 | inst->alg.maxauthsize = 16; | |
703 | inst->alg.init = crypto_gcm_init_tfm; | |
704 | inst->alg.exit = crypto_gcm_exit_tfm; | |
705 | inst->alg.setkey = crypto_gcm_setkey; | |
706 | inst->alg.setauthsize = crypto_gcm_setauthsize; | |
707 | inst->alg.encrypt = crypto_gcm_encrypt; | |
708 | inst->alg.decrypt = crypto_gcm_decrypt; | |
709 | ||
710 | inst->free = crypto_gcm_free; | |
711 | ||
712 | err = aead_register_instance(tmpl, inst); | |
713 | if (err) | |
714 | goto out_put_ctr; | |
715 | ||
716 | out_put_ghash: | |
717 | crypto_mod_put(ghash_alg); | |
718 | return err; | |
719 | ||
720 | out_put_ctr: | |
721 | crypto_drop_skcipher(&ctx->ctr); | |
722 | err_drop_ghash: | |
723 | crypto_drop_ahash(&ctx->ghash); | |
724 | err_free_inst: | |
725 | kfree(inst); | |
726 | goto out_put_ghash; | |
727 | } | |
728 | ||
729 | static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb) | |
730 | { | |
731 | const char *cipher_name; | |
732 | char ctr_name[CRYPTO_MAX_ALG_NAME]; | |
733 | char full_name[CRYPTO_MAX_ALG_NAME]; | |
734 | ||
735 | cipher_name = crypto_attr_alg_name(tb[1]); | |
736 | if (IS_ERR(cipher_name)) | |
737 | return PTR_ERR(cipher_name); | |
738 | ||
739 | if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >= | |
740 | CRYPTO_MAX_ALG_NAME) | |
741 | return -ENAMETOOLONG; | |
742 | ||
743 | if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >= | |
744 | CRYPTO_MAX_ALG_NAME) | |
745 | return -ENAMETOOLONG; | |
746 | ||
747 | return crypto_gcm_create_common(tmpl, tb, full_name, | |
748 | ctr_name, "ghash"); | |
749 | } | |
750 | ||
751 | static struct crypto_template crypto_gcm_tmpl = { | |
752 | .name = "gcm", | |
753 | .create = crypto_gcm_create, | |
754 | .module = THIS_MODULE, | |
755 | }; | |
756 | ||
757 | static int crypto_gcm_base_create(struct crypto_template *tmpl, | |
758 | struct rtattr **tb) | |
759 | { | |
760 | const char *ctr_name; | |
761 | const char *ghash_name; | |
762 | char full_name[CRYPTO_MAX_ALG_NAME]; | |
763 | ||
764 | ctr_name = crypto_attr_alg_name(tb[1]); | |
765 | if (IS_ERR(ctr_name)) | |
766 | return PTR_ERR(ctr_name); | |
767 | ||
768 | ghash_name = crypto_attr_alg_name(tb[2]); | |
769 | if (IS_ERR(ghash_name)) | |
770 | return PTR_ERR(ghash_name); | |
771 | ||
772 | if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)", | |
773 | ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME) | |
774 | return -ENAMETOOLONG; | |
775 | ||
776 | return crypto_gcm_create_common(tmpl, tb, full_name, | |
777 | ctr_name, ghash_name); | |
778 | } | |
779 | ||
780 | static struct crypto_template crypto_gcm_base_tmpl = { | |
781 | .name = "gcm_base", | |
782 | .create = crypto_gcm_base_create, | |
783 | .module = THIS_MODULE, | |
784 | }; | |
785 | ||
786 | static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key, | |
787 | unsigned int keylen) | |
788 | { | |
789 | struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent); | |
790 | struct crypto_aead *child = ctx->child; | |
791 | int err; | |
792 | ||
793 | if (keylen < 4) | |
794 | return -EINVAL; | |
795 | ||
796 | keylen -= 4; | |
797 | memcpy(ctx->nonce, key + keylen, 4); | |
798 | ||
799 | crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); | |
800 | crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & | |
801 | CRYPTO_TFM_REQ_MASK); | |
802 | err = crypto_aead_setkey(child, key, keylen); | |
803 | crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & | |
804 | CRYPTO_TFM_RES_MASK); | |
805 | ||
806 | return err; | |
807 | } | |
808 | ||
809 | static int crypto_rfc4106_setauthsize(struct crypto_aead *parent, | |
810 | unsigned int authsize) | |
811 | { | |
812 | struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent); | |
813 | ||
814 | switch (authsize) { | |
815 | case 8: | |
816 | case 12: | |
817 | case 16: | |
818 | break; | |
819 | default: | |
820 | return -EINVAL; | |
821 | } | |
822 | ||
823 | return crypto_aead_setauthsize(ctx->child, authsize); | |
824 | } | |
825 | ||
826 | static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req) | |
827 | { | |
828 | struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req); | |
829 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
830 | struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead); | |
831 | struct aead_request *subreq = &rctx->subreq; | |
832 | struct crypto_aead *child = ctx->child; | |
833 | struct scatterlist *sg; | |
834 | u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), | |
835 | crypto_aead_alignmask(child) + 1); | |
836 | ||
837 | scatterwalk_map_and_copy(iv + 12, req->src, 0, req->assoclen - 8, 0); | |
838 | ||
839 | memcpy(iv, ctx->nonce, 4); | |
840 | memcpy(iv + 4, req->iv, 8); | |
841 | ||
842 | sg_init_table(rctx->src, 3); | |
843 | sg_set_buf(rctx->src, iv + 12, req->assoclen - 8); | |
844 | sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); | |
845 | if (sg != rctx->src + 1) | |
846 | sg_chain(rctx->src, 2, sg); | |
847 | ||
848 | if (req->src != req->dst) { | |
849 | sg_init_table(rctx->dst, 3); | |
850 | sg_set_buf(rctx->dst, iv + 12, req->assoclen - 8); | |
851 | sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); | |
852 | if (sg != rctx->dst + 1) | |
853 | sg_chain(rctx->dst, 2, sg); | |
854 | } | |
855 | ||
856 | aead_request_set_tfm(subreq, child); | |
857 | aead_request_set_callback(subreq, req->base.flags, req->base.complete, | |
858 | req->base.data); | |
859 | aead_request_set_crypt(subreq, rctx->src, | |
860 | req->src == req->dst ? rctx->src : rctx->dst, | |
861 | req->cryptlen, iv); | |
862 | aead_request_set_ad(subreq, req->assoclen - 8); | |
863 | ||
864 | return subreq; | |
865 | } | |
866 | ||
867 | static int crypto_rfc4106_encrypt(struct aead_request *req) | |
868 | { | |
869 | if (req->assoclen != 16 && req->assoclen != 20) | |
870 | return -EINVAL; | |
871 | ||
872 | req = crypto_rfc4106_crypt(req); | |
873 | ||
874 | return crypto_aead_encrypt(req); | |
875 | } | |
876 | ||
877 | static int crypto_rfc4106_decrypt(struct aead_request *req) | |
878 | { | |
879 | if (req->assoclen != 16 && req->assoclen != 20) | |
880 | return -EINVAL; | |
881 | ||
882 | req = crypto_rfc4106_crypt(req); | |
883 | ||
884 | return crypto_aead_decrypt(req); | |
885 | } | |
886 | ||
887 | static int crypto_rfc4106_init_tfm(struct crypto_aead *tfm) | |
888 | { | |
889 | struct aead_instance *inst = aead_alg_instance(tfm); | |
890 | struct crypto_aead_spawn *spawn = aead_instance_ctx(inst); | |
891 | struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm); | |
892 | struct crypto_aead *aead; | |
893 | unsigned long align; | |
894 | ||
895 | aead = crypto_spawn_aead(spawn); | |
896 | if (IS_ERR(aead)) | |
897 | return PTR_ERR(aead); | |
898 | ||
899 | ctx->child = aead; | |
900 | ||
901 | align = crypto_aead_alignmask(aead); | |
902 | align &= ~(crypto_tfm_ctx_alignment() - 1); | |
903 | crypto_aead_set_reqsize( | |
904 | tfm, | |
905 | sizeof(struct crypto_rfc4106_req_ctx) + | |
906 | ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + | |
907 | align + 24); | |
908 | ||
909 | return 0; | |
910 | } | |
911 | ||
912 | static void crypto_rfc4106_exit_tfm(struct crypto_aead *tfm) | |
913 | { | |
914 | struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm); | |
915 | ||
916 | crypto_free_aead(ctx->child); | |
917 | } | |
918 | ||
919 | static void crypto_rfc4106_free(struct aead_instance *inst) | |
920 | { | |
921 | crypto_drop_aead(aead_instance_ctx(inst)); | |
922 | kfree(inst); | |
923 | } | |
924 | ||
925 | static int crypto_rfc4106_create(struct crypto_template *tmpl, | |
926 | struct rtattr **tb) | |
927 | { | |
928 | struct crypto_attr_type *algt; | |
929 | struct aead_instance *inst; | |
930 | struct crypto_aead_spawn *spawn; | |
931 | struct aead_alg *alg; | |
932 | const char *ccm_name; | |
933 | int err; | |
934 | ||
935 | algt = crypto_get_attr_type(tb); | |
936 | if (IS_ERR(algt)) | |
937 | return PTR_ERR(algt); | |
938 | ||
939 | if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) | |
940 | return -EINVAL; | |
941 | ||
942 | ccm_name = crypto_attr_alg_name(tb[1]); | |
943 | if (IS_ERR(ccm_name)) | |
944 | return PTR_ERR(ccm_name); | |
945 | ||
946 | inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); | |
947 | if (!inst) | |
948 | return -ENOMEM; | |
949 | ||
950 | spawn = aead_instance_ctx(inst); | |
951 | crypto_set_aead_spawn(spawn, aead_crypto_instance(inst)); | |
952 | err = crypto_grab_aead(spawn, ccm_name, 0, | |
953 | crypto_requires_sync(algt->type, algt->mask)); | |
954 | if (err) | |
955 | goto out_free_inst; | |
956 | ||
957 | alg = crypto_spawn_aead_alg(spawn); | |
958 | ||
959 | err = -EINVAL; | |
960 | ||
961 | /* Underlying IV size must be 12. */ | |
962 | if (crypto_aead_alg_ivsize(alg) != 12) | |
963 | goto out_drop_alg; | |
964 | ||
965 | /* Not a stream cipher? */ | |
966 | if (alg->base.cra_blocksize != 1) | |
967 | goto out_drop_alg; | |
968 | ||
969 | err = -ENAMETOOLONG; | |
970 | if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, | |
971 | "rfc4106(%s)", alg->base.cra_name) >= | |
972 | CRYPTO_MAX_ALG_NAME || | |
973 | snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, | |
974 | "rfc4106(%s)", alg->base.cra_driver_name) >= | |
975 | CRYPTO_MAX_ALG_NAME) | |
976 | goto out_drop_alg; | |
977 | ||
978 | inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; | |
979 | inst->alg.base.cra_priority = alg->base.cra_priority; | |
980 | inst->alg.base.cra_blocksize = 1; | |
981 | inst->alg.base.cra_alignmask = alg->base.cra_alignmask; | |
982 | ||
983 | inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx); | |
984 | ||
985 | inst->alg.ivsize = 8; | |
986 | inst->alg.chunksize = crypto_aead_alg_chunksize(alg); | |
987 | inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); | |
988 | ||
989 | inst->alg.init = crypto_rfc4106_init_tfm; | |
990 | inst->alg.exit = crypto_rfc4106_exit_tfm; | |
991 | ||
992 | inst->alg.setkey = crypto_rfc4106_setkey; | |
993 | inst->alg.setauthsize = crypto_rfc4106_setauthsize; | |
994 | inst->alg.encrypt = crypto_rfc4106_encrypt; | |
995 | inst->alg.decrypt = crypto_rfc4106_decrypt; | |
996 | ||
997 | inst->free = crypto_rfc4106_free; | |
998 | ||
999 | err = aead_register_instance(tmpl, inst); | |
1000 | if (err) | |
1001 | goto out_drop_alg; | |
1002 | ||
1003 | out: | |
1004 | return err; | |
1005 | ||
1006 | out_drop_alg: | |
1007 | crypto_drop_aead(spawn); | |
1008 | out_free_inst: | |
1009 | kfree(inst); | |
1010 | goto out; | |
1011 | } | |
1012 | ||
1013 | static struct crypto_template crypto_rfc4106_tmpl = { | |
1014 | .name = "rfc4106", | |
1015 | .create = crypto_rfc4106_create, | |
1016 | .module = THIS_MODULE, | |
1017 | }; | |
1018 | ||
1019 | static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key, | |
1020 | unsigned int keylen) | |
1021 | { | |
1022 | struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent); | |
1023 | struct crypto_aead *child = ctx->child; | |
1024 | int err; | |
1025 | ||
1026 | if (keylen < 4) | |
1027 | return -EINVAL; | |
1028 | ||
1029 | keylen -= 4; | |
1030 | memcpy(ctx->nonce, key + keylen, 4); | |
1031 | ||
1032 | crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); | |
1033 | crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & | |
1034 | CRYPTO_TFM_REQ_MASK); | |
1035 | err = crypto_aead_setkey(child, key, keylen); | |
1036 | crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & | |
1037 | CRYPTO_TFM_RES_MASK); | |
1038 | ||
1039 | return err; | |
1040 | } | |
1041 | ||
1042 | static int crypto_rfc4543_setauthsize(struct crypto_aead *parent, | |
1043 | unsigned int authsize) | |
1044 | { | |
1045 | struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent); | |
1046 | ||
1047 | if (authsize != 16) | |
1048 | return -EINVAL; | |
1049 | ||
1050 | return crypto_aead_setauthsize(ctx->child, authsize); | |
1051 | } | |
1052 | ||
1053 | static int crypto_rfc4543_crypt(struct aead_request *req, bool enc) | |
1054 | { | |
1055 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
1056 | struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead); | |
1057 | struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req); | |
1058 | struct aead_request *subreq = &rctx->subreq; | |
1059 | unsigned int authsize = crypto_aead_authsize(aead); | |
1060 | u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child), | |
1061 | crypto_aead_alignmask(ctx->child) + 1); | |
1062 | int err; | |
1063 | ||
1064 | if (req->src != req->dst) { | |
1065 | err = crypto_rfc4543_copy_src_to_dst(req, enc); | |
1066 | if (err) | |
1067 | return err; | |
1068 | } | |
1069 | ||
1070 | memcpy(iv, ctx->nonce, 4); | |
1071 | memcpy(iv + 4, req->iv, 8); | |
1072 | ||
1073 | aead_request_set_tfm(subreq, ctx->child); | |
1074 | aead_request_set_callback(subreq, req->base.flags, | |
1075 | req->base.complete, req->base.data); | |
1076 | aead_request_set_crypt(subreq, req->src, req->dst, | |
1077 | enc ? 0 : authsize, iv); | |
1078 | aead_request_set_ad(subreq, req->assoclen + req->cryptlen - | |
1079 | subreq->cryptlen); | |
1080 | ||
1081 | return enc ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq); | |
1082 | } | |
1083 | ||
1084 | static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc) | |
1085 | { | |
1086 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | |
1087 | struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead); | |
1088 | unsigned int authsize = crypto_aead_authsize(aead); | |
1089 | unsigned int nbytes = req->assoclen + req->cryptlen - | |
1090 | (enc ? 0 : authsize); | |
1091 | SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null); | |
1092 | ||
1093 | skcipher_request_set_tfm(nreq, ctx->null); | |
1094 | skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL); | |
1095 | skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL); | |
1096 | ||
1097 | return crypto_skcipher_encrypt(nreq); | |
1098 | } | |
1099 | ||
1100 | static int crypto_rfc4543_encrypt(struct aead_request *req) | |
1101 | { | |
1102 | return crypto_rfc4543_crypt(req, true); | |
1103 | } | |
1104 | ||
1105 | static int crypto_rfc4543_decrypt(struct aead_request *req) | |
1106 | { | |
1107 | return crypto_rfc4543_crypt(req, false); | |
1108 | } | |
1109 | ||
1110 | static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm) | |
1111 | { | |
1112 | struct aead_instance *inst = aead_alg_instance(tfm); | |
1113 | struct crypto_rfc4543_instance_ctx *ictx = aead_instance_ctx(inst); | |
1114 | struct crypto_aead_spawn *spawn = &ictx->aead; | |
1115 | struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm); | |
1116 | struct crypto_aead *aead; | |
1117 | struct crypto_skcipher *null; | |
1118 | unsigned long align; | |
1119 | int err = 0; | |
1120 | ||
1121 | aead = crypto_spawn_aead(spawn); | |
1122 | if (IS_ERR(aead)) | |
1123 | return PTR_ERR(aead); | |
1124 | ||
1125 | null = crypto_get_default_null_skcipher2(); | |
1126 | err = PTR_ERR(null); | |
1127 | if (IS_ERR(null)) | |
1128 | goto err_free_aead; | |
1129 | ||
1130 | ctx->child = aead; | |
1131 | ctx->null = null; | |
1132 | ||
1133 | align = crypto_aead_alignmask(aead); | |
1134 | align &= ~(crypto_tfm_ctx_alignment() - 1); | |
1135 | crypto_aead_set_reqsize( | |
1136 | tfm, | |
1137 | sizeof(struct crypto_rfc4543_req_ctx) + | |
1138 | ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + | |
1139 | align + 12); | |
1140 | ||
1141 | return 0; | |
1142 | ||
1143 | err_free_aead: | |
1144 | crypto_free_aead(aead); | |
1145 | return err; | |
1146 | } | |
1147 | ||
1148 | static void crypto_rfc4543_exit_tfm(struct crypto_aead *tfm) | |
1149 | { | |
1150 | struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm); | |
1151 | ||
1152 | crypto_free_aead(ctx->child); | |
1153 | crypto_put_default_null_skcipher2(); | |
1154 | } | |
1155 | ||
1156 | static void crypto_rfc4543_free(struct aead_instance *inst) | |
1157 | { | |
1158 | struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst); | |
1159 | ||
1160 | crypto_drop_aead(&ctx->aead); | |
1161 | ||
1162 | kfree(inst); | |
1163 | } | |
1164 | ||
1165 | static int crypto_rfc4543_create(struct crypto_template *tmpl, | |
1166 | struct rtattr **tb) | |
1167 | { | |
1168 | struct crypto_attr_type *algt; | |
1169 | struct aead_instance *inst; | |
1170 | struct crypto_aead_spawn *spawn; | |
1171 | struct aead_alg *alg; | |
1172 | struct crypto_rfc4543_instance_ctx *ctx; | |
1173 | const char *ccm_name; | |
1174 | int err; | |
1175 | ||
1176 | algt = crypto_get_attr_type(tb); | |
1177 | if (IS_ERR(algt)) | |
1178 | return PTR_ERR(algt); | |
1179 | ||
1180 | if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) | |
1181 | return -EINVAL; | |
1182 | ||
1183 | ccm_name = crypto_attr_alg_name(tb[1]); | |
1184 | if (IS_ERR(ccm_name)) | |
1185 | return PTR_ERR(ccm_name); | |
1186 | ||
1187 | inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); | |
1188 | if (!inst) | |
1189 | return -ENOMEM; | |
1190 | ||
1191 | ctx = aead_instance_ctx(inst); | |
1192 | spawn = &ctx->aead; | |
1193 | crypto_set_aead_spawn(spawn, aead_crypto_instance(inst)); | |
1194 | err = crypto_grab_aead(spawn, ccm_name, 0, | |
1195 | crypto_requires_sync(algt->type, algt->mask)); | |
1196 | if (err) | |
1197 | goto out_free_inst; | |
1198 | ||
1199 | alg = crypto_spawn_aead_alg(spawn); | |
1200 | ||
1201 | err = -EINVAL; | |
1202 | ||
1203 | /* Underlying IV size must be 12. */ | |
1204 | if (crypto_aead_alg_ivsize(alg) != 12) | |
1205 | goto out_drop_alg; | |
1206 | ||
1207 | /* Not a stream cipher? */ | |
1208 | if (alg->base.cra_blocksize != 1) | |
1209 | goto out_drop_alg; | |
1210 | ||
1211 | err = -ENAMETOOLONG; | |
1212 | if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, | |
1213 | "rfc4543(%s)", alg->base.cra_name) >= | |
1214 | CRYPTO_MAX_ALG_NAME || | |
1215 | snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, | |
1216 | "rfc4543(%s)", alg->base.cra_driver_name) >= | |
1217 | CRYPTO_MAX_ALG_NAME) | |
1218 | goto out_drop_alg; | |
1219 | ||
1220 | inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; | |
1221 | inst->alg.base.cra_priority = alg->base.cra_priority; | |
1222 | inst->alg.base.cra_blocksize = 1; | |
1223 | inst->alg.base.cra_alignmask = alg->base.cra_alignmask; | |
1224 | ||
1225 | inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx); | |
1226 | ||
1227 | inst->alg.ivsize = 8; | |
1228 | inst->alg.chunksize = crypto_aead_alg_chunksize(alg); | |
1229 | inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); | |
1230 | ||
1231 | inst->alg.init = crypto_rfc4543_init_tfm; | |
1232 | inst->alg.exit = crypto_rfc4543_exit_tfm; | |
1233 | ||
1234 | inst->alg.setkey = crypto_rfc4543_setkey; | |
1235 | inst->alg.setauthsize = crypto_rfc4543_setauthsize; | |
1236 | inst->alg.encrypt = crypto_rfc4543_encrypt; | |
1237 | inst->alg.decrypt = crypto_rfc4543_decrypt; | |
1238 | ||
1239 | inst->free = crypto_rfc4543_free, | |
1240 | ||
1241 | err = aead_register_instance(tmpl, inst); | |
1242 | if (err) | |
1243 | goto out_drop_alg; | |
1244 | ||
1245 | out: | |
1246 | return err; | |
1247 | ||
1248 | out_drop_alg: | |
1249 | crypto_drop_aead(spawn); | |
1250 | out_free_inst: | |
1251 | kfree(inst); | |
1252 | goto out; | |
1253 | } | |
1254 | ||
1255 | static struct crypto_template crypto_rfc4543_tmpl = { | |
1256 | .name = "rfc4543", | |
1257 | .create = crypto_rfc4543_create, | |
1258 | .module = THIS_MODULE, | |
1259 | }; | |
1260 | ||
1261 | static int __init crypto_gcm_module_init(void) | |
1262 | { | |
1263 | int err; | |
1264 | ||
1265 | gcm_zeroes = kzalloc(sizeof(*gcm_zeroes), GFP_KERNEL); | |
1266 | if (!gcm_zeroes) | |
1267 | return -ENOMEM; | |
1268 | ||
1269 | sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf)); | |
1270 | ||
1271 | err = crypto_register_template(&crypto_gcm_base_tmpl); | |
1272 | if (err) | |
1273 | goto out; | |
1274 | ||
1275 | err = crypto_register_template(&crypto_gcm_tmpl); | |
1276 | if (err) | |
1277 | goto out_undo_base; | |
1278 | ||
1279 | err = crypto_register_template(&crypto_rfc4106_tmpl); | |
1280 | if (err) | |
1281 | goto out_undo_gcm; | |
1282 | ||
1283 | err = crypto_register_template(&crypto_rfc4543_tmpl); | |
1284 | if (err) | |
1285 | goto out_undo_rfc4106; | |
1286 | ||
1287 | return 0; | |
1288 | ||
1289 | out_undo_rfc4106: | |
1290 | crypto_unregister_template(&crypto_rfc4106_tmpl); | |
1291 | out_undo_gcm: | |
1292 | crypto_unregister_template(&crypto_gcm_tmpl); | |
1293 | out_undo_base: | |
1294 | crypto_unregister_template(&crypto_gcm_base_tmpl); | |
1295 | out: | |
1296 | kfree(gcm_zeroes); | |
1297 | return err; | |
1298 | } | |
1299 | ||
1300 | static void __exit crypto_gcm_module_exit(void) | |
1301 | { | |
1302 | kfree(gcm_zeroes); | |
1303 | crypto_unregister_template(&crypto_rfc4543_tmpl); | |
1304 | crypto_unregister_template(&crypto_rfc4106_tmpl); | |
1305 | crypto_unregister_template(&crypto_gcm_tmpl); | |
1306 | crypto_unregister_template(&crypto_gcm_base_tmpl); | |
1307 | } | |
1308 | ||
1309 | module_init(crypto_gcm_module_init); | |
1310 | module_exit(crypto_gcm_module_exit); | |
1311 | ||
1312 | MODULE_LICENSE("GPL"); | |
1313 | MODULE_DESCRIPTION("Galois/Counter Mode"); | |
1314 | MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>"); | |
1315 | MODULE_ALIAS_CRYPTO("gcm_base"); | |
1316 | MODULE_ALIAS_CRYPTO("rfc4106"); | |
1317 | MODULE_ALIAS_CRYPTO("rfc4543"); | |
1318 | MODULE_ALIAS_CRYPTO("gcm"); |