]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blob - drivers/crypto/cavium/nitrox/nitrox_aead.c
Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/hid/hid
[mirror_ubuntu-hirsute-kernel.git] / drivers / crypto / cavium / nitrox / nitrox_aead.c
1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/kernel.h>
3 #include <linux/printk.h>
4 #include <linux/crypto.h>
5 #include <linux/rtnetlink.h>
6
7 #include <crypto/aead.h>
8 #include <crypto/authenc.h>
9 #include <crypto/des.h>
10 #include <crypto/sha.h>
11 #include <crypto/internal/aead.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/gcm.h>
14
15 #include "nitrox_dev.h"
16 #include "nitrox_common.h"
17 #include "nitrox_req.h"
18
19 #define GCM_AES_SALT_SIZE 4
20
21 union gph_p3 {
22 struct {
23 #ifdef __BIG_ENDIAN_BITFIELD
24 u16 iv_offset : 8;
25 u16 auth_offset : 8;
26 #else
27 u16 auth_offset : 8;
28 u16 iv_offset : 8;
29 #endif
30 };
31 u16 param;
32 };
33
34 static int nitrox_aes_gcm_setkey(struct crypto_aead *aead, const u8 *key,
35 unsigned int keylen)
36 {
37 int aes_keylen;
38 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
39 struct flexi_crypto_context *fctx;
40 union fc_ctx_flags flags;
41
42 aes_keylen = flexi_aes_keylen(keylen);
43 if (aes_keylen < 0)
44 return -EINVAL;
45
46 /* fill crypto context */
47 fctx = nctx->u.fctx;
48 flags.f = be64_to_cpu(fctx->flags.f);
49 flags.w0.aes_keylen = aes_keylen;
50 fctx->flags.f = cpu_to_be64(flags.f);
51
52 /* copy enc key to context */
53 memset(&fctx->crypto, 0, sizeof(fctx->crypto));
54 memcpy(fctx->crypto.u.key, key, keylen);
55
56 return 0;
57 }
58
59 static int nitrox_aead_setauthsize(struct crypto_aead *aead,
60 unsigned int authsize)
61 {
62 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
63 struct flexi_crypto_context *fctx = nctx->u.fctx;
64 union fc_ctx_flags flags;
65
66 flags.f = be64_to_cpu(fctx->flags.f);
67 flags.w0.mac_len = authsize;
68 fctx->flags.f = cpu_to_be64(flags.f);
69
70 aead->authsize = authsize;
71
72 return 0;
73 }
74
75 static int nitrox_aes_gcm_setauthsize(struct crypto_aead *aead,
76 unsigned int authsize)
77 {
78 switch (authsize) {
79 case 4:
80 case 8:
81 case 12:
82 case 13:
83 case 14:
84 case 15:
85 case 16:
86 break;
87 default:
88 return -EINVAL;
89 }
90
91 return nitrox_aead_setauthsize(aead, authsize);
92 }
93
94 static int alloc_src_sglist(struct nitrox_kcrypt_request *nkreq,
95 struct scatterlist *src, char *iv, int ivsize,
96 int buflen)
97 {
98 int nents = sg_nents_for_len(src, buflen);
99 int ret;
100
101 if (nents < 0)
102 return nents;
103
104 /* IV entry */
105 nents += 1;
106 /* Allocate buffer to hold IV and input scatterlist array */
107 ret = alloc_src_req_buf(nkreq, nents, ivsize);
108 if (ret)
109 return ret;
110
111 nitrox_creq_copy_iv(nkreq->src, iv, ivsize);
112 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen);
113
114 return 0;
115 }
116
117 static int alloc_dst_sglist(struct nitrox_kcrypt_request *nkreq,
118 struct scatterlist *dst, int ivsize, int buflen)
119 {
120 int nents = sg_nents_for_len(dst, buflen);
121 int ret;
122
123 if (nents < 0)
124 return nents;
125
126 /* IV, ORH, COMPLETION entries */
127 nents += 3;
128 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
129 * array
130 */
131 ret = alloc_dst_req_buf(nkreq, nents);
132 if (ret)
133 return ret;
134
135 nitrox_creq_set_orh(nkreq);
136 nitrox_creq_set_comp(nkreq);
137 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen);
138
139 return 0;
140 }
141
142 static void free_src_sglist(struct nitrox_kcrypt_request *nkreq)
143 {
144 kfree(nkreq->src);
145 }
146
147 static void free_dst_sglist(struct nitrox_kcrypt_request *nkreq)
148 {
149 kfree(nkreq->dst);
150 }
151
152 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx)
153 {
154 struct se_crypto_request *creq = &rctx->nkreq.creq;
155 union gph_p3 param3;
156 int ret;
157
158 creq->flags = rctx->flags;
159 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL :
160 GFP_ATOMIC;
161
162 creq->ctrl.value = 0;
163 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
164 creq->ctrl.s.arg = rctx->ctrl_arg;
165
166 creq->gph.param0 = cpu_to_be16(rctx->cryptlen);
167 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen);
168 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen);
169 param3.iv_offset = 0;
170 param3.auth_offset = rctx->ivsize;
171 creq->gph.param3 = cpu_to_be16(param3.param);
172
173 creq->ctx_handle = rctx->ctx_handle;
174 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
175
176 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize,
177 rctx->srclen);
178 if (ret)
179 return ret;
180
181 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize,
182 rctx->dstlen);
183 if (ret) {
184 free_src_sglist(&rctx->nkreq);
185 return ret;
186 }
187
188 return 0;
189 }
190
191 static void nitrox_aead_callback(void *arg, int err)
192 {
193 struct aead_request *areq = arg;
194 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
195
196 free_src_sglist(&rctx->nkreq);
197 free_dst_sglist(&rctx->nkreq);
198 if (err) {
199 pr_err_ratelimited("request failed status 0x%0x\n", err);
200 err = -EINVAL;
201 }
202
203 areq->base.complete(&areq->base, err);
204 }
205
206 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen)
207 {
208 if (assoclen <= 512)
209 return true;
210
211 return false;
212 }
213
214 static int nitrox_aes_gcm_enc(struct aead_request *areq)
215 {
216 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
217 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
218 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
219 struct se_crypto_request *creq = &rctx->nkreq.creq;
220 struct flexi_crypto_context *fctx = nctx->u.fctx;
221 int ret;
222
223 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen))
224 return -EINVAL;
225
226 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
227
228 rctx->cryptlen = areq->cryptlen;
229 rctx->assoclen = areq->assoclen;
230 rctx->srclen = areq->assoclen + areq->cryptlen;
231 rctx->dstlen = rctx->srclen + aead->authsize;
232 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
233 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
234 rctx->flags = areq->base.flags;
235 rctx->ctx_handle = nctx->u.ctx_handle;
236 rctx->src = areq->src;
237 rctx->dst = areq->dst;
238 rctx->ctrl_arg = ENCRYPT;
239 ret = nitrox_set_creq(rctx);
240 if (ret)
241 return ret;
242
243 /* send the crypto request */
244 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
245 areq);
246 }
247
248 static int nitrox_aes_gcm_dec(struct aead_request *areq)
249 {
250 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
251 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
252 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
253 struct se_crypto_request *creq = &rctx->nkreq.creq;
254 struct flexi_crypto_context *fctx = nctx->u.fctx;
255 int ret;
256
257 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen))
258 return -EINVAL;
259
260 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
261
262 rctx->cryptlen = areq->cryptlen - aead->authsize;
263 rctx->assoclen = areq->assoclen;
264 rctx->srclen = areq->cryptlen + areq->assoclen;
265 rctx->dstlen = rctx->srclen - aead->authsize;
266 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
267 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
268 rctx->flags = areq->base.flags;
269 rctx->ctx_handle = nctx->u.ctx_handle;
270 rctx->src = areq->src;
271 rctx->dst = areq->dst;
272 rctx->ctrl_arg = DECRYPT;
273 ret = nitrox_set_creq(rctx);
274 if (ret)
275 return ret;
276
277 /* send the crypto request */
278 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
279 areq);
280 }
281
282 static int nitrox_aead_init(struct crypto_aead *aead)
283 {
284 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
285 struct crypto_ctx_hdr *chdr;
286
287 /* get the first device */
288 nctx->ndev = nitrox_get_first_device();
289 if (!nctx->ndev)
290 return -ENODEV;
291
292 /* allocate nitrox crypto context */
293 chdr = crypto_alloc_context(nctx->ndev);
294 if (!chdr) {
295 nitrox_put_device(nctx->ndev);
296 return -ENOMEM;
297 }
298 nctx->chdr = chdr;
299 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
300 sizeof(struct ctx_hdr));
301 nctx->u.fctx->flags.f = 0;
302
303 return 0;
304 }
305
306 static int nitrox_gcm_common_init(struct crypto_aead *aead)
307 {
308 int ret;
309 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
310 union fc_ctx_flags *flags;
311
312 ret = nitrox_aead_init(aead);
313 if (ret)
314 return ret;
315
316 flags = &nctx->u.fctx->flags;
317 flags->w0.cipher_type = CIPHER_AES_GCM;
318 flags->w0.hash_type = AUTH_NULL;
319 flags->w0.iv_source = IV_FROM_DPTR;
320 /* ask microcode to calculate ipad/opad */
321 flags->w0.auth_input_type = 1;
322 flags->f = be64_to_cpu(flags->f);
323
324 return 0;
325 }
326
327 static int nitrox_aes_gcm_init(struct crypto_aead *aead)
328 {
329 int ret;
330
331 ret = nitrox_gcm_common_init(aead);
332 if (ret)
333 return ret;
334
335 crypto_aead_set_reqsize(aead,
336 sizeof(struct aead_request) +
337 sizeof(struct nitrox_aead_rctx));
338
339 return 0;
340 }
341
342 static void nitrox_aead_exit(struct crypto_aead *aead)
343 {
344 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
345
346 /* free the nitrox crypto context */
347 if (nctx->u.ctx_handle) {
348 struct flexi_crypto_context *fctx = nctx->u.fctx;
349
350 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
351 memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
352 crypto_free_context((void *)nctx->chdr);
353 }
354 nitrox_put_device(nctx->ndev);
355
356 nctx->u.ctx_handle = 0;
357 nctx->ndev = NULL;
358 }
359
360 static int nitrox_rfc4106_setkey(struct crypto_aead *aead, const u8 *key,
361 unsigned int keylen)
362 {
363 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
364 struct flexi_crypto_context *fctx = nctx->u.fctx;
365 int ret;
366
367 if (keylen < GCM_AES_SALT_SIZE)
368 return -EINVAL;
369
370 keylen -= GCM_AES_SALT_SIZE;
371 ret = nitrox_aes_gcm_setkey(aead, key, keylen);
372 if (ret)
373 return ret;
374
375 memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE);
376 return 0;
377 }
378
379 static int nitrox_rfc4106_setauthsize(struct crypto_aead *aead,
380 unsigned int authsize)
381 {
382 switch (authsize) {
383 case 8:
384 case 12:
385 case 16:
386 break;
387 default:
388 return -EINVAL;
389 }
390
391 return nitrox_aead_setauthsize(aead, authsize);
392 }
393
394 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq)
395 {
396 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
397 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
398 unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
399 struct scatterlist *sg;
400
401 if (areq->assoclen != 16 && areq->assoclen != 20)
402 return -EINVAL;
403
404 scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0);
405 sg_init_table(rctx->src, 3);
406 sg_set_buf(rctx->src, rctx->assoc, assoclen);
407 sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen);
408 if (sg != rctx->src + 1)
409 sg_chain(rctx->src, 2, sg);
410
411 if (areq->src != areq->dst) {
412 sg_init_table(rctx->dst, 3);
413 sg_set_buf(rctx->dst, rctx->assoc, assoclen);
414 sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen);
415 if (sg != rctx->dst + 1)
416 sg_chain(rctx->dst, 2, sg);
417 }
418
419 aead_rctx->src = rctx->src;
420 aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst;
421
422 return 0;
423 }
424
425 static void nitrox_rfc4106_callback(void *arg, int err)
426 {
427 struct aead_request *areq = arg;
428 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
429 struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq;
430
431 free_src_sglist(nkreq);
432 free_dst_sglist(nkreq);
433 if (err) {
434 pr_err_ratelimited("request failed status 0x%0x\n", err);
435 err = -EINVAL;
436 }
437
438 areq->base.complete(&areq->base, err);
439 }
440
441 static int nitrox_rfc4106_enc(struct aead_request *areq)
442 {
443 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
444 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
445 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
446 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
447 struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
448 int ret;
449
450 aead_rctx->cryptlen = areq->cryptlen;
451 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
452 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen;
453 aead_rctx->dstlen = aead_rctx->srclen + aead->authsize;
454 aead_rctx->iv = areq->iv;
455 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
456 aead_rctx->flags = areq->base.flags;
457 aead_rctx->ctx_handle = nctx->u.ctx_handle;
458 aead_rctx->ctrl_arg = ENCRYPT;
459
460 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
461 if (ret)
462 return ret;
463
464 ret = nitrox_set_creq(aead_rctx);
465 if (ret)
466 return ret;
467
468 /* send the crypto request */
469 return nitrox_process_se_request(nctx->ndev, creq,
470 nitrox_rfc4106_callback, areq);
471 }
472
473 static int nitrox_rfc4106_dec(struct aead_request *areq)
474 {
475 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
476 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
477 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
478 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
479 struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
480 int ret;
481
482 aead_rctx->cryptlen = areq->cryptlen - aead->authsize;
483 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
484 aead_rctx->srclen =
485 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen;
486 aead_rctx->dstlen = aead_rctx->srclen - aead->authsize;
487 aead_rctx->iv = areq->iv;
488 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
489 aead_rctx->flags = areq->base.flags;
490 aead_rctx->ctx_handle = nctx->u.ctx_handle;
491 aead_rctx->ctrl_arg = DECRYPT;
492
493 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
494 if (ret)
495 return ret;
496
497 ret = nitrox_set_creq(aead_rctx);
498 if (ret)
499 return ret;
500
501 /* send the crypto request */
502 return nitrox_process_se_request(nctx->ndev, creq,
503 nitrox_rfc4106_callback, areq);
504 }
505
506 static int nitrox_rfc4106_init(struct crypto_aead *aead)
507 {
508 int ret;
509
510 ret = nitrox_gcm_common_init(aead);
511 if (ret)
512 return ret;
513
514 crypto_aead_set_reqsize(aead, sizeof(struct aead_request) +
515 sizeof(struct nitrox_rfc4106_rctx));
516
517 return 0;
518 }
519
520 static struct aead_alg nitrox_aeads[] = { {
521 .base = {
522 .cra_name = "gcm(aes)",
523 .cra_driver_name = "n5_aes_gcm",
524 .cra_priority = PRIO,
525 .cra_flags = CRYPTO_ALG_ASYNC,
526 .cra_blocksize = 1,
527 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
528 .cra_alignmask = 0,
529 .cra_module = THIS_MODULE,
530 },
531 .setkey = nitrox_aes_gcm_setkey,
532 .setauthsize = nitrox_aes_gcm_setauthsize,
533 .encrypt = nitrox_aes_gcm_enc,
534 .decrypt = nitrox_aes_gcm_dec,
535 .init = nitrox_aes_gcm_init,
536 .exit = nitrox_aead_exit,
537 .ivsize = GCM_AES_IV_SIZE,
538 .maxauthsize = AES_BLOCK_SIZE,
539 }, {
540 .base = {
541 .cra_name = "rfc4106(gcm(aes))",
542 .cra_driver_name = "n5_rfc4106",
543 .cra_priority = PRIO,
544 .cra_flags = CRYPTO_ALG_ASYNC,
545 .cra_blocksize = 1,
546 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
547 .cra_alignmask = 0,
548 .cra_module = THIS_MODULE,
549 },
550 .setkey = nitrox_rfc4106_setkey,
551 .setauthsize = nitrox_rfc4106_setauthsize,
552 .encrypt = nitrox_rfc4106_enc,
553 .decrypt = nitrox_rfc4106_dec,
554 .init = nitrox_rfc4106_init,
555 .exit = nitrox_aead_exit,
556 .ivsize = GCM_RFC4106_IV_SIZE,
557 .maxauthsize = AES_BLOCK_SIZE,
558 } };
559
560 int nitrox_register_aeads(void)
561 {
562 return crypto_register_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
563 }
564
565 void nitrox_unregister_aeads(void)
566 {
567 crypto_unregister_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
568 }