]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - drivers/crypto/cavium/nitrox/nitrox_aead.c
treewide: Add SPDX license identifier for more missed files
[mirror_ubuntu-jammy-kernel.git] / drivers / crypto / cavium / nitrox / nitrox_aead.c
1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/kernel.h>
3 #include <linux/printk.h>
4 #include <linux/crypto.h>
5 #include <linux/rtnetlink.h>
6
7 #include <crypto/aead.h>
8 #include <crypto/authenc.h>
9 #include <crypto/des.h>
10 #include <crypto/sha.h>
11 #include <crypto/internal/aead.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/gcm.h>
14
15 #include "nitrox_dev.h"
16 #include "nitrox_common.h"
17 #include "nitrox_req.h"
18
19 #define GCM_AES_SALT_SIZE 4
20
21 union gph_p3 {
22 struct {
23 #ifdef __BIG_ENDIAN_BITFIELD
24 u16 iv_offset : 8;
25 u16 auth_offset : 8;
26 #else
27 u16 auth_offset : 8;
28 u16 iv_offset : 8;
29 #endif
30 };
31 u16 param;
32 };
33
34 static int nitrox_aes_gcm_setkey(struct crypto_aead *aead, const u8 *key,
35 unsigned int keylen)
36 {
37 int aes_keylen;
38 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
39 struct flexi_crypto_context *fctx;
40 union fc_ctx_flags flags;
41
42 aes_keylen = flexi_aes_keylen(keylen);
43 if (aes_keylen < 0) {
44 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
45 return -EINVAL;
46 }
47
48 /* fill crypto context */
49 fctx = nctx->u.fctx;
50 flags.f = be64_to_cpu(fctx->flags.f);
51 flags.w0.aes_keylen = aes_keylen;
52 fctx->flags.f = cpu_to_be64(flags.f);
53
54 /* copy enc key to context */
55 memset(&fctx->crypto, 0, sizeof(fctx->crypto));
56 memcpy(fctx->crypto.u.key, key, keylen);
57
58 return 0;
59 }
60
61 static int nitrox_aead_setauthsize(struct crypto_aead *aead,
62 unsigned int authsize)
63 {
64 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
65 struct flexi_crypto_context *fctx = nctx->u.fctx;
66 union fc_ctx_flags flags;
67
68 flags.f = be64_to_cpu(fctx->flags.f);
69 flags.w0.mac_len = authsize;
70 fctx->flags.f = cpu_to_be64(flags.f);
71
72 aead->authsize = authsize;
73
74 return 0;
75 }
76
77 static int alloc_src_sglist(struct nitrox_kcrypt_request *nkreq,
78 struct scatterlist *src, char *iv, int ivsize,
79 int buflen)
80 {
81 int nents = sg_nents_for_len(src, buflen);
82 int ret;
83
84 if (nents < 0)
85 return nents;
86
87 /* IV entry */
88 nents += 1;
89 /* Allocate buffer to hold IV and input scatterlist array */
90 ret = alloc_src_req_buf(nkreq, nents, ivsize);
91 if (ret)
92 return ret;
93
94 nitrox_creq_copy_iv(nkreq->src, iv, ivsize);
95 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen);
96
97 return 0;
98 }
99
100 static int alloc_dst_sglist(struct nitrox_kcrypt_request *nkreq,
101 struct scatterlist *dst, int ivsize, int buflen)
102 {
103 int nents = sg_nents_for_len(dst, buflen);
104 int ret;
105
106 if (nents < 0)
107 return nents;
108
109 /* IV, ORH, COMPLETION entries */
110 nents += 3;
111 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
112 * array
113 */
114 ret = alloc_dst_req_buf(nkreq, nents);
115 if (ret)
116 return ret;
117
118 nitrox_creq_set_orh(nkreq);
119 nitrox_creq_set_comp(nkreq);
120 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen);
121
122 return 0;
123 }
124
125 static void free_src_sglist(struct nitrox_kcrypt_request *nkreq)
126 {
127 kfree(nkreq->src);
128 }
129
130 static void free_dst_sglist(struct nitrox_kcrypt_request *nkreq)
131 {
132 kfree(nkreq->dst);
133 }
134
135 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx)
136 {
137 struct se_crypto_request *creq = &rctx->nkreq.creq;
138 union gph_p3 param3;
139 int ret;
140
141 creq->flags = rctx->flags;
142 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL :
143 GFP_ATOMIC;
144
145 creq->ctrl.value = 0;
146 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
147 creq->ctrl.s.arg = rctx->ctrl_arg;
148
149 creq->gph.param0 = cpu_to_be16(rctx->cryptlen);
150 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen);
151 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen);
152 param3.iv_offset = 0;
153 param3.auth_offset = rctx->ivsize;
154 creq->gph.param3 = cpu_to_be16(param3.param);
155
156 creq->ctx_handle = rctx->ctx_handle;
157 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
158
159 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize,
160 rctx->srclen);
161 if (ret)
162 return ret;
163
164 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize,
165 rctx->dstlen);
166 if (ret) {
167 free_src_sglist(&rctx->nkreq);
168 return ret;
169 }
170
171 return 0;
172 }
173
174 static void nitrox_aead_callback(void *arg, int err)
175 {
176 struct aead_request *areq = arg;
177 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
178
179 free_src_sglist(&rctx->nkreq);
180 free_dst_sglist(&rctx->nkreq);
181 if (err) {
182 pr_err_ratelimited("request failed status 0x%0x\n", err);
183 err = -EINVAL;
184 }
185
186 areq->base.complete(&areq->base, err);
187 }
188
189 static int nitrox_aes_gcm_enc(struct aead_request *areq)
190 {
191 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
192 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
193 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
194 struct se_crypto_request *creq = &rctx->nkreq.creq;
195 struct flexi_crypto_context *fctx = nctx->u.fctx;
196 int ret;
197
198 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
199
200 rctx->cryptlen = areq->cryptlen;
201 rctx->assoclen = areq->assoclen;
202 rctx->srclen = areq->assoclen + areq->cryptlen;
203 rctx->dstlen = rctx->srclen + aead->authsize;
204 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
205 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
206 rctx->flags = areq->base.flags;
207 rctx->ctx_handle = nctx->u.ctx_handle;
208 rctx->src = areq->src;
209 rctx->dst = areq->dst;
210 rctx->ctrl_arg = ENCRYPT;
211 ret = nitrox_set_creq(rctx);
212 if (ret)
213 return ret;
214
215 /* send the crypto request */
216 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
217 areq);
218 }
219
220 static int nitrox_aes_gcm_dec(struct aead_request *areq)
221 {
222 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
223 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
224 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
225 struct se_crypto_request *creq = &rctx->nkreq.creq;
226 struct flexi_crypto_context *fctx = nctx->u.fctx;
227 int ret;
228
229 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
230
231 rctx->cryptlen = areq->cryptlen - aead->authsize;
232 rctx->assoclen = areq->assoclen;
233 rctx->srclen = areq->cryptlen + areq->assoclen;
234 rctx->dstlen = rctx->srclen - aead->authsize;
235 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
236 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
237 rctx->flags = areq->base.flags;
238 rctx->ctx_handle = nctx->u.ctx_handle;
239 rctx->src = areq->src;
240 rctx->dst = areq->dst;
241 rctx->ctrl_arg = DECRYPT;
242 ret = nitrox_set_creq(rctx);
243 if (ret)
244 return ret;
245
246 /* send the crypto request */
247 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
248 areq);
249 }
250
251 static int nitrox_aead_init(struct crypto_aead *aead)
252 {
253 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
254 struct crypto_ctx_hdr *chdr;
255
256 /* get the first device */
257 nctx->ndev = nitrox_get_first_device();
258 if (!nctx->ndev)
259 return -ENODEV;
260
261 /* allocate nitrox crypto context */
262 chdr = crypto_alloc_context(nctx->ndev);
263 if (!chdr) {
264 nitrox_put_device(nctx->ndev);
265 return -ENOMEM;
266 }
267 nctx->chdr = chdr;
268 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
269 sizeof(struct ctx_hdr));
270 nctx->u.fctx->flags.f = 0;
271
272 return 0;
273 }
274
275 static int nitrox_gcm_common_init(struct crypto_aead *aead)
276 {
277 int ret;
278 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
279 union fc_ctx_flags *flags;
280
281 ret = nitrox_aead_init(aead);
282 if (ret)
283 return ret;
284
285 flags = &nctx->u.fctx->flags;
286 flags->w0.cipher_type = CIPHER_AES_GCM;
287 flags->w0.hash_type = AUTH_NULL;
288 flags->w0.iv_source = IV_FROM_DPTR;
289 /* ask microcode to calculate ipad/opad */
290 flags->w0.auth_input_type = 1;
291 flags->f = be64_to_cpu(flags->f);
292
293 return 0;
294 }
295
296 static int nitrox_aes_gcm_init(struct crypto_aead *aead)
297 {
298 int ret;
299
300 ret = nitrox_gcm_common_init(aead);
301 if (ret)
302 return ret;
303
304 crypto_aead_set_reqsize(aead,
305 sizeof(struct aead_request) +
306 sizeof(struct nitrox_aead_rctx));
307
308 return 0;
309 }
310
311 static void nitrox_aead_exit(struct crypto_aead *aead)
312 {
313 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
314
315 /* free the nitrox crypto context */
316 if (nctx->u.ctx_handle) {
317 struct flexi_crypto_context *fctx = nctx->u.fctx;
318
319 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
320 memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
321 crypto_free_context((void *)nctx->chdr);
322 }
323 nitrox_put_device(nctx->ndev);
324
325 nctx->u.ctx_handle = 0;
326 nctx->ndev = NULL;
327 }
328
329 static int nitrox_rfc4106_setkey(struct crypto_aead *aead, const u8 *key,
330 unsigned int keylen)
331 {
332 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
333 struct flexi_crypto_context *fctx = nctx->u.fctx;
334 int ret;
335
336 if (keylen < GCM_AES_SALT_SIZE)
337 return -EINVAL;
338
339 keylen -= GCM_AES_SALT_SIZE;
340 ret = nitrox_aes_gcm_setkey(aead, key, keylen);
341 if (ret)
342 return ret;
343
344 memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE);
345 return 0;
346 }
347
348 static int nitrox_rfc4106_setauthsize(struct crypto_aead *aead,
349 unsigned int authsize)
350 {
351 switch (authsize) {
352 case 8:
353 case 12:
354 case 16:
355 break;
356 default:
357 return -EINVAL;
358 }
359
360 return nitrox_aead_setauthsize(aead, authsize);
361 }
362
363 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq)
364 {
365 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
366 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
367 unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
368 struct scatterlist *sg;
369
370 if (areq->assoclen != 16 && areq->assoclen != 20)
371 return -EINVAL;
372
373 scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0);
374 sg_init_table(rctx->src, 3);
375 sg_set_buf(rctx->src, rctx->assoc, assoclen);
376 sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen);
377 if (sg != rctx->src + 1)
378 sg_chain(rctx->src, 2, sg);
379
380 if (areq->src != areq->dst) {
381 sg_init_table(rctx->dst, 3);
382 sg_set_buf(rctx->dst, rctx->assoc, assoclen);
383 sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen);
384 if (sg != rctx->dst + 1)
385 sg_chain(rctx->dst, 2, sg);
386 }
387
388 aead_rctx->src = rctx->src;
389 aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst;
390
391 return 0;
392 }
393
394 static void nitrox_rfc4106_callback(void *arg, int err)
395 {
396 struct aead_request *areq = arg;
397 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
398 struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq;
399
400 free_src_sglist(nkreq);
401 free_dst_sglist(nkreq);
402 if (err) {
403 pr_err_ratelimited("request failed status 0x%0x\n", err);
404 err = -EINVAL;
405 }
406
407 areq->base.complete(&areq->base, err);
408 }
409
410 static int nitrox_rfc4106_enc(struct aead_request *areq)
411 {
412 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
413 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
414 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
415 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
416 struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
417 int ret;
418
419 aead_rctx->cryptlen = areq->cryptlen;
420 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
421 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen;
422 aead_rctx->dstlen = aead_rctx->srclen + aead->authsize;
423 aead_rctx->iv = areq->iv;
424 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
425 aead_rctx->flags = areq->base.flags;
426 aead_rctx->ctx_handle = nctx->u.ctx_handle;
427 aead_rctx->ctrl_arg = ENCRYPT;
428
429 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
430 if (ret)
431 return ret;
432
433 ret = nitrox_set_creq(aead_rctx);
434 if (ret)
435 return ret;
436
437 /* send the crypto request */
438 return nitrox_process_se_request(nctx->ndev, creq,
439 nitrox_rfc4106_callback, areq);
440 }
441
442 static int nitrox_rfc4106_dec(struct aead_request *areq)
443 {
444 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
445 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
446 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
447 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
448 struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
449 int ret;
450
451 aead_rctx->cryptlen = areq->cryptlen - aead->authsize;
452 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
453 aead_rctx->srclen =
454 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen;
455 aead_rctx->dstlen = aead_rctx->srclen - aead->authsize;
456 aead_rctx->iv = areq->iv;
457 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
458 aead_rctx->flags = areq->base.flags;
459 aead_rctx->ctx_handle = nctx->u.ctx_handle;
460 aead_rctx->ctrl_arg = DECRYPT;
461
462 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
463 if (ret)
464 return ret;
465
466 ret = nitrox_set_creq(aead_rctx);
467 if (ret)
468 return ret;
469
470 /* send the crypto request */
471 return nitrox_process_se_request(nctx->ndev, creq,
472 nitrox_rfc4106_callback, areq);
473 }
474
475 static int nitrox_rfc4106_init(struct crypto_aead *aead)
476 {
477 int ret;
478
479 ret = nitrox_gcm_common_init(aead);
480 if (ret)
481 return ret;
482
483 crypto_aead_set_reqsize(aead, sizeof(struct aead_request) +
484 sizeof(struct nitrox_rfc4106_rctx));
485
486 return 0;
487 }
488
489 static struct aead_alg nitrox_aeads[] = { {
490 .base = {
491 .cra_name = "gcm(aes)",
492 .cra_driver_name = "n5_aes_gcm",
493 .cra_priority = PRIO,
494 .cra_flags = CRYPTO_ALG_ASYNC,
495 .cra_blocksize = AES_BLOCK_SIZE,
496 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
497 .cra_alignmask = 0,
498 .cra_module = THIS_MODULE,
499 },
500 .setkey = nitrox_aes_gcm_setkey,
501 .setauthsize = nitrox_aead_setauthsize,
502 .encrypt = nitrox_aes_gcm_enc,
503 .decrypt = nitrox_aes_gcm_dec,
504 .init = nitrox_aes_gcm_init,
505 .exit = nitrox_aead_exit,
506 .ivsize = GCM_AES_IV_SIZE,
507 .maxauthsize = AES_BLOCK_SIZE,
508 }, {
509 .base = {
510 .cra_name = "rfc4106(gcm(aes))",
511 .cra_driver_name = "n5_rfc4106",
512 .cra_priority = PRIO,
513 .cra_flags = CRYPTO_ALG_ASYNC,
514 .cra_blocksize = AES_BLOCK_SIZE,
515 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
516 .cra_alignmask = 0,
517 .cra_module = THIS_MODULE,
518 },
519 .setkey = nitrox_rfc4106_setkey,
520 .setauthsize = nitrox_rfc4106_setauthsize,
521 .encrypt = nitrox_rfc4106_enc,
522 .decrypt = nitrox_rfc4106_dec,
523 .init = nitrox_rfc4106_init,
524 .exit = nitrox_aead_exit,
525 .ivsize = GCM_RFC4106_IV_SIZE,
526 .maxauthsize = AES_BLOCK_SIZE,
527 } };
528
529 int nitrox_register_aeads(void)
530 {
531 return crypto_register_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
532 }
533
534 void nitrox_unregister_aeads(void)
535 {
536 crypto_unregister_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
537 }