1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/kernel.h>
3 #include <linux/printk.h>
4 #include <linux/crypto.h>
5 #include <linux/rtnetlink.h>
7 #include <crypto/aead.h>
8 #include <crypto/authenc.h>
9 #include <crypto/des.h>
10 #include <crypto/sha.h>
11 #include <crypto/internal/aead.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/gcm.h>
15 #include "nitrox_dev.h"
16 #include "nitrox_common.h"
17 #include "nitrox_req.h"
19 #define GCM_AES_SALT_SIZE 4
23 #ifdef __BIG_ENDIAN_BITFIELD
34 static int nitrox_aes_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
38 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
39 struct flexi_crypto_context
*fctx
;
40 union fc_ctx_flags flags
;
42 aes_keylen
= flexi_aes_keylen(keylen
);
44 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
48 /* fill crypto context */
50 flags
.f
= be64_to_cpu(fctx
->flags
.f
);
51 flags
.w0
.aes_keylen
= aes_keylen
;
52 fctx
->flags
.f
= cpu_to_be64(flags
.f
);
54 /* copy enc key to context */
55 memset(&fctx
->crypto
, 0, sizeof(fctx
->crypto
));
56 memcpy(fctx
->crypto
.u
.key
, key
, keylen
);
61 static int nitrox_aead_setauthsize(struct crypto_aead
*aead
,
62 unsigned int authsize
)
64 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
65 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
66 union fc_ctx_flags flags
;
68 flags
.f
= be64_to_cpu(fctx
->flags
.f
);
69 flags
.w0
.mac_len
= authsize
;
70 fctx
->flags
.f
= cpu_to_be64(flags
.f
);
72 aead
->authsize
= authsize
;
77 static int nitrox_aes_gcm_setauthsize(struct crypto_aead
*aead
,
78 unsigned int authsize
)
93 return nitrox_aead_setauthsize(aead
, authsize
);
96 static int alloc_src_sglist(struct nitrox_kcrypt_request
*nkreq
,
97 struct scatterlist
*src
, char *iv
, int ivsize
,
100 int nents
= sg_nents_for_len(src
, buflen
);
108 /* Allocate buffer to hold IV and input scatterlist array */
109 ret
= alloc_src_req_buf(nkreq
, nents
, ivsize
);
113 nitrox_creq_copy_iv(nkreq
->src
, iv
, ivsize
);
114 nitrox_creq_set_src_sg(nkreq
, nents
, ivsize
, src
, buflen
);
119 static int alloc_dst_sglist(struct nitrox_kcrypt_request
*nkreq
,
120 struct scatterlist
*dst
, int ivsize
, int buflen
)
122 int nents
= sg_nents_for_len(dst
, buflen
);
128 /* IV, ORH, COMPLETION entries */
130 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
133 ret
= alloc_dst_req_buf(nkreq
, nents
);
137 nitrox_creq_set_orh(nkreq
);
138 nitrox_creq_set_comp(nkreq
);
139 nitrox_creq_set_dst_sg(nkreq
, nents
, ivsize
, dst
, buflen
);
144 static void free_src_sglist(struct nitrox_kcrypt_request
*nkreq
)
149 static void free_dst_sglist(struct nitrox_kcrypt_request
*nkreq
)
154 static int nitrox_set_creq(struct nitrox_aead_rctx
*rctx
)
156 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
160 creq
->flags
= rctx
->flags
;
161 creq
->gfp
= (rctx
->flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ? GFP_KERNEL
:
164 creq
->ctrl
.value
= 0;
165 creq
->opcode
= FLEXI_CRYPTO_ENCRYPT_HMAC
;
166 creq
->ctrl
.s
.arg
= rctx
->ctrl_arg
;
168 creq
->gph
.param0
= cpu_to_be16(rctx
->cryptlen
);
169 creq
->gph
.param1
= cpu_to_be16(rctx
->cryptlen
+ rctx
->assoclen
);
170 creq
->gph
.param2
= cpu_to_be16(rctx
->ivsize
+ rctx
->assoclen
);
171 param3
.iv_offset
= 0;
172 param3
.auth_offset
= rctx
->ivsize
;
173 creq
->gph
.param3
= cpu_to_be16(param3
.param
);
175 creq
->ctx_handle
= rctx
->ctx_handle
;
176 creq
->ctrl
.s
.ctxl
= sizeof(struct flexi_crypto_context
);
178 ret
= alloc_src_sglist(&rctx
->nkreq
, rctx
->src
, rctx
->iv
, rctx
->ivsize
,
183 ret
= alloc_dst_sglist(&rctx
->nkreq
, rctx
->dst
, rctx
->ivsize
,
186 free_src_sglist(&rctx
->nkreq
);
193 static void nitrox_aead_callback(void *arg
, int err
)
195 struct aead_request
*areq
= arg
;
196 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
198 free_src_sglist(&rctx
->nkreq
);
199 free_dst_sglist(&rctx
->nkreq
);
201 pr_err_ratelimited("request failed status 0x%0x\n", err
);
205 areq
->base
.complete(&areq
->base
, err
);
208 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen
)
216 static int nitrox_aes_gcm_enc(struct aead_request
*areq
)
218 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
219 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
220 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
221 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
222 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
225 if (!nitrox_aes_gcm_assoclen_supported(areq
->assoclen
))
228 memcpy(fctx
->crypto
.iv
, areq
->iv
, GCM_AES_SALT_SIZE
);
230 rctx
->cryptlen
= areq
->cryptlen
;
231 rctx
->assoclen
= areq
->assoclen
;
232 rctx
->srclen
= areq
->assoclen
+ areq
->cryptlen
;
233 rctx
->dstlen
= rctx
->srclen
+ aead
->authsize
;
234 rctx
->iv
= &areq
->iv
[GCM_AES_SALT_SIZE
];
235 rctx
->ivsize
= GCM_AES_IV_SIZE
- GCM_AES_SALT_SIZE
;
236 rctx
->flags
= areq
->base
.flags
;
237 rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
238 rctx
->src
= areq
->src
;
239 rctx
->dst
= areq
->dst
;
240 rctx
->ctrl_arg
= ENCRYPT
;
241 ret
= nitrox_set_creq(rctx
);
245 /* send the crypto request */
246 return nitrox_process_se_request(nctx
->ndev
, creq
, nitrox_aead_callback
,
250 static int nitrox_aes_gcm_dec(struct aead_request
*areq
)
252 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
253 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
254 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
255 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
256 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
259 if (!nitrox_aes_gcm_assoclen_supported(areq
->assoclen
))
262 memcpy(fctx
->crypto
.iv
, areq
->iv
, GCM_AES_SALT_SIZE
);
264 rctx
->cryptlen
= areq
->cryptlen
- aead
->authsize
;
265 rctx
->assoclen
= areq
->assoclen
;
266 rctx
->srclen
= areq
->cryptlen
+ areq
->assoclen
;
267 rctx
->dstlen
= rctx
->srclen
- aead
->authsize
;
268 rctx
->iv
= &areq
->iv
[GCM_AES_SALT_SIZE
];
269 rctx
->ivsize
= GCM_AES_IV_SIZE
- GCM_AES_SALT_SIZE
;
270 rctx
->flags
= areq
->base
.flags
;
271 rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
272 rctx
->src
= areq
->src
;
273 rctx
->dst
= areq
->dst
;
274 rctx
->ctrl_arg
= DECRYPT
;
275 ret
= nitrox_set_creq(rctx
);
279 /* send the crypto request */
280 return nitrox_process_se_request(nctx
->ndev
, creq
, nitrox_aead_callback
,
284 static int nitrox_aead_init(struct crypto_aead
*aead
)
286 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
287 struct crypto_ctx_hdr
*chdr
;
289 /* get the first device */
290 nctx
->ndev
= nitrox_get_first_device();
294 /* allocate nitrox crypto context */
295 chdr
= crypto_alloc_context(nctx
->ndev
);
297 nitrox_put_device(nctx
->ndev
);
301 nctx
->u
.ctx_handle
= (uintptr_t)((u8
*)chdr
->vaddr
+
302 sizeof(struct ctx_hdr
));
303 nctx
->u
.fctx
->flags
.f
= 0;
308 static int nitrox_gcm_common_init(struct crypto_aead
*aead
)
311 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
312 union fc_ctx_flags
*flags
;
314 ret
= nitrox_aead_init(aead
);
318 flags
= &nctx
->u
.fctx
->flags
;
319 flags
->w0
.cipher_type
= CIPHER_AES_GCM
;
320 flags
->w0
.hash_type
= AUTH_NULL
;
321 flags
->w0
.iv_source
= IV_FROM_DPTR
;
322 /* ask microcode to calculate ipad/opad */
323 flags
->w0
.auth_input_type
= 1;
324 flags
->f
= be64_to_cpu(flags
->f
);
329 static int nitrox_aes_gcm_init(struct crypto_aead
*aead
)
333 ret
= nitrox_gcm_common_init(aead
);
337 crypto_aead_set_reqsize(aead
,
338 sizeof(struct aead_request
) +
339 sizeof(struct nitrox_aead_rctx
));
344 static void nitrox_aead_exit(struct crypto_aead
*aead
)
346 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
348 /* free the nitrox crypto context */
349 if (nctx
->u
.ctx_handle
) {
350 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
352 memzero_explicit(&fctx
->crypto
, sizeof(struct crypto_keys
));
353 memzero_explicit(&fctx
->auth
, sizeof(struct auth_keys
));
354 crypto_free_context((void *)nctx
->chdr
);
356 nitrox_put_device(nctx
->ndev
);
358 nctx
->u
.ctx_handle
= 0;
362 static int nitrox_rfc4106_setkey(struct crypto_aead
*aead
, const u8
*key
,
365 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
366 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
369 if (keylen
< GCM_AES_SALT_SIZE
)
372 keylen
-= GCM_AES_SALT_SIZE
;
373 ret
= nitrox_aes_gcm_setkey(aead
, key
, keylen
);
377 memcpy(fctx
->crypto
.iv
, key
+ keylen
, GCM_AES_SALT_SIZE
);
381 static int nitrox_rfc4106_setauthsize(struct crypto_aead
*aead
,
382 unsigned int authsize
)
393 return nitrox_aead_setauthsize(aead
, authsize
);
396 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request
*areq
)
398 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
399 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
400 unsigned int assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
401 struct scatterlist
*sg
;
403 if (areq
->assoclen
!= 16 && areq
->assoclen
!= 20)
406 scatterwalk_map_and_copy(rctx
->assoc
, areq
->src
, 0, assoclen
, 0);
407 sg_init_table(rctx
->src
, 3);
408 sg_set_buf(rctx
->src
, rctx
->assoc
, assoclen
);
409 sg
= scatterwalk_ffwd(rctx
->src
+ 1, areq
->src
, areq
->assoclen
);
410 if (sg
!= rctx
->src
+ 1)
411 sg_chain(rctx
->src
, 2, sg
);
413 if (areq
->src
!= areq
->dst
) {
414 sg_init_table(rctx
->dst
, 3);
415 sg_set_buf(rctx
->dst
, rctx
->assoc
, assoclen
);
416 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, areq
->dst
, areq
->assoclen
);
417 if (sg
!= rctx
->dst
+ 1)
418 sg_chain(rctx
->dst
, 2, sg
);
421 aead_rctx
->src
= rctx
->src
;
422 aead_rctx
->dst
= (areq
->src
== areq
->dst
) ? rctx
->src
: rctx
->dst
;
427 static void nitrox_rfc4106_callback(void *arg
, int err
)
429 struct aead_request
*areq
= arg
;
430 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
431 struct nitrox_kcrypt_request
*nkreq
= &rctx
->base
.nkreq
;
433 free_src_sglist(nkreq
);
434 free_dst_sglist(nkreq
);
436 pr_err_ratelimited("request failed status 0x%0x\n", err
);
440 areq
->base
.complete(&areq
->base
, err
);
443 static int nitrox_rfc4106_enc(struct aead_request
*areq
)
445 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
446 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
447 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
448 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
449 struct se_crypto_request
*creq
= &aead_rctx
->nkreq
.creq
;
452 aead_rctx
->cryptlen
= areq
->cryptlen
;
453 aead_rctx
->assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
454 aead_rctx
->srclen
= aead_rctx
->assoclen
+ aead_rctx
->cryptlen
;
455 aead_rctx
->dstlen
= aead_rctx
->srclen
+ aead
->authsize
;
456 aead_rctx
->iv
= areq
->iv
;
457 aead_rctx
->ivsize
= GCM_RFC4106_IV_SIZE
;
458 aead_rctx
->flags
= areq
->base
.flags
;
459 aead_rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
460 aead_rctx
->ctrl_arg
= ENCRYPT
;
462 ret
= nitrox_rfc4106_set_aead_rctx_sglist(areq
);
466 ret
= nitrox_set_creq(aead_rctx
);
470 /* send the crypto request */
471 return nitrox_process_se_request(nctx
->ndev
, creq
,
472 nitrox_rfc4106_callback
, areq
);
475 static int nitrox_rfc4106_dec(struct aead_request
*areq
)
477 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
478 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
479 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
480 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
481 struct se_crypto_request
*creq
= &aead_rctx
->nkreq
.creq
;
484 aead_rctx
->cryptlen
= areq
->cryptlen
- aead
->authsize
;
485 aead_rctx
->assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
487 areq
->cryptlen
- GCM_RFC4106_IV_SIZE
+ areq
->assoclen
;
488 aead_rctx
->dstlen
= aead_rctx
->srclen
- aead
->authsize
;
489 aead_rctx
->iv
= areq
->iv
;
490 aead_rctx
->ivsize
= GCM_RFC4106_IV_SIZE
;
491 aead_rctx
->flags
= areq
->base
.flags
;
492 aead_rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
493 aead_rctx
->ctrl_arg
= DECRYPT
;
495 ret
= nitrox_rfc4106_set_aead_rctx_sglist(areq
);
499 ret
= nitrox_set_creq(aead_rctx
);
503 /* send the crypto request */
504 return nitrox_process_se_request(nctx
->ndev
, creq
,
505 nitrox_rfc4106_callback
, areq
);
508 static int nitrox_rfc4106_init(struct crypto_aead
*aead
)
512 ret
= nitrox_gcm_common_init(aead
);
516 crypto_aead_set_reqsize(aead
, sizeof(struct aead_request
) +
517 sizeof(struct nitrox_rfc4106_rctx
));
522 static struct aead_alg nitrox_aeads
[] = { {
524 .cra_name
= "gcm(aes)",
525 .cra_driver_name
= "n5_aes_gcm",
526 .cra_priority
= PRIO
,
527 .cra_flags
= CRYPTO_ALG_ASYNC
,
529 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
531 .cra_module
= THIS_MODULE
,
533 .setkey
= nitrox_aes_gcm_setkey
,
534 .setauthsize
= nitrox_aes_gcm_setauthsize
,
535 .encrypt
= nitrox_aes_gcm_enc
,
536 .decrypt
= nitrox_aes_gcm_dec
,
537 .init
= nitrox_aes_gcm_init
,
538 .exit
= nitrox_aead_exit
,
539 .ivsize
= GCM_AES_IV_SIZE
,
540 .maxauthsize
= AES_BLOCK_SIZE
,
543 .cra_name
= "rfc4106(gcm(aes))",
544 .cra_driver_name
= "n5_rfc4106",
545 .cra_priority
= PRIO
,
546 .cra_flags
= CRYPTO_ALG_ASYNC
,
548 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
550 .cra_module
= THIS_MODULE
,
552 .setkey
= nitrox_rfc4106_setkey
,
553 .setauthsize
= nitrox_rfc4106_setauthsize
,
554 .encrypt
= nitrox_rfc4106_enc
,
555 .decrypt
= nitrox_rfc4106_dec
,
556 .init
= nitrox_rfc4106_init
,
557 .exit
= nitrox_aead_exit
,
558 .ivsize
= GCM_RFC4106_IV_SIZE
,
559 .maxauthsize
= AES_BLOCK_SIZE
,
562 int nitrox_register_aeads(void)
564 return crypto_register_aeads(nitrox_aeads
, ARRAY_SIZE(nitrox_aeads
));
567 void nitrox_unregister_aeads(void)
569 crypto_unregister_aeads(nitrox_aeads
, ARRAY_SIZE(nitrox_aeads
));