2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
84 static struct list_head alg_list
;
86 struct caam_alg_entry
{
93 struct caam_aead_alg
{
95 struct caam_alg_entry caam
;
100 * per-session context
103 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
104 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
105 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
106 u8 key
[CAAM_MAX_KEY_SIZE
];
107 dma_addr_t sh_desc_enc_dma
;
108 dma_addr_t sh_desc_dec_dma
;
109 dma_addr_t sh_desc_givenc_dma
;
111 struct device
*jrdev
;
112 struct alginfo adata
;
113 struct alginfo cdata
;
114 unsigned int authsize
;
117 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
119 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
120 struct device
*jrdev
= ctx
->jrdev
;
122 int rem_bytes
= CAAM_DESC_BYTES_MAX
- AEAD_DESC_JOB_IO_LEN
-
123 ctx
->adata
.keylen_pad
;
126 * Job Descriptor and Shared Descriptors
127 * must all fit into the 64-word Descriptor h/w Buffer
129 if (rem_bytes
>= DESC_AEAD_NULL_ENC_LEN
) {
130 ctx
->adata
.key_inline
= true;
131 ctx
->adata
.key_virt
= ctx
->key
;
133 ctx
->adata
.key_inline
= false;
134 ctx
->adata
.key_dma
= ctx
->key_dma
;
137 /* aead_encrypt shared descriptor */
138 desc
= ctx
->sh_desc_enc
;
139 cnstr_shdsc_aead_null_encap(desc
, &ctx
->adata
, ctx
->authsize
);
140 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
141 desc_bytes(desc
), DMA_TO_DEVICE
);
144 * Job Descriptor and Shared Descriptors
145 * must all fit into the 64-word Descriptor h/w Buffer
147 if (rem_bytes
>= DESC_AEAD_NULL_DEC_LEN
) {
148 ctx
->adata
.key_inline
= true;
149 ctx
->adata
.key_virt
= ctx
->key
;
151 ctx
->adata
.key_inline
= false;
152 ctx
->adata
.key_dma
= ctx
->key_dma
;
155 /* aead_decrypt shared descriptor */
156 desc
= ctx
->sh_desc_dec
;
157 cnstr_shdsc_aead_null_decap(desc
, &ctx
->adata
, ctx
->authsize
);
158 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
159 desc_bytes(desc
), DMA_TO_DEVICE
);
164 static int aead_set_sh_desc(struct crypto_aead
*aead
)
166 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
167 struct caam_aead_alg
, aead
);
168 unsigned int ivsize
= crypto_aead_ivsize(aead
);
169 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
170 struct device
*jrdev
= ctx
->jrdev
;
172 u32
*desc
, *nonce
= NULL
;
174 unsigned int data_len
[2];
175 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
176 OP_ALG_AAI_CTR_MOD128
);
177 const bool is_rfc3686
= alg
->caam
.rfc3686
;
182 /* NULL encryption / decryption */
183 if (!ctx
->cdata
.keylen
)
184 return aead_null_set_sh_desc(aead
);
187 * AES-CTR needs to load IV in CONTEXT1 reg
188 * at an offset of 128bits (16bytes)
189 * CONTEXT1[255:128] = IV
196 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
199 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
200 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
201 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
204 data_len
[0] = ctx
->adata
.keylen_pad
;
205 data_len
[1] = ctx
->cdata
.keylen
;
211 * Job Descriptor and Shared Descriptors
212 * must all fit into the 64-word Descriptor h/w Buffer
214 if (desc_inline_query(DESC_AEAD_ENC_LEN
+
215 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
216 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
217 ARRAY_SIZE(data_len
)) < 0)
221 ctx
->adata
.key_virt
= ctx
->key
;
223 ctx
->adata
.key_dma
= ctx
->key_dma
;
226 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
228 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
230 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
231 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
233 /* aead_encrypt shared descriptor */
234 desc
= ctx
->sh_desc_enc
;
235 cnstr_shdsc_aead_encap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
236 ctx
->authsize
, is_rfc3686
, nonce
, ctx1_iv_off
,
238 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
239 desc_bytes(desc
), DMA_TO_DEVICE
);
243 * Job Descriptor and Shared Descriptors
244 * must all fit into the 64-word Descriptor h/w Buffer
246 if (desc_inline_query(DESC_AEAD_DEC_LEN
+
247 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
248 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
249 ARRAY_SIZE(data_len
)) < 0)
253 ctx
->adata
.key_virt
= ctx
->key
;
255 ctx
->adata
.key_dma
= ctx
->key_dma
;
258 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
260 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
262 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
263 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
265 /* aead_decrypt shared descriptor */
266 desc
= ctx
->sh_desc_dec
;
267 cnstr_shdsc_aead_decap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
268 ctx
->authsize
, alg
->caam
.geniv
, is_rfc3686
,
269 nonce
, ctx1_iv_off
, false);
270 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
271 desc_bytes(desc
), DMA_TO_DEVICE
);
273 if (!alg
->caam
.geniv
)
277 * Job Descriptor and Shared Descriptors
278 * must all fit into the 64-word Descriptor h/w Buffer
280 if (desc_inline_query(DESC_AEAD_GIVENC_LEN
+
281 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
282 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
283 ARRAY_SIZE(data_len
)) < 0)
287 ctx
->adata
.key_virt
= ctx
->key
;
289 ctx
->adata
.key_dma
= ctx
->key_dma
;
292 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
294 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
296 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
297 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
299 /* aead_givencrypt shared descriptor */
300 desc
= ctx
->sh_desc_enc
;
301 cnstr_shdsc_aead_givencap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
302 ctx
->authsize
, is_rfc3686
, nonce
,
304 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
305 desc_bytes(desc
), DMA_TO_DEVICE
);
311 static int aead_setauthsize(struct crypto_aead
*authenc
,
312 unsigned int authsize
)
314 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
316 ctx
->authsize
= authsize
;
317 aead_set_sh_desc(authenc
);
322 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
324 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
325 struct device
*jrdev
= ctx
->jrdev
;
327 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
330 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
334 * AES GCM encrypt shared descriptor
335 * Job Descriptor and Shared Descriptor
336 * must fit into the 64-word Descriptor h/w Buffer
338 if (rem_bytes
>= DESC_GCM_ENC_LEN
) {
339 ctx
->cdata
.key_inline
= true;
340 ctx
->cdata
.key_virt
= ctx
->key
;
342 ctx
->cdata
.key_inline
= false;
343 ctx
->cdata
.key_dma
= ctx
->key_dma
;
346 desc
= ctx
->sh_desc_enc
;
347 cnstr_shdsc_gcm_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
348 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
349 desc_bytes(desc
), DMA_TO_DEVICE
);
352 * Job Descriptor and Shared Descriptors
353 * must all fit into the 64-word Descriptor h/w Buffer
355 if (rem_bytes
>= DESC_GCM_DEC_LEN
) {
356 ctx
->cdata
.key_inline
= true;
357 ctx
->cdata
.key_virt
= ctx
->key
;
359 ctx
->cdata
.key_inline
= false;
360 ctx
->cdata
.key_dma
= ctx
->key_dma
;
363 desc
= ctx
->sh_desc_dec
;
364 cnstr_shdsc_gcm_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
365 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
366 desc_bytes(desc
), DMA_TO_DEVICE
);
371 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
373 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
375 ctx
->authsize
= authsize
;
376 gcm_set_sh_desc(authenc
);
381 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
383 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
384 struct device
*jrdev
= ctx
->jrdev
;
386 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
389 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
393 * RFC4106 encrypt shared descriptor
394 * Job Descriptor and Shared Descriptor
395 * must fit into the 64-word Descriptor h/w Buffer
397 if (rem_bytes
>= DESC_RFC4106_ENC_LEN
) {
398 ctx
->cdata
.key_inline
= true;
399 ctx
->cdata
.key_virt
= ctx
->key
;
401 ctx
->cdata
.key_inline
= false;
402 ctx
->cdata
.key_dma
= ctx
->key_dma
;
405 desc
= ctx
->sh_desc_enc
;
406 cnstr_shdsc_rfc4106_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
407 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
408 desc_bytes(desc
), DMA_TO_DEVICE
);
411 * Job Descriptor and Shared Descriptors
412 * must all fit into the 64-word Descriptor h/w Buffer
414 if (rem_bytes
>= DESC_RFC4106_DEC_LEN
) {
415 ctx
->cdata
.key_inline
= true;
416 ctx
->cdata
.key_virt
= ctx
->key
;
418 ctx
->cdata
.key_inline
= false;
419 ctx
->cdata
.key_dma
= ctx
->key_dma
;
422 desc
= ctx
->sh_desc_dec
;
423 cnstr_shdsc_rfc4106_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
424 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
425 desc_bytes(desc
), DMA_TO_DEVICE
);
430 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
431 unsigned int authsize
)
433 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
435 ctx
->authsize
= authsize
;
436 rfc4106_set_sh_desc(authenc
);
441 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
443 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
444 struct device
*jrdev
= ctx
->jrdev
;
446 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
449 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
453 * RFC4543 encrypt shared descriptor
454 * Job Descriptor and Shared Descriptor
455 * must fit into the 64-word Descriptor h/w Buffer
457 if (rem_bytes
>= DESC_RFC4543_ENC_LEN
) {
458 ctx
->cdata
.key_inline
= true;
459 ctx
->cdata
.key_virt
= ctx
->key
;
461 ctx
->cdata
.key_inline
= false;
462 ctx
->cdata
.key_dma
= ctx
->key_dma
;
465 desc
= ctx
->sh_desc_enc
;
466 cnstr_shdsc_rfc4543_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
467 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
468 desc_bytes(desc
), DMA_TO_DEVICE
);
471 * Job Descriptor and Shared Descriptors
472 * must all fit into the 64-word Descriptor h/w Buffer
474 if (rem_bytes
>= DESC_RFC4543_DEC_LEN
) {
475 ctx
->cdata
.key_inline
= true;
476 ctx
->cdata
.key_virt
= ctx
->key
;
478 ctx
->cdata
.key_inline
= false;
479 ctx
->cdata
.key_dma
= ctx
->key_dma
;
482 desc
= ctx
->sh_desc_dec
;
483 cnstr_shdsc_rfc4543_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
484 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
485 desc_bytes(desc
), DMA_TO_DEVICE
);
490 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
491 unsigned int authsize
)
493 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
495 ctx
->authsize
= authsize
;
496 rfc4543_set_sh_desc(authenc
);
501 static int aead_setkey(struct crypto_aead
*aead
,
502 const u8
*key
, unsigned int keylen
)
504 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
505 struct device
*jrdev
= ctx
->jrdev
;
506 struct crypto_authenc_keys keys
;
509 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
513 printk(KERN_ERR
"keylen %d enckeylen %d authkeylen %d\n",
514 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
516 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
517 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
520 ret
= gen_split_key(ctx
->jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
521 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
527 /* postpend encryption key to auth split key */
528 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
529 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
530 keys
.enckeylen
, DMA_TO_DEVICE
);
532 print_hex_dump(KERN_ERR
, "ctx.key@"__stringify(__LINE__
)": ",
533 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
534 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
536 ctx
->cdata
.keylen
= keys
.enckeylen
;
537 return aead_set_sh_desc(aead
);
539 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
543 static int gcm_setkey(struct crypto_aead
*aead
,
544 const u8
*key
, unsigned int keylen
)
546 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
547 struct device
*jrdev
= ctx
->jrdev
;
550 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
551 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
554 memcpy(ctx
->key
, key
, keylen
);
555 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
556 ctx
->cdata
.keylen
= keylen
;
558 return gcm_set_sh_desc(aead
);
561 static int rfc4106_setkey(struct crypto_aead
*aead
,
562 const u8
*key
, unsigned int keylen
)
564 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
565 struct device
*jrdev
= ctx
->jrdev
;
571 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
572 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
575 memcpy(ctx
->key
, key
, keylen
);
578 * The last four bytes of the key material are used as the salt value
579 * in the nonce. Update the AES key length.
581 ctx
->cdata
.keylen
= keylen
- 4;
582 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
584 return rfc4106_set_sh_desc(aead
);
587 static int rfc4543_setkey(struct crypto_aead
*aead
,
588 const u8
*key
, unsigned int keylen
)
590 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
591 struct device
*jrdev
= ctx
->jrdev
;
597 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
598 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
601 memcpy(ctx
->key
, key
, keylen
);
604 * The last four bytes of the key material are used as the salt value
605 * in the nonce. Update the AES key length.
607 ctx
->cdata
.keylen
= keylen
- 4;
608 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
610 return rfc4543_set_sh_desc(aead
);
613 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
614 const u8
*key
, unsigned int keylen
)
616 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
617 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
618 const char *alg_name
= crypto_tfm_alg_name(tfm
);
619 struct device
*jrdev
= ctx
->jrdev
;
620 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
623 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
624 OP_ALG_AAI_CTR_MOD128
);
625 const bool is_rfc3686
= (ctr_mode
&&
626 (strstr(alg_name
, "rfc3686") != NULL
));
628 memcpy(ctx
->key
, key
, keylen
);
630 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
631 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
634 * AES-CTR needs to load IV in CONTEXT1 reg
635 * at an offset of 128bits (16bytes)
636 * CONTEXT1[255:128] = IV
643 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
644 * | *key = {KEY, NONCE}
647 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
648 keylen
-= CTR_RFC3686_NONCE_SIZE
;
651 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
652 ctx
->cdata
.keylen
= keylen
;
653 ctx
->cdata
.key_virt
= ctx
->key
;
654 ctx
->cdata
.key_inline
= true;
656 /* ablkcipher_encrypt shared descriptor */
657 desc
= ctx
->sh_desc_enc
;
658 cnstr_shdsc_ablkcipher_encap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
660 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
661 desc_bytes(desc
), DMA_TO_DEVICE
);
663 /* ablkcipher_decrypt shared descriptor */
664 desc
= ctx
->sh_desc_dec
;
665 cnstr_shdsc_ablkcipher_decap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
667 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
668 desc_bytes(desc
), DMA_TO_DEVICE
);
670 /* ablkcipher_givencrypt shared descriptor */
671 desc
= ctx
->sh_desc_givenc
;
672 cnstr_shdsc_ablkcipher_givencap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
674 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_givenc_dma
,
675 desc_bytes(desc
), DMA_TO_DEVICE
);
680 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
681 const u8
*key
, unsigned int keylen
)
683 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
684 struct device
*jrdev
= ctx
->jrdev
;
687 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
688 crypto_ablkcipher_set_flags(ablkcipher
,
689 CRYPTO_TFM_RES_BAD_KEY_LEN
);
690 dev_err(jrdev
, "key size mismatch\n");
694 memcpy(ctx
->key
, key
, keylen
);
695 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
696 ctx
->cdata
.keylen
= keylen
;
697 ctx
->cdata
.key_virt
= ctx
->key
;
698 ctx
->cdata
.key_inline
= true;
700 /* xts_ablkcipher_encrypt shared descriptor */
701 desc
= ctx
->sh_desc_enc
;
702 cnstr_shdsc_xts_ablkcipher_encap(desc
, &ctx
->cdata
);
703 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
704 desc_bytes(desc
), DMA_TO_DEVICE
);
706 /* xts_ablkcipher_decrypt shared descriptor */
707 desc
= ctx
->sh_desc_dec
;
708 cnstr_shdsc_xts_ablkcipher_decap(desc
, &ctx
->cdata
);
709 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
710 desc_bytes(desc
), DMA_TO_DEVICE
);
716 * aead_edesc - s/w-extended aead descriptor
717 * @src_nents: number of segments in input s/w scatterlist
718 * @dst_nents: number of segments in output s/w scatterlist
719 * @sec4_sg_bytes: length of dma mapped sec4_sg space
720 * @sec4_sg_dma: bus physical mapped address of h/w link table
721 * @sec4_sg: pointer to h/w link table
722 * @hw_desc: the h/w job descriptor followed by any referenced link tables
728 dma_addr_t sec4_sg_dma
;
729 struct sec4_sg_entry
*sec4_sg
;
734 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
735 * @src_nents: number of segments in input s/w scatterlist
736 * @dst_nents: number of segments in output s/w scatterlist
737 * @iv_dma: dma address of iv for checking continuity and link table
738 * @sec4_sg_bytes: length of dma mapped sec4_sg space
739 * @sec4_sg_dma: bus physical mapped address of h/w link table
740 * @sec4_sg: pointer to h/w link table
741 * @hw_desc: the h/w job descriptor followed by any referenced link tables
743 struct ablkcipher_edesc
{
748 dma_addr_t sec4_sg_dma
;
749 struct sec4_sg_entry
*sec4_sg
;
753 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
754 struct scatterlist
*dst
, int src_nents
,
756 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
761 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
762 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
764 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
768 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
770 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
774 static void aead_unmap(struct device
*dev
,
775 struct aead_edesc
*edesc
,
776 struct aead_request
*req
)
778 caam_unmap(dev
, req
->src
, req
->dst
,
779 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
780 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
783 static void ablkcipher_unmap(struct device
*dev
,
784 struct ablkcipher_edesc
*edesc
,
785 struct ablkcipher_request
*req
)
787 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
788 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
790 caam_unmap(dev
, req
->src
, req
->dst
,
791 edesc
->src_nents
, edesc
->dst_nents
,
792 edesc
->iv_dma
, ivsize
,
793 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
796 static void aead_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
799 struct aead_request
*req
= context
;
800 struct aead_edesc
*edesc
;
803 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
806 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
809 caam_jr_strstatus(jrdev
, err
);
811 aead_unmap(jrdev
, edesc
, req
);
815 aead_request_complete(req
, err
);
818 static void aead_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
821 struct aead_request
*req
= context
;
822 struct aead_edesc
*edesc
;
825 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
828 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
831 caam_jr_strstatus(jrdev
, err
);
833 aead_unmap(jrdev
, edesc
, req
);
836 * verify hw auth check passed else return -EBADMSG
838 if ((err
& JRSTA_CCBERR_ERRID_MASK
) == JRSTA_CCBERR_ERRID_ICVCHK
)
843 aead_request_complete(req
, err
);
846 static void ablkcipher_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
849 struct ablkcipher_request
*req
= context
;
850 struct ablkcipher_edesc
*edesc
;
851 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
852 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
855 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
858 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
861 caam_jr_strstatus(jrdev
, err
);
864 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
865 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
866 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
868 caam_dump_sg(KERN_ERR
, "dst @" __stringify(__LINE__
)": ",
869 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
870 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
872 ablkcipher_unmap(jrdev
, edesc
, req
);
875 * The crypto API expects us to set the IV (req->info) to the last
876 * ciphertext block. This is used e.g. by the CTS mode.
878 scatterwalk_map_and_copy(req
->info
, req
->dst
, req
->nbytes
- ivsize
,
883 ablkcipher_request_complete(req
, err
);
886 static void ablkcipher_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
889 struct ablkcipher_request
*req
= context
;
890 struct ablkcipher_edesc
*edesc
;
891 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
892 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
895 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
898 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
900 caam_jr_strstatus(jrdev
, err
);
903 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
904 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
907 caam_dump_sg(KERN_ERR
, "dst @" __stringify(__LINE__
)": ",
908 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
909 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
911 ablkcipher_unmap(jrdev
, edesc
, req
);
914 * The crypto API expects us to set the IV (req->info) to the last
917 scatterwalk_map_and_copy(req
->info
, req
->src
, req
->nbytes
- ivsize
,
922 ablkcipher_request_complete(req
, err
);
926 * Fill in aead job descriptor
928 static void init_aead_job(struct aead_request
*req
,
929 struct aead_edesc
*edesc
,
930 bool all_contig
, bool encrypt
)
932 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
933 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
934 int authsize
= ctx
->authsize
;
935 u32
*desc
= edesc
->hw_desc
;
936 u32 out_options
, in_options
;
937 dma_addr_t dst_dma
, src_dma
;
938 int len
, sec4_sg_index
= 0;
942 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
943 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
945 len
= desc_len(sh_desc
);
946 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
949 src_dma
= edesc
->src_nents
? sg_dma_address(req
->src
) : 0;
952 src_dma
= edesc
->sec4_sg_dma
;
953 sec4_sg_index
+= edesc
->src_nents
;
954 in_options
= LDST_SGF
;
957 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
961 out_options
= in_options
;
963 if (unlikely(req
->src
!= req
->dst
)) {
964 if (edesc
->dst_nents
== 1) {
965 dst_dma
= sg_dma_address(req
->dst
);
967 dst_dma
= edesc
->sec4_sg_dma
+
969 sizeof(struct sec4_sg_entry
);
970 out_options
= LDST_SGF
;
975 append_seq_out_ptr(desc
, dst_dma
,
976 req
->assoclen
+ req
->cryptlen
+ authsize
,
979 append_seq_out_ptr(desc
, dst_dma
,
980 req
->assoclen
+ req
->cryptlen
- authsize
,
983 /* REG3 = assoclen */
984 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
987 static void init_gcm_job(struct aead_request
*req
,
988 struct aead_edesc
*edesc
,
989 bool all_contig
, bool encrypt
)
991 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
992 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
993 unsigned int ivsize
= crypto_aead_ivsize(aead
);
994 u32
*desc
= edesc
->hw_desc
;
995 bool generic_gcm
= (ivsize
== GCM_AES_IV_SIZE
);
998 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1000 /* BUG This should not be specific to generic GCM. */
1002 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
1003 last
= FIFOLD_TYPE_LAST1
;
1006 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
1007 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| GCM_AES_IV_SIZE
| last
);
1010 append_data(desc
, ctx
->key
+ ctx
->cdata
.keylen
, 4);
1012 append_data(desc
, req
->iv
, ivsize
);
1013 /* End of blank commands */
1016 static void init_authenc_job(struct aead_request
*req
,
1017 struct aead_edesc
*edesc
,
1018 bool all_contig
, bool encrypt
)
1020 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1021 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
1022 struct caam_aead_alg
, aead
);
1023 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1024 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1025 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
1026 OP_ALG_AAI_CTR_MOD128
);
1027 const bool is_rfc3686
= alg
->caam
.rfc3686
;
1028 u32
*desc
= edesc
->hw_desc
;
1032 * AES-CTR needs to load IV in CONTEXT1 reg
1033 * at an offset of 128bits (16bytes)
1034 * CONTEXT1[255:128] = IV
1041 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1044 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
1046 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1048 if (ivsize
&& ((is_rfc3686
&& encrypt
) || !alg
->caam
.geniv
))
1049 append_load_as_imm(desc
, req
->iv
, ivsize
,
1051 LDST_SRCDST_BYTE_CONTEXT
|
1052 (ivoffset
<< LDST_OFFSET_SHIFT
));
1056 * Fill in ablkcipher job descriptor
1058 static void init_ablkcipher_job(u32
*sh_desc
, dma_addr_t ptr
,
1059 struct ablkcipher_edesc
*edesc
,
1060 struct ablkcipher_request
*req
,
1063 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1064 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1065 u32
*desc
= edesc
->hw_desc
;
1066 u32 out_options
= 0, in_options
;
1067 dma_addr_t dst_dma
, src_dma
;
1068 int len
, sec4_sg_index
= 0;
1071 print_hex_dump(KERN_ERR
, "presciv@"__stringify(__LINE__
)": ",
1072 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1074 pr_err("asked=%d, nbytes%d\n",
1075 (int)edesc
->src_nents
> 1 ? 100 : req
->nbytes
, req
->nbytes
);
1077 caam_dump_sg(KERN_ERR
, "src @" __stringify(__LINE__
)": ",
1078 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1079 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1081 len
= desc_len(sh_desc
);
1082 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1085 src_dma
= edesc
->iv_dma
;
1088 src_dma
= edesc
->sec4_sg_dma
;
1089 sec4_sg_index
+= edesc
->src_nents
+ 1;
1090 in_options
= LDST_SGF
;
1092 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
+ ivsize
, in_options
);
1094 if (likely(req
->src
== req
->dst
)) {
1095 if (edesc
->src_nents
== 1 && iv_contig
) {
1096 dst_dma
= sg_dma_address(req
->src
);
1098 dst_dma
= edesc
->sec4_sg_dma
+
1099 sizeof(struct sec4_sg_entry
);
1100 out_options
= LDST_SGF
;
1103 if (edesc
->dst_nents
== 1) {
1104 dst_dma
= sg_dma_address(req
->dst
);
1106 dst_dma
= edesc
->sec4_sg_dma
+
1107 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1108 out_options
= LDST_SGF
;
1111 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
, out_options
);
1115 * Fill in ablkcipher givencrypt job descriptor
1117 static void init_ablkcipher_giv_job(u32
*sh_desc
, dma_addr_t ptr
,
1118 struct ablkcipher_edesc
*edesc
,
1119 struct ablkcipher_request
*req
,
1122 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1123 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1124 u32
*desc
= edesc
->hw_desc
;
1125 u32 out_options
, in_options
;
1126 dma_addr_t dst_dma
, src_dma
;
1127 int len
, sec4_sg_index
= 0;
1130 print_hex_dump(KERN_ERR
, "presciv@" __stringify(__LINE__
) ": ",
1131 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1134 caam_dump_sg(KERN_ERR
, "src @" __stringify(__LINE__
) ": ",
1135 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1136 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1138 len
= desc_len(sh_desc
);
1139 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1141 if (edesc
->src_nents
== 1) {
1142 src_dma
= sg_dma_address(req
->src
);
1145 src_dma
= edesc
->sec4_sg_dma
;
1146 sec4_sg_index
+= edesc
->src_nents
;
1147 in_options
= LDST_SGF
;
1149 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
, in_options
);
1152 dst_dma
= edesc
->iv_dma
;
1155 dst_dma
= edesc
->sec4_sg_dma
+
1156 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1157 out_options
= LDST_SGF
;
1159 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
+ ivsize
, out_options
);
1163 * allocate and map the aead extended descriptor
1165 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
1166 int desc_bytes
, bool *all_contig_ptr
,
1169 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1170 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1171 struct device
*jrdev
= ctx
->jrdev
;
1172 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1173 GFP_KERNEL
: GFP_ATOMIC
;
1174 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1175 struct aead_edesc
*edesc
;
1176 int sec4_sg_index
, sec4_sg_len
, sec4_sg_bytes
;
1177 unsigned int authsize
= ctx
->authsize
;
1179 if (unlikely(req
->dst
!= req
->src
)) {
1180 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1182 if (unlikely(src_nents
< 0)) {
1183 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1184 req
->assoclen
+ req
->cryptlen
);
1185 return ERR_PTR(src_nents
);
1188 dst_nents
= sg_nents_for_len(req
->dst
, req
->assoclen
+
1190 (encrypt
? authsize
:
1192 if (unlikely(dst_nents
< 0)) {
1193 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1194 req
->assoclen
+ req
->cryptlen
+
1195 (encrypt
? authsize
: (-authsize
)));
1196 return ERR_PTR(dst_nents
);
1199 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1201 (encrypt
? authsize
: 0));
1202 if (unlikely(src_nents
< 0)) {
1203 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1204 req
->assoclen
+ req
->cryptlen
+
1205 (encrypt
? authsize
: 0));
1206 return ERR_PTR(src_nents
);
1210 if (likely(req
->src
== req
->dst
)) {
1211 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1213 if (unlikely(!mapped_src_nents
)) {
1214 dev_err(jrdev
, "unable to map source\n");
1215 return ERR_PTR(-ENOMEM
);
1218 /* Cover also the case of null (zero length) input data */
1220 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
,
1221 src_nents
, DMA_TO_DEVICE
);
1222 if (unlikely(!mapped_src_nents
)) {
1223 dev_err(jrdev
, "unable to map source\n");
1224 return ERR_PTR(-ENOMEM
);
1227 mapped_src_nents
= 0;
1230 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1232 if (unlikely(!mapped_dst_nents
)) {
1233 dev_err(jrdev
, "unable to map destination\n");
1234 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1235 return ERR_PTR(-ENOMEM
);
1239 sec4_sg_len
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1240 sec4_sg_len
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1241 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
1243 /* allocate space for base edesc and hw desc commands, link tables */
1244 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1247 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1249 return ERR_PTR(-ENOMEM
);
1252 edesc
->src_nents
= src_nents
;
1253 edesc
->dst_nents
= dst_nents
;
1254 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
1256 *all_contig_ptr
= !(mapped_src_nents
> 1);
1259 if (mapped_src_nents
> 1) {
1260 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1261 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1262 sec4_sg_index
+= mapped_src_nents
;
1264 if (mapped_dst_nents
> 1) {
1265 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1266 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1272 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1273 sec4_sg_bytes
, DMA_TO_DEVICE
);
1274 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1275 dev_err(jrdev
, "unable to map S/G table\n");
1276 aead_unmap(jrdev
, edesc
, req
);
1278 return ERR_PTR(-ENOMEM
);
1281 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1286 static int gcm_encrypt(struct aead_request
*req
)
1288 struct aead_edesc
*edesc
;
1289 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1290 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1291 struct device
*jrdev
= ctx
->jrdev
;
1296 /* allocate extended descriptor */
1297 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, true);
1299 return PTR_ERR(edesc
);
1301 /* Create and submit job descriptor */
1302 init_gcm_job(req
, edesc
, all_contig
, true);
1304 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1305 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1306 desc_bytes(edesc
->hw_desc
), 1);
1309 desc
= edesc
->hw_desc
;
1310 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1314 aead_unmap(jrdev
, edesc
, req
);
1321 static int ipsec_gcm_encrypt(struct aead_request
*req
)
1323 if (req
->assoclen
< 8)
1326 return gcm_encrypt(req
);
1329 static int aead_encrypt(struct aead_request
*req
)
1331 struct aead_edesc
*edesc
;
1332 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1333 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1334 struct device
*jrdev
= ctx
->jrdev
;
1339 /* allocate extended descriptor */
1340 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1343 return PTR_ERR(edesc
);
1345 /* Create and submit job descriptor */
1346 init_authenc_job(req
, edesc
, all_contig
, true);
1348 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1349 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1350 desc_bytes(edesc
->hw_desc
), 1);
1353 desc
= edesc
->hw_desc
;
1354 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1358 aead_unmap(jrdev
, edesc
, req
);
1365 static int gcm_decrypt(struct aead_request
*req
)
1367 struct aead_edesc
*edesc
;
1368 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1369 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1370 struct device
*jrdev
= ctx
->jrdev
;
1375 /* allocate extended descriptor */
1376 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, false);
1378 return PTR_ERR(edesc
);
1380 /* Create and submit job descriptor*/
1381 init_gcm_job(req
, edesc
, all_contig
, false);
1383 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1384 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1385 desc_bytes(edesc
->hw_desc
), 1);
1388 desc
= edesc
->hw_desc
;
1389 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1393 aead_unmap(jrdev
, edesc
, req
);
1400 static int ipsec_gcm_decrypt(struct aead_request
*req
)
1402 if (req
->assoclen
< 8)
1405 return gcm_decrypt(req
);
1408 static int aead_decrypt(struct aead_request
*req
)
1410 struct aead_edesc
*edesc
;
1411 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1412 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1413 struct device
*jrdev
= ctx
->jrdev
;
1418 caam_dump_sg(KERN_ERR
, "dec src@" __stringify(__LINE__
)": ",
1419 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1420 req
->assoclen
+ req
->cryptlen
, 1);
1422 /* allocate extended descriptor */
1423 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1424 &all_contig
, false);
1426 return PTR_ERR(edesc
);
1428 /* Create and submit job descriptor*/
1429 init_authenc_job(req
, edesc
, all_contig
, false);
1431 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1432 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1433 desc_bytes(edesc
->hw_desc
), 1);
1436 desc
= edesc
->hw_desc
;
1437 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1441 aead_unmap(jrdev
, edesc
, req
);
1449 * allocate and map the ablkcipher extended descriptor for ablkcipher
1451 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
1452 *req
, int desc_bytes
,
1453 bool *iv_contig_out
)
1455 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1456 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1457 struct device
*jrdev
= ctx
->jrdev
;
1458 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1459 GFP_KERNEL
: GFP_ATOMIC
;
1460 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1461 struct ablkcipher_edesc
*edesc
;
1462 dma_addr_t iv_dma
= 0;
1464 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1465 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1467 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1468 if (unlikely(src_nents
< 0)) {
1469 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1471 return ERR_PTR(src_nents
);
1474 if (req
->dst
!= req
->src
) {
1475 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1476 if (unlikely(dst_nents
< 0)) {
1477 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1479 return ERR_PTR(dst_nents
);
1483 if (likely(req
->src
== req
->dst
)) {
1484 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1486 if (unlikely(!mapped_src_nents
)) {
1487 dev_err(jrdev
, "unable to map source\n");
1488 return ERR_PTR(-ENOMEM
);
1491 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1493 if (unlikely(!mapped_src_nents
)) {
1494 dev_err(jrdev
, "unable to map source\n");
1495 return ERR_PTR(-ENOMEM
);
1498 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1500 if (unlikely(!mapped_dst_nents
)) {
1501 dev_err(jrdev
, "unable to map destination\n");
1502 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1503 return ERR_PTR(-ENOMEM
);
1507 iv_dma
= dma_map_single(jrdev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
1508 if (dma_mapping_error(jrdev
, iv_dma
)) {
1509 dev_err(jrdev
, "unable to map IV\n");
1510 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1512 return ERR_PTR(-ENOMEM
);
1515 if (mapped_src_nents
== 1 &&
1516 iv_dma
+ ivsize
== sg_dma_address(req
->src
)) {
1521 sec4_sg_ents
= 1 + mapped_src_nents
;
1523 dst_sg_idx
= sec4_sg_ents
;
1524 sec4_sg_ents
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1525 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1527 /* allocate space for base edesc and hw desc commands, link tables */
1528 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1531 dev_err(jrdev
, "could not allocate extended descriptor\n");
1532 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1533 iv_dma
, ivsize
, 0, 0);
1534 return ERR_PTR(-ENOMEM
);
1537 edesc
->src_nents
= src_nents
;
1538 edesc
->dst_nents
= dst_nents
;
1539 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1540 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1544 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
1545 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1546 edesc
->sec4_sg
+ 1, 0);
1549 if (mapped_dst_nents
> 1) {
1550 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1551 edesc
->sec4_sg
+ dst_sg_idx
, 0);
1554 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1555 sec4_sg_bytes
, DMA_TO_DEVICE
);
1556 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1557 dev_err(jrdev
, "unable to map S/G table\n");
1558 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1559 iv_dma
, ivsize
, 0, 0);
1561 return ERR_PTR(-ENOMEM
);
1564 edesc
->iv_dma
= iv_dma
;
1567 print_hex_dump(KERN_ERR
, "ablkcipher sec4_sg@"__stringify(__LINE__
)": ",
1568 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1572 *iv_contig_out
= in_contig
;
1576 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
1578 struct ablkcipher_edesc
*edesc
;
1579 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1580 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1581 struct device
*jrdev
= ctx
->jrdev
;
1586 /* allocate extended descriptor */
1587 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1588 CAAM_CMD_SZ
, &iv_contig
);
1590 return PTR_ERR(edesc
);
1592 /* Create and submit job descriptor*/
1593 init_ablkcipher_job(ctx
->sh_desc_enc
,
1594 ctx
->sh_desc_enc_dma
, edesc
, req
, iv_contig
);
1596 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1597 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1598 desc_bytes(edesc
->hw_desc
), 1);
1600 desc
= edesc
->hw_desc
;
1601 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1606 ablkcipher_unmap(jrdev
, edesc
, req
);
1613 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
1615 struct ablkcipher_edesc
*edesc
;
1616 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1617 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1618 struct device
*jrdev
= ctx
->jrdev
;
1623 /* allocate extended descriptor */
1624 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1625 CAAM_CMD_SZ
, &iv_contig
);
1627 return PTR_ERR(edesc
);
1629 /* Create and submit job descriptor*/
1630 init_ablkcipher_job(ctx
->sh_desc_dec
,
1631 ctx
->sh_desc_dec_dma
, edesc
, req
, iv_contig
);
1632 desc
= edesc
->hw_desc
;
1634 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1635 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1636 desc_bytes(edesc
->hw_desc
), 1);
1639 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_decrypt_done
, req
);
1643 ablkcipher_unmap(jrdev
, edesc
, req
);
1651 * allocate and map the ablkcipher extended descriptor
1652 * for ablkcipher givencrypt
1654 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
1655 struct skcipher_givcrypt_request
*greq
,
1657 bool *iv_contig_out
)
1659 struct ablkcipher_request
*req
= &greq
->creq
;
1660 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1661 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1662 struct device
*jrdev
= ctx
->jrdev
;
1663 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1664 GFP_KERNEL
: GFP_ATOMIC
;
1665 int src_nents
, mapped_src_nents
, dst_nents
, mapped_dst_nents
;
1666 struct ablkcipher_edesc
*edesc
;
1667 dma_addr_t iv_dma
= 0;
1669 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1670 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1672 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1673 if (unlikely(src_nents
< 0)) {
1674 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1676 return ERR_PTR(src_nents
);
1679 if (likely(req
->src
== req
->dst
)) {
1680 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1682 if (unlikely(!mapped_src_nents
)) {
1683 dev_err(jrdev
, "unable to map source\n");
1684 return ERR_PTR(-ENOMEM
);
1687 dst_nents
= src_nents
;
1688 mapped_dst_nents
= src_nents
;
1690 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1692 if (unlikely(!mapped_src_nents
)) {
1693 dev_err(jrdev
, "unable to map source\n");
1694 return ERR_PTR(-ENOMEM
);
1697 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1698 if (unlikely(dst_nents
< 0)) {
1699 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1701 return ERR_PTR(dst_nents
);
1704 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1706 if (unlikely(!mapped_dst_nents
)) {
1707 dev_err(jrdev
, "unable to map destination\n");
1708 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1709 return ERR_PTR(-ENOMEM
);
1714 * Check if iv can be contiguous with source and destination.
1715 * If so, include it. If not, create scatterlist.
1717 iv_dma
= dma_map_single(jrdev
, greq
->giv
, ivsize
, DMA_TO_DEVICE
);
1718 if (dma_mapping_error(jrdev
, iv_dma
)) {
1719 dev_err(jrdev
, "unable to map IV\n");
1720 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1722 return ERR_PTR(-ENOMEM
);
1725 sec4_sg_ents
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1726 dst_sg_idx
= sec4_sg_ents
;
1727 if (mapped_dst_nents
== 1 &&
1728 iv_dma
+ ivsize
== sg_dma_address(req
->dst
)) {
1732 sec4_sg_ents
+= 1 + mapped_dst_nents
;
1735 /* allocate space for base edesc and hw desc commands, link tables */
1736 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1737 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1740 dev_err(jrdev
, "could not allocate extended descriptor\n");
1741 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1742 iv_dma
, ivsize
, 0, 0);
1743 return ERR_PTR(-ENOMEM
);
1746 edesc
->src_nents
= src_nents
;
1747 edesc
->dst_nents
= dst_nents
;
1748 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1749 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1752 if (mapped_src_nents
> 1)
1753 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
, edesc
->sec4_sg
,
1757 dma_to_sec4_sg_one(edesc
->sec4_sg
+ dst_sg_idx
,
1759 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1760 edesc
->sec4_sg
+ dst_sg_idx
+ 1, 0);
1763 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1764 sec4_sg_bytes
, DMA_TO_DEVICE
);
1765 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1766 dev_err(jrdev
, "unable to map S/G table\n");
1767 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1768 iv_dma
, ivsize
, 0, 0);
1770 return ERR_PTR(-ENOMEM
);
1772 edesc
->iv_dma
= iv_dma
;
1775 print_hex_dump(KERN_ERR
,
1776 "ablkcipher sec4_sg@" __stringify(__LINE__
) ": ",
1777 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1781 *iv_contig_out
= out_contig
;
1785 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
1787 struct ablkcipher_request
*req
= &creq
->creq
;
1788 struct ablkcipher_edesc
*edesc
;
1789 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1790 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1791 struct device
*jrdev
= ctx
->jrdev
;
1792 bool iv_contig
= false;
1796 /* allocate extended descriptor */
1797 edesc
= ablkcipher_giv_edesc_alloc(creq
, DESC_JOB_IO_LEN
*
1798 CAAM_CMD_SZ
, &iv_contig
);
1800 return PTR_ERR(edesc
);
1802 /* Create and submit job descriptor*/
1803 init_ablkcipher_giv_job(ctx
->sh_desc_givenc
, ctx
->sh_desc_givenc_dma
,
1804 edesc
, req
, iv_contig
);
1806 print_hex_dump(KERN_ERR
,
1807 "ablkcipher jobdesc@" __stringify(__LINE__
) ": ",
1808 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1809 desc_bytes(edesc
->hw_desc
), 1);
1811 desc
= edesc
->hw_desc
;
1812 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1817 ablkcipher_unmap(jrdev
, edesc
, req
);
1824 #define template_aead template_u.aead
1825 #define template_ablkcipher template_u.ablkcipher
1826 struct caam_alg_template
{
1827 char name
[CRYPTO_MAX_ALG_NAME
];
1828 char driver_name
[CRYPTO_MAX_ALG_NAME
];
1829 unsigned int blocksize
;
1832 struct ablkcipher_alg ablkcipher
;
1834 u32 class1_alg_type
;
1835 u32 class2_alg_type
;
1838 static struct caam_alg_template driver_algs
[] = {
1839 /* ablkcipher descriptor */
1842 .driver_name
= "cbc-aes-caam",
1843 .blocksize
= AES_BLOCK_SIZE
,
1844 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1845 .template_ablkcipher
= {
1846 .setkey
= ablkcipher_setkey
,
1847 .encrypt
= ablkcipher_encrypt
,
1848 .decrypt
= ablkcipher_decrypt
,
1849 .givencrypt
= ablkcipher_givencrypt
,
1850 .geniv
= "<built-in>",
1851 .min_keysize
= AES_MIN_KEY_SIZE
,
1852 .max_keysize
= AES_MAX_KEY_SIZE
,
1853 .ivsize
= AES_BLOCK_SIZE
,
1855 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1858 .name
= "cbc(des3_ede)",
1859 .driver_name
= "cbc-3des-caam",
1860 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1861 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1862 .template_ablkcipher
= {
1863 .setkey
= ablkcipher_setkey
,
1864 .encrypt
= ablkcipher_encrypt
,
1865 .decrypt
= ablkcipher_decrypt
,
1866 .givencrypt
= ablkcipher_givencrypt
,
1867 .geniv
= "<built-in>",
1868 .min_keysize
= DES3_EDE_KEY_SIZE
,
1869 .max_keysize
= DES3_EDE_KEY_SIZE
,
1870 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1872 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1876 .driver_name
= "cbc-des-caam",
1877 .blocksize
= DES_BLOCK_SIZE
,
1878 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1879 .template_ablkcipher
= {
1880 .setkey
= ablkcipher_setkey
,
1881 .encrypt
= ablkcipher_encrypt
,
1882 .decrypt
= ablkcipher_decrypt
,
1883 .givencrypt
= ablkcipher_givencrypt
,
1884 .geniv
= "<built-in>",
1885 .min_keysize
= DES_KEY_SIZE
,
1886 .max_keysize
= DES_KEY_SIZE
,
1887 .ivsize
= DES_BLOCK_SIZE
,
1889 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1893 .driver_name
= "ctr-aes-caam",
1895 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1896 .template_ablkcipher
= {
1897 .setkey
= ablkcipher_setkey
,
1898 .encrypt
= ablkcipher_encrypt
,
1899 .decrypt
= ablkcipher_decrypt
,
1901 .min_keysize
= AES_MIN_KEY_SIZE
,
1902 .max_keysize
= AES_MAX_KEY_SIZE
,
1903 .ivsize
= AES_BLOCK_SIZE
,
1905 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1908 .name
= "rfc3686(ctr(aes))",
1909 .driver_name
= "rfc3686-ctr-aes-caam",
1911 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1912 .template_ablkcipher
= {
1913 .setkey
= ablkcipher_setkey
,
1914 .encrypt
= ablkcipher_encrypt
,
1915 .decrypt
= ablkcipher_decrypt
,
1916 .givencrypt
= ablkcipher_givencrypt
,
1917 .geniv
= "<built-in>",
1918 .min_keysize
= AES_MIN_KEY_SIZE
+
1919 CTR_RFC3686_NONCE_SIZE
,
1920 .max_keysize
= AES_MAX_KEY_SIZE
+
1921 CTR_RFC3686_NONCE_SIZE
,
1922 .ivsize
= CTR_RFC3686_IV_SIZE
,
1924 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1928 .driver_name
= "xts-aes-caam",
1929 .blocksize
= AES_BLOCK_SIZE
,
1930 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1931 .template_ablkcipher
= {
1932 .setkey
= xts_ablkcipher_setkey
,
1933 .encrypt
= ablkcipher_encrypt
,
1934 .decrypt
= ablkcipher_decrypt
,
1936 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1937 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1938 .ivsize
= AES_BLOCK_SIZE
,
1940 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
1944 static struct caam_aead_alg driver_aeads
[] = {
1948 .cra_name
= "rfc4106(gcm(aes))",
1949 .cra_driver_name
= "rfc4106-gcm-aes-caam",
1952 .setkey
= rfc4106_setkey
,
1953 .setauthsize
= rfc4106_setauthsize
,
1954 .encrypt
= ipsec_gcm_encrypt
,
1955 .decrypt
= ipsec_gcm_decrypt
,
1956 .ivsize
= GCM_RFC4106_IV_SIZE
,
1957 .maxauthsize
= AES_BLOCK_SIZE
,
1960 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1966 .cra_name
= "rfc4543(gcm(aes))",
1967 .cra_driver_name
= "rfc4543-gcm-aes-caam",
1970 .setkey
= rfc4543_setkey
,
1971 .setauthsize
= rfc4543_setauthsize
,
1972 .encrypt
= ipsec_gcm_encrypt
,
1973 .decrypt
= ipsec_gcm_decrypt
,
1974 .ivsize
= GCM_RFC4543_IV_SIZE
,
1975 .maxauthsize
= AES_BLOCK_SIZE
,
1978 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1981 /* Galois Counter Mode */
1985 .cra_name
= "gcm(aes)",
1986 .cra_driver_name
= "gcm-aes-caam",
1989 .setkey
= gcm_setkey
,
1990 .setauthsize
= gcm_setauthsize
,
1991 .encrypt
= gcm_encrypt
,
1992 .decrypt
= gcm_decrypt
,
1993 .ivsize
= GCM_AES_IV_SIZE
,
1994 .maxauthsize
= AES_BLOCK_SIZE
,
1997 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2000 /* single-pass ipsec_esp descriptor */
2004 .cra_name
= "authenc(hmac(md5),"
2005 "ecb(cipher_null))",
2006 .cra_driver_name
= "authenc-hmac-md5-"
2007 "ecb-cipher_null-caam",
2008 .cra_blocksize
= NULL_BLOCK_SIZE
,
2010 .setkey
= aead_setkey
,
2011 .setauthsize
= aead_setauthsize
,
2012 .encrypt
= aead_encrypt
,
2013 .decrypt
= aead_decrypt
,
2014 .ivsize
= NULL_IV_SIZE
,
2015 .maxauthsize
= MD5_DIGEST_SIZE
,
2018 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2019 OP_ALG_AAI_HMAC_PRECOMP
,
2025 .cra_name
= "authenc(hmac(sha1),"
2026 "ecb(cipher_null))",
2027 .cra_driver_name
= "authenc-hmac-sha1-"
2028 "ecb-cipher_null-caam",
2029 .cra_blocksize
= NULL_BLOCK_SIZE
,
2031 .setkey
= aead_setkey
,
2032 .setauthsize
= aead_setauthsize
,
2033 .encrypt
= aead_encrypt
,
2034 .decrypt
= aead_decrypt
,
2035 .ivsize
= NULL_IV_SIZE
,
2036 .maxauthsize
= SHA1_DIGEST_SIZE
,
2039 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2040 OP_ALG_AAI_HMAC_PRECOMP
,
2046 .cra_name
= "authenc(hmac(sha224),"
2047 "ecb(cipher_null))",
2048 .cra_driver_name
= "authenc-hmac-sha224-"
2049 "ecb-cipher_null-caam",
2050 .cra_blocksize
= NULL_BLOCK_SIZE
,
2052 .setkey
= aead_setkey
,
2053 .setauthsize
= aead_setauthsize
,
2054 .encrypt
= aead_encrypt
,
2055 .decrypt
= aead_decrypt
,
2056 .ivsize
= NULL_IV_SIZE
,
2057 .maxauthsize
= SHA224_DIGEST_SIZE
,
2060 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2061 OP_ALG_AAI_HMAC_PRECOMP
,
2067 .cra_name
= "authenc(hmac(sha256),"
2068 "ecb(cipher_null))",
2069 .cra_driver_name
= "authenc-hmac-sha256-"
2070 "ecb-cipher_null-caam",
2071 .cra_blocksize
= NULL_BLOCK_SIZE
,
2073 .setkey
= aead_setkey
,
2074 .setauthsize
= aead_setauthsize
,
2075 .encrypt
= aead_encrypt
,
2076 .decrypt
= aead_decrypt
,
2077 .ivsize
= NULL_IV_SIZE
,
2078 .maxauthsize
= SHA256_DIGEST_SIZE
,
2081 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2082 OP_ALG_AAI_HMAC_PRECOMP
,
2088 .cra_name
= "authenc(hmac(sha384),"
2089 "ecb(cipher_null))",
2090 .cra_driver_name
= "authenc-hmac-sha384-"
2091 "ecb-cipher_null-caam",
2092 .cra_blocksize
= NULL_BLOCK_SIZE
,
2094 .setkey
= aead_setkey
,
2095 .setauthsize
= aead_setauthsize
,
2096 .encrypt
= aead_encrypt
,
2097 .decrypt
= aead_decrypt
,
2098 .ivsize
= NULL_IV_SIZE
,
2099 .maxauthsize
= SHA384_DIGEST_SIZE
,
2102 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2103 OP_ALG_AAI_HMAC_PRECOMP
,
2109 .cra_name
= "authenc(hmac(sha512),"
2110 "ecb(cipher_null))",
2111 .cra_driver_name
= "authenc-hmac-sha512-"
2112 "ecb-cipher_null-caam",
2113 .cra_blocksize
= NULL_BLOCK_SIZE
,
2115 .setkey
= aead_setkey
,
2116 .setauthsize
= aead_setauthsize
,
2117 .encrypt
= aead_encrypt
,
2118 .decrypt
= aead_decrypt
,
2119 .ivsize
= NULL_IV_SIZE
,
2120 .maxauthsize
= SHA512_DIGEST_SIZE
,
2123 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2124 OP_ALG_AAI_HMAC_PRECOMP
,
2130 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2131 .cra_driver_name
= "authenc-hmac-md5-"
2133 .cra_blocksize
= AES_BLOCK_SIZE
,
2135 .setkey
= aead_setkey
,
2136 .setauthsize
= aead_setauthsize
,
2137 .encrypt
= aead_encrypt
,
2138 .decrypt
= aead_decrypt
,
2139 .ivsize
= AES_BLOCK_SIZE
,
2140 .maxauthsize
= MD5_DIGEST_SIZE
,
2143 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2144 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2145 OP_ALG_AAI_HMAC_PRECOMP
,
2151 .cra_name
= "echainiv(authenc(hmac(md5),"
2153 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2155 .cra_blocksize
= AES_BLOCK_SIZE
,
2157 .setkey
= aead_setkey
,
2158 .setauthsize
= aead_setauthsize
,
2159 .encrypt
= aead_encrypt
,
2160 .decrypt
= aead_decrypt
,
2161 .ivsize
= AES_BLOCK_SIZE
,
2162 .maxauthsize
= MD5_DIGEST_SIZE
,
2165 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2166 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2167 OP_ALG_AAI_HMAC_PRECOMP
,
2174 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2175 .cra_driver_name
= "authenc-hmac-sha1-"
2177 .cra_blocksize
= AES_BLOCK_SIZE
,
2179 .setkey
= aead_setkey
,
2180 .setauthsize
= aead_setauthsize
,
2181 .encrypt
= aead_encrypt
,
2182 .decrypt
= aead_decrypt
,
2183 .ivsize
= AES_BLOCK_SIZE
,
2184 .maxauthsize
= SHA1_DIGEST_SIZE
,
2187 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2188 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2189 OP_ALG_AAI_HMAC_PRECOMP
,
2195 .cra_name
= "echainiv(authenc(hmac(sha1),"
2197 .cra_driver_name
= "echainiv-authenc-"
2198 "hmac-sha1-cbc-aes-caam",
2199 .cra_blocksize
= AES_BLOCK_SIZE
,
2201 .setkey
= aead_setkey
,
2202 .setauthsize
= aead_setauthsize
,
2203 .encrypt
= aead_encrypt
,
2204 .decrypt
= aead_decrypt
,
2205 .ivsize
= AES_BLOCK_SIZE
,
2206 .maxauthsize
= SHA1_DIGEST_SIZE
,
2209 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2210 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2211 OP_ALG_AAI_HMAC_PRECOMP
,
2218 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2219 .cra_driver_name
= "authenc-hmac-sha224-"
2221 .cra_blocksize
= AES_BLOCK_SIZE
,
2223 .setkey
= aead_setkey
,
2224 .setauthsize
= aead_setauthsize
,
2225 .encrypt
= aead_encrypt
,
2226 .decrypt
= aead_decrypt
,
2227 .ivsize
= AES_BLOCK_SIZE
,
2228 .maxauthsize
= SHA224_DIGEST_SIZE
,
2231 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2232 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2233 OP_ALG_AAI_HMAC_PRECOMP
,
2239 .cra_name
= "echainiv(authenc(hmac(sha224),"
2241 .cra_driver_name
= "echainiv-authenc-"
2242 "hmac-sha224-cbc-aes-caam",
2243 .cra_blocksize
= AES_BLOCK_SIZE
,
2245 .setkey
= aead_setkey
,
2246 .setauthsize
= aead_setauthsize
,
2247 .encrypt
= aead_encrypt
,
2248 .decrypt
= aead_decrypt
,
2249 .ivsize
= AES_BLOCK_SIZE
,
2250 .maxauthsize
= SHA224_DIGEST_SIZE
,
2253 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2254 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2255 OP_ALG_AAI_HMAC_PRECOMP
,
2262 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2263 .cra_driver_name
= "authenc-hmac-sha256-"
2265 .cra_blocksize
= AES_BLOCK_SIZE
,
2267 .setkey
= aead_setkey
,
2268 .setauthsize
= aead_setauthsize
,
2269 .encrypt
= aead_encrypt
,
2270 .decrypt
= aead_decrypt
,
2271 .ivsize
= AES_BLOCK_SIZE
,
2272 .maxauthsize
= SHA256_DIGEST_SIZE
,
2275 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2276 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2277 OP_ALG_AAI_HMAC_PRECOMP
,
2283 .cra_name
= "echainiv(authenc(hmac(sha256),"
2285 .cra_driver_name
= "echainiv-authenc-"
2286 "hmac-sha256-cbc-aes-caam",
2287 .cra_blocksize
= AES_BLOCK_SIZE
,
2289 .setkey
= aead_setkey
,
2290 .setauthsize
= aead_setauthsize
,
2291 .encrypt
= aead_encrypt
,
2292 .decrypt
= aead_decrypt
,
2293 .ivsize
= AES_BLOCK_SIZE
,
2294 .maxauthsize
= SHA256_DIGEST_SIZE
,
2297 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2298 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2299 OP_ALG_AAI_HMAC_PRECOMP
,
2306 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2307 .cra_driver_name
= "authenc-hmac-sha384-"
2309 .cra_blocksize
= AES_BLOCK_SIZE
,
2311 .setkey
= aead_setkey
,
2312 .setauthsize
= aead_setauthsize
,
2313 .encrypt
= aead_encrypt
,
2314 .decrypt
= aead_decrypt
,
2315 .ivsize
= AES_BLOCK_SIZE
,
2316 .maxauthsize
= SHA384_DIGEST_SIZE
,
2319 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2320 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2321 OP_ALG_AAI_HMAC_PRECOMP
,
2327 .cra_name
= "echainiv(authenc(hmac(sha384),"
2329 .cra_driver_name
= "echainiv-authenc-"
2330 "hmac-sha384-cbc-aes-caam",
2331 .cra_blocksize
= AES_BLOCK_SIZE
,
2333 .setkey
= aead_setkey
,
2334 .setauthsize
= aead_setauthsize
,
2335 .encrypt
= aead_encrypt
,
2336 .decrypt
= aead_decrypt
,
2337 .ivsize
= AES_BLOCK_SIZE
,
2338 .maxauthsize
= SHA384_DIGEST_SIZE
,
2341 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2342 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2343 OP_ALG_AAI_HMAC_PRECOMP
,
2350 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2351 .cra_driver_name
= "authenc-hmac-sha512-"
2353 .cra_blocksize
= AES_BLOCK_SIZE
,
2355 .setkey
= aead_setkey
,
2356 .setauthsize
= aead_setauthsize
,
2357 .encrypt
= aead_encrypt
,
2358 .decrypt
= aead_decrypt
,
2359 .ivsize
= AES_BLOCK_SIZE
,
2360 .maxauthsize
= SHA512_DIGEST_SIZE
,
2363 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2364 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2365 OP_ALG_AAI_HMAC_PRECOMP
,
2371 .cra_name
= "echainiv(authenc(hmac(sha512),"
2373 .cra_driver_name
= "echainiv-authenc-"
2374 "hmac-sha512-cbc-aes-caam",
2375 .cra_blocksize
= AES_BLOCK_SIZE
,
2377 .setkey
= aead_setkey
,
2378 .setauthsize
= aead_setauthsize
,
2379 .encrypt
= aead_encrypt
,
2380 .decrypt
= aead_decrypt
,
2381 .ivsize
= AES_BLOCK_SIZE
,
2382 .maxauthsize
= SHA512_DIGEST_SIZE
,
2385 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2386 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2387 OP_ALG_AAI_HMAC_PRECOMP
,
2394 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2395 .cra_driver_name
= "authenc-hmac-md5-"
2396 "cbc-des3_ede-caam",
2397 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2399 .setkey
= aead_setkey
,
2400 .setauthsize
= aead_setauthsize
,
2401 .encrypt
= aead_encrypt
,
2402 .decrypt
= aead_decrypt
,
2403 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2404 .maxauthsize
= MD5_DIGEST_SIZE
,
2407 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2408 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2409 OP_ALG_AAI_HMAC_PRECOMP
,
2415 .cra_name
= "echainiv(authenc(hmac(md5),"
2417 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2418 "cbc-des3_ede-caam",
2419 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2421 .setkey
= aead_setkey
,
2422 .setauthsize
= aead_setauthsize
,
2423 .encrypt
= aead_encrypt
,
2424 .decrypt
= aead_decrypt
,
2425 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2426 .maxauthsize
= MD5_DIGEST_SIZE
,
2429 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2430 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2431 OP_ALG_AAI_HMAC_PRECOMP
,
2438 .cra_name
= "authenc(hmac(sha1),"
2440 .cra_driver_name
= "authenc-hmac-sha1-"
2441 "cbc-des3_ede-caam",
2442 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2444 .setkey
= aead_setkey
,
2445 .setauthsize
= aead_setauthsize
,
2446 .encrypt
= aead_encrypt
,
2447 .decrypt
= aead_decrypt
,
2448 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2449 .maxauthsize
= SHA1_DIGEST_SIZE
,
2452 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2453 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2454 OP_ALG_AAI_HMAC_PRECOMP
,
2460 .cra_name
= "echainiv(authenc(hmac(sha1),"
2462 .cra_driver_name
= "echainiv-authenc-"
2464 "cbc-des3_ede-caam",
2465 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2467 .setkey
= aead_setkey
,
2468 .setauthsize
= aead_setauthsize
,
2469 .encrypt
= aead_encrypt
,
2470 .decrypt
= aead_decrypt
,
2471 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2472 .maxauthsize
= SHA1_DIGEST_SIZE
,
2475 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2476 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2477 OP_ALG_AAI_HMAC_PRECOMP
,
2484 .cra_name
= "authenc(hmac(sha224),"
2486 .cra_driver_name
= "authenc-hmac-sha224-"
2487 "cbc-des3_ede-caam",
2488 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2490 .setkey
= aead_setkey
,
2491 .setauthsize
= aead_setauthsize
,
2492 .encrypt
= aead_encrypt
,
2493 .decrypt
= aead_decrypt
,
2494 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2495 .maxauthsize
= SHA224_DIGEST_SIZE
,
2498 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2499 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2500 OP_ALG_AAI_HMAC_PRECOMP
,
2506 .cra_name
= "echainiv(authenc(hmac(sha224),"
2508 .cra_driver_name
= "echainiv-authenc-"
2510 "cbc-des3_ede-caam",
2511 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2513 .setkey
= aead_setkey
,
2514 .setauthsize
= aead_setauthsize
,
2515 .encrypt
= aead_encrypt
,
2516 .decrypt
= aead_decrypt
,
2517 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2518 .maxauthsize
= SHA224_DIGEST_SIZE
,
2521 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2522 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2523 OP_ALG_AAI_HMAC_PRECOMP
,
2530 .cra_name
= "authenc(hmac(sha256),"
2532 .cra_driver_name
= "authenc-hmac-sha256-"
2533 "cbc-des3_ede-caam",
2534 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2536 .setkey
= aead_setkey
,
2537 .setauthsize
= aead_setauthsize
,
2538 .encrypt
= aead_encrypt
,
2539 .decrypt
= aead_decrypt
,
2540 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2541 .maxauthsize
= SHA256_DIGEST_SIZE
,
2544 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2545 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2546 OP_ALG_AAI_HMAC_PRECOMP
,
2552 .cra_name
= "echainiv(authenc(hmac(sha256),"
2554 .cra_driver_name
= "echainiv-authenc-"
2556 "cbc-des3_ede-caam",
2557 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2559 .setkey
= aead_setkey
,
2560 .setauthsize
= aead_setauthsize
,
2561 .encrypt
= aead_encrypt
,
2562 .decrypt
= aead_decrypt
,
2563 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2564 .maxauthsize
= SHA256_DIGEST_SIZE
,
2567 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2568 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2569 OP_ALG_AAI_HMAC_PRECOMP
,
2576 .cra_name
= "authenc(hmac(sha384),"
2578 .cra_driver_name
= "authenc-hmac-sha384-"
2579 "cbc-des3_ede-caam",
2580 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2582 .setkey
= aead_setkey
,
2583 .setauthsize
= aead_setauthsize
,
2584 .encrypt
= aead_encrypt
,
2585 .decrypt
= aead_decrypt
,
2586 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2587 .maxauthsize
= SHA384_DIGEST_SIZE
,
2590 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2591 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2592 OP_ALG_AAI_HMAC_PRECOMP
,
2598 .cra_name
= "echainiv(authenc(hmac(sha384),"
2600 .cra_driver_name
= "echainiv-authenc-"
2602 "cbc-des3_ede-caam",
2603 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2605 .setkey
= aead_setkey
,
2606 .setauthsize
= aead_setauthsize
,
2607 .encrypt
= aead_encrypt
,
2608 .decrypt
= aead_decrypt
,
2609 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2610 .maxauthsize
= SHA384_DIGEST_SIZE
,
2613 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2614 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2615 OP_ALG_AAI_HMAC_PRECOMP
,
2622 .cra_name
= "authenc(hmac(sha512),"
2624 .cra_driver_name
= "authenc-hmac-sha512-"
2625 "cbc-des3_ede-caam",
2626 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2628 .setkey
= aead_setkey
,
2629 .setauthsize
= aead_setauthsize
,
2630 .encrypt
= aead_encrypt
,
2631 .decrypt
= aead_decrypt
,
2632 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2633 .maxauthsize
= SHA512_DIGEST_SIZE
,
2636 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2637 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2638 OP_ALG_AAI_HMAC_PRECOMP
,
2644 .cra_name
= "echainiv(authenc(hmac(sha512),"
2646 .cra_driver_name
= "echainiv-authenc-"
2648 "cbc-des3_ede-caam",
2649 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2651 .setkey
= aead_setkey
,
2652 .setauthsize
= aead_setauthsize
,
2653 .encrypt
= aead_encrypt
,
2654 .decrypt
= aead_decrypt
,
2655 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2656 .maxauthsize
= SHA512_DIGEST_SIZE
,
2659 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2660 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2661 OP_ALG_AAI_HMAC_PRECOMP
,
2668 .cra_name
= "authenc(hmac(md5),cbc(des))",
2669 .cra_driver_name
= "authenc-hmac-md5-"
2671 .cra_blocksize
= DES_BLOCK_SIZE
,
2673 .setkey
= aead_setkey
,
2674 .setauthsize
= aead_setauthsize
,
2675 .encrypt
= aead_encrypt
,
2676 .decrypt
= aead_decrypt
,
2677 .ivsize
= DES_BLOCK_SIZE
,
2678 .maxauthsize
= MD5_DIGEST_SIZE
,
2681 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2682 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2683 OP_ALG_AAI_HMAC_PRECOMP
,
2689 .cra_name
= "echainiv(authenc(hmac(md5),"
2691 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2693 .cra_blocksize
= DES_BLOCK_SIZE
,
2695 .setkey
= aead_setkey
,
2696 .setauthsize
= aead_setauthsize
,
2697 .encrypt
= aead_encrypt
,
2698 .decrypt
= aead_decrypt
,
2699 .ivsize
= DES_BLOCK_SIZE
,
2700 .maxauthsize
= MD5_DIGEST_SIZE
,
2703 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2704 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2705 OP_ALG_AAI_HMAC_PRECOMP
,
2712 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2713 .cra_driver_name
= "authenc-hmac-sha1-"
2715 .cra_blocksize
= DES_BLOCK_SIZE
,
2717 .setkey
= aead_setkey
,
2718 .setauthsize
= aead_setauthsize
,
2719 .encrypt
= aead_encrypt
,
2720 .decrypt
= aead_decrypt
,
2721 .ivsize
= DES_BLOCK_SIZE
,
2722 .maxauthsize
= SHA1_DIGEST_SIZE
,
2725 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2726 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2727 OP_ALG_AAI_HMAC_PRECOMP
,
2733 .cra_name
= "echainiv(authenc(hmac(sha1),"
2735 .cra_driver_name
= "echainiv-authenc-"
2736 "hmac-sha1-cbc-des-caam",
2737 .cra_blocksize
= DES_BLOCK_SIZE
,
2739 .setkey
= aead_setkey
,
2740 .setauthsize
= aead_setauthsize
,
2741 .encrypt
= aead_encrypt
,
2742 .decrypt
= aead_decrypt
,
2743 .ivsize
= DES_BLOCK_SIZE
,
2744 .maxauthsize
= SHA1_DIGEST_SIZE
,
2747 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2748 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2749 OP_ALG_AAI_HMAC_PRECOMP
,
2756 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2757 .cra_driver_name
= "authenc-hmac-sha224-"
2759 .cra_blocksize
= DES_BLOCK_SIZE
,
2761 .setkey
= aead_setkey
,
2762 .setauthsize
= aead_setauthsize
,
2763 .encrypt
= aead_encrypt
,
2764 .decrypt
= aead_decrypt
,
2765 .ivsize
= DES_BLOCK_SIZE
,
2766 .maxauthsize
= SHA224_DIGEST_SIZE
,
2769 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2770 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2771 OP_ALG_AAI_HMAC_PRECOMP
,
2777 .cra_name
= "echainiv(authenc(hmac(sha224),"
2779 .cra_driver_name
= "echainiv-authenc-"
2780 "hmac-sha224-cbc-des-caam",
2781 .cra_blocksize
= DES_BLOCK_SIZE
,
2783 .setkey
= aead_setkey
,
2784 .setauthsize
= aead_setauthsize
,
2785 .encrypt
= aead_encrypt
,
2786 .decrypt
= aead_decrypt
,
2787 .ivsize
= DES_BLOCK_SIZE
,
2788 .maxauthsize
= SHA224_DIGEST_SIZE
,
2791 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2792 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2793 OP_ALG_AAI_HMAC_PRECOMP
,
2800 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2801 .cra_driver_name
= "authenc-hmac-sha256-"
2803 .cra_blocksize
= DES_BLOCK_SIZE
,
2805 .setkey
= aead_setkey
,
2806 .setauthsize
= aead_setauthsize
,
2807 .encrypt
= aead_encrypt
,
2808 .decrypt
= aead_decrypt
,
2809 .ivsize
= DES_BLOCK_SIZE
,
2810 .maxauthsize
= SHA256_DIGEST_SIZE
,
2813 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2814 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2815 OP_ALG_AAI_HMAC_PRECOMP
,
2821 .cra_name
= "echainiv(authenc(hmac(sha256),"
2823 .cra_driver_name
= "echainiv-authenc-"
2824 "hmac-sha256-cbc-des-caam",
2825 .cra_blocksize
= DES_BLOCK_SIZE
,
2827 .setkey
= aead_setkey
,
2828 .setauthsize
= aead_setauthsize
,
2829 .encrypt
= aead_encrypt
,
2830 .decrypt
= aead_decrypt
,
2831 .ivsize
= DES_BLOCK_SIZE
,
2832 .maxauthsize
= SHA256_DIGEST_SIZE
,
2835 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2836 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2837 OP_ALG_AAI_HMAC_PRECOMP
,
2844 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2845 .cra_driver_name
= "authenc-hmac-sha384-"
2847 .cra_blocksize
= DES_BLOCK_SIZE
,
2849 .setkey
= aead_setkey
,
2850 .setauthsize
= aead_setauthsize
,
2851 .encrypt
= aead_encrypt
,
2852 .decrypt
= aead_decrypt
,
2853 .ivsize
= DES_BLOCK_SIZE
,
2854 .maxauthsize
= SHA384_DIGEST_SIZE
,
2857 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2858 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2859 OP_ALG_AAI_HMAC_PRECOMP
,
2865 .cra_name
= "echainiv(authenc(hmac(sha384),"
2867 .cra_driver_name
= "echainiv-authenc-"
2868 "hmac-sha384-cbc-des-caam",
2869 .cra_blocksize
= DES_BLOCK_SIZE
,
2871 .setkey
= aead_setkey
,
2872 .setauthsize
= aead_setauthsize
,
2873 .encrypt
= aead_encrypt
,
2874 .decrypt
= aead_decrypt
,
2875 .ivsize
= DES_BLOCK_SIZE
,
2876 .maxauthsize
= SHA384_DIGEST_SIZE
,
2879 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2880 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2881 OP_ALG_AAI_HMAC_PRECOMP
,
2888 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2889 .cra_driver_name
= "authenc-hmac-sha512-"
2891 .cra_blocksize
= DES_BLOCK_SIZE
,
2893 .setkey
= aead_setkey
,
2894 .setauthsize
= aead_setauthsize
,
2895 .encrypt
= aead_encrypt
,
2896 .decrypt
= aead_decrypt
,
2897 .ivsize
= DES_BLOCK_SIZE
,
2898 .maxauthsize
= SHA512_DIGEST_SIZE
,
2901 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2902 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2903 OP_ALG_AAI_HMAC_PRECOMP
,
2909 .cra_name
= "echainiv(authenc(hmac(sha512),"
2911 .cra_driver_name
= "echainiv-authenc-"
2912 "hmac-sha512-cbc-des-caam",
2913 .cra_blocksize
= DES_BLOCK_SIZE
,
2915 .setkey
= aead_setkey
,
2916 .setauthsize
= aead_setauthsize
,
2917 .encrypt
= aead_encrypt
,
2918 .decrypt
= aead_decrypt
,
2919 .ivsize
= DES_BLOCK_SIZE
,
2920 .maxauthsize
= SHA512_DIGEST_SIZE
,
2923 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2924 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2925 OP_ALG_AAI_HMAC_PRECOMP
,
2932 .cra_name
= "authenc(hmac(md5),"
2933 "rfc3686(ctr(aes)))",
2934 .cra_driver_name
= "authenc-hmac-md5-"
2935 "rfc3686-ctr-aes-caam",
2938 .setkey
= aead_setkey
,
2939 .setauthsize
= aead_setauthsize
,
2940 .encrypt
= aead_encrypt
,
2941 .decrypt
= aead_decrypt
,
2942 .ivsize
= CTR_RFC3686_IV_SIZE
,
2943 .maxauthsize
= MD5_DIGEST_SIZE
,
2946 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2947 OP_ALG_AAI_CTR_MOD128
,
2948 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2949 OP_ALG_AAI_HMAC_PRECOMP
,
2956 .cra_name
= "seqiv(authenc("
2957 "hmac(md5),rfc3686(ctr(aes))))",
2958 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
2959 "rfc3686-ctr-aes-caam",
2962 .setkey
= aead_setkey
,
2963 .setauthsize
= aead_setauthsize
,
2964 .encrypt
= aead_encrypt
,
2965 .decrypt
= aead_decrypt
,
2966 .ivsize
= CTR_RFC3686_IV_SIZE
,
2967 .maxauthsize
= MD5_DIGEST_SIZE
,
2970 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2971 OP_ALG_AAI_CTR_MOD128
,
2972 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2973 OP_ALG_AAI_HMAC_PRECOMP
,
2981 .cra_name
= "authenc(hmac(sha1),"
2982 "rfc3686(ctr(aes)))",
2983 .cra_driver_name
= "authenc-hmac-sha1-"
2984 "rfc3686-ctr-aes-caam",
2987 .setkey
= aead_setkey
,
2988 .setauthsize
= aead_setauthsize
,
2989 .encrypt
= aead_encrypt
,
2990 .decrypt
= aead_decrypt
,
2991 .ivsize
= CTR_RFC3686_IV_SIZE
,
2992 .maxauthsize
= SHA1_DIGEST_SIZE
,
2995 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2996 OP_ALG_AAI_CTR_MOD128
,
2997 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2998 OP_ALG_AAI_HMAC_PRECOMP
,
3005 .cra_name
= "seqiv(authenc("
3006 "hmac(sha1),rfc3686(ctr(aes))))",
3007 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
3008 "rfc3686-ctr-aes-caam",
3011 .setkey
= aead_setkey
,
3012 .setauthsize
= aead_setauthsize
,
3013 .encrypt
= aead_encrypt
,
3014 .decrypt
= aead_decrypt
,
3015 .ivsize
= CTR_RFC3686_IV_SIZE
,
3016 .maxauthsize
= SHA1_DIGEST_SIZE
,
3019 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3020 OP_ALG_AAI_CTR_MOD128
,
3021 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3022 OP_ALG_AAI_HMAC_PRECOMP
,
3030 .cra_name
= "authenc(hmac(sha224),"
3031 "rfc3686(ctr(aes)))",
3032 .cra_driver_name
= "authenc-hmac-sha224-"
3033 "rfc3686-ctr-aes-caam",
3036 .setkey
= aead_setkey
,
3037 .setauthsize
= aead_setauthsize
,
3038 .encrypt
= aead_encrypt
,
3039 .decrypt
= aead_decrypt
,
3040 .ivsize
= CTR_RFC3686_IV_SIZE
,
3041 .maxauthsize
= SHA224_DIGEST_SIZE
,
3044 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3045 OP_ALG_AAI_CTR_MOD128
,
3046 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3047 OP_ALG_AAI_HMAC_PRECOMP
,
3054 .cra_name
= "seqiv(authenc("
3055 "hmac(sha224),rfc3686(ctr(aes))))",
3056 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
3057 "rfc3686-ctr-aes-caam",
3060 .setkey
= aead_setkey
,
3061 .setauthsize
= aead_setauthsize
,
3062 .encrypt
= aead_encrypt
,
3063 .decrypt
= aead_decrypt
,
3064 .ivsize
= CTR_RFC3686_IV_SIZE
,
3065 .maxauthsize
= SHA224_DIGEST_SIZE
,
3068 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3069 OP_ALG_AAI_CTR_MOD128
,
3070 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3071 OP_ALG_AAI_HMAC_PRECOMP
,
3079 .cra_name
= "authenc(hmac(sha256),"
3080 "rfc3686(ctr(aes)))",
3081 .cra_driver_name
= "authenc-hmac-sha256-"
3082 "rfc3686-ctr-aes-caam",
3085 .setkey
= aead_setkey
,
3086 .setauthsize
= aead_setauthsize
,
3087 .encrypt
= aead_encrypt
,
3088 .decrypt
= aead_decrypt
,
3089 .ivsize
= CTR_RFC3686_IV_SIZE
,
3090 .maxauthsize
= SHA256_DIGEST_SIZE
,
3093 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3094 OP_ALG_AAI_CTR_MOD128
,
3095 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3096 OP_ALG_AAI_HMAC_PRECOMP
,
3103 .cra_name
= "seqiv(authenc(hmac(sha256),"
3104 "rfc3686(ctr(aes))))",
3105 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
3106 "rfc3686-ctr-aes-caam",
3109 .setkey
= aead_setkey
,
3110 .setauthsize
= aead_setauthsize
,
3111 .encrypt
= aead_encrypt
,
3112 .decrypt
= aead_decrypt
,
3113 .ivsize
= CTR_RFC3686_IV_SIZE
,
3114 .maxauthsize
= SHA256_DIGEST_SIZE
,
3117 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3118 OP_ALG_AAI_CTR_MOD128
,
3119 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3120 OP_ALG_AAI_HMAC_PRECOMP
,
3128 .cra_name
= "authenc(hmac(sha384),"
3129 "rfc3686(ctr(aes)))",
3130 .cra_driver_name
= "authenc-hmac-sha384-"
3131 "rfc3686-ctr-aes-caam",
3134 .setkey
= aead_setkey
,
3135 .setauthsize
= aead_setauthsize
,
3136 .encrypt
= aead_encrypt
,
3137 .decrypt
= aead_decrypt
,
3138 .ivsize
= CTR_RFC3686_IV_SIZE
,
3139 .maxauthsize
= SHA384_DIGEST_SIZE
,
3142 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3143 OP_ALG_AAI_CTR_MOD128
,
3144 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3145 OP_ALG_AAI_HMAC_PRECOMP
,
3152 .cra_name
= "seqiv(authenc(hmac(sha384),"
3153 "rfc3686(ctr(aes))))",
3154 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
3155 "rfc3686-ctr-aes-caam",
3158 .setkey
= aead_setkey
,
3159 .setauthsize
= aead_setauthsize
,
3160 .encrypt
= aead_encrypt
,
3161 .decrypt
= aead_decrypt
,
3162 .ivsize
= CTR_RFC3686_IV_SIZE
,
3163 .maxauthsize
= SHA384_DIGEST_SIZE
,
3166 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3167 OP_ALG_AAI_CTR_MOD128
,
3168 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3169 OP_ALG_AAI_HMAC_PRECOMP
,
3177 .cra_name
= "authenc(hmac(sha512),"
3178 "rfc3686(ctr(aes)))",
3179 .cra_driver_name
= "authenc-hmac-sha512-"
3180 "rfc3686-ctr-aes-caam",
3183 .setkey
= aead_setkey
,
3184 .setauthsize
= aead_setauthsize
,
3185 .encrypt
= aead_encrypt
,
3186 .decrypt
= aead_decrypt
,
3187 .ivsize
= CTR_RFC3686_IV_SIZE
,
3188 .maxauthsize
= SHA512_DIGEST_SIZE
,
3191 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3192 OP_ALG_AAI_CTR_MOD128
,
3193 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3194 OP_ALG_AAI_HMAC_PRECOMP
,
3201 .cra_name
= "seqiv(authenc(hmac(sha512),"
3202 "rfc3686(ctr(aes))))",
3203 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
3204 "rfc3686-ctr-aes-caam",
3207 .setkey
= aead_setkey
,
3208 .setauthsize
= aead_setauthsize
,
3209 .encrypt
= aead_encrypt
,
3210 .decrypt
= aead_decrypt
,
3211 .ivsize
= CTR_RFC3686_IV_SIZE
,
3212 .maxauthsize
= SHA512_DIGEST_SIZE
,
3215 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3216 OP_ALG_AAI_CTR_MOD128
,
3217 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3218 OP_ALG_AAI_HMAC_PRECOMP
,
3225 struct caam_crypto_alg
{
3226 struct crypto_alg crypto_alg
;
3227 struct list_head entry
;
3228 struct caam_alg_entry caam
;
3231 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
)
3233 dma_addr_t dma_addr
;
3235 ctx
->jrdev
= caam_jr_alloc();
3236 if (IS_ERR(ctx
->jrdev
)) {
3237 pr_err("Job Ring Device allocation for transform failed\n");
3238 return PTR_ERR(ctx
->jrdev
);
3241 dma_addr
= dma_map_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc
,
3242 offsetof(struct caam_ctx
,
3244 DMA_TO_DEVICE
, DMA_ATTR_SKIP_CPU_SYNC
);
3245 if (dma_mapping_error(ctx
->jrdev
, dma_addr
)) {
3246 dev_err(ctx
->jrdev
, "unable to map key, shared descriptors\n");
3247 caam_jr_free(ctx
->jrdev
);
3251 ctx
->sh_desc_enc_dma
= dma_addr
;
3252 ctx
->sh_desc_dec_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3254 ctx
->sh_desc_givenc_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3256 ctx
->key_dma
= dma_addr
+ offsetof(struct caam_ctx
, key
);
3258 /* copy descriptor header template value */
3259 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
3260 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
3265 static int caam_cra_init(struct crypto_tfm
*tfm
)
3267 struct crypto_alg
*alg
= tfm
->__crt_alg
;
3268 struct caam_crypto_alg
*caam_alg
=
3269 container_of(alg
, struct caam_crypto_alg
, crypto_alg
);
3270 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3272 return caam_init_common(ctx
, &caam_alg
->caam
);
3275 static int caam_aead_init(struct crypto_aead
*tfm
)
3277 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
3278 struct caam_aead_alg
*caam_alg
=
3279 container_of(alg
, struct caam_aead_alg
, aead
);
3280 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
3282 return caam_init_common(ctx
, &caam_alg
->caam
);
3285 static void caam_exit_common(struct caam_ctx
*ctx
)
3287 dma_unmap_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
3288 offsetof(struct caam_ctx
, sh_desc_enc_dma
),
3289 DMA_TO_DEVICE
, DMA_ATTR_SKIP_CPU_SYNC
);
3290 caam_jr_free(ctx
->jrdev
);
3293 static void caam_cra_exit(struct crypto_tfm
*tfm
)
3295 caam_exit_common(crypto_tfm_ctx(tfm
));
3298 static void caam_aead_exit(struct crypto_aead
*tfm
)
3300 caam_exit_common(crypto_aead_ctx(tfm
));
3303 static void __exit
caam_algapi_exit(void)
3306 struct caam_crypto_alg
*t_alg
, *n
;
3309 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3310 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3312 if (t_alg
->registered
)
3313 crypto_unregister_aead(&t_alg
->aead
);
3319 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
3320 crypto_unregister_alg(&t_alg
->crypto_alg
);
3321 list_del(&t_alg
->entry
);
3326 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
3329 struct caam_crypto_alg
*t_alg
;
3330 struct crypto_alg
*alg
;
3332 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
3334 pr_err("failed to allocate t_alg\n");
3335 return ERR_PTR(-ENOMEM
);
3338 alg
= &t_alg
->crypto_alg
;
3340 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
3341 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
3342 template->driver_name
);
3343 alg
->cra_module
= THIS_MODULE
;
3344 alg
->cra_init
= caam_cra_init
;
3345 alg
->cra_exit
= caam_cra_exit
;
3346 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
3347 alg
->cra_blocksize
= template->blocksize
;
3348 alg
->cra_alignmask
= 0;
3349 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
3350 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
3352 switch (template->type
) {
3353 case CRYPTO_ALG_TYPE_GIVCIPHER
:
3354 alg
->cra_type
= &crypto_givcipher_type
;
3355 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3357 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
3358 alg
->cra_type
= &crypto_ablkcipher_type
;
3359 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3363 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
3364 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
3369 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
3371 struct aead_alg
*alg
= &t_alg
->aead
;
3373 alg
->base
.cra_module
= THIS_MODULE
;
3374 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
3375 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
3376 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
3378 alg
->init
= caam_aead_init
;
3379 alg
->exit
= caam_aead_exit
;
3382 static int __init
caam_algapi_init(void)
3384 struct device_node
*dev_node
;
3385 struct platform_device
*pdev
;
3386 struct device
*ctrldev
;
3387 struct caam_drv_private
*priv
;
3389 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
3390 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
3391 bool registered
= false;
3393 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
3395 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
3400 pdev
= of_find_device_by_node(dev_node
);
3402 of_node_put(dev_node
);
3406 ctrldev
= &pdev
->dev
;
3407 priv
= dev_get_drvdata(ctrldev
);
3408 of_node_put(dev_node
);
3411 * If priv is NULL, it's probably because the caam driver wasn't
3412 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3418 INIT_LIST_HEAD(&alg_list
);
3421 * Register crypto algorithms the device supports.
3422 * First, detect presence and attributes of DES, AES, and MD blocks.
3424 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
3425 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
3426 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
3427 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
3428 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
3430 /* If MD is present, limit digest size based on LP256 */
3431 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
3432 md_limit
= SHA256_DIGEST_SIZE
;
3434 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3435 struct caam_crypto_alg
*t_alg
;
3436 struct caam_alg_template
*alg
= driver_algs
+ i
;
3437 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
3439 /* Skip DES algorithms if not supported by device */
3441 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3442 (alg_sel
== OP_ALG_ALGSEL_DES
)))
3445 /* Skip AES algorithms if not supported by device */
3446 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
3450 * Check support for AES modes not available
3453 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3454 if ((alg
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
3458 t_alg
= caam_alg_alloc(alg
);
3459 if (IS_ERR(t_alg
)) {
3460 err
= PTR_ERR(t_alg
);
3461 pr_warn("%s alg allocation failed\n", alg
->driver_name
);
3465 err
= crypto_register_alg(&t_alg
->crypto_alg
);
3467 pr_warn("%s alg registration failed\n",
3468 t_alg
->crypto_alg
.cra_driver_name
);
3473 list_add_tail(&t_alg
->entry
, &alg_list
);
3477 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3478 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3479 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
3481 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
3483 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
3485 /* Skip DES algorithms if not supported by device */
3487 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3488 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
3491 /* Skip AES algorithms if not supported by device */
3492 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
3496 * Check support for AES algorithms not available
3499 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3500 if (alg_aai
== OP_ALG_AAI_GCM
)
3504 * Skip algorithms requiring message digests
3505 * if MD or MD size is not supported by device.
3508 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
3511 caam_aead_alg_init(t_alg
);
3513 err
= crypto_register_aead(&t_alg
->aead
);
3515 pr_warn("%s alg registration failed\n",
3516 t_alg
->aead
.base
.cra_driver_name
);
3520 t_alg
->registered
= true;
3525 pr_info("caam algorithms registered in /proc/crypto\n");
3530 module_init(caam_algapi_init
);
3531 module_exit(caam_algapi_exit
);
3533 MODULE_LICENSE("GPL");
3534 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3535 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");