2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
85 #include <linux/highmem.h>
87 static void dbg_dump_sg(const char *level
, const char *prefix_str
,
88 int prefix_type
, int rowsize
, int groupsize
,
89 struct scatterlist
*sg
, size_t tlen
, bool ascii
)
91 struct scatterlist
*it
;
96 for (it
= sg
; it
!= NULL
&& tlen
> 0 ; it
= sg_next(sg
)) {
98 * make sure the scatterlist's page
99 * has a valid virtual memory mapping
101 it_page
= kmap_atomic(sg_page(it
));
102 if (unlikely(!it_page
)) {
103 printk(KERN_ERR
"dbg_dump_sg: kmap failed\n");
107 buf
= it_page
+ it
->offset
;
108 len
= min_t(size_t, tlen
, it
->length
);
109 print_hex_dump(level
, prefix_str
, prefix_type
, rowsize
,
110 groupsize
, buf
, len
, ascii
);
113 kunmap_atomic(it_page
);
118 static struct list_head alg_list
;
120 struct caam_alg_entry
{
127 struct caam_aead_alg
{
128 struct aead_alg aead
;
129 struct caam_alg_entry caam
;
134 * per-session context
137 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
138 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
139 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
140 u8 key
[CAAM_MAX_KEY_SIZE
];
141 dma_addr_t sh_desc_enc_dma
;
142 dma_addr_t sh_desc_dec_dma
;
143 dma_addr_t sh_desc_givenc_dma
;
145 struct device
*jrdev
;
146 struct alginfo adata
;
147 struct alginfo cdata
;
148 unsigned int authsize
;
151 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
153 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
154 struct device
*jrdev
= ctx
->jrdev
;
156 int rem_bytes
= CAAM_DESC_BYTES_MAX
- AEAD_DESC_JOB_IO_LEN
-
157 ctx
->adata
.keylen_pad
;
160 * Job Descriptor and Shared Descriptors
161 * must all fit into the 64-word Descriptor h/w Buffer
163 if (rem_bytes
>= DESC_AEAD_NULL_ENC_LEN
) {
164 ctx
->adata
.key_inline
= true;
165 ctx
->adata
.key_virt
= ctx
->key
;
167 ctx
->adata
.key_inline
= false;
168 ctx
->adata
.key_dma
= ctx
->key_dma
;
171 /* aead_encrypt shared descriptor */
172 desc
= ctx
->sh_desc_enc
;
173 cnstr_shdsc_aead_null_encap(desc
, &ctx
->adata
, ctx
->authsize
);
174 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
175 desc_bytes(desc
), DMA_TO_DEVICE
);
178 * Job Descriptor and Shared Descriptors
179 * must all fit into the 64-word Descriptor h/w Buffer
181 if (rem_bytes
>= DESC_AEAD_NULL_DEC_LEN
) {
182 ctx
->adata
.key_inline
= true;
183 ctx
->adata
.key_virt
= ctx
->key
;
185 ctx
->adata
.key_inline
= false;
186 ctx
->adata
.key_dma
= ctx
->key_dma
;
189 /* aead_decrypt shared descriptor */
190 desc
= ctx
->sh_desc_dec
;
191 cnstr_shdsc_aead_null_decap(desc
, &ctx
->adata
, ctx
->authsize
);
192 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
193 desc_bytes(desc
), DMA_TO_DEVICE
);
198 static int aead_set_sh_desc(struct crypto_aead
*aead
)
200 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
201 struct caam_aead_alg
, aead
);
202 unsigned int ivsize
= crypto_aead_ivsize(aead
);
203 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
204 struct device
*jrdev
= ctx
->jrdev
;
206 u32
*desc
, *nonce
= NULL
;
208 unsigned int data_len
[2];
209 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
210 OP_ALG_AAI_CTR_MOD128
);
211 const bool is_rfc3686
= alg
->caam
.rfc3686
;
216 /* NULL encryption / decryption */
217 if (!ctx
->cdata
.keylen
)
218 return aead_null_set_sh_desc(aead
);
221 * AES-CTR needs to load IV in CONTEXT1 reg
222 * at an offset of 128bits (16bytes)
223 * CONTEXT1[255:128] = IV
230 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
233 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
234 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
235 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
238 data_len
[0] = ctx
->adata
.keylen_pad
;
239 data_len
[1] = ctx
->cdata
.keylen
;
245 * Job Descriptor and Shared Descriptors
246 * must all fit into the 64-word Descriptor h/w Buffer
248 if (desc_inline_query(DESC_AEAD_ENC_LEN
+
249 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
250 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
251 ARRAY_SIZE(data_len
)) < 0)
255 ctx
->adata
.key_virt
= ctx
->key
;
257 ctx
->adata
.key_dma
= ctx
->key_dma
;
260 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
262 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
264 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
265 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
267 /* aead_encrypt shared descriptor */
268 desc
= ctx
->sh_desc_enc
;
269 cnstr_shdsc_aead_encap(desc
, &ctx
->cdata
, &ctx
->adata
, ctx
->authsize
,
270 is_rfc3686
, nonce
, ctx1_iv_off
);
271 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
272 desc_bytes(desc
), DMA_TO_DEVICE
);
276 * Job Descriptor and Shared Descriptors
277 * must all fit into the 64-word Descriptor h/w Buffer
279 if (desc_inline_query(DESC_AEAD_DEC_LEN
+
280 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
281 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
282 ARRAY_SIZE(data_len
)) < 0)
286 ctx
->adata
.key_virt
= ctx
->key
;
288 ctx
->adata
.key_dma
= ctx
->key_dma
;
291 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
293 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
295 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
296 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
298 /* aead_decrypt shared descriptor */
299 desc
= ctx
->sh_desc_dec
;
300 cnstr_shdsc_aead_decap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
301 ctx
->authsize
, alg
->caam
.geniv
, is_rfc3686
,
303 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
304 desc_bytes(desc
), DMA_TO_DEVICE
);
306 if (!alg
->caam
.geniv
)
310 * Job Descriptor and Shared Descriptors
311 * must all fit into the 64-word Descriptor h/w Buffer
313 if (desc_inline_query(DESC_AEAD_GIVENC_LEN
+
314 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
315 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
316 ARRAY_SIZE(data_len
)) < 0)
320 ctx
->adata
.key_virt
= ctx
->key
;
322 ctx
->adata
.key_dma
= ctx
->key_dma
;
325 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
327 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
329 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
330 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
332 /* aead_givencrypt shared descriptor */
333 desc
= ctx
->sh_desc_enc
;
334 cnstr_shdsc_aead_givencap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
335 ctx
->authsize
, is_rfc3686
, nonce
,
337 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
338 desc_bytes(desc
), DMA_TO_DEVICE
);
344 static int aead_setauthsize(struct crypto_aead
*authenc
,
345 unsigned int authsize
)
347 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
349 ctx
->authsize
= authsize
;
350 aead_set_sh_desc(authenc
);
355 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
357 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
358 struct device
*jrdev
= ctx
->jrdev
;
360 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
363 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
367 * AES GCM encrypt shared descriptor
368 * Job Descriptor and Shared Descriptor
369 * must fit into the 64-word Descriptor h/w Buffer
371 if (rem_bytes
>= DESC_GCM_ENC_LEN
) {
372 ctx
->cdata
.key_inline
= true;
373 ctx
->cdata
.key_virt
= ctx
->key
;
375 ctx
->cdata
.key_inline
= false;
376 ctx
->cdata
.key_dma
= ctx
->key_dma
;
379 desc
= ctx
->sh_desc_enc
;
380 cnstr_shdsc_gcm_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
381 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
382 desc_bytes(desc
), DMA_TO_DEVICE
);
385 * Job Descriptor and Shared Descriptors
386 * must all fit into the 64-word Descriptor h/w Buffer
388 if (rem_bytes
>= DESC_GCM_DEC_LEN
) {
389 ctx
->cdata
.key_inline
= true;
390 ctx
->cdata
.key_virt
= ctx
->key
;
392 ctx
->cdata
.key_inline
= false;
393 ctx
->cdata
.key_dma
= ctx
->key_dma
;
396 desc
= ctx
->sh_desc_dec
;
397 cnstr_shdsc_gcm_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
398 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
399 desc_bytes(desc
), DMA_TO_DEVICE
);
404 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
406 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
408 ctx
->authsize
= authsize
;
409 gcm_set_sh_desc(authenc
);
414 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
416 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
417 struct device
*jrdev
= ctx
->jrdev
;
419 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
422 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
426 * RFC4106 encrypt shared descriptor
427 * Job Descriptor and Shared Descriptor
428 * must fit into the 64-word Descriptor h/w Buffer
430 if (rem_bytes
>= DESC_RFC4106_ENC_LEN
) {
431 ctx
->cdata
.key_inline
= true;
432 ctx
->cdata
.key_virt
= ctx
->key
;
434 ctx
->cdata
.key_inline
= false;
435 ctx
->cdata
.key_dma
= ctx
->key_dma
;
438 desc
= ctx
->sh_desc_enc
;
439 cnstr_shdsc_rfc4106_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
440 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
441 desc_bytes(desc
), DMA_TO_DEVICE
);
444 * Job Descriptor and Shared Descriptors
445 * must all fit into the 64-word Descriptor h/w Buffer
447 if (rem_bytes
>= DESC_RFC4106_DEC_LEN
) {
448 ctx
->cdata
.key_inline
= true;
449 ctx
->cdata
.key_virt
= ctx
->key
;
451 ctx
->cdata
.key_inline
= false;
452 ctx
->cdata
.key_dma
= ctx
->key_dma
;
455 desc
= ctx
->sh_desc_dec
;
456 cnstr_shdsc_rfc4106_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
457 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
458 desc_bytes(desc
), DMA_TO_DEVICE
);
463 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
464 unsigned int authsize
)
466 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
468 ctx
->authsize
= authsize
;
469 rfc4106_set_sh_desc(authenc
);
474 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
476 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
477 struct device
*jrdev
= ctx
->jrdev
;
479 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
482 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
486 * RFC4543 encrypt shared descriptor
487 * Job Descriptor and Shared Descriptor
488 * must fit into the 64-word Descriptor h/w Buffer
490 if (rem_bytes
>= DESC_RFC4543_ENC_LEN
) {
491 ctx
->cdata
.key_inline
= true;
492 ctx
->cdata
.key_virt
= ctx
->key
;
494 ctx
->cdata
.key_inline
= false;
495 ctx
->cdata
.key_dma
= ctx
->key_dma
;
498 desc
= ctx
->sh_desc_enc
;
499 cnstr_shdsc_rfc4543_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
500 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
501 desc_bytes(desc
), DMA_TO_DEVICE
);
504 * Job Descriptor and Shared Descriptors
505 * must all fit into the 64-word Descriptor h/w Buffer
507 if (rem_bytes
>= DESC_RFC4543_DEC_LEN
) {
508 ctx
->cdata
.key_inline
= true;
509 ctx
->cdata
.key_virt
= ctx
->key
;
511 ctx
->cdata
.key_inline
= false;
512 ctx
->cdata
.key_dma
= ctx
->key_dma
;
515 desc
= ctx
->sh_desc_dec
;
516 cnstr_shdsc_rfc4543_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
517 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
518 desc_bytes(desc
), DMA_TO_DEVICE
);
523 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
524 unsigned int authsize
)
526 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
528 ctx
->authsize
= authsize
;
529 rfc4543_set_sh_desc(authenc
);
534 static int aead_setkey(struct crypto_aead
*aead
,
535 const u8
*key
, unsigned int keylen
)
537 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
538 struct device
*jrdev
= ctx
->jrdev
;
539 struct crypto_authenc_keys keys
;
542 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
546 printk(KERN_ERR
"keylen %d enckeylen %d authkeylen %d\n",
547 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
549 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
550 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
553 ret
= gen_split_key(ctx
->jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
554 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
560 /* postpend encryption key to auth split key */
561 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
562 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
563 keys
.enckeylen
, DMA_TO_DEVICE
);
565 print_hex_dump(KERN_ERR
, "ctx.key@"__stringify(__LINE__
)": ",
566 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
567 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
569 ctx
->cdata
.keylen
= keys
.enckeylen
;
570 return aead_set_sh_desc(aead
);
572 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
576 static int gcm_setkey(struct crypto_aead
*aead
,
577 const u8
*key
, unsigned int keylen
)
579 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
580 struct device
*jrdev
= ctx
->jrdev
;
583 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
584 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
587 memcpy(ctx
->key
, key
, keylen
);
588 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
589 ctx
->cdata
.keylen
= keylen
;
591 return gcm_set_sh_desc(aead
);
594 static int rfc4106_setkey(struct crypto_aead
*aead
,
595 const u8
*key
, unsigned int keylen
)
597 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
598 struct device
*jrdev
= ctx
->jrdev
;
604 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
605 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
608 memcpy(ctx
->key
, key
, keylen
);
611 * The last four bytes of the key material are used as the salt value
612 * in the nonce. Update the AES key length.
614 ctx
->cdata
.keylen
= keylen
- 4;
615 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
617 return rfc4106_set_sh_desc(aead
);
620 static int rfc4543_setkey(struct crypto_aead
*aead
,
621 const u8
*key
, unsigned int keylen
)
623 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
624 struct device
*jrdev
= ctx
->jrdev
;
630 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
631 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
634 memcpy(ctx
->key
, key
, keylen
);
637 * The last four bytes of the key material are used as the salt value
638 * in the nonce. Update the AES key length.
640 ctx
->cdata
.keylen
= keylen
- 4;
641 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
643 return rfc4543_set_sh_desc(aead
);
646 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
647 const u8
*key
, unsigned int keylen
)
649 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
650 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
651 const char *alg_name
= crypto_tfm_alg_name(tfm
);
652 struct device
*jrdev
= ctx
->jrdev
;
653 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
656 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
657 OP_ALG_AAI_CTR_MOD128
);
658 const bool is_rfc3686
= (ctr_mode
&&
659 (strstr(alg_name
, "rfc3686") != NULL
));
661 memcpy(ctx
->key
, key
, keylen
);
663 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
664 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
667 * AES-CTR needs to load IV in CONTEXT1 reg
668 * at an offset of 128bits (16bytes)
669 * CONTEXT1[255:128] = IV
676 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
677 * | *key = {KEY, NONCE}
680 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
681 keylen
-= CTR_RFC3686_NONCE_SIZE
;
684 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
685 ctx
->cdata
.keylen
= keylen
;
686 ctx
->cdata
.key_virt
= ctx
->key
;
687 ctx
->cdata
.key_inline
= true;
689 /* ablkcipher_encrypt shared descriptor */
690 desc
= ctx
->sh_desc_enc
;
691 cnstr_shdsc_ablkcipher_encap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
693 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
694 desc_bytes(desc
), DMA_TO_DEVICE
);
696 /* ablkcipher_decrypt shared descriptor */
697 desc
= ctx
->sh_desc_dec
;
698 cnstr_shdsc_ablkcipher_decap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
700 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
701 desc_bytes(desc
), DMA_TO_DEVICE
);
703 /* ablkcipher_givencrypt shared descriptor */
704 desc
= ctx
->sh_desc_givenc
;
705 cnstr_shdsc_ablkcipher_givencap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
707 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_givenc_dma
,
708 desc_bytes(desc
), DMA_TO_DEVICE
);
713 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
714 const u8
*key
, unsigned int keylen
)
716 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
717 struct device
*jrdev
= ctx
->jrdev
;
720 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
721 crypto_ablkcipher_set_flags(ablkcipher
,
722 CRYPTO_TFM_RES_BAD_KEY_LEN
);
723 dev_err(jrdev
, "key size mismatch\n");
727 memcpy(ctx
->key
, key
, keylen
);
728 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
729 ctx
->cdata
.keylen
= keylen
;
730 ctx
->cdata
.key_virt
= ctx
->key
;
731 ctx
->cdata
.key_inline
= true;
733 /* xts_ablkcipher_encrypt shared descriptor */
734 desc
= ctx
->sh_desc_enc
;
735 cnstr_shdsc_xts_ablkcipher_encap(desc
, &ctx
->cdata
);
736 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
737 desc_bytes(desc
), DMA_TO_DEVICE
);
739 /* xts_ablkcipher_decrypt shared descriptor */
740 desc
= ctx
->sh_desc_dec
;
741 cnstr_shdsc_xts_ablkcipher_decap(desc
, &ctx
->cdata
);
742 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
743 desc_bytes(desc
), DMA_TO_DEVICE
);
749 * aead_edesc - s/w-extended aead descriptor
750 * @src_nents: number of segments in input s/w scatterlist
751 * @dst_nents: number of segments in output s/w scatterlist
752 * @sec4_sg_bytes: length of dma mapped sec4_sg space
753 * @sec4_sg_dma: bus physical mapped address of h/w link table
754 * @sec4_sg: pointer to h/w link table
755 * @hw_desc: the h/w job descriptor followed by any referenced link tables
761 dma_addr_t sec4_sg_dma
;
762 struct sec4_sg_entry
*sec4_sg
;
767 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
768 * @src_nents: number of segments in input s/w scatterlist
769 * @dst_nents: number of segments in output s/w scatterlist
770 * @iv_dma: dma address of iv for checking continuity and link table
771 * @sec4_sg_bytes: length of dma mapped sec4_sg space
772 * @sec4_sg_dma: bus physical mapped address of h/w link table
773 * @sec4_sg: pointer to h/w link table
774 * @hw_desc: the h/w job descriptor followed by any referenced link tables
776 struct ablkcipher_edesc
{
781 dma_addr_t sec4_sg_dma
;
782 struct sec4_sg_entry
*sec4_sg
;
786 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
787 struct scatterlist
*dst
, int src_nents
,
789 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
794 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
795 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
797 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
801 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
803 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
807 static void aead_unmap(struct device
*dev
,
808 struct aead_edesc
*edesc
,
809 struct aead_request
*req
)
811 caam_unmap(dev
, req
->src
, req
->dst
,
812 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
813 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
816 static void ablkcipher_unmap(struct device
*dev
,
817 struct ablkcipher_edesc
*edesc
,
818 struct ablkcipher_request
*req
)
820 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
821 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
823 caam_unmap(dev
, req
->src
, req
->dst
,
824 edesc
->src_nents
, edesc
->dst_nents
,
825 edesc
->iv_dma
, ivsize
,
826 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
829 static void aead_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
832 struct aead_request
*req
= context
;
833 struct aead_edesc
*edesc
;
836 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
839 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
842 caam_jr_strstatus(jrdev
, err
);
844 aead_unmap(jrdev
, edesc
, req
);
848 aead_request_complete(req
, err
);
851 static void aead_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
854 struct aead_request
*req
= context
;
855 struct aead_edesc
*edesc
;
858 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
861 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
864 caam_jr_strstatus(jrdev
, err
);
866 aead_unmap(jrdev
, edesc
, req
);
869 * verify hw auth check passed else return -EBADMSG
871 if ((err
& JRSTA_CCBERR_ERRID_MASK
) == JRSTA_CCBERR_ERRID_ICVCHK
)
876 aead_request_complete(req
, err
);
879 static void ablkcipher_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
882 struct ablkcipher_request
*req
= context
;
883 struct ablkcipher_edesc
*edesc
;
885 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
886 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
888 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
891 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
894 caam_jr_strstatus(jrdev
, err
);
897 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
898 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
899 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
900 dbg_dump_sg(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
901 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
902 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
905 ablkcipher_unmap(jrdev
, edesc
, req
);
908 ablkcipher_request_complete(req
, err
);
911 static void ablkcipher_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
914 struct ablkcipher_request
*req
= context
;
915 struct ablkcipher_edesc
*edesc
;
917 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
918 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
920 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
923 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
925 caam_jr_strstatus(jrdev
, err
);
928 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
929 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
931 dbg_dump_sg(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
932 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
933 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
936 ablkcipher_unmap(jrdev
, edesc
, req
);
939 ablkcipher_request_complete(req
, err
);
943 * Fill in aead job descriptor
945 static void init_aead_job(struct aead_request
*req
,
946 struct aead_edesc
*edesc
,
947 bool all_contig
, bool encrypt
)
949 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
950 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
951 int authsize
= ctx
->authsize
;
952 u32
*desc
= edesc
->hw_desc
;
953 u32 out_options
, in_options
;
954 dma_addr_t dst_dma
, src_dma
;
955 int len
, sec4_sg_index
= 0;
959 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
960 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
962 len
= desc_len(sh_desc
);
963 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
966 src_dma
= edesc
->src_nents
? sg_dma_address(req
->src
) : 0;
969 src_dma
= edesc
->sec4_sg_dma
;
970 sec4_sg_index
+= edesc
->src_nents
;
971 in_options
= LDST_SGF
;
974 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
978 out_options
= in_options
;
980 if (unlikely(req
->src
!= req
->dst
)) {
981 if (edesc
->dst_nents
== 1) {
982 dst_dma
= sg_dma_address(req
->dst
);
984 dst_dma
= edesc
->sec4_sg_dma
+
986 sizeof(struct sec4_sg_entry
);
987 out_options
= LDST_SGF
;
992 append_seq_out_ptr(desc
, dst_dma
,
993 req
->assoclen
+ req
->cryptlen
+ authsize
,
996 append_seq_out_ptr(desc
, dst_dma
,
997 req
->assoclen
+ req
->cryptlen
- authsize
,
1000 /* REG3 = assoclen */
1001 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
1004 static void init_gcm_job(struct aead_request
*req
,
1005 struct aead_edesc
*edesc
,
1006 bool all_contig
, bool encrypt
)
1008 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1009 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1010 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1011 u32
*desc
= edesc
->hw_desc
;
1012 bool generic_gcm
= (ivsize
== 12);
1015 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1017 /* BUG This should not be specific to generic GCM. */
1019 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
1020 last
= FIFOLD_TYPE_LAST1
;
1023 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
1024 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| 12 | last
);
1027 append_data(desc
, ctx
->key
+ ctx
->cdata
.keylen
, 4);
1029 append_data(desc
, req
->iv
, ivsize
);
1030 /* End of blank commands */
1033 static void init_authenc_job(struct aead_request
*req
,
1034 struct aead_edesc
*edesc
,
1035 bool all_contig
, bool encrypt
)
1037 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1038 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
1039 struct caam_aead_alg
, aead
);
1040 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1041 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1042 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
1043 OP_ALG_AAI_CTR_MOD128
);
1044 const bool is_rfc3686
= alg
->caam
.rfc3686
;
1045 u32
*desc
= edesc
->hw_desc
;
1049 * AES-CTR needs to load IV in CONTEXT1 reg
1050 * at an offset of 128bits (16bytes)
1051 * CONTEXT1[255:128] = IV
1058 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1061 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
1063 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1065 if (ivsize
&& ((is_rfc3686
&& encrypt
) || !alg
->caam
.geniv
))
1066 append_load_as_imm(desc
, req
->iv
, ivsize
,
1068 LDST_SRCDST_BYTE_CONTEXT
|
1069 (ivoffset
<< LDST_OFFSET_SHIFT
));
1073 * Fill in ablkcipher job descriptor
1075 static void init_ablkcipher_job(u32
*sh_desc
, dma_addr_t ptr
,
1076 struct ablkcipher_edesc
*edesc
,
1077 struct ablkcipher_request
*req
,
1080 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1081 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1082 u32
*desc
= edesc
->hw_desc
;
1083 u32 out_options
= 0, in_options
;
1084 dma_addr_t dst_dma
, src_dma
;
1085 int len
, sec4_sg_index
= 0;
1088 print_hex_dump(KERN_ERR
, "presciv@"__stringify(__LINE__
)": ",
1089 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1091 pr_err("asked=%d, nbytes%d\n",
1092 (int)edesc
->src_nents
> 1 ? 100 : req
->nbytes
, req
->nbytes
);
1093 dbg_dump_sg(KERN_ERR
, "src @"__stringify(__LINE__
)": ",
1094 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1095 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1098 len
= desc_len(sh_desc
);
1099 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1102 src_dma
= edesc
->iv_dma
;
1105 src_dma
= edesc
->sec4_sg_dma
;
1106 sec4_sg_index
+= edesc
->src_nents
+ 1;
1107 in_options
= LDST_SGF
;
1109 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
+ ivsize
, in_options
);
1111 if (likely(req
->src
== req
->dst
)) {
1112 if (edesc
->src_nents
== 1 && iv_contig
) {
1113 dst_dma
= sg_dma_address(req
->src
);
1115 dst_dma
= edesc
->sec4_sg_dma
+
1116 sizeof(struct sec4_sg_entry
);
1117 out_options
= LDST_SGF
;
1120 if (edesc
->dst_nents
== 1) {
1121 dst_dma
= sg_dma_address(req
->dst
);
1123 dst_dma
= edesc
->sec4_sg_dma
+
1124 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1125 out_options
= LDST_SGF
;
1128 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
, out_options
);
1132 * Fill in ablkcipher givencrypt job descriptor
1134 static void init_ablkcipher_giv_job(u32
*sh_desc
, dma_addr_t ptr
,
1135 struct ablkcipher_edesc
*edesc
,
1136 struct ablkcipher_request
*req
,
1139 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1140 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1141 u32
*desc
= edesc
->hw_desc
;
1142 u32 out_options
, in_options
;
1143 dma_addr_t dst_dma
, src_dma
;
1144 int len
, sec4_sg_index
= 0;
1147 print_hex_dump(KERN_ERR
, "presciv@" __stringify(__LINE__
) ": ",
1148 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1150 dbg_dump_sg(KERN_ERR
, "src @" __stringify(__LINE__
) ": ",
1151 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1152 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1155 len
= desc_len(sh_desc
);
1156 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1158 if (edesc
->src_nents
== 1) {
1159 src_dma
= sg_dma_address(req
->src
);
1162 src_dma
= edesc
->sec4_sg_dma
;
1163 sec4_sg_index
+= edesc
->src_nents
;
1164 in_options
= LDST_SGF
;
1166 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
, in_options
);
1169 dst_dma
= edesc
->iv_dma
;
1172 dst_dma
= edesc
->sec4_sg_dma
+
1173 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1174 out_options
= LDST_SGF
;
1176 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
+ ivsize
, out_options
);
1180 * allocate and map the aead extended descriptor
1182 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
1183 int desc_bytes
, bool *all_contig_ptr
,
1186 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1187 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1188 struct device
*jrdev
= ctx
->jrdev
;
1189 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
1190 CRYPTO_TFM_REQ_MAY_SLEEP
)) ? GFP_KERNEL
: GFP_ATOMIC
;
1191 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1192 struct aead_edesc
*edesc
;
1193 int sec4_sg_index
, sec4_sg_len
, sec4_sg_bytes
;
1194 unsigned int authsize
= ctx
->authsize
;
1196 if (unlikely(req
->dst
!= req
->src
)) {
1197 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1199 if (unlikely(src_nents
< 0)) {
1200 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1201 req
->assoclen
+ req
->cryptlen
);
1202 return ERR_PTR(src_nents
);
1205 dst_nents
= sg_nents_for_len(req
->dst
, req
->assoclen
+
1207 (encrypt
? authsize
:
1209 if (unlikely(dst_nents
< 0)) {
1210 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1211 req
->assoclen
+ req
->cryptlen
+
1212 (encrypt
? authsize
: (-authsize
)));
1213 return ERR_PTR(dst_nents
);
1216 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1218 (encrypt
? authsize
: 0));
1219 if (unlikely(src_nents
< 0)) {
1220 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1221 req
->assoclen
+ req
->cryptlen
+
1222 (encrypt
? authsize
: 0));
1223 return ERR_PTR(src_nents
);
1227 if (likely(req
->src
== req
->dst
)) {
1228 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1230 if (unlikely(!mapped_src_nents
)) {
1231 dev_err(jrdev
, "unable to map source\n");
1232 return ERR_PTR(-ENOMEM
);
1235 /* Cover also the case of null (zero length) input data */
1237 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
,
1238 src_nents
, DMA_TO_DEVICE
);
1239 if (unlikely(!mapped_src_nents
)) {
1240 dev_err(jrdev
, "unable to map source\n");
1241 return ERR_PTR(-ENOMEM
);
1244 mapped_src_nents
= 0;
1247 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1249 if (unlikely(!mapped_dst_nents
)) {
1250 dev_err(jrdev
, "unable to map destination\n");
1251 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1252 return ERR_PTR(-ENOMEM
);
1256 sec4_sg_len
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1257 sec4_sg_len
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1258 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
1260 /* allocate space for base edesc and hw desc commands, link tables */
1261 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1264 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1266 return ERR_PTR(-ENOMEM
);
1269 edesc
->src_nents
= src_nents
;
1270 edesc
->dst_nents
= dst_nents
;
1271 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
1273 *all_contig_ptr
= !(mapped_src_nents
> 1);
1276 if (mapped_src_nents
> 1) {
1277 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1278 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1279 sec4_sg_index
+= mapped_src_nents
;
1281 if (mapped_dst_nents
> 1) {
1282 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1283 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1289 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1290 sec4_sg_bytes
, DMA_TO_DEVICE
);
1291 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1292 dev_err(jrdev
, "unable to map S/G table\n");
1293 aead_unmap(jrdev
, edesc
, req
);
1295 return ERR_PTR(-ENOMEM
);
1298 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1303 static int gcm_encrypt(struct aead_request
*req
)
1305 struct aead_edesc
*edesc
;
1306 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1307 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1308 struct device
*jrdev
= ctx
->jrdev
;
1313 /* allocate extended descriptor */
1314 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, true);
1316 return PTR_ERR(edesc
);
1318 /* Create and submit job descriptor */
1319 init_gcm_job(req
, edesc
, all_contig
, true);
1321 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1322 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1323 desc_bytes(edesc
->hw_desc
), 1);
1326 desc
= edesc
->hw_desc
;
1327 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1331 aead_unmap(jrdev
, edesc
, req
);
1338 static int ipsec_gcm_encrypt(struct aead_request
*req
)
1340 if (req
->assoclen
< 8)
1343 return gcm_encrypt(req
);
1346 static int aead_encrypt(struct aead_request
*req
)
1348 struct aead_edesc
*edesc
;
1349 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1350 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1351 struct device
*jrdev
= ctx
->jrdev
;
1356 /* allocate extended descriptor */
1357 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1360 return PTR_ERR(edesc
);
1362 /* Create and submit job descriptor */
1363 init_authenc_job(req
, edesc
, all_contig
, true);
1365 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1366 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1367 desc_bytes(edesc
->hw_desc
), 1);
1370 desc
= edesc
->hw_desc
;
1371 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1375 aead_unmap(jrdev
, edesc
, req
);
1382 static int gcm_decrypt(struct aead_request
*req
)
1384 struct aead_edesc
*edesc
;
1385 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1386 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1387 struct device
*jrdev
= ctx
->jrdev
;
1392 /* allocate extended descriptor */
1393 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, false);
1395 return PTR_ERR(edesc
);
1397 /* Create and submit job descriptor*/
1398 init_gcm_job(req
, edesc
, all_contig
, false);
1400 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1401 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1402 desc_bytes(edesc
->hw_desc
), 1);
1405 desc
= edesc
->hw_desc
;
1406 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1410 aead_unmap(jrdev
, edesc
, req
);
1417 static int ipsec_gcm_decrypt(struct aead_request
*req
)
1419 if (req
->assoclen
< 8)
1422 return gcm_decrypt(req
);
1425 static int aead_decrypt(struct aead_request
*req
)
1427 struct aead_edesc
*edesc
;
1428 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1429 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1430 struct device
*jrdev
= ctx
->jrdev
;
1436 dbg_dump_sg(KERN_ERR
, "dec src@"__stringify(__LINE__
)": ",
1437 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1438 req
->assoclen
+ req
->cryptlen
, 1);
1441 /* allocate extended descriptor */
1442 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1443 &all_contig
, false);
1445 return PTR_ERR(edesc
);
1447 /* Create and submit job descriptor*/
1448 init_authenc_job(req
, edesc
, all_contig
, false);
1450 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1451 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1452 desc_bytes(edesc
->hw_desc
), 1);
1455 desc
= edesc
->hw_desc
;
1456 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1460 aead_unmap(jrdev
, edesc
, req
);
1468 * allocate and map the ablkcipher extended descriptor for ablkcipher
1470 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
1471 *req
, int desc_bytes
,
1472 bool *iv_contig_out
)
1474 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1475 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1476 struct device
*jrdev
= ctx
->jrdev
;
1477 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
1478 CRYPTO_TFM_REQ_MAY_SLEEP
)) ?
1479 GFP_KERNEL
: GFP_ATOMIC
;
1480 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1481 struct ablkcipher_edesc
*edesc
;
1482 dma_addr_t iv_dma
= 0;
1484 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1485 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1487 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1488 if (unlikely(src_nents
< 0)) {
1489 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1491 return ERR_PTR(src_nents
);
1494 if (req
->dst
!= req
->src
) {
1495 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1496 if (unlikely(dst_nents
< 0)) {
1497 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1499 return ERR_PTR(dst_nents
);
1503 if (likely(req
->src
== req
->dst
)) {
1504 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1506 if (unlikely(!mapped_src_nents
)) {
1507 dev_err(jrdev
, "unable to map source\n");
1508 return ERR_PTR(-ENOMEM
);
1511 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1513 if (unlikely(!mapped_src_nents
)) {
1514 dev_err(jrdev
, "unable to map source\n");
1515 return ERR_PTR(-ENOMEM
);
1518 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1520 if (unlikely(!mapped_dst_nents
)) {
1521 dev_err(jrdev
, "unable to map destination\n");
1522 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1523 return ERR_PTR(-ENOMEM
);
1527 iv_dma
= dma_map_single(jrdev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
1528 if (dma_mapping_error(jrdev
, iv_dma
)) {
1529 dev_err(jrdev
, "unable to map IV\n");
1530 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1532 return ERR_PTR(-ENOMEM
);
1535 if (mapped_src_nents
== 1 &&
1536 iv_dma
+ ivsize
== sg_dma_address(req
->src
)) {
1541 sec4_sg_ents
= 1 + mapped_src_nents
;
1543 dst_sg_idx
= sec4_sg_ents
;
1544 sec4_sg_ents
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1545 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1547 /* allocate space for base edesc and hw desc commands, link tables */
1548 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1551 dev_err(jrdev
, "could not allocate extended descriptor\n");
1552 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1553 iv_dma
, ivsize
, 0, 0);
1554 return ERR_PTR(-ENOMEM
);
1557 edesc
->src_nents
= src_nents
;
1558 edesc
->dst_nents
= dst_nents
;
1559 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1560 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1564 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
1565 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1566 edesc
->sec4_sg
+ 1, 0);
1569 if (mapped_dst_nents
> 1) {
1570 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1571 edesc
->sec4_sg
+ dst_sg_idx
, 0);
1574 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1575 sec4_sg_bytes
, DMA_TO_DEVICE
);
1576 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1577 dev_err(jrdev
, "unable to map S/G table\n");
1578 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1579 iv_dma
, ivsize
, 0, 0);
1581 return ERR_PTR(-ENOMEM
);
1584 edesc
->iv_dma
= iv_dma
;
1587 print_hex_dump(KERN_ERR
, "ablkcipher sec4_sg@"__stringify(__LINE__
)": ",
1588 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1592 *iv_contig_out
= in_contig
;
1596 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
1598 struct ablkcipher_edesc
*edesc
;
1599 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1600 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1601 struct device
*jrdev
= ctx
->jrdev
;
1606 /* allocate extended descriptor */
1607 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1608 CAAM_CMD_SZ
, &iv_contig
);
1610 return PTR_ERR(edesc
);
1612 /* Create and submit job descriptor*/
1613 init_ablkcipher_job(ctx
->sh_desc_enc
,
1614 ctx
->sh_desc_enc_dma
, edesc
, req
, iv_contig
);
1616 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1617 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1618 desc_bytes(edesc
->hw_desc
), 1);
1620 desc
= edesc
->hw_desc
;
1621 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1626 ablkcipher_unmap(jrdev
, edesc
, req
);
1633 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
1635 struct ablkcipher_edesc
*edesc
;
1636 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1637 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1638 struct device
*jrdev
= ctx
->jrdev
;
1643 /* allocate extended descriptor */
1644 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1645 CAAM_CMD_SZ
, &iv_contig
);
1647 return PTR_ERR(edesc
);
1649 /* Create and submit job descriptor*/
1650 init_ablkcipher_job(ctx
->sh_desc_dec
,
1651 ctx
->sh_desc_dec_dma
, edesc
, req
, iv_contig
);
1652 desc
= edesc
->hw_desc
;
1654 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1655 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1656 desc_bytes(edesc
->hw_desc
), 1);
1659 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_decrypt_done
, req
);
1663 ablkcipher_unmap(jrdev
, edesc
, req
);
1671 * allocate and map the ablkcipher extended descriptor
1672 * for ablkcipher givencrypt
1674 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
1675 struct skcipher_givcrypt_request
*greq
,
1677 bool *iv_contig_out
)
1679 struct ablkcipher_request
*req
= &greq
->creq
;
1680 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1681 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1682 struct device
*jrdev
= ctx
->jrdev
;
1683 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
1684 CRYPTO_TFM_REQ_MAY_SLEEP
)) ?
1685 GFP_KERNEL
: GFP_ATOMIC
;
1686 int src_nents
, mapped_src_nents
, dst_nents
, mapped_dst_nents
;
1687 struct ablkcipher_edesc
*edesc
;
1688 dma_addr_t iv_dma
= 0;
1690 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1691 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1693 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1694 if (unlikely(src_nents
< 0)) {
1695 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1697 return ERR_PTR(src_nents
);
1700 if (likely(req
->src
== req
->dst
)) {
1701 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1703 if (unlikely(!mapped_src_nents
)) {
1704 dev_err(jrdev
, "unable to map source\n");
1705 return ERR_PTR(-ENOMEM
);
1708 dst_nents
= src_nents
;
1709 mapped_dst_nents
= src_nents
;
1711 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1713 if (unlikely(!mapped_src_nents
)) {
1714 dev_err(jrdev
, "unable to map source\n");
1715 return ERR_PTR(-ENOMEM
);
1718 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1719 if (unlikely(dst_nents
< 0)) {
1720 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1722 return ERR_PTR(dst_nents
);
1725 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1727 if (unlikely(!mapped_dst_nents
)) {
1728 dev_err(jrdev
, "unable to map destination\n");
1729 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1730 return ERR_PTR(-ENOMEM
);
1735 * Check if iv can be contiguous with source and destination.
1736 * If so, include it. If not, create scatterlist.
1738 iv_dma
= dma_map_single(jrdev
, greq
->giv
, ivsize
, DMA_TO_DEVICE
);
1739 if (dma_mapping_error(jrdev
, iv_dma
)) {
1740 dev_err(jrdev
, "unable to map IV\n");
1741 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1743 return ERR_PTR(-ENOMEM
);
1746 sec4_sg_ents
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1747 dst_sg_idx
= sec4_sg_ents
;
1748 if (mapped_dst_nents
== 1 &&
1749 iv_dma
+ ivsize
== sg_dma_address(req
->dst
)) {
1753 sec4_sg_ents
+= 1 + mapped_dst_nents
;
1756 /* allocate space for base edesc and hw desc commands, link tables */
1757 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1758 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1761 dev_err(jrdev
, "could not allocate extended descriptor\n");
1762 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1763 iv_dma
, ivsize
, 0, 0);
1764 return ERR_PTR(-ENOMEM
);
1767 edesc
->src_nents
= src_nents
;
1768 edesc
->dst_nents
= dst_nents
;
1769 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1770 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1773 if (mapped_src_nents
> 1)
1774 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
, edesc
->sec4_sg
,
1778 dma_to_sec4_sg_one(edesc
->sec4_sg
+ dst_sg_idx
,
1780 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1781 edesc
->sec4_sg
+ dst_sg_idx
+ 1, 0);
1784 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1785 sec4_sg_bytes
, DMA_TO_DEVICE
);
1786 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1787 dev_err(jrdev
, "unable to map S/G table\n");
1788 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1789 iv_dma
, ivsize
, 0, 0);
1791 return ERR_PTR(-ENOMEM
);
1793 edesc
->iv_dma
= iv_dma
;
1796 print_hex_dump(KERN_ERR
,
1797 "ablkcipher sec4_sg@" __stringify(__LINE__
) ": ",
1798 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1802 *iv_contig_out
= out_contig
;
1806 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
1808 struct ablkcipher_request
*req
= &creq
->creq
;
1809 struct ablkcipher_edesc
*edesc
;
1810 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1811 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1812 struct device
*jrdev
= ctx
->jrdev
;
1813 bool iv_contig
= false;
1817 /* allocate extended descriptor */
1818 edesc
= ablkcipher_giv_edesc_alloc(creq
, DESC_JOB_IO_LEN
*
1819 CAAM_CMD_SZ
, &iv_contig
);
1821 return PTR_ERR(edesc
);
1823 /* Create and submit job descriptor*/
1824 init_ablkcipher_giv_job(ctx
->sh_desc_givenc
, ctx
->sh_desc_givenc_dma
,
1825 edesc
, req
, iv_contig
);
1827 print_hex_dump(KERN_ERR
,
1828 "ablkcipher jobdesc@" __stringify(__LINE__
) ": ",
1829 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1830 desc_bytes(edesc
->hw_desc
), 1);
1832 desc
= edesc
->hw_desc
;
1833 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1838 ablkcipher_unmap(jrdev
, edesc
, req
);
1845 #define template_aead template_u.aead
1846 #define template_ablkcipher template_u.ablkcipher
1847 struct caam_alg_template
{
1848 char name
[CRYPTO_MAX_ALG_NAME
];
1849 char driver_name
[CRYPTO_MAX_ALG_NAME
];
1850 unsigned int blocksize
;
1853 struct ablkcipher_alg ablkcipher
;
1855 u32 class1_alg_type
;
1856 u32 class2_alg_type
;
1859 static struct caam_alg_template driver_algs
[] = {
1860 /* ablkcipher descriptor */
1863 .driver_name
= "cbc-aes-caam",
1864 .blocksize
= AES_BLOCK_SIZE
,
1865 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1866 .template_ablkcipher
= {
1867 .setkey
= ablkcipher_setkey
,
1868 .encrypt
= ablkcipher_encrypt
,
1869 .decrypt
= ablkcipher_decrypt
,
1870 .givencrypt
= ablkcipher_givencrypt
,
1871 .geniv
= "<built-in>",
1872 .min_keysize
= AES_MIN_KEY_SIZE
,
1873 .max_keysize
= AES_MAX_KEY_SIZE
,
1874 .ivsize
= AES_BLOCK_SIZE
,
1876 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1879 .name
= "cbc(des3_ede)",
1880 .driver_name
= "cbc-3des-caam",
1881 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1882 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1883 .template_ablkcipher
= {
1884 .setkey
= ablkcipher_setkey
,
1885 .encrypt
= ablkcipher_encrypt
,
1886 .decrypt
= ablkcipher_decrypt
,
1887 .givencrypt
= ablkcipher_givencrypt
,
1888 .geniv
= "<built-in>",
1889 .min_keysize
= DES3_EDE_KEY_SIZE
,
1890 .max_keysize
= DES3_EDE_KEY_SIZE
,
1891 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1893 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1897 .driver_name
= "cbc-des-caam",
1898 .blocksize
= DES_BLOCK_SIZE
,
1899 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1900 .template_ablkcipher
= {
1901 .setkey
= ablkcipher_setkey
,
1902 .encrypt
= ablkcipher_encrypt
,
1903 .decrypt
= ablkcipher_decrypt
,
1904 .givencrypt
= ablkcipher_givencrypt
,
1905 .geniv
= "<built-in>",
1906 .min_keysize
= DES_KEY_SIZE
,
1907 .max_keysize
= DES_KEY_SIZE
,
1908 .ivsize
= DES_BLOCK_SIZE
,
1910 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1914 .driver_name
= "ctr-aes-caam",
1916 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1917 .template_ablkcipher
= {
1918 .setkey
= ablkcipher_setkey
,
1919 .encrypt
= ablkcipher_encrypt
,
1920 .decrypt
= ablkcipher_decrypt
,
1922 .min_keysize
= AES_MIN_KEY_SIZE
,
1923 .max_keysize
= AES_MAX_KEY_SIZE
,
1924 .ivsize
= AES_BLOCK_SIZE
,
1926 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1929 .name
= "rfc3686(ctr(aes))",
1930 .driver_name
= "rfc3686-ctr-aes-caam",
1932 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1933 .template_ablkcipher
= {
1934 .setkey
= ablkcipher_setkey
,
1935 .encrypt
= ablkcipher_encrypt
,
1936 .decrypt
= ablkcipher_decrypt
,
1937 .givencrypt
= ablkcipher_givencrypt
,
1938 .geniv
= "<built-in>",
1939 .min_keysize
= AES_MIN_KEY_SIZE
+
1940 CTR_RFC3686_NONCE_SIZE
,
1941 .max_keysize
= AES_MAX_KEY_SIZE
+
1942 CTR_RFC3686_NONCE_SIZE
,
1943 .ivsize
= CTR_RFC3686_IV_SIZE
,
1945 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1949 .driver_name
= "xts-aes-caam",
1950 .blocksize
= AES_BLOCK_SIZE
,
1951 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1952 .template_ablkcipher
= {
1953 .setkey
= xts_ablkcipher_setkey
,
1954 .encrypt
= ablkcipher_encrypt
,
1955 .decrypt
= ablkcipher_decrypt
,
1957 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1958 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1959 .ivsize
= AES_BLOCK_SIZE
,
1961 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
1965 static struct caam_aead_alg driver_aeads
[] = {
1969 .cra_name
= "rfc4106(gcm(aes))",
1970 .cra_driver_name
= "rfc4106-gcm-aes-caam",
1973 .setkey
= rfc4106_setkey
,
1974 .setauthsize
= rfc4106_setauthsize
,
1975 .encrypt
= ipsec_gcm_encrypt
,
1976 .decrypt
= ipsec_gcm_decrypt
,
1978 .maxauthsize
= AES_BLOCK_SIZE
,
1981 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1987 .cra_name
= "rfc4543(gcm(aes))",
1988 .cra_driver_name
= "rfc4543-gcm-aes-caam",
1991 .setkey
= rfc4543_setkey
,
1992 .setauthsize
= rfc4543_setauthsize
,
1993 .encrypt
= ipsec_gcm_encrypt
,
1994 .decrypt
= ipsec_gcm_decrypt
,
1996 .maxauthsize
= AES_BLOCK_SIZE
,
1999 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2002 /* Galois Counter Mode */
2006 .cra_name
= "gcm(aes)",
2007 .cra_driver_name
= "gcm-aes-caam",
2010 .setkey
= gcm_setkey
,
2011 .setauthsize
= gcm_setauthsize
,
2012 .encrypt
= gcm_encrypt
,
2013 .decrypt
= gcm_decrypt
,
2015 .maxauthsize
= AES_BLOCK_SIZE
,
2018 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2021 /* single-pass ipsec_esp descriptor */
2025 .cra_name
= "authenc(hmac(md5),"
2026 "ecb(cipher_null))",
2027 .cra_driver_name
= "authenc-hmac-md5-"
2028 "ecb-cipher_null-caam",
2029 .cra_blocksize
= NULL_BLOCK_SIZE
,
2031 .setkey
= aead_setkey
,
2032 .setauthsize
= aead_setauthsize
,
2033 .encrypt
= aead_encrypt
,
2034 .decrypt
= aead_decrypt
,
2035 .ivsize
= NULL_IV_SIZE
,
2036 .maxauthsize
= MD5_DIGEST_SIZE
,
2039 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2040 OP_ALG_AAI_HMAC_PRECOMP
,
2046 .cra_name
= "authenc(hmac(sha1),"
2047 "ecb(cipher_null))",
2048 .cra_driver_name
= "authenc-hmac-sha1-"
2049 "ecb-cipher_null-caam",
2050 .cra_blocksize
= NULL_BLOCK_SIZE
,
2052 .setkey
= aead_setkey
,
2053 .setauthsize
= aead_setauthsize
,
2054 .encrypt
= aead_encrypt
,
2055 .decrypt
= aead_decrypt
,
2056 .ivsize
= NULL_IV_SIZE
,
2057 .maxauthsize
= SHA1_DIGEST_SIZE
,
2060 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2061 OP_ALG_AAI_HMAC_PRECOMP
,
2067 .cra_name
= "authenc(hmac(sha224),"
2068 "ecb(cipher_null))",
2069 .cra_driver_name
= "authenc-hmac-sha224-"
2070 "ecb-cipher_null-caam",
2071 .cra_blocksize
= NULL_BLOCK_SIZE
,
2073 .setkey
= aead_setkey
,
2074 .setauthsize
= aead_setauthsize
,
2075 .encrypt
= aead_encrypt
,
2076 .decrypt
= aead_decrypt
,
2077 .ivsize
= NULL_IV_SIZE
,
2078 .maxauthsize
= SHA224_DIGEST_SIZE
,
2081 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2082 OP_ALG_AAI_HMAC_PRECOMP
,
2088 .cra_name
= "authenc(hmac(sha256),"
2089 "ecb(cipher_null))",
2090 .cra_driver_name
= "authenc-hmac-sha256-"
2091 "ecb-cipher_null-caam",
2092 .cra_blocksize
= NULL_BLOCK_SIZE
,
2094 .setkey
= aead_setkey
,
2095 .setauthsize
= aead_setauthsize
,
2096 .encrypt
= aead_encrypt
,
2097 .decrypt
= aead_decrypt
,
2098 .ivsize
= NULL_IV_SIZE
,
2099 .maxauthsize
= SHA256_DIGEST_SIZE
,
2102 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2103 OP_ALG_AAI_HMAC_PRECOMP
,
2109 .cra_name
= "authenc(hmac(sha384),"
2110 "ecb(cipher_null))",
2111 .cra_driver_name
= "authenc-hmac-sha384-"
2112 "ecb-cipher_null-caam",
2113 .cra_blocksize
= NULL_BLOCK_SIZE
,
2115 .setkey
= aead_setkey
,
2116 .setauthsize
= aead_setauthsize
,
2117 .encrypt
= aead_encrypt
,
2118 .decrypt
= aead_decrypt
,
2119 .ivsize
= NULL_IV_SIZE
,
2120 .maxauthsize
= SHA384_DIGEST_SIZE
,
2123 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2124 OP_ALG_AAI_HMAC_PRECOMP
,
2130 .cra_name
= "authenc(hmac(sha512),"
2131 "ecb(cipher_null))",
2132 .cra_driver_name
= "authenc-hmac-sha512-"
2133 "ecb-cipher_null-caam",
2134 .cra_blocksize
= NULL_BLOCK_SIZE
,
2136 .setkey
= aead_setkey
,
2137 .setauthsize
= aead_setauthsize
,
2138 .encrypt
= aead_encrypt
,
2139 .decrypt
= aead_decrypt
,
2140 .ivsize
= NULL_IV_SIZE
,
2141 .maxauthsize
= SHA512_DIGEST_SIZE
,
2144 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2145 OP_ALG_AAI_HMAC_PRECOMP
,
2151 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2152 .cra_driver_name
= "authenc-hmac-md5-"
2154 .cra_blocksize
= AES_BLOCK_SIZE
,
2156 .setkey
= aead_setkey
,
2157 .setauthsize
= aead_setauthsize
,
2158 .encrypt
= aead_encrypt
,
2159 .decrypt
= aead_decrypt
,
2160 .ivsize
= AES_BLOCK_SIZE
,
2161 .maxauthsize
= MD5_DIGEST_SIZE
,
2164 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2165 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2166 OP_ALG_AAI_HMAC_PRECOMP
,
2172 .cra_name
= "echainiv(authenc(hmac(md5),"
2174 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2176 .cra_blocksize
= AES_BLOCK_SIZE
,
2178 .setkey
= aead_setkey
,
2179 .setauthsize
= aead_setauthsize
,
2180 .encrypt
= aead_encrypt
,
2181 .decrypt
= aead_decrypt
,
2182 .ivsize
= AES_BLOCK_SIZE
,
2183 .maxauthsize
= MD5_DIGEST_SIZE
,
2186 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2187 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2188 OP_ALG_AAI_HMAC_PRECOMP
,
2195 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2196 .cra_driver_name
= "authenc-hmac-sha1-"
2198 .cra_blocksize
= AES_BLOCK_SIZE
,
2200 .setkey
= aead_setkey
,
2201 .setauthsize
= aead_setauthsize
,
2202 .encrypt
= aead_encrypt
,
2203 .decrypt
= aead_decrypt
,
2204 .ivsize
= AES_BLOCK_SIZE
,
2205 .maxauthsize
= SHA1_DIGEST_SIZE
,
2208 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2209 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2210 OP_ALG_AAI_HMAC_PRECOMP
,
2216 .cra_name
= "echainiv(authenc(hmac(sha1),"
2218 .cra_driver_name
= "echainiv-authenc-"
2219 "hmac-sha1-cbc-aes-caam",
2220 .cra_blocksize
= AES_BLOCK_SIZE
,
2222 .setkey
= aead_setkey
,
2223 .setauthsize
= aead_setauthsize
,
2224 .encrypt
= aead_encrypt
,
2225 .decrypt
= aead_decrypt
,
2226 .ivsize
= AES_BLOCK_SIZE
,
2227 .maxauthsize
= SHA1_DIGEST_SIZE
,
2230 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2231 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2232 OP_ALG_AAI_HMAC_PRECOMP
,
2239 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2240 .cra_driver_name
= "authenc-hmac-sha224-"
2242 .cra_blocksize
= AES_BLOCK_SIZE
,
2244 .setkey
= aead_setkey
,
2245 .setauthsize
= aead_setauthsize
,
2246 .encrypt
= aead_encrypt
,
2247 .decrypt
= aead_decrypt
,
2248 .ivsize
= AES_BLOCK_SIZE
,
2249 .maxauthsize
= SHA224_DIGEST_SIZE
,
2252 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2253 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2254 OP_ALG_AAI_HMAC_PRECOMP
,
2260 .cra_name
= "echainiv(authenc(hmac(sha224),"
2262 .cra_driver_name
= "echainiv-authenc-"
2263 "hmac-sha224-cbc-aes-caam",
2264 .cra_blocksize
= AES_BLOCK_SIZE
,
2266 .setkey
= aead_setkey
,
2267 .setauthsize
= aead_setauthsize
,
2268 .encrypt
= aead_encrypt
,
2269 .decrypt
= aead_decrypt
,
2270 .ivsize
= AES_BLOCK_SIZE
,
2271 .maxauthsize
= SHA224_DIGEST_SIZE
,
2274 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2275 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2276 OP_ALG_AAI_HMAC_PRECOMP
,
2283 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2284 .cra_driver_name
= "authenc-hmac-sha256-"
2286 .cra_blocksize
= AES_BLOCK_SIZE
,
2288 .setkey
= aead_setkey
,
2289 .setauthsize
= aead_setauthsize
,
2290 .encrypt
= aead_encrypt
,
2291 .decrypt
= aead_decrypt
,
2292 .ivsize
= AES_BLOCK_SIZE
,
2293 .maxauthsize
= SHA256_DIGEST_SIZE
,
2296 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2297 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2298 OP_ALG_AAI_HMAC_PRECOMP
,
2304 .cra_name
= "echainiv(authenc(hmac(sha256),"
2306 .cra_driver_name
= "echainiv-authenc-"
2307 "hmac-sha256-cbc-aes-caam",
2308 .cra_blocksize
= AES_BLOCK_SIZE
,
2310 .setkey
= aead_setkey
,
2311 .setauthsize
= aead_setauthsize
,
2312 .encrypt
= aead_encrypt
,
2313 .decrypt
= aead_decrypt
,
2314 .ivsize
= AES_BLOCK_SIZE
,
2315 .maxauthsize
= SHA256_DIGEST_SIZE
,
2318 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2319 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2320 OP_ALG_AAI_HMAC_PRECOMP
,
2327 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2328 .cra_driver_name
= "authenc-hmac-sha384-"
2330 .cra_blocksize
= AES_BLOCK_SIZE
,
2332 .setkey
= aead_setkey
,
2333 .setauthsize
= aead_setauthsize
,
2334 .encrypt
= aead_encrypt
,
2335 .decrypt
= aead_decrypt
,
2336 .ivsize
= AES_BLOCK_SIZE
,
2337 .maxauthsize
= SHA384_DIGEST_SIZE
,
2340 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2341 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2342 OP_ALG_AAI_HMAC_PRECOMP
,
2348 .cra_name
= "echainiv(authenc(hmac(sha384),"
2350 .cra_driver_name
= "echainiv-authenc-"
2351 "hmac-sha384-cbc-aes-caam",
2352 .cra_blocksize
= AES_BLOCK_SIZE
,
2354 .setkey
= aead_setkey
,
2355 .setauthsize
= aead_setauthsize
,
2356 .encrypt
= aead_encrypt
,
2357 .decrypt
= aead_decrypt
,
2358 .ivsize
= AES_BLOCK_SIZE
,
2359 .maxauthsize
= SHA384_DIGEST_SIZE
,
2362 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2363 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2364 OP_ALG_AAI_HMAC_PRECOMP
,
2371 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2372 .cra_driver_name
= "authenc-hmac-sha512-"
2374 .cra_blocksize
= AES_BLOCK_SIZE
,
2376 .setkey
= aead_setkey
,
2377 .setauthsize
= aead_setauthsize
,
2378 .encrypt
= aead_encrypt
,
2379 .decrypt
= aead_decrypt
,
2380 .ivsize
= AES_BLOCK_SIZE
,
2381 .maxauthsize
= SHA512_DIGEST_SIZE
,
2384 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2385 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2386 OP_ALG_AAI_HMAC_PRECOMP
,
2392 .cra_name
= "echainiv(authenc(hmac(sha512),"
2394 .cra_driver_name
= "echainiv-authenc-"
2395 "hmac-sha512-cbc-aes-caam",
2396 .cra_blocksize
= AES_BLOCK_SIZE
,
2398 .setkey
= aead_setkey
,
2399 .setauthsize
= aead_setauthsize
,
2400 .encrypt
= aead_encrypt
,
2401 .decrypt
= aead_decrypt
,
2402 .ivsize
= AES_BLOCK_SIZE
,
2403 .maxauthsize
= SHA512_DIGEST_SIZE
,
2406 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2407 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2408 OP_ALG_AAI_HMAC_PRECOMP
,
2415 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2416 .cra_driver_name
= "authenc-hmac-md5-"
2417 "cbc-des3_ede-caam",
2418 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2420 .setkey
= aead_setkey
,
2421 .setauthsize
= aead_setauthsize
,
2422 .encrypt
= aead_encrypt
,
2423 .decrypt
= aead_decrypt
,
2424 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2425 .maxauthsize
= MD5_DIGEST_SIZE
,
2428 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2429 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2430 OP_ALG_AAI_HMAC_PRECOMP
,
2436 .cra_name
= "echainiv(authenc(hmac(md5),"
2438 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2439 "cbc-des3_ede-caam",
2440 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2442 .setkey
= aead_setkey
,
2443 .setauthsize
= aead_setauthsize
,
2444 .encrypt
= aead_encrypt
,
2445 .decrypt
= aead_decrypt
,
2446 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2447 .maxauthsize
= MD5_DIGEST_SIZE
,
2450 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2451 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2452 OP_ALG_AAI_HMAC_PRECOMP
,
2459 .cra_name
= "authenc(hmac(sha1),"
2461 .cra_driver_name
= "authenc-hmac-sha1-"
2462 "cbc-des3_ede-caam",
2463 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2465 .setkey
= aead_setkey
,
2466 .setauthsize
= aead_setauthsize
,
2467 .encrypt
= aead_encrypt
,
2468 .decrypt
= aead_decrypt
,
2469 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2470 .maxauthsize
= SHA1_DIGEST_SIZE
,
2473 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2474 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2475 OP_ALG_AAI_HMAC_PRECOMP
,
2481 .cra_name
= "echainiv(authenc(hmac(sha1),"
2483 .cra_driver_name
= "echainiv-authenc-"
2485 "cbc-des3_ede-caam",
2486 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2488 .setkey
= aead_setkey
,
2489 .setauthsize
= aead_setauthsize
,
2490 .encrypt
= aead_encrypt
,
2491 .decrypt
= aead_decrypt
,
2492 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2493 .maxauthsize
= SHA1_DIGEST_SIZE
,
2496 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2497 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2498 OP_ALG_AAI_HMAC_PRECOMP
,
2505 .cra_name
= "authenc(hmac(sha224),"
2507 .cra_driver_name
= "authenc-hmac-sha224-"
2508 "cbc-des3_ede-caam",
2509 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2511 .setkey
= aead_setkey
,
2512 .setauthsize
= aead_setauthsize
,
2513 .encrypt
= aead_encrypt
,
2514 .decrypt
= aead_decrypt
,
2515 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2516 .maxauthsize
= SHA224_DIGEST_SIZE
,
2519 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2520 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2521 OP_ALG_AAI_HMAC_PRECOMP
,
2527 .cra_name
= "echainiv(authenc(hmac(sha224),"
2529 .cra_driver_name
= "echainiv-authenc-"
2531 "cbc-des3_ede-caam",
2532 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2534 .setkey
= aead_setkey
,
2535 .setauthsize
= aead_setauthsize
,
2536 .encrypt
= aead_encrypt
,
2537 .decrypt
= aead_decrypt
,
2538 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2539 .maxauthsize
= SHA224_DIGEST_SIZE
,
2542 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2543 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2544 OP_ALG_AAI_HMAC_PRECOMP
,
2551 .cra_name
= "authenc(hmac(sha256),"
2553 .cra_driver_name
= "authenc-hmac-sha256-"
2554 "cbc-des3_ede-caam",
2555 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2557 .setkey
= aead_setkey
,
2558 .setauthsize
= aead_setauthsize
,
2559 .encrypt
= aead_encrypt
,
2560 .decrypt
= aead_decrypt
,
2561 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2562 .maxauthsize
= SHA256_DIGEST_SIZE
,
2565 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2566 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2567 OP_ALG_AAI_HMAC_PRECOMP
,
2573 .cra_name
= "echainiv(authenc(hmac(sha256),"
2575 .cra_driver_name
= "echainiv-authenc-"
2577 "cbc-des3_ede-caam",
2578 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2580 .setkey
= aead_setkey
,
2581 .setauthsize
= aead_setauthsize
,
2582 .encrypt
= aead_encrypt
,
2583 .decrypt
= aead_decrypt
,
2584 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2585 .maxauthsize
= SHA256_DIGEST_SIZE
,
2588 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2589 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2590 OP_ALG_AAI_HMAC_PRECOMP
,
2597 .cra_name
= "authenc(hmac(sha384),"
2599 .cra_driver_name
= "authenc-hmac-sha384-"
2600 "cbc-des3_ede-caam",
2601 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2603 .setkey
= aead_setkey
,
2604 .setauthsize
= aead_setauthsize
,
2605 .encrypt
= aead_encrypt
,
2606 .decrypt
= aead_decrypt
,
2607 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2608 .maxauthsize
= SHA384_DIGEST_SIZE
,
2611 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2612 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2613 OP_ALG_AAI_HMAC_PRECOMP
,
2619 .cra_name
= "echainiv(authenc(hmac(sha384),"
2621 .cra_driver_name
= "echainiv-authenc-"
2623 "cbc-des3_ede-caam",
2624 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2626 .setkey
= aead_setkey
,
2627 .setauthsize
= aead_setauthsize
,
2628 .encrypt
= aead_encrypt
,
2629 .decrypt
= aead_decrypt
,
2630 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2631 .maxauthsize
= SHA384_DIGEST_SIZE
,
2634 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2635 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2636 OP_ALG_AAI_HMAC_PRECOMP
,
2643 .cra_name
= "authenc(hmac(sha512),"
2645 .cra_driver_name
= "authenc-hmac-sha512-"
2646 "cbc-des3_ede-caam",
2647 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2649 .setkey
= aead_setkey
,
2650 .setauthsize
= aead_setauthsize
,
2651 .encrypt
= aead_encrypt
,
2652 .decrypt
= aead_decrypt
,
2653 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2654 .maxauthsize
= SHA512_DIGEST_SIZE
,
2657 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2658 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2659 OP_ALG_AAI_HMAC_PRECOMP
,
2665 .cra_name
= "echainiv(authenc(hmac(sha512),"
2667 .cra_driver_name
= "echainiv-authenc-"
2669 "cbc-des3_ede-caam",
2670 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2672 .setkey
= aead_setkey
,
2673 .setauthsize
= aead_setauthsize
,
2674 .encrypt
= aead_encrypt
,
2675 .decrypt
= aead_decrypt
,
2676 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2677 .maxauthsize
= SHA512_DIGEST_SIZE
,
2680 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2681 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2682 OP_ALG_AAI_HMAC_PRECOMP
,
2689 .cra_name
= "authenc(hmac(md5),cbc(des))",
2690 .cra_driver_name
= "authenc-hmac-md5-"
2692 .cra_blocksize
= DES_BLOCK_SIZE
,
2694 .setkey
= aead_setkey
,
2695 .setauthsize
= aead_setauthsize
,
2696 .encrypt
= aead_encrypt
,
2697 .decrypt
= aead_decrypt
,
2698 .ivsize
= DES_BLOCK_SIZE
,
2699 .maxauthsize
= MD5_DIGEST_SIZE
,
2702 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2703 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2704 OP_ALG_AAI_HMAC_PRECOMP
,
2710 .cra_name
= "echainiv(authenc(hmac(md5),"
2712 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2714 .cra_blocksize
= DES_BLOCK_SIZE
,
2716 .setkey
= aead_setkey
,
2717 .setauthsize
= aead_setauthsize
,
2718 .encrypt
= aead_encrypt
,
2719 .decrypt
= aead_decrypt
,
2720 .ivsize
= DES_BLOCK_SIZE
,
2721 .maxauthsize
= MD5_DIGEST_SIZE
,
2724 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2725 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2726 OP_ALG_AAI_HMAC_PRECOMP
,
2733 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2734 .cra_driver_name
= "authenc-hmac-sha1-"
2736 .cra_blocksize
= DES_BLOCK_SIZE
,
2738 .setkey
= aead_setkey
,
2739 .setauthsize
= aead_setauthsize
,
2740 .encrypt
= aead_encrypt
,
2741 .decrypt
= aead_decrypt
,
2742 .ivsize
= DES_BLOCK_SIZE
,
2743 .maxauthsize
= SHA1_DIGEST_SIZE
,
2746 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2747 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2748 OP_ALG_AAI_HMAC_PRECOMP
,
2754 .cra_name
= "echainiv(authenc(hmac(sha1),"
2756 .cra_driver_name
= "echainiv-authenc-"
2757 "hmac-sha1-cbc-des-caam",
2758 .cra_blocksize
= DES_BLOCK_SIZE
,
2760 .setkey
= aead_setkey
,
2761 .setauthsize
= aead_setauthsize
,
2762 .encrypt
= aead_encrypt
,
2763 .decrypt
= aead_decrypt
,
2764 .ivsize
= DES_BLOCK_SIZE
,
2765 .maxauthsize
= SHA1_DIGEST_SIZE
,
2768 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2769 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2770 OP_ALG_AAI_HMAC_PRECOMP
,
2777 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2778 .cra_driver_name
= "authenc-hmac-sha224-"
2780 .cra_blocksize
= DES_BLOCK_SIZE
,
2782 .setkey
= aead_setkey
,
2783 .setauthsize
= aead_setauthsize
,
2784 .encrypt
= aead_encrypt
,
2785 .decrypt
= aead_decrypt
,
2786 .ivsize
= DES_BLOCK_SIZE
,
2787 .maxauthsize
= SHA224_DIGEST_SIZE
,
2790 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2791 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2792 OP_ALG_AAI_HMAC_PRECOMP
,
2798 .cra_name
= "echainiv(authenc(hmac(sha224),"
2800 .cra_driver_name
= "echainiv-authenc-"
2801 "hmac-sha224-cbc-des-caam",
2802 .cra_blocksize
= DES_BLOCK_SIZE
,
2804 .setkey
= aead_setkey
,
2805 .setauthsize
= aead_setauthsize
,
2806 .encrypt
= aead_encrypt
,
2807 .decrypt
= aead_decrypt
,
2808 .ivsize
= DES_BLOCK_SIZE
,
2809 .maxauthsize
= SHA224_DIGEST_SIZE
,
2812 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2813 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2814 OP_ALG_AAI_HMAC_PRECOMP
,
2821 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2822 .cra_driver_name
= "authenc-hmac-sha256-"
2824 .cra_blocksize
= DES_BLOCK_SIZE
,
2826 .setkey
= aead_setkey
,
2827 .setauthsize
= aead_setauthsize
,
2828 .encrypt
= aead_encrypt
,
2829 .decrypt
= aead_decrypt
,
2830 .ivsize
= DES_BLOCK_SIZE
,
2831 .maxauthsize
= SHA256_DIGEST_SIZE
,
2834 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2835 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2836 OP_ALG_AAI_HMAC_PRECOMP
,
2842 .cra_name
= "echainiv(authenc(hmac(sha256),"
2844 .cra_driver_name
= "echainiv-authenc-"
2845 "hmac-sha256-cbc-des-caam",
2846 .cra_blocksize
= DES_BLOCK_SIZE
,
2848 .setkey
= aead_setkey
,
2849 .setauthsize
= aead_setauthsize
,
2850 .encrypt
= aead_encrypt
,
2851 .decrypt
= aead_decrypt
,
2852 .ivsize
= DES_BLOCK_SIZE
,
2853 .maxauthsize
= SHA256_DIGEST_SIZE
,
2856 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2857 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2858 OP_ALG_AAI_HMAC_PRECOMP
,
2865 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2866 .cra_driver_name
= "authenc-hmac-sha384-"
2868 .cra_blocksize
= DES_BLOCK_SIZE
,
2870 .setkey
= aead_setkey
,
2871 .setauthsize
= aead_setauthsize
,
2872 .encrypt
= aead_encrypt
,
2873 .decrypt
= aead_decrypt
,
2874 .ivsize
= DES_BLOCK_SIZE
,
2875 .maxauthsize
= SHA384_DIGEST_SIZE
,
2878 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2879 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2880 OP_ALG_AAI_HMAC_PRECOMP
,
2886 .cra_name
= "echainiv(authenc(hmac(sha384),"
2888 .cra_driver_name
= "echainiv-authenc-"
2889 "hmac-sha384-cbc-des-caam",
2890 .cra_blocksize
= DES_BLOCK_SIZE
,
2892 .setkey
= aead_setkey
,
2893 .setauthsize
= aead_setauthsize
,
2894 .encrypt
= aead_encrypt
,
2895 .decrypt
= aead_decrypt
,
2896 .ivsize
= DES_BLOCK_SIZE
,
2897 .maxauthsize
= SHA384_DIGEST_SIZE
,
2900 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2901 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2902 OP_ALG_AAI_HMAC_PRECOMP
,
2909 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2910 .cra_driver_name
= "authenc-hmac-sha512-"
2912 .cra_blocksize
= DES_BLOCK_SIZE
,
2914 .setkey
= aead_setkey
,
2915 .setauthsize
= aead_setauthsize
,
2916 .encrypt
= aead_encrypt
,
2917 .decrypt
= aead_decrypt
,
2918 .ivsize
= DES_BLOCK_SIZE
,
2919 .maxauthsize
= SHA512_DIGEST_SIZE
,
2922 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2923 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2924 OP_ALG_AAI_HMAC_PRECOMP
,
2930 .cra_name
= "echainiv(authenc(hmac(sha512),"
2932 .cra_driver_name
= "echainiv-authenc-"
2933 "hmac-sha512-cbc-des-caam",
2934 .cra_blocksize
= DES_BLOCK_SIZE
,
2936 .setkey
= aead_setkey
,
2937 .setauthsize
= aead_setauthsize
,
2938 .encrypt
= aead_encrypt
,
2939 .decrypt
= aead_decrypt
,
2940 .ivsize
= DES_BLOCK_SIZE
,
2941 .maxauthsize
= SHA512_DIGEST_SIZE
,
2944 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2945 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2946 OP_ALG_AAI_HMAC_PRECOMP
,
2953 .cra_name
= "authenc(hmac(md5),"
2954 "rfc3686(ctr(aes)))",
2955 .cra_driver_name
= "authenc-hmac-md5-"
2956 "rfc3686-ctr-aes-caam",
2959 .setkey
= aead_setkey
,
2960 .setauthsize
= aead_setauthsize
,
2961 .encrypt
= aead_encrypt
,
2962 .decrypt
= aead_decrypt
,
2963 .ivsize
= CTR_RFC3686_IV_SIZE
,
2964 .maxauthsize
= MD5_DIGEST_SIZE
,
2967 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2968 OP_ALG_AAI_CTR_MOD128
,
2969 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2970 OP_ALG_AAI_HMAC_PRECOMP
,
2977 .cra_name
= "seqiv(authenc("
2978 "hmac(md5),rfc3686(ctr(aes))))",
2979 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
2980 "rfc3686-ctr-aes-caam",
2983 .setkey
= aead_setkey
,
2984 .setauthsize
= aead_setauthsize
,
2985 .encrypt
= aead_encrypt
,
2986 .decrypt
= aead_decrypt
,
2987 .ivsize
= CTR_RFC3686_IV_SIZE
,
2988 .maxauthsize
= MD5_DIGEST_SIZE
,
2991 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2992 OP_ALG_AAI_CTR_MOD128
,
2993 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2994 OP_ALG_AAI_HMAC_PRECOMP
,
3002 .cra_name
= "authenc(hmac(sha1),"
3003 "rfc3686(ctr(aes)))",
3004 .cra_driver_name
= "authenc-hmac-sha1-"
3005 "rfc3686-ctr-aes-caam",
3008 .setkey
= aead_setkey
,
3009 .setauthsize
= aead_setauthsize
,
3010 .encrypt
= aead_encrypt
,
3011 .decrypt
= aead_decrypt
,
3012 .ivsize
= CTR_RFC3686_IV_SIZE
,
3013 .maxauthsize
= SHA1_DIGEST_SIZE
,
3016 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3017 OP_ALG_AAI_CTR_MOD128
,
3018 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3019 OP_ALG_AAI_HMAC_PRECOMP
,
3026 .cra_name
= "seqiv(authenc("
3027 "hmac(sha1),rfc3686(ctr(aes))))",
3028 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
3029 "rfc3686-ctr-aes-caam",
3032 .setkey
= aead_setkey
,
3033 .setauthsize
= aead_setauthsize
,
3034 .encrypt
= aead_encrypt
,
3035 .decrypt
= aead_decrypt
,
3036 .ivsize
= CTR_RFC3686_IV_SIZE
,
3037 .maxauthsize
= SHA1_DIGEST_SIZE
,
3040 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3041 OP_ALG_AAI_CTR_MOD128
,
3042 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3043 OP_ALG_AAI_HMAC_PRECOMP
,
3051 .cra_name
= "authenc(hmac(sha224),"
3052 "rfc3686(ctr(aes)))",
3053 .cra_driver_name
= "authenc-hmac-sha224-"
3054 "rfc3686-ctr-aes-caam",
3057 .setkey
= aead_setkey
,
3058 .setauthsize
= aead_setauthsize
,
3059 .encrypt
= aead_encrypt
,
3060 .decrypt
= aead_decrypt
,
3061 .ivsize
= CTR_RFC3686_IV_SIZE
,
3062 .maxauthsize
= SHA224_DIGEST_SIZE
,
3065 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3066 OP_ALG_AAI_CTR_MOD128
,
3067 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3068 OP_ALG_AAI_HMAC_PRECOMP
,
3075 .cra_name
= "seqiv(authenc("
3076 "hmac(sha224),rfc3686(ctr(aes))))",
3077 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
3078 "rfc3686-ctr-aes-caam",
3081 .setkey
= aead_setkey
,
3082 .setauthsize
= aead_setauthsize
,
3083 .encrypt
= aead_encrypt
,
3084 .decrypt
= aead_decrypt
,
3085 .ivsize
= CTR_RFC3686_IV_SIZE
,
3086 .maxauthsize
= SHA224_DIGEST_SIZE
,
3089 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3090 OP_ALG_AAI_CTR_MOD128
,
3091 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3092 OP_ALG_AAI_HMAC_PRECOMP
,
3100 .cra_name
= "authenc(hmac(sha256),"
3101 "rfc3686(ctr(aes)))",
3102 .cra_driver_name
= "authenc-hmac-sha256-"
3103 "rfc3686-ctr-aes-caam",
3106 .setkey
= aead_setkey
,
3107 .setauthsize
= aead_setauthsize
,
3108 .encrypt
= aead_encrypt
,
3109 .decrypt
= aead_decrypt
,
3110 .ivsize
= CTR_RFC3686_IV_SIZE
,
3111 .maxauthsize
= SHA256_DIGEST_SIZE
,
3114 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3115 OP_ALG_AAI_CTR_MOD128
,
3116 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3117 OP_ALG_AAI_HMAC_PRECOMP
,
3124 .cra_name
= "seqiv(authenc(hmac(sha256),"
3125 "rfc3686(ctr(aes))))",
3126 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
3127 "rfc3686-ctr-aes-caam",
3130 .setkey
= aead_setkey
,
3131 .setauthsize
= aead_setauthsize
,
3132 .encrypt
= aead_encrypt
,
3133 .decrypt
= aead_decrypt
,
3134 .ivsize
= CTR_RFC3686_IV_SIZE
,
3135 .maxauthsize
= SHA256_DIGEST_SIZE
,
3138 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3139 OP_ALG_AAI_CTR_MOD128
,
3140 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3141 OP_ALG_AAI_HMAC_PRECOMP
,
3149 .cra_name
= "authenc(hmac(sha384),"
3150 "rfc3686(ctr(aes)))",
3151 .cra_driver_name
= "authenc-hmac-sha384-"
3152 "rfc3686-ctr-aes-caam",
3155 .setkey
= aead_setkey
,
3156 .setauthsize
= aead_setauthsize
,
3157 .encrypt
= aead_encrypt
,
3158 .decrypt
= aead_decrypt
,
3159 .ivsize
= CTR_RFC3686_IV_SIZE
,
3160 .maxauthsize
= SHA384_DIGEST_SIZE
,
3163 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3164 OP_ALG_AAI_CTR_MOD128
,
3165 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3166 OP_ALG_AAI_HMAC_PRECOMP
,
3173 .cra_name
= "seqiv(authenc(hmac(sha384),"
3174 "rfc3686(ctr(aes))))",
3175 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
3176 "rfc3686-ctr-aes-caam",
3179 .setkey
= aead_setkey
,
3180 .setauthsize
= aead_setauthsize
,
3181 .encrypt
= aead_encrypt
,
3182 .decrypt
= aead_decrypt
,
3183 .ivsize
= CTR_RFC3686_IV_SIZE
,
3184 .maxauthsize
= SHA384_DIGEST_SIZE
,
3187 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3188 OP_ALG_AAI_CTR_MOD128
,
3189 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3190 OP_ALG_AAI_HMAC_PRECOMP
,
3198 .cra_name
= "authenc(hmac(sha512),"
3199 "rfc3686(ctr(aes)))",
3200 .cra_driver_name
= "authenc-hmac-sha512-"
3201 "rfc3686-ctr-aes-caam",
3204 .setkey
= aead_setkey
,
3205 .setauthsize
= aead_setauthsize
,
3206 .encrypt
= aead_encrypt
,
3207 .decrypt
= aead_decrypt
,
3208 .ivsize
= CTR_RFC3686_IV_SIZE
,
3209 .maxauthsize
= SHA512_DIGEST_SIZE
,
3212 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3213 OP_ALG_AAI_CTR_MOD128
,
3214 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3215 OP_ALG_AAI_HMAC_PRECOMP
,
3222 .cra_name
= "seqiv(authenc(hmac(sha512),"
3223 "rfc3686(ctr(aes))))",
3224 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
3225 "rfc3686-ctr-aes-caam",
3228 .setkey
= aead_setkey
,
3229 .setauthsize
= aead_setauthsize
,
3230 .encrypt
= aead_encrypt
,
3231 .decrypt
= aead_decrypt
,
3232 .ivsize
= CTR_RFC3686_IV_SIZE
,
3233 .maxauthsize
= SHA512_DIGEST_SIZE
,
3236 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3237 OP_ALG_AAI_CTR_MOD128
,
3238 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3239 OP_ALG_AAI_HMAC_PRECOMP
,
3246 struct caam_crypto_alg
{
3247 struct crypto_alg crypto_alg
;
3248 struct list_head entry
;
3249 struct caam_alg_entry caam
;
3252 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
)
3254 dma_addr_t dma_addr
;
3256 ctx
->jrdev
= caam_jr_alloc();
3257 if (IS_ERR(ctx
->jrdev
)) {
3258 pr_err("Job Ring Device allocation for transform failed\n");
3259 return PTR_ERR(ctx
->jrdev
);
3262 dma_addr
= dma_map_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc
,
3263 offsetof(struct caam_ctx
,
3265 DMA_TO_DEVICE
, DMA_ATTR_SKIP_CPU_SYNC
);
3266 if (dma_mapping_error(ctx
->jrdev
, dma_addr
)) {
3267 dev_err(ctx
->jrdev
, "unable to map key, shared descriptors\n");
3268 caam_jr_free(ctx
->jrdev
);
3272 ctx
->sh_desc_enc_dma
= dma_addr
;
3273 ctx
->sh_desc_dec_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3275 ctx
->sh_desc_givenc_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3277 ctx
->key_dma
= dma_addr
+ offsetof(struct caam_ctx
, key
);
3279 /* copy descriptor header template value */
3280 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
3281 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
3286 static int caam_cra_init(struct crypto_tfm
*tfm
)
3288 struct crypto_alg
*alg
= tfm
->__crt_alg
;
3289 struct caam_crypto_alg
*caam_alg
=
3290 container_of(alg
, struct caam_crypto_alg
, crypto_alg
);
3291 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3293 return caam_init_common(ctx
, &caam_alg
->caam
);
3296 static int caam_aead_init(struct crypto_aead
*tfm
)
3298 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
3299 struct caam_aead_alg
*caam_alg
=
3300 container_of(alg
, struct caam_aead_alg
, aead
);
3301 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
3303 return caam_init_common(ctx
, &caam_alg
->caam
);
3306 static void caam_exit_common(struct caam_ctx
*ctx
)
3308 dma_unmap_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
3309 offsetof(struct caam_ctx
, sh_desc_enc_dma
),
3310 DMA_TO_DEVICE
, DMA_ATTR_SKIP_CPU_SYNC
);
3311 caam_jr_free(ctx
->jrdev
);
3314 static void caam_cra_exit(struct crypto_tfm
*tfm
)
3316 caam_exit_common(crypto_tfm_ctx(tfm
));
3319 static void caam_aead_exit(struct crypto_aead
*tfm
)
3321 caam_exit_common(crypto_aead_ctx(tfm
));
3324 static void __exit
caam_algapi_exit(void)
3327 struct caam_crypto_alg
*t_alg
, *n
;
3330 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3331 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3333 if (t_alg
->registered
)
3334 crypto_unregister_aead(&t_alg
->aead
);
3340 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
3341 crypto_unregister_alg(&t_alg
->crypto_alg
);
3342 list_del(&t_alg
->entry
);
3347 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
3350 struct caam_crypto_alg
*t_alg
;
3351 struct crypto_alg
*alg
;
3353 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
3355 pr_err("failed to allocate t_alg\n");
3356 return ERR_PTR(-ENOMEM
);
3359 alg
= &t_alg
->crypto_alg
;
3361 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
3362 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
3363 template->driver_name
);
3364 alg
->cra_module
= THIS_MODULE
;
3365 alg
->cra_init
= caam_cra_init
;
3366 alg
->cra_exit
= caam_cra_exit
;
3367 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
3368 alg
->cra_blocksize
= template->blocksize
;
3369 alg
->cra_alignmask
= 0;
3370 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
3371 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
3373 switch (template->type
) {
3374 case CRYPTO_ALG_TYPE_GIVCIPHER
:
3375 alg
->cra_type
= &crypto_givcipher_type
;
3376 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3378 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
3379 alg
->cra_type
= &crypto_ablkcipher_type
;
3380 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3384 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
3385 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
3390 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
3392 struct aead_alg
*alg
= &t_alg
->aead
;
3394 alg
->base
.cra_module
= THIS_MODULE
;
3395 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
3396 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
3397 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
3399 alg
->init
= caam_aead_init
;
3400 alg
->exit
= caam_aead_exit
;
3403 static int __init
caam_algapi_init(void)
3405 struct device_node
*dev_node
;
3406 struct platform_device
*pdev
;
3407 struct device
*ctrldev
;
3408 struct caam_drv_private
*priv
;
3410 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
3411 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
3412 bool registered
= false;
3414 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
3416 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
3421 pdev
= of_find_device_by_node(dev_node
);
3423 of_node_put(dev_node
);
3427 ctrldev
= &pdev
->dev
;
3428 priv
= dev_get_drvdata(ctrldev
);
3429 of_node_put(dev_node
);
3432 * If priv is NULL, it's probably because the caam driver wasn't
3433 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3439 INIT_LIST_HEAD(&alg_list
);
3442 * Register crypto algorithms the device supports.
3443 * First, detect presence and attributes of DES, AES, and MD blocks.
3445 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
3446 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
3447 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
3448 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
3449 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
3451 /* If MD is present, limit digest size based on LP256 */
3452 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
3453 md_limit
= SHA256_DIGEST_SIZE
;
3455 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3456 struct caam_crypto_alg
*t_alg
;
3457 struct caam_alg_template
*alg
= driver_algs
+ i
;
3458 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
3460 /* Skip DES algorithms if not supported by device */
3462 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3463 (alg_sel
== OP_ALG_ALGSEL_DES
)))
3466 /* Skip AES algorithms if not supported by device */
3467 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
3471 * Check support for AES modes not available
3474 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3475 if ((alg
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
3479 t_alg
= caam_alg_alloc(alg
);
3480 if (IS_ERR(t_alg
)) {
3481 err
= PTR_ERR(t_alg
);
3482 pr_warn("%s alg allocation failed\n", alg
->driver_name
);
3486 err
= crypto_register_alg(&t_alg
->crypto_alg
);
3488 pr_warn("%s alg registration failed\n",
3489 t_alg
->crypto_alg
.cra_driver_name
);
3494 list_add_tail(&t_alg
->entry
, &alg_list
);
3498 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3499 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3500 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
3502 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
3504 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
3506 /* Skip DES algorithms if not supported by device */
3508 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3509 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
3512 /* Skip AES algorithms if not supported by device */
3513 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
3517 * Check support for AES algorithms not available
3520 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3521 if (alg_aai
== OP_ALG_AAI_GCM
)
3525 * Skip algorithms requiring message digests
3526 * if MD or MD size is not supported by device.
3529 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
3532 caam_aead_alg_init(t_alg
);
3534 err
= crypto_register_aead(&t_alg
->aead
);
3536 pr_warn("%s alg registration failed\n",
3537 t_alg
->aead
.base
.cra_driver_name
);
3541 t_alg
->registered
= true;
3546 pr_info("caam algorithms registered in /proc/crypto\n");
3551 module_init(caam_algapi_init
);
3552 module_exit(caam_algapi_exit
);
3554 MODULE_LICENSE("GPL");
3555 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3556 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");