2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
85 #include <linux/highmem.h>
87 static void dbg_dump_sg(const char *level
, const char *prefix_str
,
88 int prefix_type
, int rowsize
, int groupsize
,
89 struct scatterlist
*sg
, size_t tlen
, bool ascii
)
91 struct scatterlist
*it
;
96 for (it
= sg
; it
!= NULL
&& tlen
> 0 ; it
= sg_next(sg
)) {
98 * make sure the scatterlist's page
99 * has a valid virtual memory mapping
101 it_page
= kmap_atomic(sg_page(it
));
102 if (unlikely(!it_page
)) {
103 printk(KERN_ERR
"dbg_dump_sg: kmap failed\n");
107 buf
= it_page
+ it
->offset
;
108 len
= min_t(size_t, tlen
, it
->length
);
109 print_hex_dump(level
, prefix_str
, prefix_type
, rowsize
,
110 groupsize
, buf
, len
, ascii
);
113 kunmap_atomic(it_page
);
118 static struct list_head alg_list
;
120 struct caam_alg_entry
{
127 struct caam_aead_alg
{
128 struct aead_alg aead
;
129 struct caam_alg_entry caam
;
134 * per-session context
137 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
138 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
139 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
140 u8 key
[CAAM_MAX_KEY_SIZE
];
141 dma_addr_t sh_desc_enc_dma
;
142 dma_addr_t sh_desc_dec_dma
;
143 dma_addr_t sh_desc_givenc_dma
;
145 struct device
*jrdev
;
146 struct alginfo adata
;
147 struct alginfo cdata
;
148 unsigned int authsize
;
151 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
153 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
154 struct device
*jrdev
= ctx
->jrdev
;
156 int rem_bytes
= CAAM_DESC_BYTES_MAX
- AEAD_DESC_JOB_IO_LEN
-
157 ctx
->adata
.keylen_pad
;
160 * Job Descriptor and Shared Descriptors
161 * must all fit into the 64-word Descriptor h/w Buffer
163 if (rem_bytes
>= DESC_AEAD_NULL_ENC_LEN
) {
164 ctx
->adata
.key_inline
= true;
165 ctx
->adata
.key_virt
= ctx
->key
;
167 ctx
->adata
.key_inline
= false;
168 ctx
->adata
.key_dma
= ctx
->key_dma
;
171 /* aead_encrypt shared descriptor */
172 desc
= ctx
->sh_desc_enc
;
173 cnstr_shdsc_aead_null_encap(desc
, &ctx
->adata
, ctx
->authsize
);
174 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
175 desc_bytes(desc
), DMA_TO_DEVICE
);
178 * Job Descriptor and Shared Descriptors
179 * must all fit into the 64-word Descriptor h/w Buffer
181 if (rem_bytes
>= DESC_AEAD_NULL_DEC_LEN
) {
182 ctx
->adata
.key_inline
= true;
183 ctx
->adata
.key_virt
= ctx
->key
;
185 ctx
->adata
.key_inline
= false;
186 ctx
->adata
.key_dma
= ctx
->key_dma
;
189 /* aead_decrypt shared descriptor */
190 desc
= ctx
->sh_desc_dec
;
191 cnstr_shdsc_aead_null_decap(desc
, &ctx
->adata
, ctx
->authsize
);
192 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
193 desc_bytes(desc
), DMA_TO_DEVICE
);
198 static int aead_set_sh_desc(struct crypto_aead
*aead
)
200 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
201 struct caam_aead_alg
, aead
);
202 unsigned int ivsize
= crypto_aead_ivsize(aead
);
203 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
204 struct device
*jrdev
= ctx
->jrdev
;
206 u32
*desc
, *nonce
= NULL
;
208 unsigned int data_len
[2];
209 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
210 OP_ALG_AAI_CTR_MOD128
);
211 const bool is_rfc3686
= alg
->caam
.rfc3686
;
216 /* NULL encryption / decryption */
217 if (!ctx
->cdata
.keylen
)
218 return aead_null_set_sh_desc(aead
);
221 * AES-CTR needs to load IV in CONTEXT1 reg
222 * at an offset of 128bits (16bytes)
223 * CONTEXT1[255:128] = IV
230 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
233 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
234 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
235 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
238 data_len
[0] = ctx
->adata
.keylen_pad
;
239 data_len
[1] = ctx
->cdata
.keylen
;
245 * Job Descriptor and Shared Descriptors
246 * must all fit into the 64-word Descriptor h/w Buffer
248 if (desc_inline_query(DESC_AEAD_ENC_LEN
+
249 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
250 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
251 ARRAY_SIZE(data_len
)) < 0)
255 ctx
->adata
.key_virt
= ctx
->key
;
257 ctx
->adata
.key_dma
= ctx
->key_dma
;
260 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
262 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
264 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
265 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
267 /* aead_encrypt shared descriptor */
268 desc
= ctx
->sh_desc_enc
;
269 cnstr_shdsc_aead_encap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
270 ctx
->authsize
, is_rfc3686
, nonce
, ctx1_iv_off
,
272 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
273 desc_bytes(desc
), DMA_TO_DEVICE
);
277 * Job Descriptor and Shared Descriptors
278 * must all fit into the 64-word Descriptor h/w Buffer
280 if (desc_inline_query(DESC_AEAD_DEC_LEN
+
281 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
282 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
283 ARRAY_SIZE(data_len
)) < 0)
287 ctx
->adata
.key_virt
= ctx
->key
;
289 ctx
->adata
.key_dma
= ctx
->key_dma
;
292 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
294 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
296 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
297 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
299 /* aead_decrypt shared descriptor */
300 desc
= ctx
->sh_desc_dec
;
301 cnstr_shdsc_aead_decap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
302 ctx
->authsize
, alg
->caam
.geniv
, is_rfc3686
,
303 nonce
, ctx1_iv_off
, false);
304 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
305 desc_bytes(desc
), DMA_TO_DEVICE
);
307 if (!alg
->caam
.geniv
)
311 * Job Descriptor and Shared Descriptors
312 * must all fit into the 64-word Descriptor h/w Buffer
314 if (desc_inline_query(DESC_AEAD_GIVENC_LEN
+
315 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
316 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
317 ARRAY_SIZE(data_len
)) < 0)
321 ctx
->adata
.key_virt
= ctx
->key
;
323 ctx
->adata
.key_dma
= ctx
->key_dma
;
326 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
328 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
330 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
331 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
333 /* aead_givencrypt shared descriptor */
334 desc
= ctx
->sh_desc_enc
;
335 cnstr_shdsc_aead_givencap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
336 ctx
->authsize
, is_rfc3686
, nonce
,
338 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
339 desc_bytes(desc
), DMA_TO_DEVICE
);
345 static int aead_setauthsize(struct crypto_aead
*authenc
,
346 unsigned int authsize
)
348 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
350 ctx
->authsize
= authsize
;
351 aead_set_sh_desc(authenc
);
356 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
358 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
359 struct device
*jrdev
= ctx
->jrdev
;
361 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
364 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
368 * AES GCM encrypt shared descriptor
369 * Job Descriptor and Shared Descriptor
370 * must fit into the 64-word Descriptor h/w Buffer
372 if (rem_bytes
>= DESC_GCM_ENC_LEN
) {
373 ctx
->cdata
.key_inline
= true;
374 ctx
->cdata
.key_virt
= ctx
->key
;
376 ctx
->cdata
.key_inline
= false;
377 ctx
->cdata
.key_dma
= ctx
->key_dma
;
380 desc
= ctx
->sh_desc_enc
;
381 cnstr_shdsc_gcm_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
382 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
383 desc_bytes(desc
), DMA_TO_DEVICE
);
386 * Job Descriptor and Shared Descriptors
387 * must all fit into the 64-word Descriptor h/w Buffer
389 if (rem_bytes
>= DESC_GCM_DEC_LEN
) {
390 ctx
->cdata
.key_inline
= true;
391 ctx
->cdata
.key_virt
= ctx
->key
;
393 ctx
->cdata
.key_inline
= false;
394 ctx
->cdata
.key_dma
= ctx
->key_dma
;
397 desc
= ctx
->sh_desc_dec
;
398 cnstr_shdsc_gcm_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
399 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
400 desc_bytes(desc
), DMA_TO_DEVICE
);
405 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
407 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
409 ctx
->authsize
= authsize
;
410 gcm_set_sh_desc(authenc
);
415 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
417 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
418 struct device
*jrdev
= ctx
->jrdev
;
420 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
423 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
427 * RFC4106 encrypt shared descriptor
428 * Job Descriptor and Shared Descriptor
429 * must fit into the 64-word Descriptor h/w Buffer
431 if (rem_bytes
>= DESC_RFC4106_ENC_LEN
) {
432 ctx
->cdata
.key_inline
= true;
433 ctx
->cdata
.key_virt
= ctx
->key
;
435 ctx
->cdata
.key_inline
= false;
436 ctx
->cdata
.key_dma
= ctx
->key_dma
;
439 desc
= ctx
->sh_desc_enc
;
440 cnstr_shdsc_rfc4106_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
441 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
442 desc_bytes(desc
), DMA_TO_DEVICE
);
445 * Job Descriptor and Shared Descriptors
446 * must all fit into the 64-word Descriptor h/w Buffer
448 if (rem_bytes
>= DESC_RFC4106_DEC_LEN
) {
449 ctx
->cdata
.key_inline
= true;
450 ctx
->cdata
.key_virt
= ctx
->key
;
452 ctx
->cdata
.key_inline
= false;
453 ctx
->cdata
.key_dma
= ctx
->key_dma
;
456 desc
= ctx
->sh_desc_dec
;
457 cnstr_shdsc_rfc4106_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
458 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
459 desc_bytes(desc
), DMA_TO_DEVICE
);
464 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
465 unsigned int authsize
)
467 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
469 ctx
->authsize
= authsize
;
470 rfc4106_set_sh_desc(authenc
);
475 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
477 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
478 struct device
*jrdev
= ctx
->jrdev
;
480 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
483 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
487 * RFC4543 encrypt shared descriptor
488 * Job Descriptor and Shared Descriptor
489 * must fit into the 64-word Descriptor h/w Buffer
491 if (rem_bytes
>= DESC_RFC4543_ENC_LEN
) {
492 ctx
->cdata
.key_inline
= true;
493 ctx
->cdata
.key_virt
= ctx
->key
;
495 ctx
->cdata
.key_inline
= false;
496 ctx
->cdata
.key_dma
= ctx
->key_dma
;
499 desc
= ctx
->sh_desc_enc
;
500 cnstr_shdsc_rfc4543_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
501 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
502 desc_bytes(desc
), DMA_TO_DEVICE
);
505 * Job Descriptor and Shared Descriptors
506 * must all fit into the 64-word Descriptor h/w Buffer
508 if (rem_bytes
>= DESC_RFC4543_DEC_LEN
) {
509 ctx
->cdata
.key_inline
= true;
510 ctx
->cdata
.key_virt
= ctx
->key
;
512 ctx
->cdata
.key_inline
= false;
513 ctx
->cdata
.key_dma
= ctx
->key_dma
;
516 desc
= ctx
->sh_desc_dec
;
517 cnstr_shdsc_rfc4543_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
518 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
519 desc_bytes(desc
), DMA_TO_DEVICE
);
524 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
525 unsigned int authsize
)
527 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
529 ctx
->authsize
= authsize
;
530 rfc4543_set_sh_desc(authenc
);
535 static int aead_setkey(struct crypto_aead
*aead
,
536 const u8
*key
, unsigned int keylen
)
538 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
539 struct device
*jrdev
= ctx
->jrdev
;
540 struct crypto_authenc_keys keys
;
543 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
547 printk(KERN_ERR
"keylen %d enckeylen %d authkeylen %d\n",
548 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
550 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
551 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
554 ret
= gen_split_key(ctx
->jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
555 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
561 /* postpend encryption key to auth split key */
562 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
563 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
564 keys
.enckeylen
, DMA_TO_DEVICE
);
566 print_hex_dump(KERN_ERR
, "ctx.key@"__stringify(__LINE__
)": ",
567 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
568 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
570 ctx
->cdata
.keylen
= keys
.enckeylen
;
571 return aead_set_sh_desc(aead
);
573 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
577 static int gcm_setkey(struct crypto_aead
*aead
,
578 const u8
*key
, unsigned int keylen
)
580 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
581 struct device
*jrdev
= ctx
->jrdev
;
584 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
585 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
588 memcpy(ctx
->key
, key
, keylen
);
589 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
590 ctx
->cdata
.keylen
= keylen
;
592 return gcm_set_sh_desc(aead
);
595 static int rfc4106_setkey(struct crypto_aead
*aead
,
596 const u8
*key
, unsigned int keylen
)
598 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
599 struct device
*jrdev
= ctx
->jrdev
;
605 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
606 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
609 memcpy(ctx
->key
, key
, keylen
);
612 * The last four bytes of the key material are used as the salt value
613 * in the nonce. Update the AES key length.
615 ctx
->cdata
.keylen
= keylen
- 4;
616 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
618 return rfc4106_set_sh_desc(aead
);
621 static int rfc4543_setkey(struct crypto_aead
*aead
,
622 const u8
*key
, unsigned int keylen
)
624 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
625 struct device
*jrdev
= ctx
->jrdev
;
631 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
632 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
635 memcpy(ctx
->key
, key
, keylen
);
638 * The last four bytes of the key material are used as the salt value
639 * in the nonce. Update the AES key length.
641 ctx
->cdata
.keylen
= keylen
- 4;
642 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
644 return rfc4543_set_sh_desc(aead
);
647 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
648 const u8
*key
, unsigned int keylen
)
650 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
651 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
652 const char *alg_name
= crypto_tfm_alg_name(tfm
);
653 struct device
*jrdev
= ctx
->jrdev
;
654 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
657 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
658 OP_ALG_AAI_CTR_MOD128
);
659 const bool is_rfc3686
= (ctr_mode
&&
660 (strstr(alg_name
, "rfc3686") != NULL
));
662 memcpy(ctx
->key
, key
, keylen
);
664 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
665 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
668 * AES-CTR needs to load IV in CONTEXT1 reg
669 * at an offset of 128bits (16bytes)
670 * CONTEXT1[255:128] = IV
677 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
678 * | *key = {KEY, NONCE}
681 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
682 keylen
-= CTR_RFC3686_NONCE_SIZE
;
685 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
686 ctx
->cdata
.keylen
= keylen
;
687 ctx
->cdata
.key_virt
= ctx
->key
;
688 ctx
->cdata
.key_inline
= true;
690 /* ablkcipher_encrypt shared descriptor */
691 desc
= ctx
->sh_desc_enc
;
692 cnstr_shdsc_ablkcipher_encap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
694 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
695 desc_bytes(desc
), DMA_TO_DEVICE
);
697 /* ablkcipher_decrypt shared descriptor */
698 desc
= ctx
->sh_desc_dec
;
699 cnstr_shdsc_ablkcipher_decap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
701 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
702 desc_bytes(desc
), DMA_TO_DEVICE
);
704 /* ablkcipher_givencrypt shared descriptor */
705 desc
= ctx
->sh_desc_givenc
;
706 cnstr_shdsc_ablkcipher_givencap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
708 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_givenc_dma
,
709 desc_bytes(desc
), DMA_TO_DEVICE
);
714 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
715 const u8
*key
, unsigned int keylen
)
717 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
718 struct device
*jrdev
= ctx
->jrdev
;
721 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
722 crypto_ablkcipher_set_flags(ablkcipher
,
723 CRYPTO_TFM_RES_BAD_KEY_LEN
);
724 dev_err(jrdev
, "key size mismatch\n");
728 memcpy(ctx
->key
, key
, keylen
);
729 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
730 ctx
->cdata
.keylen
= keylen
;
731 ctx
->cdata
.key_virt
= ctx
->key
;
732 ctx
->cdata
.key_inline
= true;
734 /* xts_ablkcipher_encrypt shared descriptor */
735 desc
= ctx
->sh_desc_enc
;
736 cnstr_shdsc_xts_ablkcipher_encap(desc
, &ctx
->cdata
);
737 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
738 desc_bytes(desc
), DMA_TO_DEVICE
);
740 /* xts_ablkcipher_decrypt shared descriptor */
741 desc
= ctx
->sh_desc_dec
;
742 cnstr_shdsc_xts_ablkcipher_decap(desc
, &ctx
->cdata
);
743 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
744 desc_bytes(desc
), DMA_TO_DEVICE
);
750 * aead_edesc - s/w-extended aead descriptor
751 * @src_nents: number of segments in input s/w scatterlist
752 * @dst_nents: number of segments in output s/w scatterlist
753 * @sec4_sg_bytes: length of dma mapped sec4_sg space
754 * @sec4_sg_dma: bus physical mapped address of h/w link table
755 * @sec4_sg: pointer to h/w link table
756 * @hw_desc: the h/w job descriptor followed by any referenced link tables
762 dma_addr_t sec4_sg_dma
;
763 struct sec4_sg_entry
*sec4_sg
;
768 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
769 * @src_nents: number of segments in input s/w scatterlist
770 * @dst_nents: number of segments in output s/w scatterlist
771 * @iv_dma: dma address of iv for checking continuity and link table
772 * @sec4_sg_bytes: length of dma mapped sec4_sg space
773 * @sec4_sg_dma: bus physical mapped address of h/w link table
774 * @sec4_sg: pointer to h/w link table
775 * @hw_desc: the h/w job descriptor followed by any referenced link tables
777 struct ablkcipher_edesc
{
782 dma_addr_t sec4_sg_dma
;
783 struct sec4_sg_entry
*sec4_sg
;
787 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
788 struct scatterlist
*dst
, int src_nents
,
790 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
795 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
796 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
798 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
802 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
804 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
808 static void aead_unmap(struct device
*dev
,
809 struct aead_edesc
*edesc
,
810 struct aead_request
*req
)
812 caam_unmap(dev
, req
->src
, req
->dst
,
813 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
814 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
817 static void ablkcipher_unmap(struct device
*dev
,
818 struct ablkcipher_edesc
*edesc
,
819 struct ablkcipher_request
*req
)
821 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
822 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
824 caam_unmap(dev
, req
->src
, req
->dst
,
825 edesc
->src_nents
, edesc
->dst_nents
,
826 edesc
->iv_dma
, ivsize
,
827 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
830 static void aead_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
833 struct aead_request
*req
= context
;
834 struct aead_edesc
*edesc
;
837 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
840 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
843 caam_jr_strstatus(jrdev
, err
);
845 aead_unmap(jrdev
, edesc
, req
);
849 aead_request_complete(req
, err
);
852 static void aead_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
855 struct aead_request
*req
= context
;
856 struct aead_edesc
*edesc
;
859 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
862 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
865 caam_jr_strstatus(jrdev
, err
);
867 aead_unmap(jrdev
, edesc
, req
);
870 * verify hw auth check passed else return -EBADMSG
872 if ((err
& JRSTA_CCBERR_ERRID_MASK
) == JRSTA_CCBERR_ERRID_ICVCHK
)
877 aead_request_complete(req
, err
);
880 static void ablkcipher_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
883 struct ablkcipher_request
*req
= context
;
884 struct ablkcipher_edesc
*edesc
;
886 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
887 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
889 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
892 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
895 caam_jr_strstatus(jrdev
, err
);
898 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
899 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
900 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
901 dbg_dump_sg(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
902 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
903 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
906 ablkcipher_unmap(jrdev
, edesc
, req
);
909 ablkcipher_request_complete(req
, err
);
912 static void ablkcipher_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
915 struct ablkcipher_request
*req
= context
;
916 struct ablkcipher_edesc
*edesc
;
918 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
919 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
921 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
924 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
926 caam_jr_strstatus(jrdev
, err
);
929 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
930 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
932 dbg_dump_sg(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
933 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
934 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
937 ablkcipher_unmap(jrdev
, edesc
, req
);
940 ablkcipher_request_complete(req
, err
);
944 * Fill in aead job descriptor
946 static void init_aead_job(struct aead_request
*req
,
947 struct aead_edesc
*edesc
,
948 bool all_contig
, bool encrypt
)
950 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
951 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
952 int authsize
= ctx
->authsize
;
953 u32
*desc
= edesc
->hw_desc
;
954 u32 out_options
, in_options
;
955 dma_addr_t dst_dma
, src_dma
;
956 int len
, sec4_sg_index
= 0;
960 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
961 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
963 len
= desc_len(sh_desc
);
964 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
967 src_dma
= edesc
->src_nents
? sg_dma_address(req
->src
) : 0;
970 src_dma
= edesc
->sec4_sg_dma
;
971 sec4_sg_index
+= edesc
->src_nents
;
972 in_options
= LDST_SGF
;
975 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
979 out_options
= in_options
;
981 if (unlikely(req
->src
!= req
->dst
)) {
982 if (edesc
->dst_nents
== 1) {
983 dst_dma
= sg_dma_address(req
->dst
);
985 dst_dma
= edesc
->sec4_sg_dma
+
987 sizeof(struct sec4_sg_entry
);
988 out_options
= LDST_SGF
;
993 append_seq_out_ptr(desc
, dst_dma
,
994 req
->assoclen
+ req
->cryptlen
+ authsize
,
997 append_seq_out_ptr(desc
, dst_dma
,
998 req
->assoclen
+ req
->cryptlen
- authsize
,
1001 /* REG3 = assoclen */
1002 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
1005 static void init_gcm_job(struct aead_request
*req
,
1006 struct aead_edesc
*edesc
,
1007 bool all_contig
, bool encrypt
)
1009 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1010 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1011 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1012 u32
*desc
= edesc
->hw_desc
;
1013 bool generic_gcm
= (ivsize
== 12);
1016 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1018 /* BUG This should not be specific to generic GCM. */
1020 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
1021 last
= FIFOLD_TYPE_LAST1
;
1024 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
1025 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| 12 | last
);
1028 append_data(desc
, ctx
->key
+ ctx
->cdata
.keylen
, 4);
1030 append_data(desc
, req
->iv
, ivsize
);
1031 /* End of blank commands */
1034 static void init_authenc_job(struct aead_request
*req
,
1035 struct aead_edesc
*edesc
,
1036 bool all_contig
, bool encrypt
)
1038 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1039 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
1040 struct caam_aead_alg
, aead
);
1041 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1042 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1043 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
1044 OP_ALG_AAI_CTR_MOD128
);
1045 const bool is_rfc3686
= alg
->caam
.rfc3686
;
1046 u32
*desc
= edesc
->hw_desc
;
1050 * AES-CTR needs to load IV in CONTEXT1 reg
1051 * at an offset of 128bits (16bytes)
1052 * CONTEXT1[255:128] = IV
1059 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1062 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
1064 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1066 if (ivsize
&& ((is_rfc3686
&& encrypt
) || !alg
->caam
.geniv
))
1067 append_load_as_imm(desc
, req
->iv
, ivsize
,
1069 LDST_SRCDST_BYTE_CONTEXT
|
1070 (ivoffset
<< LDST_OFFSET_SHIFT
));
1074 * Fill in ablkcipher job descriptor
1076 static void init_ablkcipher_job(u32
*sh_desc
, dma_addr_t ptr
,
1077 struct ablkcipher_edesc
*edesc
,
1078 struct ablkcipher_request
*req
,
1081 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1082 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1083 u32
*desc
= edesc
->hw_desc
;
1084 u32 out_options
= 0, in_options
;
1085 dma_addr_t dst_dma
, src_dma
;
1086 int len
, sec4_sg_index
= 0;
1089 print_hex_dump(KERN_ERR
, "presciv@"__stringify(__LINE__
)": ",
1090 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1092 pr_err("asked=%d, nbytes%d\n",
1093 (int)edesc
->src_nents
> 1 ? 100 : req
->nbytes
, req
->nbytes
);
1094 dbg_dump_sg(KERN_ERR
, "src @"__stringify(__LINE__
)": ",
1095 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1096 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1099 len
= desc_len(sh_desc
);
1100 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1103 src_dma
= edesc
->iv_dma
;
1106 src_dma
= edesc
->sec4_sg_dma
;
1107 sec4_sg_index
+= edesc
->src_nents
+ 1;
1108 in_options
= LDST_SGF
;
1110 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
+ ivsize
, in_options
);
1112 if (likely(req
->src
== req
->dst
)) {
1113 if (edesc
->src_nents
== 1 && iv_contig
) {
1114 dst_dma
= sg_dma_address(req
->src
);
1116 dst_dma
= edesc
->sec4_sg_dma
+
1117 sizeof(struct sec4_sg_entry
);
1118 out_options
= LDST_SGF
;
1121 if (edesc
->dst_nents
== 1) {
1122 dst_dma
= sg_dma_address(req
->dst
);
1124 dst_dma
= edesc
->sec4_sg_dma
+
1125 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1126 out_options
= LDST_SGF
;
1129 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
, out_options
);
1133 * Fill in ablkcipher givencrypt job descriptor
1135 static void init_ablkcipher_giv_job(u32
*sh_desc
, dma_addr_t ptr
,
1136 struct ablkcipher_edesc
*edesc
,
1137 struct ablkcipher_request
*req
,
1140 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1141 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1142 u32
*desc
= edesc
->hw_desc
;
1143 u32 out_options
, in_options
;
1144 dma_addr_t dst_dma
, src_dma
;
1145 int len
, sec4_sg_index
= 0;
1148 print_hex_dump(KERN_ERR
, "presciv@" __stringify(__LINE__
) ": ",
1149 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1151 dbg_dump_sg(KERN_ERR
, "src @" __stringify(__LINE__
) ": ",
1152 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1153 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1156 len
= desc_len(sh_desc
);
1157 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1159 if (edesc
->src_nents
== 1) {
1160 src_dma
= sg_dma_address(req
->src
);
1163 src_dma
= edesc
->sec4_sg_dma
;
1164 sec4_sg_index
+= edesc
->src_nents
;
1165 in_options
= LDST_SGF
;
1167 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
, in_options
);
1170 dst_dma
= edesc
->iv_dma
;
1173 dst_dma
= edesc
->sec4_sg_dma
+
1174 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1175 out_options
= LDST_SGF
;
1177 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
+ ivsize
, out_options
);
1181 * allocate and map the aead extended descriptor
1183 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
1184 int desc_bytes
, bool *all_contig_ptr
,
1187 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1188 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1189 struct device
*jrdev
= ctx
->jrdev
;
1190 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1191 GFP_KERNEL
: GFP_ATOMIC
;
1192 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1193 struct aead_edesc
*edesc
;
1194 int sec4_sg_index
, sec4_sg_len
, sec4_sg_bytes
;
1195 unsigned int authsize
= ctx
->authsize
;
1197 if (unlikely(req
->dst
!= req
->src
)) {
1198 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1200 if (unlikely(src_nents
< 0)) {
1201 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1202 req
->assoclen
+ req
->cryptlen
);
1203 return ERR_PTR(src_nents
);
1206 dst_nents
= sg_nents_for_len(req
->dst
, req
->assoclen
+
1208 (encrypt
? authsize
:
1210 if (unlikely(dst_nents
< 0)) {
1211 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1212 req
->assoclen
+ req
->cryptlen
+
1213 (encrypt
? authsize
: (-authsize
)));
1214 return ERR_PTR(dst_nents
);
1217 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1219 (encrypt
? authsize
: 0));
1220 if (unlikely(src_nents
< 0)) {
1221 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1222 req
->assoclen
+ req
->cryptlen
+
1223 (encrypt
? authsize
: 0));
1224 return ERR_PTR(src_nents
);
1228 if (likely(req
->src
== req
->dst
)) {
1229 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1231 if (unlikely(!mapped_src_nents
)) {
1232 dev_err(jrdev
, "unable to map source\n");
1233 return ERR_PTR(-ENOMEM
);
1236 /* Cover also the case of null (zero length) input data */
1238 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
,
1239 src_nents
, DMA_TO_DEVICE
);
1240 if (unlikely(!mapped_src_nents
)) {
1241 dev_err(jrdev
, "unable to map source\n");
1242 return ERR_PTR(-ENOMEM
);
1245 mapped_src_nents
= 0;
1248 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1250 if (unlikely(!mapped_dst_nents
)) {
1251 dev_err(jrdev
, "unable to map destination\n");
1252 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1253 return ERR_PTR(-ENOMEM
);
1257 sec4_sg_len
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1258 sec4_sg_len
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1259 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
1261 /* allocate space for base edesc and hw desc commands, link tables */
1262 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1265 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1267 return ERR_PTR(-ENOMEM
);
1270 edesc
->src_nents
= src_nents
;
1271 edesc
->dst_nents
= dst_nents
;
1272 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
1274 *all_contig_ptr
= !(mapped_src_nents
> 1);
1277 if (mapped_src_nents
> 1) {
1278 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1279 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1280 sec4_sg_index
+= mapped_src_nents
;
1282 if (mapped_dst_nents
> 1) {
1283 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1284 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1290 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1291 sec4_sg_bytes
, DMA_TO_DEVICE
);
1292 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1293 dev_err(jrdev
, "unable to map S/G table\n");
1294 aead_unmap(jrdev
, edesc
, req
);
1296 return ERR_PTR(-ENOMEM
);
1299 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1304 static int gcm_encrypt(struct aead_request
*req
)
1306 struct aead_edesc
*edesc
;
1307 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1308 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1309 struct device
*jrdev
= ctx
->jrdev
;
1314 /* allocate extended descriptor */
1315 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, true);
1317 return PTR_ERR(edesc
);
1319 /* Create and submit job descriptor */
1320 init_gcm_job(req
, edesc
, all_contig
, true);
1322 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1323 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1324 desc_bytes(edesc
->hw_desc
), 1);
1327 desc
= edesc
->hw_desc
;
1328 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1332 aead_unmap(jrdev
, edesc
, req
);
1339 static int ipsec_gcm_encrypt(struct aead_request
*req
)
1341 if (req
->assoclen
< 8)
1344 return gcm_encrypt(req
);
1347 static int aead_encrypt(struct aead_request
*req
)
1349 struct aead_edesc
*edesc
;
1350 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1351 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1352 struct device
*jrdev
= ctx
->jrdev
;
1357 /* allocate extended descriptor */
1358 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1361 return PTR_ERR(edesc
);
1363 /* Create and submit job descriptor */
1364 init_authenc_job(req
, edesc
, all_contig
, true);
1366 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1367 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1368 desc_bytes(edesc
->hw_desc
), 1);
1371 desc
= edesc
->hw_desc
;
1372 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1376 aead_unmap(jrdev
, edesc
, req
);
1383 static int gcm_decrypt(struct aead_request
*req
)
1385 struct aead_edesc
*edesc
;
1386 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1387 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1388 struct device
*jrdev
= ctx
->jrdev
;
1393 /* allocate extended descriptor */
1394 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, false);
1396 return PTR_ERR(edesc
);
1398 /* Create and submit job descriptor*/
1399 init_gcm_job(req
, edesc
, all_contig
, false);
1401 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1402 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1403 desc_bytes(edesc
->hw_desc
), 1);
1406 desc
= edesc
->hw_desc
;
1407 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1411 aead_unmap(jrdev
, edesc
, req
);
1418 static int ipsec_gcm_decrypt(struct aead_request
*req
)
1420 if (req
->assoclen
< 8)
1423 return gcm_decrypt(req
);
1426 static int aead_decrypt(struct aead_request
*req
)
1428 struct aead_edesc
*edesc
;
1429 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1430 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1431 struct device
*jrdev
= ctx
->jrdev
;
1437 dbg_dump_sg(KERN_ERR
, "dec src@"__stringify(__LINE__
)": ",
1438 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1439 req
->assoclen
+ req
->cryptlen
, 1);
1442 /* allocate extended descriptor */
1443 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1444 &all_contig
, false);
1446 return PTR_ERR(edesc
);
1448 /* Create and submit job descriptor*/
1449 init_authenc_job(req
, edesc
, all_contig
, false);
1451 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1452 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1453 desc_bytes(edesc
->hw_desc
), 1);
1456 desc
= edesc
->hw_desc
;
1457 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1461 aead_unmap(jrdev
, edesc
, req
);
1469 * allocate and map the ablkcipher extended descriptor for ablkcipher
1471 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
1472 *req
, int desc_bytes
,
1473 bool *iv_contig_out
)
1475 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1476 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1477 struct device
*jrdev
= ctx
->jrdev
;
1478 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1479 GFP_KERNEL
: GFP_ATOMIC
;
1480 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1481 struct ablkcipher_edesc
*edesc
;
1482 dma_addr_t iv_dma
= 0;
1484 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1485 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1487 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1488 if (unlikely(src_nents
< 0)) {
1489 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1491 return ERR_PTR(src_nents
);
1494 if (req
->dst
!= req
->src
) {
1495 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1496 if (unlikely(dst_nents
< 0)) {
1497 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1499 return ERR_PTR(dst_nents
);
1503 if (likely(req
->src
== req
->dst
)) {
1504 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1506 if (unlikely(!mapped_src_nents
)) {
1507 dev_err(jrdev
, "unable to map source\n");
1508 return ERR_PTR(-ENOMEM
);
1511 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1513 if (unlikely(!mapped_src_nents
)) {
1514 dev_err(jrdev
, "unable to map source\n");
1515 return ERR_PTR(-ENOMEM
);
1518 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1520 if (unlikely(!mapped_dst_nents
)) {
1521 dev_err(jrdev
, "unable to map destination\n");
1522 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1523 return ERR_PTR(-ENOMEM
);
1527 iv_dma
= dma_map_single(jrdev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
1528 if (dma_mapping_error(jrdev
, iv_dma
)) {
1529 dev_err(jrdev
, "unable to map IV\n");
1530 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1532 return ERR_PTR(-ENOMEM
);
1535 if (mapped_src_nents
== 1 &&
1536 iv_dma
+ ivsize
== sg_dma_address(req
->src
)) {
1541 sec4_sg_ents
= 1 + mapped_src_nents
;
1543 dst_sg_idx
= sec4_sg_ents
;
1544 sec4_sg_ents
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1545 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1547 /* allocate space for base edesc and hw desc commands, link tables */
1548 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1551 dev_err(jrdev
, "could not allocate extended descriptor\n");
1552 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1553 iv_dma
, ivsize
, 0, 0);
1554 return ERR_PTR(-ENOMEM
);
1557 edesc
->src_nents
= src_nents
;
1558 edesc
->dst_nents
= dst_nents
;
1559 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1560 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1564 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
1565 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1566 edesc
->sec4_sg
+ 1, 0);
1569 if (mapped_dst_nents
> 1) {
1570 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1571 edesc
->sec4_sg
+ dst_sg_idx
, 0);
1574 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1575 sec4_sg_bytes
, DMA_TO_DEVICE
);
1576 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1577 dev_err(jrdev
, "unable to map S/G table\n");
1578 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1579 iv_dma
, ivsize
, 0, 0);
1581 return ERR_PTR(-ENOMEM
);
1584 edesc
->iv_dma
= iv_dma
;
1587 print_hex_dump(KERN_ERR
, "ablkcipher sec4_sg@"__stringify(__LINE__
)": ",
1588 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1592 *iv_contig_out
= in_contig
;
1596 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
1598 struct ablkcipher_edesc
*edesc
;
1599 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1600 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1601 struct device
*jrdev
= ctx
->jrdev
;
1606 /* allocate extended descriptor */
1607 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1608 CAAM_CMD_SZ
, &iv_contig
);
1610 return PTR_ERR(edesc
);
1612 /* Create and submit job descriptor*/
1613 init_ablkcipher_job(ctx
->sh_desc_enc
,
1614 ctx
->sh_desc_enc_dma
, edesc
, req
, iv_contig
);
1616 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1617 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1618 desc_bytes(edesc
->hw_desc
), 1);
1620 desc
= edesc
->hw_desc
;
1621 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1626 ablkcipher_unmap(jrdev
, edesc
, req
);
1633 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
1635 struct ablkcipher_edesc
*edesc
;
1636 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1637 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1638 struct device
*jrdev
= ctx
->jrdev
;
1643 /* allocate extended descriptor */
1644 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1645 CAAM_CMD_SZ
, &iv_contig
);
1647 return PTR_ERR(edesc
);
1649 /* Create and submit job descriptor*/
1650 init_ablkcipher_job(ctx
->sh_desc_dec
,
1651 ctx
->sh_desc_dec_dma
, edesc
, req
, iv_contig
);
1652 desc
= edesc
->hw_desc
;
1654 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1655 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1656 desc_bytes(edesc
->hw_desc
), 1);
1659 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_decrypt_done
, req
);
1663 ablkcipher_unmap(jrdev
, edesc
, req
);
1671 * allocate and map the ablkcipher extended descriptor
1672 * for ablkcipher givencrypt
1674 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
1675 struct skcipher_givcrypt_request
*greq
,
1677 bool *iv_contig_out
)
1679 struct ablkcipher_request
*req
= &greq
->creq
;
1680 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1681 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1682 struct device
*jrdev
= ctx
->jrdev
;
1683 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1684 GFP_KERNEL
: GFP_ATOMIC
;
1685 int src_nents
, mapped_src_nents
, dst_nents
, mapped_dst_nents
;
1686 struct ablkcipher_edesc
*edesc
;
1687 dma_addr_t iv_dma
= 0;
1689 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1690 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1692 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1693 if (unlikely(src_nents
< 0)) {
1694 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1696 return ERR_PTR(src_nents
);
1699 if (likely(req
->src
== req
->dst
)) {
1700 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1702 if (unlikely(!mapped_src_nents
)) {
1703 dev_err(jrdev
, "unable to map source\n");
1704 return ERR_PTR(-ENOMEM
);
1707 dst_nents
= src_nents
;
1708 mapped_dst_nents
= src_nents
;
1710 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1712 if (unlikely(!mapped_src_nents
)) {
1713 dev_err(jrdev
, "unable to map source\n");
1714 return ERR_PTR(-ENOMEM
);
1717 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1718 if (unlikely(dst_nents
< 0)) {
1719 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1721 return ERR_PTR(dst_nents
);
1724 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1726 if (unlikely(!mapped_dst_nents
)) {
1727 dev_err(jrdev
, "unable to map destination\n");
1728 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1729 return ERR_PTR(-ENOMEM
);
1734 * Check if iv can be contiguous with source and destination.
1735 * If so, include it. If not, create scatterlist.
1737 iv_dma
= dma_map_single(jrdev
, greq
->giv
, ivsize
, DMA_TO_DEVICE
);
1738 if (dma_mapping_error(jrdev
, iv_dma
)) {
1739 dev_err(jrdev
, "unable to map IV\n");
1740 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1742 return ERR_PTR(-ENOMEM
);
1745 sec4_sg_ents
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1746 dst_sg_idx
= sec4_sg_ents
;
1747 if (mapped_dst_nents
== 1 &&
1748 iv_dma
+ ivsize
== sg_dma_address(req
->dst
)) {
1752 sec4_sg_ents
+= 1 + mapped_dst_nents
;
1755 /* allocate space for base edesc and hw desc commands, link tables */
1756 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1757 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1760 dev_err(jrdev
, "could not allocate extended descriptor\n");
1761 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1762 iv_dma
, ivsize
, 0, 0);
1763 return ERR_PTR(-ENOMEM
);
1766 edesc
->src_nents
= src_nents
;
1767 edesc
->dst_nents
= dst_nents
;
1768 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1769 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1772 if (mapped_src_nents
> 1)
1773 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
, edesc
->sec4_sg
,
1777 dma_to_sec4_sg_one(edesc
->sec4_sg
+ dst_sg_idx
,
1779 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1780 edesc
->sec4_sg
+ dst_sg_idx
+ 1, 0);
1783 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1784 sec4_sg_bytes
, DMA_TO_DEVICE
);
1785 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1786 dev_err(jrdev
, "unable to map S/G table\n");
1787 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1788 iv_dma
, ivsize
, 0, 0);
1790 return ERR_PTR(-ENOMEM
);
1792 edesc
->iv_dma
= iv_dma
;
1795 print_hex_dump(KERN_ERR
,
1796 "ablkcipher sec4_sg@" __stringify(__LINE__
) ": ",
1797 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1801 *iv_contig_out
= out_contig
;
1805 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
1807 struct ablkcipher_request
*req
= &creq
->creq
;
1808 struct ablkcipher_edesc
*edesc
;
1809 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1810 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1811 struct device
*jrdev
= ctx
->jrdev
;
1812 bool iv_contig
= false;
1816 /* allocate extended descriptor */
1817 edesc
= ablkcipher_giv_edesc_alloc(creq
, DESC_JOB_IO_LEN
*
1818 CAAM_CMD_SZ
, &iv_contig
);
1820 return PTR_ERR(edesc
);
1822 /* Create and submit job descriptor*/
1823 init_ablkcipher_giv_job(ctx
->sh_desc_givenc
, ctx
->sh_desc_givenc_dma
,
1824 edesc
, req
, iv_contig
);
1826 print_hex_dump(KERN_ERR
,
1827 "ablkcipher jobdesc@" __stringify(__LINE__
) ": ",
1828 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1829 desc_bytes(edesc
->hw_desc
), 1);
1831 desc
= edesc
->hw_desc
;
1832 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1837 ablkcipher_unmap(jrdev
, edesc
, req
);
1844 #define template_aead template_u.aead
1845 #define template_ablkcipher template_u.ablkcipher
1846 struct caam_alg_template
{
1847 char name
[CRYPTO_MAX_ALG_NAME
];
1848 char driver_name
[CRYPTO_MAX_ALG_NAME
];
1849 unsigned int blocksize
;
1852 struct ablkcipher_alg ablkcipher
;
1854 u32 class1_alg_type
;
1855 u32 class2_alg_type
;
1858 static struct caam_alg_template driver_algs
[] = {
1859 /* ablkcipher descriptor */
1862 .driver_name
= "cbc-aes-caam",
1863 .blocksize
= AES_BLOCK_SIZE
,
1864 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1865 .template_ablkcipher
= {
1866 .setkey
= ablkcipher_setkey
,
1867 .encrypt
= ablkcipher_encrypt
,
1868 .decrypt
= ablkcipher_decrypt
,
1869 .givencrypt
= ablkcipher_givencrypt
,
1870 .geniv
= "<built-in>",
1871 .min_keysize
= AES_MIN_KEY_SIZE
,
1872 .max_keysize
= AES_MAX_KEY_SIZE
,
1873 .ivsize
= AES_BLOCK_SIZE
,
1875 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1878 .name
= "cbc(des3_ede)",
1879 .driver_name
= "cbc-3des-caam",
1880 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1881 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1882 .template_ablkcipher
= {
1883 .setkey
= ablkcipher_setkey
,
1884 .encrypt
= ablkcipher_encrypt
,
1885 .decrypt
= ablkcipher_decrypt
,
1886 .givencrypt
= ablkcipher_givencrypt
,
1887 .geniv
= "<built-in>",
1888 .min_keysize
= DES3_EDE_KEY_SIZE
,
1889 .max_keysize
= DES3_EDE_KEY_SIZE
,
1890 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1892 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1896 .driver_name
= "cbc-des-caam",
1897 .blocksize
= DES_BLOCK_SIZE
,
1898 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1899 .template_ablkcipher
= {
1900 .setkey
= ablkcipher_setkey
,
1901 .encrypt
= ablkcipher_encrypt
,
1902 .decrypt
= ablkcipher_decrypt
,
1903 .givencrypt
= ablkcipher_givencrypt
,
1904 .geniv
= "<built-in>",
1905 .min_keysize
= DES_KEY_SIZE
,
1906 .max_keysize
= DES_KEY_SIZE
,
1907 .ivsize
= DES_BLOCK_SIZE
,
1909 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1913 .driver_name
= "ctr-aes-caam",
1915 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1916 .template_ablkcipher
= {
1917 .setkey
= ablkcipher_setkey
,
1918 .encrypt
= ablkcipher_encrypt
,
1919 .decrypt
= ablkcipher_decrypt
,
1921 .min_keysize
= AES_MIN_KEY_SIZE
,
1922 .max_keysize
= AES_MAX_KEY_SIZE
,
1923 .ivsize
= AES_BLOCK_SIZE
,
1925 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1928 .name
= "rfc3686(ctr(aes))",
1929 .driver_name
= "rfc3686-ctr-aes-caam",
1931 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1932 .template_ablkcipher
= {
1933 .setkey
= ablkcipher_setkey
,
1934 .encrypt
= ablkcipher_encrypt
,
1935 .decrypt
= ablkcipher_decrypt
,
1936 .givencrypt
= ablkcipher_givencrypt
,
1937 .geniv
= "<built-in>",
1938 .min_keysize
= AES_MIN_KEY_SIZE
+
1939 CTR_RFC3686_NONCE_SIZE
,
1940 .max_keysize
= AES_MAX_KEY_SIZE
+
1941 CTR_RFC3686_NONCE_SIZE
,
1942 .ivsize
= CTR_RFC3686_IV_SIZE
,
1944 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1948 .driver_name
= "xts-aes-caam",
1949 .blocksize
= AES_BLOCK_SIZE
,
1950 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1951 .template_ablkcipher
= {
1952 .setkey
= xts_ablkcipher_setkey
,
1953 .encrypt
= ablkcipher_encrypt
,
1954 .decrypt
= ablkcipher_decrypt
,
1956 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1957 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1958 .ivsize
= AES_BLOCK_SIZE
,
1960 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
1964 static struct caam_aead_alg driver_aeads
[] = {
1968 .cra_name
= "rfc4106(gcm(aes))",
1969 .cra_driver_name
= "rfc4106-gcm-aes-caam",
1972 .setkey
= rfc4106_setkey
,
1973 .setauthsize
= rfc4106_setauthsize
,
1974 .encrypt
= ipsec_gcm_encrypt
,
1975 .decrypt
= ipsec_gcm_decrypt
,
1977 .maxauthsize
= AES_BLOCK_SIZE
,
1980 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1986 .cra_name
= "rfc4543(gcm(aes))",
1987 .cra_driver_name
= "rfc4543-gcm-aes-caam",
1990 .setkey
= rfc4543_setkey
,
1991 .setauthsize
= rfc4543_setauthsize
,
1992 .encrypt
= ipsec_gcm_encrypt
,
1993 .decrypt
= ipsec_gcm_decrypt
,
1995 .maxauthsize
= AES_BLOCK_SIZE
,
1998 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2001 /* Galois Counter Mode */
2005 .cra_name
= "gcm(aes)",
2006 .cra_driver_name
= "gcm-aes-caam",
2009 .setkey
= gcm_setkey
,
2010 .setauthsize
= gcm_setauthsize
,
2011 .encrypt
= gcm_encrypt
,
2012 .decrypt
= gcm_decrypt
,
2014 .maxauthsize
= AES_BLOCK_SIZE
,
2017 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2020 /* single-pass ipsec_esp descriptor */
2024 .cra_name
= "authenc(hmac(md5),"
2025 "ecb(cipher_null))",
2026 .cra_driver_name
= "authenc-hmac-md5-"
2027 "ecb-cipher_null-caam",
2028 .cra_blocksize
= NULL_BLOCK_SIZE
,
2030 .setkey
= aead_setkey
,
2031 .setauthsize
= aead_setauthsize
,
2032 .encrypt
= aead_encrypt
,
2033 .decrypt
= aead_decrypt
,
2034 .ivsize
= NULL_IV_SIZE
,
2035 .maxauthsize
= MD5_DIGEST_SIZE
,
2038 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2039 OP_ALG_AAI_HMAC_PRECOMP
,
2045 .cra_name
= "authenc(hmac(sha1),"
2046 "ecb(cipher_null))",
2047 .cra_driver_name
= "authenc-hmac-sha1-"
2048 "ecb-cipher_null-caam",
2049 .cra_blocksize
= NULL_BLOCK_SIZE
,
2051 .setkey
= aead_setkey
,
2052 .setauthsize
= aead_setauthsize
,
2053 .encrypt
= aead_encrypt
,
2054 .decrypt
= aead_decrypt
,
2055 .ivsize
= NULL_IV_SIZE
,
2056 .maxauthsize
= SHA1_DIGEST_SIZE
,
2059 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2060 OP_ALG_AAI_HMAC_PRECOMP
,
2066 .cra_name
= "authenc(hmac(sha224),"
2067 "ecb(cipher_null))",
2068 .cra_driver_name
= "authenc-hmac-sha224-"
2069 "ecb-cipher_null-caam",
2070 .cra_blocksize
= NULL_BLOCK_SIZE
,
2072 .setkey
= aead_setkey
,
2073 .setauthsize
= aead_setauthsize
,
2074 .encrypt
= aead_encrypt
,
2075 .decrypt
= aead_decrypt
,
2076 .ivsize
= NULL_IV_SIZE
,
2077 .maxauthsize
= SHA224_DIGEST_SIZE
,
2080 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2081 OP_ALG_AAI_HMAC_PRECOMP
,
2087 .cra_name
= "authenc(hmac(sha256),"
2088 "ecb(cipher_null))",
2089 .cra_driver_name
= "authenc-hmac-sha256-"
2090 "ecb-cipher_null-caam",
2091 .cra_blocksize
= NULL_BLOCK_SIZE
,
2093 .setkey
= aead_setkey
,
2094 .setauthsize
= aead_setauthsize
,
2095 .encrypt
= aead_encrypt
,
2096 .decrypt
= aead_decrypt
,
2097 .ivsize
= NULL_IV_SIZE
,
2098 .maxauthsize
= SHA256_DIGEST_SIZE
,
2101 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2102 OP_ALG_AAI_HMAC_PRECOMP
,
2108 .cra_name
= "authenc(hmac(sha384),"
2109 "ecb(cipher_null))",
2110 .cra_driver_name
= "authenc-hmac-sha384-"
2111 "ecb-cipher_null-caam",
2112 .cra_blocksize
= NULL_BLOCK_SIZE
,
2114 .setkey
= aead_setkey
,
2115 .setauthsize
= aead_setauthsize
,
2116 .encrypt
= aead_encrypt
,
2117 .decrypt
= aead_decrypt
,
2118 .ivsize
= NULL_IV_SIZE
,
2119 .maxauthsize
= SHA384_DIGEST_SIZE
,
2122 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2123 OP_ALG_AAI_HMAC_PRECOMP
,
2129 .cra_name
= "authenc(hmac(sha512),"
2130 "ecb(cipher_null))",
2131 .cra_driver_name
= "authenc-hmac-sha512-"
2132 "ecb-cipher_null-caam",
2133 .cra_blocksize
= NULL_BLOCK_SIZE
,
2135 .setkey
= aead_setkey
,
2136 .setauthsize
= aead_setauthsize
,
2137 .encrypt
= aead_encrypt
,
2138 .decrypt
= aead_decrypt
,
2139 .ivsize
= NULL_IV_SIZE
,
2140 .maxauthsize
= SHA512_DIGEST_SIZE
,
2143 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2144 OP_ALG_AAI_HMAC_PRECOMP
,
2150 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2151 .cra_driver_name
= "authenc-hmac-md5-"
2153 .cra_blocksize
= AES_BLOCK_SIZE
,
2155 .setkey
= aead_setkey
,
2156 .setauthsize
= aead_setauthsize
,
2157 .encrypt
= aead_encrypt
,
2158 .decrypt
= aead_decrypt
,
2159 .ivsize
= AES_BLOCK_SIZE
,
2160 .maxauthsize
= MD5_DIGEST_SIZE
,
2163 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2164 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2165 OP_ALG_AAI_HMAC_PRECOMP
,
2171 .cra_name
= "echainiv(authenc(hmac(md5),"
2173 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2175 .cra_blocksize
= AES_BLOCK_SIZE
,
2177 .setkey
= aead_setkey
,
2178 .setauthsize
= aead_setauthsize
,
2179 .encrypt
= aead_encrypt
,
2180 .decrypt
= aead_decrypt
,
2181 .ivsize
= AES_BLOCK_SIZE
,
2182 .maxauthsize
= MD5_DIGEST_SIZE
,
2185 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2186 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2187 OP_ALG_AAI_HMAC_PRECOMP
,
2194 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2195 .cra_driver_name
= "authenc-hmac-sha1-"
2197 .cra_blocksize
= AES_BLOCK_SIZE
,
2199 .setkey
= aead_setkey
,
2200 .setauthsize
= aead_setauthsize
,
2201 .encrypt
= aead_encrypt
,
2202 .decrypt
= aead_decrypt
,
2203 .ivsize
= AES_BLOCK_SIZE
,
2204 .maxauthsize
= SHA1_DIGEST_SIZE
,
2207 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2208 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2209 OP_ALG_AAI_HMAC_PRECOMP
,
2215 .cra_name
= "echainiv(authenc(hmac(sha1),"
2217 .cra_driver_name
= "echainiv-authenc-"
2218 "hmac-sha1-cbc-aes-caam",
2219 .cra_blocksize
= AES_BLOCK_SIZE
,
2221 .setkey
= aead_setkey
,
2222 .setauthsize
= aead_setauthsize
,
2223 .encrypt
= aead_encrypt
,
2224 .decrypt
= aead_decrypt
,
2225 .ivsize
= AES_BLOCK_SIZE
,
2226 .maxauthsize
= SHA1_DIGEST_SIZE
,
2229 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2230 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2231 OP_ALG_AAI_HMAC_PRECOMP
,
2238 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2239 .cra_driver_name
= "authenc-hmac-sha224-"
2241 .cra_blocksize
= AES_BLOCK_SIZE
,
2243 .setkey
= aead_setkey
,
2244 .setauthsize
= aead_setauthsize
,
2245 .encrypt
= aead_encrypt
,
2246 .decrypt
= aead_decrypt
,
2247 .ivsize
= AES_BLOCK_SIZE
,
2248 .maxauthsize
= SHA224_DIGEST_SIZE
,
2251 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2252 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2253 OP_ALG_AAI_HMAC_PRECOMP
,
2259 .cra_name
= "echainiv(authenc(hmac(sha224),"
2261 .cra_driver_name
= "echainiv-authenc-"
2262 "hmac-sha224-cbc-aes-caam",
2263 .cra_blocksize
= AES_BLOCK_SIZE
,
2265 .setkey
= aead_setkey
,
2266 .setauthsize
= aead_setauthsize
,
2267 .encrypt
= aead_encrypt
,
2268 .decrypt
= aead_decrypt
,
2269 .ivsize
= AES_BLOCK_SIZE
,
2270 .maxauthsize
= SHA224_DIGEST_SIZE
,
2273 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2274 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2275 OP_ALG_AAI_HMAC_PRECOMP
,
2282 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2283 .cra_driver_name
= "authenc-hmac-sha256-"
2285 .cra_blocksize
= AES_BLOCK_SIZE
,
2287 .setkey
= aead_setkey
,
2288 .setauthsize
= aead_setauthsize
,
2289 .encrypt
= aead_encrypt
,
2290 .decrypt
= aead_decrypt
,
2291 .ivsize
= AES_BLOCK_SIZE
,
2292 .maxauthsize
= SHA256_DIGEST_SIZE
,
2295 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2296 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2297 OP_ALG_AAI_HMAC_PRECOMP
,
2303 .cra_name
= "echainiv(authenc(hmac(sha256),"
2305 .cra_driver_name
= "echainiv-authenc-"
2306 "hmac-sha256-cbc-aes-caam",
2307 .cra_blocksize
= AES_BLOCK_SIZE
,
2309 .setkey
= aead_setkey
,
2310 .setauthsize
= aead_setauthsize
,
2311 .encrypt
= aead_encrypt
,
2312 .decrypt
= aead_decrypt
,
2313 .ivsize
= AES_BLOCK_SIZE
,
2314 .maxauthsize
= SHA256_DIGEST_SIZE
,
2317 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2318 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2319 OP_ALG_AAI_HMAC_PRECOMP
,
2326 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2327 .cra_driver_name
= "authenc-hmac-sha384-"
2329 .cra_blocksize
= AES_BLOCK_SIZE
,
2331 .setkey
= aead_setkey
,
2332 .setauthsize
= aead_setauthsize
,
2333 .encrypt
= aead_encrypt
,
2334 .decrypt
= aead_decrypt
,
2335 .ivsize
= AES_BLOCK_SIZE
,
2336 .maxauthsize
= SHA384_DIGEST_SIZE
,
2339 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2340 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2341 OP_ALG_AAI_HMAC_PRECOMP
,
2347 .cra_name
= "echainiv(authenc(hmac(sha384),"
2349 .cra_driver_name
= "echainiv-authenc-"
2350 "hmac-sha384-cbc-aes-caam",
2351 .cra_blocksize
= AES_BLOCK_SIZE
,
2353 .setkey
= aead_setkey
,
2354 .setauthsize
= aead_setauthsize
,
2355 .encrypt
= aead_encrypt
,
2356 .decrypt
= aead_decrypt
,
2357 .ivsize
= AES_BLOCK_SIZE
,
2358 .maxauthsize
= SHA384_DIGEST_SIZE
,
2361 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2362 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2363 OP_ALG_AAI_HMAC_PRECOMP
,
2370 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2371 .cra_driver_name
= "authenc-hmac-sha512-"
2373 .cra_blocksize
= AES_BLOCK_SIZE
,
2375 .setkey
= aead_setkey
,
2376 .setauthsize
= aead_setauthsize
,
2377 .encrypt
= aead_encrypt
,
2378 .decrypt
= aead_decrypt
,
2379 .ivsize
= AES_BLOCK_SIZE
,
2380 .maxauthsize
= SHA512_DIGEST_SIZE
,
2383 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2384 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2385 OP_ALG_AAI_HMAC_PRECOMP
,
2391 .cra_name
= "echainiv(authenc(hmac(sha512),"
2393 .cra_driver_name
= "echainiv-authenc-"
2394 "hmac-sha512-cbc-aes-caam",
2395 .cra_blocksize
= AES_BLOCK_SIZE
,
2397 .setkey
= aead_setkey
,
2398 .setauthsize
= aead_setauthsize
,
2399 .encrypt
= aead_encrypt
,
2400 .decrypt
= aead_decrypt
,
2401 .ivsize
= AES_BLOCK_SIZE
,
2402 .maxauthsize
= SHA512_DIGEST_SIZE
,
2405 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2406 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2407 OP_ALG_AAI_HMAC_PRECOMP
,
2414 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2415 .cra_driver_name
= "authenc-hmac-md5-"
2416 "cbc-des3_ede-caam",
2417 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2419 .setkey
= aead_setkey
,
2420 .setauthsize
= aead_setauthsize
,
2421 .encrypt
= aead_encrypt
,
2422 .decrypt
= aead_decrypt
,
2423 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2424 .maxauthsize
= MD5_DIGEST_SIZE
,
2427 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2428 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2429 OP_ALG_AAI_HMAC_PRECOMP
,
2435 .cra_name
= "echainiv(authenc(hmac(md5),"
2437 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2438 "cbc-des3_ede-caam",
2439 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2441 .setkey
= aead_setkey
,
2442 .setauthsize
= aead_setauthsize
,
2443 .encrypt
= aead_encrypt
,
2444 .decrypt
= aead_decrypt
,
2445 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2446 .maxauthsize
= MD5_DIGEST_SIZE
,
2449 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2450 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2451 OP_ALG_AAI_HMAC_PRECOMP
,
2458 .cra_name
= "authenc(hmac(sha1),"
2460 .cra_driver_name
= "authenc-hmac-sha1-"
2461 "cbc-des3_ede-caam",
2462 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2464 .setkey
= aead_setkey
,
2465 .setauthsize
= aead_setauthsize
,
2466 .encrypt
= aead_encrypt
,
2467 .decrypt
= aead_decrypt
,
2468 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2469 .maxauthsize
= SHA1_DIGEST_SIZE
,
2472 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2473 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2474 OP_ALG_AAI_HMAC_PRECOMP
,
2480 .cra_name
= "echainiv(authenc(hmac(sha1),"
2482 .cra_driver_name
= "echainiv-authenc-"
2484 "cbc-des3_ede-caam",
2485 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2487 .setkey
= aead_setkey
,
2488 .setauthsize
= aead_setauthsize
,
2489 .encrypt
= aead_encrypt
,
2490 .decrypt
= aead_decrypt
,
2491 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2492 .maxauthsize
= SHA1_DIGEST_SIZE
,
2495 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2496 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2497 OP_ALG_AAI_HMAC_PRECOMP
,
2504 .cra_name
= "authenc(hmac(sha224),"
2506 .cra_driver_name
= "authenc-hmac-sha224-"
2507 "cbc-des3_ede-caam",
2508 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2510 .setkey
= aead_setkey
,
2511 .setauthsize
= aead_setauthsize
,
2512 .encrypt
= aead_encrypt
,
2513 .decrypt
= aead_decrypt
,
2514 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2515 .maxauthsize
= SHA224_DIGEST_SIZE
,
2518 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2519 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2520 OP_ALG_AAI_HMAC_PRECOMP
,
2526 .cra_name
= "echainiv(authenc(hmac(sha224),"
2528 .cra_driver_name
= "echainiv-authenc-"
2530 "cbc-des3_ede-caam",
2531 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2533 .setkey
= aead_setkey
,
2534 .setauthsize
= aead_setauthsize
,
2535 .encrypt
= aead_encrypt
,
2536 .decrypt
= aead_decrypt
,
2537 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2538 .maxauthsize
= SHA224_DIGEST_SIZE
,
2541 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2542 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2543 OP_ALG_AAI_HMAC_PRECOMP
,
2550 .cra_name
= "authenc(hmac(sha256),"
2552 .cra_driver_name
= "authenc-hmac-sha256-"
2553 "cbc-des3_ede-caam",
2554 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2556 .setkey
= aead_setkey
,
2557 .setauthsize
= aead_setauthsize
,
2558 .encrypt
= aead_encrypt
,
2559 .decrypt
= aead_decrypt
,
2560 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2561 .maxauthsize
= SHA256_DIGEST_SIZE
,
2564 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2565 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2566 OP_ALG_AAI_HMAC_PRECOMP
,
2572 .cra_name
= "echainiv(authenc(hmac(sha256),"
2574 .cra_driver_name
= "echainiv-authenc-"
2576 "cbc-des3_ede-caam",
2577 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2579 .setkey
= aead_setkey
,
2580 .setauthsize
= aead_setauthsize
,
2581 .encrypt
= aead_encrypt
,
2582 .decrypt
= aead_decrypt
,
2583 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2584 .maxauthsize
= SHA256_DIGEST_SIZE
,
2587 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2588 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2589 OP_ALG_AAI_HMAC_PRECOMP
,
2596 .cra_name
= "authenc(hmac(sha384),"
2598 .cra_driver_name
= "authenc-hmac-sha384-"
2599 "cbc-des3_ede-caam",
2600 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2602 .setkey
= aead_setkey
,
2603 .setauthsize
= aead_setauthsize
,
2604 .encrypt
= aead_encrypt
,
2605 .decrypt
= aead_decrypt
,
2606 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2607 .maxauthsize
= SHA384_DIGEST_SIZE
,
2610 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2611 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2612 OP_ALG_AAI_HMAC_PRECOMP
,
2618 .cra_name
= "echainiv(authenc(hmac(sha384),"
2620 .cra_driver_name
= "echainiv-authenc-"
2622 "cbc-des3_ede-caam",
2623 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2625 .setkey
= aead_setkey
,
2626 .setauthsize
= aead_setauthsize
,
2627 .encrypt
= aead_encrypt
,
2628 .decrypt
= aead_decrypt
,
2629 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2630 .maxauthsize
= SHA384_DIGEST_SIZE
,
2633 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2634 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2635 OP_ALG_AAI_HMAC_PRECOMP
,
2642 .cra_name
= "authenc(hmac(sha512),"
2644 .cra_driver_name
= "authenc-hmac-sha512-"
2645 "cbc-des3_ede-caam",
2646 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2648 .setkey
= aead_setkey
,
2649 .setauthsize
= aead_setauthsize
,
2650 .encrypt
= aead_encrypt
,
2651 .decrypt
= aead_decrypt
,
2652 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2653 .maxauthsize
= SHA512_DIGEST_SIZE
,
2656 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2657 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2658 OP_ALG_AAI_HMAC_PRECOMP
,
2664 .cra_name
= "echainiv(authenc(hmac(sha512),"
2666 .cra_driver_name
= "echainiv-authenc-"
2668 "cbc-des3_ede-caam",
2669 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2671 .setkey
= aead_setkey
,
2672 .setauthsize
= aead_setauthsize
,
2673 .encrypt
= aead_encrypt
,
2674 .decrypt
= aead_decrypt
,
2675 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2676 .maxauthsize
= SHA512_DIGEST_SIZE
,
2679 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2680 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2681 OP_ALG_AAI_HMAC_PRECOMP
,
2688 .cra_name
= "authenc(hmac(md5),cbc(des))",
2689 .cra_driver_name
= "authenc-hmac-md5-"
2691 .cra_blocksize
= DES_BLOCK_SIZE
,
2693 .setkey
= aead_setkey
,
2694 .setauthsize
= aead_setauthsize
,
2695 .encrypt
= aead_encrypt
,
2696 .decrypt
= aead_decrypt
,
2697 .ivsize
= DES_BLOCK_SIZE
,
2698 .maxauthsize
= MD5_DIGEST_SIZE
,
2701 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2702 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2703 OP_ALG_AAI_HMAC_PRECOMP
,
2709 .cra_name
= "echainiv(authenc(hmac(md5),"
2711 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2713 .cra_blocksize
= DES_BLOCK_SIZE
,
2715 .setkey
= aead_setkey
,
2716 .setauthsize
= aead_setauthsize
,
2717 .encrypt
= aead_encrypt
,
2718 .decrypt
= aead_decrypt
,
2719 .ivsize
= DES_BLOCK_SIZE
,
2720 .maxauthsize
= MD5_DIGEST_SIZE
,
2723 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2724 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2725 OP_ALG_AAI_HMAC_PRECOMP
,
2732 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2733 .cra_driver_name
= "authenc-hmac-sha1-"
2735 .cra_blocksize
= DES_BLOCK_SIZE
,
2737 .setkey
= aead_setkey
,
2738 .setauthsize
= aead_setauthsize
,
2739 .encrypt
= aead_encrypt
,
2740 .decrypt
= aead_decrypt
,
2741 .ivsize
= DES_BLOCK_SIZE
,
2742 .maxauthsize
= SHA1_DIGEST_SIZE
,
2745 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2746 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2747 OP_ALG_AAI_HMAC_PRECOMP
,
2753 .cra_name
= "echainiv(authenc(hmac(sha1),"
2755 .cra_driver_name
= "echainiv-authenc-"
2756 "hmac-sha1-cbc-des-caam",
2757 .cra_blocksize
= DES_BLOCK_SIZE
,
2759 .setkey
= aead_setkey
,
2760 .setauthsize
= aead_setauthsize
,
2761 .encrypt
= aead_encrypt
,
2762 .decrypt
= aead_decrypt
,
2763 .ivsize
= DES_BLOCK_SIZE
,
2764 .maxauthsize
= SHA1_DIGEST_SIZE
,
2767 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2768 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2769 OP_ALG_AAI_HMAC_PRECOMP
,
2776 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2777 .cra_driver_name
= "authenc-hmac-sha224-"
2779 .cra_blocksize
= DES_BLOCK_SIZE
,
2781 .setkey
= aead_setkey
,
2782 .setauthsize
= aead_setauthsize
,
2783 .encrypt
= aead_encrypt
,
2784 .decrypt
= aead_decrypt
,
2785 .ivsize
= DES_BLOCK_SIZE
,
2786 .maxauthsize
= SHA224_DIGEST_SIZE
,
2789 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2790 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2791 OP_ALG_AAI_HMAC_PRECOMP
,
2797 .cra_name
= "echainiv(authenc(hmac(sha224),"
2799 .cra_driver_name
= "echainiv-authenc-"
2800 "hmac-sha224-cbc-des-caam",
2801 .cra_blocksize
= DES_BLOCK_SIZE
,
2803 .setkey
= aead_setkey
,
2804 .setauthsize
= aead_setauthsize
,
2805 .encrypt
= aead_encrypt
,
2806 .decrypt
= aead_decrypt
,
2807 .ivsize
= DES_BLOCK_SIZE
,
2808 .maxauthsize
= SHA224_DIGEST_SIZE
,
2811 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2812 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2813 OP_ALG_AAI_HMAC_PRECOMP
,
2820 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2821 .cra_driver_name
= "authenc-hmac-sha256-"
2823 .cra_blocksize
= DES_BLOCK_SIZE
,
2825 .setkey
= aead_setkey
,
2826 .setauthsize
= aead_setauthsize
,
2827 .encrypt
= aead_encrypt
,
2828 .decrypt
= aead_decrypt
,
2829 .ivsize
= DES_BLOCK_SIZE
,
2830 .maxauthsize
= SHA256_DIGEST_SIZE
,
2833 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2834 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2835 OP_ALG_AAI_HMAC_PRECOMP
,
2841 .cra_name
= "echainiv(authenc(hmac(sha256),"
2843 .cra_driver_name
= "echainiv-authenc-"
2844 "hmac-sha256-cbc-des-caam",
2845 .cra_blocksize
= DES_BLOCK_SIZE
,
2847 .setkey
= aead_setkey
,
2848 .setauthsize
= aead_setauthsize
,
2849 .encrypt
= aead_encrypt
,
2850 .decrypt
= aead_decrypt
,
2851 .ivsize
= DES_BLOCK_SIZE
,
2852 .maxauthsize
= SHA256_DIGEST_SIZE
,
2855 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2856 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2857 OP_ALG_AAI_HMAC_PRECOMP
,
2864 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2865 .cra_driver_name
= "authenc-hmac-sha384-"
2867 .cra_blocksize
= DES_BLOCK_SIZE
,
2869 .setkey
= aead_setkey
,
2870 .setauthsize
= aead_setauthsize
,
2871 .encrypt
= aead_encrypt
,
2872 .decrypt
= aead_decrypt
,
2873 .ivsize
= DES_BLOCK_SIZE
,
2874 .maxauthsize
= SHA384_DIGEST_SIZE
,
2877 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2878 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2879 OP_ALG_AAI_HMAC_PRECOMP
,
2885 .cra_name
= "echainiv(authenc(hmac(sha384),"
2887 .cra_driver_name
= "echainiv-authenc-"
2888 "hmac-sha384-cbc-des-caam",
2889 .cra_blocksize
= DES_BLOCK_SIZE
,
2891 .setkey
= aead_setkey
,
2892 .setauthsize
= aead_setauthsize
,
2893 .encrypt
= aead_encrypt
,
2894 .decrypt
= aead_decrypt
,
2895 .ivsize
= DES_BLOCK_SIZE
,
2896 .maxauthsize
= SHA384_DIGEST_SIZE
,
2899 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2900 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2901 OP_ALG_AAI_HMAC_PRECOMP
,
2908 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2909 .cra_driver_name
= "authenc-hmac-sha512-"
2911 .cra_blocksize
= DES_BLOCK_SIZE
,
2913 .setkey
= aead_setkey
,
2914 .setauthsize
= aead_setauthsize
,
2915 .encrypt
= aead_encrypt
,
2916 .decrypt
= aead_decrypt
,
2917 .ivsize
= DES_BLOCK_SIZE
,
2918 .maxauthsize
= SHA512_DIGEST_SIZE
,
2921 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2922 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2923 OP_ALG_AAI_HMAC_PRECOMP
,
2929 .cra_name
= "echainiv(authenc(hmac(sha512),"
2931 .cra_driver_name
= "echainiv-authenc-"
2932 "hmac-sha512-cbc-des-caam",
2933 .cra_blocksize
= DES_BLOCK_SIZE
,
2935 .setkey
= aead_setkey
,
2936 .setauthsize
= aead_setauthsize
,
2937 .encrypt
= aead_encrypt
,
2938 .decrypt
= aead_decrypt
,
2939 .ivsize
= DES_BLOCK_SIZE
,
2940 .maxauthsize
= SHA512_DIGEST_SIZE
,
2943 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2944 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2945 OP_ALG_AAI_HMAC_PRECOMP
,
2952 .cra_name
= "authenc(hmac(md5),"
2953 "rfc3686(ctr(aes)))",
2954 .cra_driver_name
= "authenc-hmac-md5-"
2955 "rfc3686-ctr-aes-caam",
2958 .setkey
= aead_setkey
,
2959 .setauthsize
= aead_setauthsize
,
2960 .encrypt
= aead_encrypt
,
2961 .decrypt
= aead_decrypt
,
2962 .ivsize
= CTR_RFC3686_IV_SIZE
,
2963 .maxauthsize
= MD5_DIGEST_SIZE
,
2966 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2967 OP_ALG_AAI_CTR_MOD128
,
2968 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2969 OP_ALG_AAI_HMAC_PRECOMP
,
2976 .cra_name
= "seqiv(authenc("
2977 "hmac(md5),rfc3686(ctr(aes))))",
2978 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
2979 "rfc3686-ctr-aes-caam",
2982 .setkey
= aead_setkey
,
2983 .setauthsize
= aead_setauthsize
,
2984 .encrypt
= aead_encrypt
,
2985 .decrypt
= aead_decrypt
,
2986 .ivsize
= CTR_RFC3686_IV_SIZE
,
2987 .maxauthsize
= MD5_DIGEST_SIZE
,
2990 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2991 OP_ALG_AAI_CTR_MOD128
,
2992 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2993 OP_ALG_AAI_HMAC_PRECOMP
,
3001 .cra_name
= "authenc(hmac(sha1),"
3002 "rfc3686(ctr(aes)))",
3003 .cra_driver_name
= "authenc-hmac-sha1-"
3004 "rfc3686-ctr-aes-caam",
3007 .setkey
= aead_setkey
,
3008 .setauthsize
= aead_setauthsize
,
3009 .encrypt
= aead_encrypt
,
3010 .decrypt
= aead_decrypt
,
3011 .ivsize
= CTR_RFC3686_IV_SIZE
,
3012 .maxauthsize
= SHA1_DIGEST_SIZE
,
3015 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3016 OP_ALG_AAI_CTR_MOD128
,
3017 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3018 OP_ALG_AAI_HMAC_PRECOMP
,
3025 .cra_name
= "seqiv(authenc("
3026 "hmac(sha1),rfc3686(ctr(aes))))",
3027 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
3028 "rfc3686-ctr-aes-caam",
3031 .setkey
= aead_setkey
,
3032 .setauthsize
= aead_setauthsize
,
3033 .encrypt
= aead_encrypt
,
3034 .decrypt
= aead_decrypt
,
3035 .ivsize
= CTR_RFC3686_IV_SIZE
,
3036 .maxauthsize
= SHA1_DIGEST_SIZE
,
3039 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3040 OP_ALG_AAI_CTR_MOD128
,
3041 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3042 OP_ALG_AAI_HMAC_PRECOMP
,
3050 .cra_name
= "authenc(hmac(sha224),"
3051 "rfc3686(ctr(aes)))",
3052 .cra_driver_name
= "authenc-hmac-sha224-"
3053 "rfc3686-ctr-aes-caam",
3056 .setkey
= aead_setkey
,
3057 .setauthsize
= aead_setauthsize
,
3058 .encrypt
= aead_encrypt
,
3059 .decrypt
= aead_decrypt
,
3060 .ivsize
= CTR_RFC3686_IV_SIZE
,
3061 .maxauthsize
= SHA224_DIGEST_SIZE
,
3064 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3065 OP_ALG_AAI_CTR_MOD128
,
3066 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3067 OP_ALG_AAI_HMAC_PRECOMP
,
3074 .cra_name
= "seqiv(authenc("
3075 "hmac(sha224),rfc3686(ctr(aes))))",
3076 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
3077 "rfc3686-ctr-aes-caam",
3080 .setkey
= aead_setkey
,
3081 .setauthsize
= aead_setauthsize
,
3082 .encrypt
= aead_encrypt
,
3083 .decrypt
= aead_decrypt
,
3084 .ivsize
= CTR_RFC3686_IV_SIZE
,
3085 .maxauthsize
= SHA224_DIGEST_SIZE
,
3088 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3089 OP_ALG_AAI_CTR_MOD128
,
3090 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3091 OP_ALG_AAI_HMAC_PRECOMP
,
3099 .cra_name
= "authenc(hmac(sha256),"
3100 "rfc3686(ctr(aes)))",
3101 .cra_driver_name
= "authenc-hmac-sha256-"
3102 "rfc3686-ctr-aes-caam",
3105 .setkey
= aead_setkey
,
3106 .setauthsize
= aead_setauthsize
,
3107 .encrypt
= aead_encrypt
,
3108 .decrypt
= aead_decrypt
,
3109 .ivsize
= CTR_RFC3686_IV_SIZE
,
3110 .maxauthsize
= SHA256_DIGEST_SIZE
,
3113 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3114 OP_ALG_AAI_CTR_MOD128
,
3115 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3116 OP_ALG_AAI_HMAC_PRECOMP
,
3123 .cra_name
= "seqiv(authenc(hmac(sha256),"
3124 "rfc3686(ctr(aes))))",
3125 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
3126 "rfc3686-ctr-aes-caam",
3129 .setkey
= aead_setkey
,
3130 .setauthsize
= aead_setauthsize
,
3131 .encrypt
= aead_encrypt
,
3132 .decrypt
= aead_decrypt
,
3133 .ivsize
= CTR_RFC3686_IV_SIZE
,
3134 .maxauthsize
= SHA256_DIGEST_SIZE
,
3137 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3138 OP_ALG_AAI_CTR_MOD128
,
3139 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3140 OP_ALG_AAI_HMAC_PRECOMP
,
3148 .cra_name
= "authenc(hmac(sha384),"
3149 "rfc3686(ctr(aes)))",
3150 .cra_driver_name
= "authenc-hmac-sha384-"
3151 "rfc3686-ctr-aes-caam",
3154 .setkey
= aead_setkey
,
3155 .setauthsize
= aead_setauthsize
,
3156 .encrypt
= aead_encrypt
,
3157 .decrypt
= aead_decrypt
,
3158 .ivsize
= CTR_RFC3686_IV_SIZE
,
3159 .maxauthsize
= SHA384_DIGEST_SIZE
,
3162 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3163 OP_ALG_AAI_CTR_MOD128
,
3164 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3165 OP_ALG_AAI_HMAC_PRECOMP
,
3172 .cra_name
= "seqiv(authenc(hmac(sha384),"
3173 "rfc3686(ctr(aes))))",
3174 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
3175 "rfc3686-ctr-aes-caam",
3178 .setkey
= aead_setkey
,
3179 .setauthsize
= aead_setauthsize
,
3180 .encrypt
= aead_encrypt
,
3181 .decrypt
= aead_decrypt
,
3182 .ivsize
= CTR_RFC3686_IV_SIZE
,
3183 .maxauthsize
= SHA384_DIGEST_SIZE
,
3186 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3187 OP_ALG_AAI_CTR_MOD128
,
3188 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3189 OP_ALG_AAI_HMAC_PRECOMP
,
3197 .cra_name
= "authenc(hmac(sha512),"
3198 "rfc3686(ctr(aes)))",
3199 .cra_driver_name
= "authenc-hmac-sha512-"
3200 "rfc3686-ctr-aes-caam",
3203 .setkey
= aead_setkey
,
3204 .setauthsize
= aead_setauthsize
,
3205 .encrypt
= aead_encrypt
,
3206 .decrypt
= aead_decrypt
,
3207 .ivsize
= CTR_RFC3686_IV_SIZE
,
3208 .maxauthsize
= SHA512_DIGEST_SIZE
,
3211 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3212 OP_ALG_AAI_CTR_MOD128
,
3213 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3214 OP_ALG_AAI_HMAC_PRECOMP
,
3221 .cra_name
= "seqiv(authenc(hmac(sha512),"
3222 "rfc3686(ctr(aes))))",
3223 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
3224 "rfc3686-ctr-aes-caam",
3227 .setkey
= aead_setkey
,
3228 .setauthsize
= aead_setauthsize
,
3229 .encrypt
= aead_encrypt
,
3230 .decrypt
= aead_decrypt
,
3231 .ivsize
= CTR_RFC3686_IV_SIZE
,
3232 .maxauthsize
= SHA512_DIGEST_SIZE
,
3235 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3236 OP_ALG_AAI_CTR_MOD128
,
3237 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3238 OP_ALG_AAI_HMAC_PRECOMP
,
3245 struct caam_crypto_alg
{
3246 struct crypto_alg crypto_alg
;
3247 struct list_head entry
;
3248 struct caam_alg_entry caam
;
3251 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
)
3253 dma_addr_t dma_addr
;
3255 ctx
->jrdev
= caam_jr_alloc();
3256 if (IS_ERR(ctx
->jrdev
)) {
3257 pr_err("Job Ring Device allocation for transform failed\n");
3258 return PTR_ERR(ctx
->jrdev
);
3261 dma_addr
= dma_map_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc
,
3262 offsetof(struct caam_ctx
,
3264 DMA_TO_DEVICE
, DMA_ATTR_SKIP_CPU_SYNC
);
3265 if (dma_mapping_error(ctx
->jrdev
, dma_addr
)) {
3266 dev_err(ctx
->jrdev
, "unable to map key, shared descriptors\n");
3267 caam_jr_free(ctx
->jrdev
);
3271 ctx
->sh_desc_enc_dma
= dma_addr
;
3272 ctx
->sh_desc_dec_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3274 ctx
->sh_desc_givenc_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3276 ctx
->key_dma
= dma_addr
+ offsetof(struct caam_ctx
, key
);
3278 /* copy descriptor header template value */
3279 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
3280 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
3285 static int caam_cra_init(struct crypto_tfm
*tfm
)
3287 struct crypto_alg
*alg
= tfm
->__crt_alg
;
3288 struct caam_crypto_alg
*caam_alg
=
3289 container_of(alg
, struct caam_crypto_alg
, crypto_alg
);
3290 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3292 return caam_init_common(ctx
, &caam_alg
->caam
);
3295 static int caam_aead_init(struct crypto_aead
*tfm
)
3297 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
3298 struct caam_aead_alg
*caam_alg
=
3299 container_of(alg
, struct caam_aead_alg
, aead
);
3300 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
3302 return caam_init_common(ctx
, &caam_alg
->caam
);
3305 static void caam_exit_common(struct caam_ctx
*ctx
)
3307 dma_unmap_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
3308 offsetof(struct caam_ctx
, sh_desc_enc_dma
),
3309 DMA_TO_DEVICE
, DMA_ATTR_SKIP_CPU_SYNC
);
3310 caam_jr_free(ctx
->jrdev
);
3313 static void caam_cra_exit(struct crypto_tfm
*tfm
)
3315 caam_exit_common(crypto_tfm_ctx(tfm
));
3318 static void caam_aead_exit(struct crypto_aead
*tfm
)
3320 caam_exit_common(crypto_aead_ctx(tfm
));
3323 static void __exit
caam_algapi_exit(void)
3326 struct caam_crypto_alg
*t_alg
, *n
;
3329 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3330 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3332 if (t_alg
->registered
)
3333 crypto_unregister_aead(&t_alg
->aead
);
3339 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
3340 crypto_unregister_alg(&t_alg
->crypto_alg
);
3341 list_del(&t_alg
->entry
);
3346 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
3349 struct caam_crypto_alg
*t_alg
;
3350 struct crypto_alg
*alg
;
3352 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
3354 pr_err("failed to allocate t_alg\n");
3355 return ERR_PTR(-ENOMEM
);
3358 alg
= &t_alg
->crypto_alg
;
3360 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
3361 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
3362 template->driver_name
);
3363 alg
->cra_module
= THIS_MODULE
;
3364 alg
->cra_init
= caam_cra_init
;
3365 alg
->cra_exit
= caam_cra_exit
;
3366 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
3367 alg
->cra_blocksize
= template->blocksize
;
3368 alg
->cra_alignmask
= 0;
3369 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
3370 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
3372 switch (template->type
) {
3373 case CRYPTO_ALG_TYPE_GIVCIPHER
:
3374 alg
->cra_type
= &crypto_givcipher_type
;
3375 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3377 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
3378 alg
->cra_type
= &crypto_ablkcipher_type
;
3379 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3383 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
3384 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
3389 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
3391 struct aead_alg
*alg
= &t_alg
->aead
;
3393 alg
->base
.cra_module
= THIS_MODULE
;
3394 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
3395 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
3396 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
3398 alg
->init
= caam_aead_init
;
3399 alg
->exit
= caam_aead_exit
;
3402 static int __init
caam_algapi_init(void)
3404 struct device_node
*dev_node
;
3405 struct platform_device
*pdev
;
3406 struct device
*ctrldev
;
3407 struct caam_drv_private
*priv
;
3409 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
3410 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
3411 bool registered
= false;
3413 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
3415 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
3420 pdev
= of_find_device_by_node(dev_node
);
3422 of_node_put(dev_node
);
3426 ctrldev
= &pdev
->dev
;
3427 priv
= dev_get_drvdata(ctrldev
);
3428 of_node_put(dev_node
);
3431 * If priv is NULL, it's probably because the caam driver wasn't
3432 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3438 INIT_LIST_HEAD(&alg_list
);
3441 * Register crypto algorithms the device supports.
3442 * First, detect presence and attributes of DES, AES, and MD blocks.
3444 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
3445 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
3446 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
3447 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
3448 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
3450 /* If MD is present, limit digest size based on LP256 */
3451 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
3452 md_limit
= SHA256_DIGEST_SIZE
;
3454 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3455 struct caam_crypto_alg
*t_alg
;
3456 struct caam_alg_template
*alg
= driver_algs
+ i
;
3457 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
3459 /* Skip DES algorithms if not supported by device */
3461 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3462 (alg_sel
== OP_ALG_ALGSEL_DES
)))
3465 /* Skip AES algorithms if not supported by device */
3466 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
3470 * Check support for AES modes not available
3473 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3474 if ((alg
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
3478 t_alg
= caam_alg_alloc(alg
);
3479 if (IS_ERR(t_alg
)) {
3480 err
= PTR_ERR(t_alg
);
3481 pr_warn("%s alg allocation failed\n", alg
->driver_name
);
3485 err
= crypto_register_alg(&t_alg
->crypto_alg
);
3487 pr_warn("%s alg registration failed\n",
3488 t_alg
->crypto_alg
.cra_driver_name
);
3493 list_add_tail(&t_alg
->entry
, &alg_list
);
3497 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3498 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3499 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
3501 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
3503 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
3505 /* Skip DES algorithms if not supported by device */
3507 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3508 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
3511 /* Skip AES algorithms if not supported by device */
3512 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
3516 * Check support for AES algorithms not available
3519 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3520 if (alg_aai
== OP_ALG_AAI_GCM
)
3524 * Skip algorithms requiring message digests
3525 * if MD or MD size is not supported by device.
3528 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
3531 caam_aead_alg_init(t_alg
);
3533 err
= crypto_register_aead(&t_alg
->aead
);
3535 pr_warn("%s alg registration failed\n",
3536 t_alg
->aead
.base
.cra_driver_name
);
3540 t_alg
->registered
= true;
3545 pr_info("caam algorithms registered in /proc/crypto\n");
3550 module_init(caam_algapi_init
);
3551 module_exit(caam_algapi_exit
);
3553 MODULE_LICENSE("GPL");
3554 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3555 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");