]> git.proxmox.com Git - mirror_ubuntu-kernels.git/blame - drivers/crypto/caam/caamalg.c
Merge branches 'for-5.1/upstream-fixes', 'for-5.2/core', 'for-5.2/ish', 'for-5.2...
[mirror_ubuntu-kernels.git] / drivers / crypto / caam / caamalg.c
CommitLineData
618b5dc4 1// SPDX-License-Identifier: GPL-2.0+
8e8ec596
KP
2/*
3 * caam - Freescale FSL CAAM support for crypto API
4 *
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
eaed71a4 6 * Copyright 2016-2019 NXP
8e8ec596
KP
7 *
8 * Based on talitos crypto API driver.
9 *
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 *
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
16 * . | | (cipherKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
20 * | *(packet 2) | |
21 * --------------- |
22 * . |
23 * . |
24 * --------------- |
25 * | JobDesc #3 |------------
26 * | *(packet 3) |
27 * ---------------
28 *
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
34 *
35 * So, a job desc looks like:
36 *
37 * ---------------------
38 * | Header |
39 * | ShareDesc Pointer |
40 * | SEQ_OUT_PTR |
41 * | (output buffer) |
6ec47334 42 * | (output length) |
8e8ec596
KP
43 * | SEQ_IN_PTR |
44 * | (input buffer) |
6ec47334 45 * | (input length) |
8e8ec596
KP
46 * ---------------------
47 */
48
49#include "compat.h"
50
51#include "regs.h"
52#include "intern.h"
53#include "desc_constr.h"
54#include "jr.h"
55#include "error.h"
a299c837 56#include "sg_sw_sec4.h"
4c1ec1f9 57#include "key_gen.h"
8cea7b66 58#include "caamalg_desc.h"
8e8ec596
KP
59
60/*
61 * crypto alg
62 */
63#define CAAM_CRA_PRIORITY 3000
64/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
daebc465 66 CTR_RFC3686_NONCE_SIZE + \
8e8ec596 67 SHA512_DIGEST_SIZE * 2)
8e8ec596 68
f2147b88
HX
69#define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70#define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 CAAM_CMD_SZ * 4)
479bcc7c
HX
72#define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
73 CAAM_CMD_SZ * 5)
f2147b88 74
d6bbd4ee
HG
75#define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
76
87e51b07
HX
77#define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
78#define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
4427b1b4 79
8e8ec596
KP
80#ifdef DEBUG
81/* for print_hex_dumps with line references */
8e8ec596
KP
82#define debug(format, arg...) printk(format, arg)
83#else
84#define debug(format, arg...)
85#endif
5ecf8ef9 86
479bcc7c
HX
87struct caam_alg_entry {
88 int class1_alg_type;
89 int class2_alg_type;
479bcc7c
HX
90 bool rfc3686;
91 bool geniv;
92};
93
94struct caam_aead_alg {
95 struct aead_alg aead;
96 struct caam_alg_entry caam;
97 bool registered;
98};
99
5ca7badb
HG
100struct caam_skcipher_alg {
101 struct skcipher_alg skcipher;
102 struct caam_alg_entry caam;
103 bool registered;
104};
105
8e8ec596
KP
106/*
107 * per-session context
108 */
109struct caam_ctx {
1acebad3
YK
110 u32 sh_desc_enc[DESC_MAX_USED_LEN];
111 u32 sh_desc_dec[DESC_MAX_USED_LEN];
bbf22344 112 u8 key[CAAM_MAX_KEY_SIZE];
1acebad3
YK
113 dma_addr_t sh_desc_enc_dma;
114 dma_addr_t sh_desc_dec_dma;
885e9e2f 115 dma_addr_t key_dma;
7e0880b9 116 enum dma_data_direction dir;
bbf22344 117 struct device *jrdev;
db57656b
HG
118 struct alginfo adata;
119 struct alginfo cdata;
8e8ec596
KP
120 unsigned int authsize;
121};
122
ae4a825f
HG
123static int aead_null_set_sh_desc(struct crypto_aead *aead)
124{
ae4a825f
HG
125 struct caam_ctx *ctx = crypto_aead_ctx(aead);
126 struct device *jrdev = ctx->jrdev;
7e0880b9 127 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
ae4a825f 128 u32 *desc;
4cbe79cc
HG
129 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
130 ctx->adata.keylen_pad;
ae4a825f
HG
131
132 /*
133 * Job Descriptor and Shared Descriptors
134 * must all fit into the 64-word Descriptor h/w Buffer
135 */
4cbe79cc 136 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
db57656b 137 ctx->adata.key_inline = true;
9c0bc511 138 ctx->adata.key_virt = ctx->key;
db57656b
HG
139 } else {
140 ctx->adata.key_inline = false;
9c0bc511 141 ctx->adata.key_dma = ctx->key_dma;
db57656b 142 }
ae4a825f 143
479bcc7c 144 /* aead_encrypt shared descriptor */
ae4a825f 145 desc = ctx->sh_desc_enc;
7e0880b9
HG
146 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
147 ctrlpriv->era);
bbf22344 148 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 149 desc_bytes(desc), ctx->dir);
ae4a825f
HG
150
151 /*
152 * Job Descriptor and Shared Descriptors
153 * must all fit into the 64-word Descriptor h/w Buffer
154 */
4cbe79cc 155 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
db57656b 156 ctx->adata.key_inline = true;
9c0bc511 157 ctx->adata.key_virt = ctx->key;
db57656b
HG
158 } else {
159 ctx->adata.key_inline = false;
9c0bc511 160 ctx->adata.key_dma = ctx->key_dma;
db57656b 161 }
ae4a825f 162
479bcc7c 163 /* aead_decrypt shared descriptor */
8cea7b66 164 desc = ctx->sh_desc_dec;
7e0880b9
HG
165 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
166 ctrlpriv->era);
bbf22344 167 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 168 desc_bytes(desc), ctx->dir);
ae4a825f
HG
169
170 return 0;
171}
172
1acebad3
YK
173static int aead_set_sh_desc(struct crypto_aead *aead)
174{
479bcc7c
HX
175 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
176 struct caam_aead_alg, aead);
add86d55 177 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3
YK
178 struct caam_ctx *ctx = crypto_aead_ctx(aead);
179 struct device *jrdev = ctx->jrdev;
7e0880b9 180 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
daebc465 181 u32 ctx1_iv_off = 0;
8cea7b66 182 u32 *desc, *nonce = NULL;
4cbe79cc
HG
183 u32 inl_mask;
184 unsigned int data_len[2];
db57656b 185 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
daebc465 186 OP_ALG_AAI_CTR_MOD128);
479bcc7c 187 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 188
2fdea258
HG
189 if (!ctx->authsize)
190 return 0;
191
ae4a825f 192 /* NULL encryption / decryption */
db57656b 193 if (!ctx->cdata.keylen)
ae4a825f
HG
194 return aead_null_set_sh_desc(aead);
195
daebc465
CV
196 /*
197 * AES-CTR needs to load IV in CONTEXT1 reg
198 * at an offset of 128bits (16bytes)
199 * CONTEXT1[255:128] = IV
200 */
201 if (ctr_mode)
202 ctx1_iv_off = 16;
203
204 /*
205 * RFC3686 specific:
206 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
207 */
8cea7b66 208 if (is_rfc3686) {
daebc465 209 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
8cea7b66
HG
210 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
211 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
212 }
daebc465 213
4cbe79cc
HG
214 data_len[0] = ctx->adata.keylen_pad;
215 data_len[1] = ctx->cdata.keylen;
216
479bcc7c
HX
217 if (alg->caam.geniv)
218 goto skip_enc;
219
1acebad3
YK
220 /*
221 * Job Descriptor and Shared Descriptors
222 * must all fit into the 64-word Descriptor h/w Buffer
223 */
4cbe79cc
HG
224 if (desc_inline_query(DESC_AEAD_ENC_LEN +
225 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
226 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
227 ARRAY_SIZE(data_len)) < 0)
228 return -EINVAL;
229
230 if (inl_mask & 1)
9c0bc511 231 ctx->adata.key_virt = ctx->key;
4cbe79cc 232 else
9c0bc511 233 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
234
235 if (inl_mask & 2)
9c0bc511 236 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 237 else
9c0bc511 238 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
239
240 ctx->adata.key_inline = !!(inl_mask & 1);
241 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 242
479bcc7c 243 /* aead_encrypt shared descriptor */
1acebad3 244 desc = ctx->sh_desc_enc;
b189817c
HG
245 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
246 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
7e0880b9 247 false, ctrlpriv->era);
bbf22344 248 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 249 desc_bytes(desc), ctx->dir);
1acebad3 250
479bcc7c 251skip_enc:
1acebad3
YK
252 /*
253 * Job Descriptor and Shared Descriptors
254 * must all fit into the 64-word Descriptor h/w Buffer
255 */
4cbe79cc
HG
256 if (desc_inline_query(DESC_AEAD_DEC_LEN +
257 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
258 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
259 ARRAY_SIZE(data_len)) < 0)
260 return -EINVAL;
261
262 if (inl_mask & 1)
9c0bc511 263 ctx->adata.key_virt = ctx->key;
4cbe79cc 264 else
9c0bc511 265 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
266
267 if (inl_mask & 2)
9c0bc511 268 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 269 else
9c0bc511 270 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
271
272 ctx->adata.key_inline = !!(inl_mask & 1);
273 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 274
479bcc7c 275 /* aead_decrypt shared descriptor */
4464a7d4 276 desc = ctx->sh_desc_dec;
8cea7b66
HG
277 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
278 ctx->authsize, alg->caam.geniv, is_rfc3686,
7e0880b9 279 nonce, ctx1_iv_off, false, ctrlpriv->era);
bbf22344 280 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 281 desc_bytes(desc), ctx->dir);
1acebad3 282
479bcc7c
HX
283 if (!alg->caam.geniv)
284 goto skip_givenc;
285
1acebad3
YK
286 /*
287 * Job Descriptor and Shared Descriptors
288 * must all fit into the 64-word Descriptor h/w Buffer
289 */
4cbe79cc
HG
290 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
291 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
292 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
293 ARRAY_SIZE(data_len)) < 0)
294 return -EINVAL;
295
296 if (inl_mask & 1)
9c0bc511 297 ctx->adata.key_virt = ctx->key;
4cbe79cc 298 else
9c0bc511 299 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
300
301 if (inl_mask & 2)
9c0bc511 302 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 303 else
9c0bc511 304 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
305
306 ctx->adata.key_inline = !!(inl_mask & 1);
307 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3
YK
308
309 /* aead_givencrypt shared descriptor */
1d2d87e8 310 desc = ctx->sh_desc_enc;
8cea7b66
HG
311 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
312 ctx->authsize, is_rfc3686, nonce,
7e0880b9 313 ctx1_iv_off, false, ctrlpriv->era);
bbf22344 314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 315 desc_bytes(desc), ctx->dir);
1acebad3 316
479bcc7c 317skip_givenc:
1acebad3
YK
318 return 0;
319}
320
0e479300 321static int aead_setauthsize(struct crypto_aead *authenc,
8e8ec596
KP
322 unsigned int authsize)
323{
324 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
325
326 ctx->authsize = authsize;
1acebad3 327 aead_set_sh_desc(authenc);
8e8ec596
KP
328
329 return 0;
330}
331
3ef8d945
TA
332static int gcm_set_sh_desc(struct crypto_aead *aead)
333{
3ef8d945
TA
334 struct caam_ctx *ctx = crypto_aead_ctx(aead);
335 struct device *jrdev = ctx->jrdev;
87ec3a0b 336 unsigned int ivsize = crypto_aead_ivsize(aead);
3ef8d945 337 u32 *desc;
4cbe79cc
HG
338 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
339 ctx->cdata.keylen;
3ef8d945 340
db57656b 341 if (!ctx->cdata.keylen || !ctx->authsize)
3ef8d945
TA
342 return 0;
343
344 /*
345 * AES GCM encrypt shared descriptor
346 * Job Descriptor and Shared Descriptor
347 * must fit into the 64-word Descriptor h/w Buffer
348 */
4cbe79cc 349 if (rem_bytes >= DESC_GCM_ENC_LEN) {
db57656b 350 ctx->cdata.key_inline = true;
9c0bc511 351 ctx->cdata.key_virt = ctx->key;
db57656b
HG
352 } else {
353 ctx->cdata.key_inline = false;
9c0bc511 354 ctx->cdata.key_dma = ctx->key_dma;
db57656b 355 }
3ef8d945
TA
356
357 desc = ctx->sh_desc_enc;
87ec3a0b 358 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
bbf22344 359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 360 desc_bytes(desc), ctx->dir);
3ef8d945
TA
361
362 /*
363 * Job Descriptor and Shared Descriptors
364 * must all fit into the 64-word Descriptor h/w Buffer
365 */
4cbe79cc 366 if (rem_bytes >= DESC_GCM_DEC_LEN) {
db57656b 367 ctx->cdata.key_inline = true;
9c0bc511 368 ctx->cdata.key_virt = ctx->key;
db57656b
HG
369 } else {
370 ctx->cdata.key_inline = false;
9c0bc511 371 ctx->cdata.key_dma = ctx->key_dma;
db57656b 372 }
3ef8d945
TA
373
374 desc = ctx->sh_desc_dec;
87ec3a0b 375 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
bbf22344 376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 377 desc_bytes(desc), ctx->dir);
3ef8d945
TA
378
379 return 0;
380}
381
382static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
383{
384 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
385
386 ctx->authsize = authsize;
387 gcm_set_sh_desc(authenc);
388
389 return 0;
390}
391
bac68f2c
TA
392static int rfc4106_set_sh_desc(struct crypto_aead *aead)
393{
bac68f2c
TA
394 struct caam_ctx *ctx = crypto_aead_ctx(aead);
395 struct device *jrdev = ctx->jrdev;
87ec3a0b 396 unsigned int ivsize = crypto_aead_ivsize(aead);
bac68f2c 397 u32 *desc;
4cbe79cc
HG
398 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
399 ctx->cdata.keylen;
bac68f2c 400
db57656b 401 if (!ctx->cdata.keylen || !ctx->authsize)
bac68f2c
TA
402 return 0;
403
404 /*
405 * RFC4106 encrypt shared descriptor
406 * Job Descriptor and Shared Descriptor
407 * must fit into the 64-word Descriptor h/w Buffer
408 */
4cbe79cc 409 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
db57656b 410 ctx->cdata.key_inline = true;
9c0bc511 411 ctx->cdata.key_virt = ctx->key;
db57656b
HG
412 } else {
413 ctx->cdata.key_inline = false;
9c0bc511 414 ctx->cdata.key_dma = ctx->key_dma;
db57656b 415 }
bac68f2c
TA
416
417 desc = ctx->sh_desc_enc;
87ec3a0b
HG
418 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
419 false);
bbf22344 420 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 421 desc_bytes(desc), ctx->dir);
bac68f2c
TA
422
423 /*
424 * Job Descriptor and Shared Descriptors
425 * must all fit into the 64-word Descriptor h/w Buffer
426 */
4cbe79cc 427 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
db57656b 428 ctx->cdata.key_inline = true;
9c0bc511 429 ctx->cdata.key_virt = ctx->key;
db57656b
HG
430 } else {
431 ctx->cdata.key_inline = false;
9c0bc511 432 ctx->cdata.key_dma = ctx->key_dma;
db57656b 433 }
bac68f2c
TA
434
435 desc = ctx->sh_desc_dec;
87ec3a0b
HG
436 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
437 false);
bbf22344 438 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 439 desc_bytes(desc), ctx->dir);
bac68f2c 440
bac68f2c
TA
441 return 0;
442}
443
444static int rfc4106_setauthsize(struct crypto_aead *authenc,
445 unsigned int authsize)
446{
447 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
448
449 ctx->authsize = authsize;
450 rfc4106_set_sh_desc(authenc);
451
452 return 0;
453}
454
5d0429a3
TA
455static int rfc4543_set_sh_desc(struct crypto_aead *aead)
456{
5d0429a3
TA
457 struct caam_ctx *ctx = crypto_aead_ctx(aead);
458 struct device *jrdev = ctx->jrdev;
87ec3a0b 459 unsigned int ivsize = crypto_aead_ivsize(aead);
5d0429a3 460 u32 *desc;
4cbe79cc
HG
461 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
462 ctx->cdata.keylen;
5d0429a3 463
db57656b 464 if (!ctx->cdata.keylen || !ctx->authsize)
5d0429a3
TA
465 return 0;
466
467 /*
468 * RFC4543 encrypt shared descriptor
469 * Job Descriptor and Shared Descriptor
470 * must fit into the 64-word Descriptor h/w Buffer
471 */
4cbe79cc 472 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
db57656b 473 ctx->cdata.key_inline = true;
9c0bc511 474 ctx->cdata.key_virt = ctx->key;
db57656b
HG
475 } else {
476 ctx->cdata.key_inline = false;
9c0bc511 477 ctx->cdata.key_dma = ctx->key_dma;
db57656b 478 }
5d0429a3
TA
479
480 desc = ctx->sh_desc_enc;
87ec3a0b
HG
481 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
482 false);
bbf22344 483 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 484 desc_bytes(desc), ctx->dir);
5d0429a3
TA
485
486 /*
487 * Job Descriptor and Shared Descriptors
488 * must all fit into the 64-word Descriptor h/w Buffer
489 */
4cbe79cc 490 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
db57656b 491 ctx->cdata.key_inline = true;
9c0bc511 492 ctx->cdata.key_virt = ctx->key;
db57656b
HG
493 } else {
494 ctx->cdata.key_inline = false;
9c0bc511 495 ctx->cdata.key_dma = ctx->key_dma;
db57656b 496 }
5d0429a3
TA
497
498 desc = ctx->sh_desc_dec;
87ec3a0b
HG
499 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
500 false);
bbf22344 501 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 502 desc_bytes(desc), ctx->dir);
5d0429a3 503
f2147b88
HX
504 return 0;
505}
5d0429a3 506
f2147b88
HX
507static int rfc4543_setauthsize(struct crypto_aead *authenc,
508 unsigned int authsize)
509{
510 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
5d0429a3 511
f2147b88
HX
512 ctx->authsize = authsize;
513 rfc4543_set_sh_desc(authenc);
5d0429a3 514
f2147b88
HX
515 return 0;
516}
5d0429a3 517
d6bbd4ee
HG
518static int chachapoly_set_sh_desc(struct crypto_aead *aead)
519{
520 struct caam_ctx *ctx = crypto_aead_ctx(aead);
521 struct device *jrdev = ctx->jrdev;
522 unsigned int ivsize = crypto_aead_ivsize(aead);
523 u32 *desc;
524
525 if (!ctx->cdata.keylen || !ctx->authsize)
526 return 0;
527
528 desc = ctx->sh_desc_enc;
529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
c10a5336 530 ctx->authsize, true, false);
d6bbd4ee
HG
531 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
532 desc_bytes(desc), ctx->dir);
533
534 desc = ctx->sh_desc_dec;
535 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
c10a5336 536 ctx->authsize, false, false);
d6bbd4ee
HG
537 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
538 desc_bytes(desc), ctx->dir);
539
540 return 0;
541}
542
543static int chachapoly_setauthsize(struct crypto_aead *aead,
544 unsigned int authsize)
545{
546 struct caam_ctx *ctx = crypto_aead_ctx(aead);
547
548 if (authsize != POLY1305_DIGEST_SIZE)
549 return -EINVAL;
550
551 ctx->authsize = authsize;
552 return chachapoly_set_sh_desc(aead);
553}
554
555static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
556 unsigned int keylen)
557{
558 struct caam_ctx *ctx = crypto_aead_ctx(aead);
559 unsigned int ivsize = crypto_aead_ivsize(aead);
560 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
561
1ca1b917 562 if (keylen != CHACHA_KEY_SIZE + saltlen) {
d6bbd4ee
HG
563 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
564 return -EINVAL;
565 }
566
567 ctx->cdata.key_virt = key;
568 ctx->cdata.keylen = keylen - saltlen;
569
570 return chachapoly_set_sh_desc(aead);
571}
572
0e479300 573static int aead_setkey(struct crypto_aead *aead,
8e8ec596
KP
574 const u8 *key, unsigned int keylen)
575{
8e8ec596
KP
576 struct caam_ctx *ctx = crypto_aead_ctx(aead);
577 struct device *jrdev = ctx->jrdev;
7e0880b9 578 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
4e6e0b27 579 struct crypto_authenc_keys keys;
8e8ec596
KP
580 int ret = 0;
581
4e6e0b27 582 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
8e8ec596
KP
583 goto badkey;
584
8e8ec596
KP
585#ifdef DEBUG
586 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
4e6e0b27
HG
587 keys.authkeylen + keys.enckeylen, keys.enckeylen,
588 keys.authkeylen);
514df281 589 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
8e8ec596
KP
590 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
591#endif
8e8ec596 592
7e0880b9
HG
593 /*
594 * If DKP is supported, use it in the shared descriptor to generate
595 * the split key.
596 */
597 if (ctrlpriv->era >= 6) {
598 ctx->adata.keylen = keys.authkeylen;
599 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
600 OP_ALG_ALGSEL_MASK);
601
602 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
603 goto badkey;
604
605 memcpy(ctx->key, keys.authkey, keys.authkeylen);
606 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
607 keys.enckeylen);
608 dma_sync_single_for_device(jrdev, ctx->key_dma,
609 ctx->adata.keylen_pad +
610 keys.enckeylen, ctx->dir);
611 goto skip_split_key;
612 }
613
6655cb8e
HG
614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
615 keys.authkeylen, CAAM_MAX_KEY_SIZE -
616 keys.enckeylen);
8e8ec596 617 if (ret) {
8e8ec596
KP
618 goto badkey;
619 }
620
621 /* postpend encryption key to auth split key */
db57656b 622 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
bbf22344 623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
7e0880b9 624 keys.enckeylen, ctx->dir);
8e8ec596 625#ifdef DEBUG
514df281 626 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
8e8ec596 627 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
db57656b 628 ctx->adata.keylen_pad + keys.enckeylen, 1);
8e8ec596 629#endif
7e0880b9
HG
630
631skip_split_key:
db57656b 632 ctx->cdata.keylen = keys.enckeylen;
61dab972 633 memzero_explicit(&keys, sizeof(keys));
bbf22344 634 return aead_set_sh_desc(aead);
8e8ec596
KP
635badkey:
636 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
61dab972 637 memzero_explicit(&keys, sizeof(keys));
8e8ec596
KP
638 return -EINVAL;
639}
640
3ef8d945
TA
641static int gcm_setkey(struct crypto_aead *aead,
642 const u8 *key, unsigned int keylen)
643{
644 struct caam_ctx *ctx = crypto_aead_ctx(aead);
645 struct device *jrdev = ctx->jrdev;
3ef8d945
TA
646
647#ifdef DEBUG
648 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
649 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
650#endif
651
652 memcpy(ctx->key, key, keylen);
7e0880b9 653 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
db57656b 654 ctx->cdata.keylen = keylen;
3ef8d945 655
bbf22344 656 return gcm_set_sh_desc(aead);
3ef8d945
TA
657}
658
bac68f2c
TA
659static int rfc4106_setkey(struct crypto_aead *aead,
660 const u8 *key, unsigned int keylen)
661{
662 struct caam_ctx *ctx = crypto_aead_ctx(aead);
663 struct device *jrdev = ctx->jrdev;
bac68f2c
TA
664
665 if (keylen < 4)
666 return -EINVAL;
667
668#ifdef DEBUG
669 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
670 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
671#endif
672
673 memcpy(ctx->key, key, keylen);
674
675 /*
676 * The last four bytes of the key material are used as the salt value
677 * in the nonce. Update the AES key length.
678 */
db57656b 679 ctx->cdata.keylen = keylen - 4;
bbf22344 680 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
7e0880b9 681 ctx->dir);
bbf22344 682 return rfc4106_set_sh_desc(aead);
bac68f2c
TA
683}
684
5d0429a3
TA
685static int rfc4543_setkey(struct crypto_aead *aead,
686 const u8 *key, unsigned int keylen)
687{
688 struct caam_ctx *ctx = crypto_aead_ctx(aead);
689 struct device *jrdev = ctx->jrdev;
5d0429a3
TA
690
691 if (keylen < 4)
692 return -EINVAL;
693
694#ifdef DEBUG
695 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
696 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
697#endif
698
699 memcpy(ctx->key, key, keylen);
700
701 /*
702 * The last four bytes of the key material are used as the salt value
703 * in the nonce. Update the AES key length.
704 */
db57656b 705 ctx->cdata.keylen = keylen - 4;
bbf22344 706 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
7e0880b9 707 ctx->dir);
bbf22344 708 return rfc4543_set_sh_desc(aead);
5d0429a3
TA
709}
710
5ca7badb
HG
711static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
712 unsigned int keylen)
acdca31d 713{
5ca7badb
HG
714 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
715 struct caam_skcipher_alg *alg =
716 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
717 skcipher);
acdca31d 718 struct device *jrdev = ctx->jrdev;
5ca7badb 719 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 720 u32 *desc;
2b22f6c5 721 u32 ctx1_iv_off = 0;
db57656b 722 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
2b22f6c5 723 OP_ALG_AAI_CTR_MOD128);
5ca7badb 724 const bool is_rfc3686 = alg->caam.rfc3686;
acdca31d
YK
725
726#ifdef DEBUG
514df281 727 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
acdca31d
YK
728 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
729#endif
2b22f6c5
CV
730 /*
731 * AES-CTR needs to load IV in CONTEXT1 reg
732 * at an offset of 128bits (16bytes)
733 * CONTEXT1[255:128] = IV
734 */
735 if (ctr_mode)
736 ctx1_iv_off = 16;
acdca31d 737
a5f57cff
CV
738 /*
739 * RFC3686 specific:
740 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
741 * | *key = {KEY, NONCE}
742 */
743 if (is_rfc3686) {
744 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
745 keylen -= CTR_RFC3686_NONCE_SIZE;
746 }
747
db57656b 748 ctx->cdata.keylen = keylen;
662f70ed 749 ctx->cdata.key_virt = key;
db57656b 750 ctx->cdata.key_inline = true;
acdca31d 751
5ca7badb 752 /* skcipher_encrypt shared descriptor */
acdca31d 753 desc = ctx->sh_desc_enc;
9dbe3072
HG
754 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
755 ctx1_iv_off);
bbf22344 756 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 757 desc_bytes(desc), ctx->dir);
8cea7b66 758
5ca7badb 759 /* skcipher_decrypt shared descriptor */
acdca31d 760 desc = ctx->sh_desc_dec;
9dbe3072
HG
761 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
762 ctx1_iv_off);
bbf22344 763 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 764 desc_bytes(desc), ctx->dir);
acdca31d 765
8cea7b66 766 return 0;
acdca31d
YK
767}
768
eaed71a4
IP
769static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
770 const u8 *key, unsigned int keylen)
771{
772 u32 tmp[DES3_EDE_EXPKEY_WORDS];
773 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
774
775 if (keylen == DES3_EDE_KEY_SIZE &&
776 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) {
777 return -EINVAL;
778 }
779
780 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) &
781 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
782 crypto_skcipher_set_flags(skcipher,
783 CRYPTO_TFM_RES_WEAK_KEY);
784 return -EINVAL;
785 }
786
787 return skcipher_setkey(skcipher, key, keylen);
788}
789
5ca7badb
HG
790static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
791 unsigned int keylen)
c6415a60 792{
5ca7badb 793 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
c6415a60 794 struct device *jrdev = ctx->jrdev;
8cea7b66 795 u32 *desc;
c6415a60
CV
796
797 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
5ca7badb 798 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
c6415a60
CV
799 dev_err(jrdev, "key size mismatch\n");
800 return -EINVAL;
801 }
802
db57656b 803 ctx->cdata.keylen = keylen;
662f70ed 804 ctx->cdata.key_virt = key;
db57656b 805 ctx->cdata.key_inline = true;
c6415a60 806
5ca7badb 807 /* xts_skcipher_encrypt shared descriptor */
c6415a60 808 desc = ctx->sh_desc_enc;
9dbe3072 809 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
bbf22344 810 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 811 desc_bytes(desc), ctx->dir);
c6415a60 812
5ca7badb 813 /* xts_skcipher_decrypt shared descriptor */
c6415a60 814 desc = ctx->sh_desc_dec;
9dbe3072 815 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
bbf22344 816 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 817 desc_bytes(desc), ctx->dir);
c6415a60
CV
818
819 return 0;
820}
821
8e8ec596 822/*
1acebad3 823 * aead_edesc - s/w-extended aead descriptor
fa0c92db
HG
824 * @src_nents: number of segments in input s/w scatterlist
825 * @dst_nents: number of segments in output s/w scatterlist
ba4cf71b
IP
826 * @mapped_src_nents: number of segments in input h/w link table
827 * @mapped_dst_nents: number of segments in output h/w link table
a299c837
YK
828 * @sec4_sg_bytes: length of dma mapped sec4_sg space
829 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 830 * @sec4_sg: pointer to h/w link table
8e8ec596
KP
831 * @hw_desc: the h/w job descriptor followed by any referenced link tables
832 */
0e479300 833struct aead_edesc {
8e8ec596
KP
834 int src_nents;
835 int dst_nents;
ba4cf71b
IP
836 int mapped_src_nents;
837 int mapped_dst_nents;
a299c837
YK
838 int sec4_sg_bytes;
839 dma_addr_t sec4_sg_dma;
840 struct sec4_sg_entry *sec4_sg;
f2147b88 841 u32 hw_desc[];
8e8ec596
KP
842};
843
acdca31d 844/*
5ca7badb 845 * skcipher_edesc - s/w-extended skcipher descriptor
fa0c92db
HG
846 * @src_nents: number of segments in input s/w scatterlist
847 * @dst_nents: number of segments in output s/w scatterlist
ba4cf71b
IP
848 * @mapped_src_nents: number of segments in input h/w link table
849 * @mapped_dst_nents: number of segments in output h/w link table
acdca31d 850 * @iv_dma: dma address of iv for checking continuity and link table
a299c837
YK
851 * @sec4_sg_bytes: length of dma mapped sec4_sg space
852 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 853 * @sec4_sg: pointer to h/w link table
acdca31d 854 * @hw_desc: the h/w job descriptor followed by any referenced link tables
115957bb 855 * and IV
acdca31d 856 */
5ca7badb 857struct skcipher_edesc {
acdca31d
YK
858 int src_nents;
859 int dst_nents;
ba4cf71b
IP
860 int mapped_src_nents;
861 int mapped_dst_nents;
acdca31d 862 dma_addr_t iv_dma;
a299c837
YK
863 int sec4_sg_bytes;
864 dma_addr_t sec4_sg_dma;
865 struct sec4_sg_entry *sec4_sg;
acdca31d
YK
866 u32 hw_desc[0];
867};
868
1acebad3 869static void caam_unmap(struct device *dev, struct scatterlist *src,
643b39b0 870 struct scatterlist *dst, int src_nents,
13fb8fd7 871 int dst_nents,
cf5448b5 872 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
a299c837 873 int sec4_sg_bytes)
8e8ec596 874{
643b39b0 875 if (dst != src) {
fa0c92db
HG
876 if (src_nents)
877 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
763069ba
HG
878 if (dst_nents)
879 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
8e8ec596 880 } else {
fa0c92db 881 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
8e8ec596
KP
882 }
883
1acebad3 884 if (iv_dma)
cf5448b5 885 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
a299c837
YK
886 if (sec4_sg_bytes)
887 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
8e8ec596
KP
888 DMA_TO_DEVICE);
889}
890
1acebad3
YK
891static void aead_unmap(struct device *dev,
892 struct aead_edesc *edesc,
893 struct aead_request *req)
f2147b88
HX
894{
895 caam_unmap(dev, req->src, req->dst,
cf5448b5 896 edesc->src_nents, edesc->dst_nents, 0, 0,
f2147b88
HX
897 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
898}
899
5ca7badb
HG
900static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
901 struct skcipher_request *req)
acdca31d 902{
5ca7badb
HG
903 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
904 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d
YK
905
906 caam_unmap(dev, req->src, req->dst,
13fb8fd7 907 edesc->src_nents, edesc->dst_nents,
cf5448b5 908 edesc->iv_dma, ivsize,
643b39b0 909 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
acdca31d
YK
910}
911
0e479300 912static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
913 void *context)
914{
0e479300
YK
915 struct aead_request *req = context;
916 struct aead_edesc *edesc;
f2147b88
HX
917
918#ifdef DEBUG
919 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
920#endif
921
922 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
923
924 if (err)
925 caam_jr_strstatus(jrdev, err);
926
927 aead_unmap(jrdev, edesc, req);
928
929 kfree(edesc);
930
931 aead_request_complete(req, err);
932}
933
0e479300 934static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
935 void *context)
936{
0e479300
YK
937 struct aead_request *req = context;
938 struct aead_edesc *edesc;
f2147b88
HX
939
940#ifdef DEBUG
941 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
942#endif
943
944 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
945
946 if (err)
947 caam_jr_strstatus(jrdev, err);
948
949 aead_unmap(jrdev, edesc, req);
950
951 /*
952 * verify hw auth check passed else return -EBADMSG
953 */
954 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
955 err = -EBADMSG;
956
957 kfree(edesc);
958
959 aead_request_complete(req, err);
960}
961
5ca7badb
HG
962static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
963 void *context)
acdca31d 964{
5ca7badb
HG
965 struct skcipher_request *req = context;
966 struct skcipher_edesc *edesc;
967 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
968 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 969
854b06f7 970#ifdef DEBUG
acdca31d
YK
971 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
972#endif
973
5ca7badb 974 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
acdca31d 975
fa9659cd
MV
976 if (err)
977 caam_jr_strstatus(jrdev, err);
acdca31d
YK
978
979#ifdef DEBUG
514df281 980 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
5ca7badb 981 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
acdca31d 982 edesc->src_nents > 1 ? 100 : ivsize, 1);
acdca31d 983#endif
972b812b
HG
984 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
985 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
5ca7badb 986 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
acdca31d 987
5ca7badb 988 skcipher_unmap(jrdev, edesc, req);
854b06f7
DG
989
990 /*
5ca7badb 991 * The crypto API expects us to set the IV (req->iv) to the last
854b06f7
DG
992 * ciphertext block. This is used e.g. by the CTS mode.
993 */
eaed71a4
IP
994 if (ivsize)
995 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
996 ivsize, ivsize, 0);
854b06f7 997
acdca31d
YK
998 kfree(edesc);
999
5ca7badb 1000 skcipher_request_complete(req, err);
acdca31d
YK
1001}
1002
5ca7badb
HG
1003static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1004 void *context)
acdca31d 1005{
5ca7badb
HG
1006 struct skcipher_request *req = context;
1007 struct skcipher_edesc *edesc;
115957bb 1008#ifdef DEBUG
5ca7badb
HG
1009 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1010 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d
YK
1011
1012 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1013#endif
1014
5ca7badb 1015 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
fa9659cd
MV
1016 if (err)
1017 caam_jr_strstatus(jrdev, err);
acdca31d
YK
1018
1019#ifdef DEBUG
514df281 1020 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
5ca7badb 1021 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
acdca31d 1022#endif
972b812b
HG
1023 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1024 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
5ca7badb 1025 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
acdca31d 1026
5ca7badb 1027 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1028 kfree(edesc);
1029
5ca7badb 1030 skcipher_request_complete(req, err);
acdca31d
YK
1031}
1032
f2147b88
HX
1033/*
1034 * Fill in aead job descriptor
1035 */
1036static void init_aead_job(struct aead_request *req,
1037 struct aead_edesc *edesc,
1038 bool all_contig, bool encrypt)
1039{
1040 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1041 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1042 int authsize = ctx->authsize;
1043 u32 *desc = edesc->hw_desc;
1044 u32 out_options, in_options;
1045 dma_addr_t dst_dma, src_dma;
1046 int len, sec4_sg_index = 0;
1047 dma_addr_t ptr;
1048 u32 *sh_desc;
1049
1050 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1051 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1052
1053 len = desc_len(sh_desc);
1054 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1055
1056 if (all_contig) {
ba4cf71b
IP
1057 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1058 0;
f2147b88
HX
1059 in_options = 0;
1060 } else {
1061 src_dma = edesc->sec4_sg_dma;
ba4cf71b 1062 sec4_sg_index += edesc->mapped_src_nents;
f2147b88
HX
1063 in_options = LDST_SGF;
1064 }
1065
1066 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1067 in_options);
1068
1069 dst_dma = src_dma;
1070 out_options = in_options;
1071
1072 if (unlikely(req->src != req->dst)) {
ba4cf71b 1073 if (!edesc->mapped_dst_nents) {
763069ba 1074 dst_dma = 0;
ba4cf71b 1075 } else if (edesc->mapped_dst_nents == 1) {
f2147b88 1076 dst_dma = sg_dma_address(req->dst);
42e95d1f 1077 out_options = 0;
f2147b88
HX
1078 } else {
1079 dst_dma = edesc->sec4_sg_dma +
1080 sec4_sg_index *
1081 sizeof(struct sec4_sg_entry);
1082 out_options = LDST_SGF;
1083 }
1084 }
1085
1086 if (encrypt)
1087 append_seq_out_ptr(desc, dst_dma,
1088 req->assoclen + req->cryptlen + authsize,
1089 out_options);
1090 else
1091 append_seq_out_ptr(desc, dst_dma,
1092 req->assoclen + req->cryptlen - authsize,
1093 out_options);
f2147b88
HX
1094}
1095
1096static void init_gcm_job(struct aead_request *req,
1097 struct aead_edesc *edesc,
1098 bool all_contig, bool encrypt)
1099{
1100 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1101 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1102 unsigned int ivsize = crypto_aead_ivsize(aead);
1103 u32 *desc = edesc->hw_desc;
7545e166 1104 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
f2147b88
HX
1105 unsigned int last;
1106
1107 init_aead_job(req, edesc, all_contig, encrypt);
7e0880b9 1108 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
f2147b88
HX
1109
1110 /* BUG This should not be specific to generic GCM. */
1111 last = 0;
1112 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1113 last = FIFOLD_TYPE_LAST1;
1114
1115 /* Read GCM IV */
1116 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
7545e166 1117 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
f2147b88
HX
1118 /* Append Salt */
1119 if (!generic_gcm)
db57656b 1120 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
f2147b88
HX
1121 /* Append IV */
1122 append_data(desc, req->iv, ivsize);
1123 /* End of blank commands */
1124}
1125
d6bbd4ee
HG
1126static void init_chachapoly_job(struct aead_request *req,
1127 struct aead_edesc *edesc, bool all_contig,
1128 bool encrypt)
1129{
1130 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1131 unsigned int ivsize = crypto_aead_ivsize(aead);
1132 unsigned int assoclen = req->assoclen;
1133 u32 *desc = edesc->hw_desc;
1134 u32 ctx_iv_off = 4;
1135
1136 init_aead_job(req, edesc, all_contig, encrypt);
1137
1138 if (ivsize != CHACHAPOLY_IV_SIZE) {
1139 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1140 ctx_iv_off += 4;
1141
1142 /*
1143 * The associated data comes already with the IV but we need
1144 * to skip it when we authenticate or encrypt...
1145 */
1146 assoclen -= ivsize;
1147 }
1148
1149 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1150
1151 /*
1152 * For IPsec load the IV further in the same register.
1153 * For RFC7539 simply load the 12 bytes nonce in a single operation
1154 */
1155 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1156 LDST_SRCDST_BYTE_CONTEXT |
1157 ctx_iv_off << LDST_OFFSET_SHIFT);
1158}
1159
479bcc7c
HX
1160static void init_authenc_job(struct aead_request *req,
1161 struct aead_edesc *edesc,
1162 bool all_contig, bool encrypt)
1acebad3
YK
1163{
1164 struct crypto_aead *aead = crypto_aead_reqtfm(req);
479bcc7c
HX
1165 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1166 struct caam_aead_alg, aead);
1167 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3 1168 struct caam_ctx *ctx = crypto_aead_ctx(aead);
7e0880b9 1169 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
db57656b 1170 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
479bcc7c
HX
1171 OP_ALG_AAI_CTR_MOD128);
1172 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 1173 u32 *desc = edesc->hw_desc;
479bcc7c 1174 u32 ivoffset = 0;
8e8ec596 1175
479bcc7c
HX
1176 /*
1177 * AES-CTR needs to load IV in CONTEXT1 reg
1178 * at an offset of 128bits (16bytes)
1179 * CONTEXT1[255:128] = IV
1180 */
1181 if (ctr_mode)
1182 ivoffset = 16;
1acebad3 1183
479bcc7c
HX
1184 /*
1185 * RFC3686 specific:
1186 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1187 */
1188 if (is_rfc3686)
1189 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
8e8ec596 1190
479bcc7c 1191 init_aead_job(req, edesc, all_contig, encrypt);
1acebad3 1192
7e0880b9
HG
1193 /*
1194 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1195 * having DPOVRD as destination.
1196 */
1197 if (ctrlpriv->era < 3)
1198 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1199 else
1200 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1201
8b18e235 1202 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
479bcc7c
HX
1203 append_load_as_imm(desc, req->iv, ivsize,
1204 LDST_CLASS_1_CCB |
1205 LDST_SRCDST_BYTE_CONTEXT |
1206 (ivoffset << LDST_OFFSET_SHIFT));
8e8ec596
KP
1207}
1208
acdca31d 1209/*
5ca7badb 1210 * Fill in skcipher job descriptor
acdca31d 1211 */
5ca7badb
HG
1212static void init_skcipher_job(struct skcipher_request *req,
1213 struct skcipher_edesc *edesc,
1214 const bool encrypt)
acdca31d 1215{
5ca7badb
HG
1216 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1217 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1218 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1219 u32 *desc = edesc->hw_desc;
5ca7badb 1220 u32 *sh_desc;
eaed71a4
IP
1221 u32 in_options = 0, out_options = 0;
1222 dma_addr_t src_dma, dst_dma, ptr;
1223 int len, sec4_sg_index = 0;
acdca31d
YK
1224
1225#ifdef DEBUG
514df281 1226 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
5ca7badb
HG
1227 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1228 pr_err("asked=%d, cryptlen%d\n",
1229 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
acdca31d 1230#endif
972b812b
HG
1231 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1232 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
5ca7badb
HG
1233 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1234
1235 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1236 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
acdca31d
YK
1237
1238 len = desc_len(sh_desc);
1239 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1240
eaed71a4
IP
1241 if (ivsize || edesc->mapped_src_nents > 1) {
1242 src_dma = edesc->sec4_sg_dma;
1243 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1244 in_options = LDST_SGF;
1245 } else {
1246 src_dma = sg_dma_address(req->src);
1247 }
1248
1249 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
acdca31d
YK
1250
1251 if (likely(req->src == req->dst)) {
eaed71a4
IP
1252 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1253 out_options = in_options;
1254 } else if (edesc->mapped_dst_nents == 1) {
1255 dst_dma = sg_dma_address(req->dst);
acdca31d 1256 } else {
eaed71a4
IP
1257 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1258 sizeof(struct sec4_sg_entry);
1259 out_options = LDST_SGF;
acdca31d 1260 }
eaed71a4 1261
5ca7badb 1262 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
acdca31d
YK
1263}
1264
8e8ec596 1265/*
1acebad3 1266 * allocate and map the aead extended descriptor
8e8ec596 1267 */
479bcc7c
HX
1268static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1269 int desc_bytes, bool *all_contig_ptr,
1270 bool encrypt)
8e8ec596 1271{
0e479300 1272 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1273 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1274 struct device *jrdev = ctx->jrdev;
019d62db
HG
1275 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1276 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1277 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
0e479300 1278 struct aead_edesc *edesc;
fa0c92db 1279 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
bbf9c893 1280 unsigned int authsize = ctx->authsize;
1acebad3 1281
bbf9c893 1282 if (unlikely(req->dst != req->src)) {
fa0c92db
HG
1283 src_nents = sg_nents_for_len(req->src, req->assoclen +
1284 req->cryptlen);
fd144d83
HG
1285 if (unlikely(src_nents < 0)) {
1286 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1287 req->assoclen + req->cryptlen);
1288 return ERR_PTR(src_nents);
1289 }
1290
fa0c92db
HG
1291 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1292 req->cryptlen +
1293 (encrypt ? authsize :
1294 (-authsize)));
fd144d83
HG
1295 if (unlikely(dst_nents < 0)) {
1296 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1297 req->assoclen + req->cryptlen +
1298 (encrypt ? authsize : (-authsize)));
1299 return ERR_PTR(dst_nents);
1300 }
bbf9c893 1301 } else {
fa0c92db
HG
1302 src_nents = sg_nents_for_len(req->src, req->assoclen +
1303 req->cryptlen +
1304 (encrypt ? authsize : 0));
fd144d83
HG
1305 if (unlikely(src_nents < 0)) {
1306 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1307 req->assoclen + req->cryptlen +
1308 (encrypt ? authsize : 0));
1309 return ERR_PTR(src_nents);
1310 }
f2147b88 1311 }
3ef8d945 1312
f2147b88 1313 if (likely(req->src == req->dst)) {
838e0a89
HG
1314 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1315 DMA_BIDIRECTIONAL);
1316 if (unlikely(!mapped_src_nents)) {
f2147b88 1317 dev_err(jrdev, "unable to map source\n");
f2147b88
HX
1318 return ERR_PTR(-ENOMEM);
1319 }
1320 } else {
fa0c92db
HG
1321 /* Cover also the case of null (zero length) input data */
1322 if (src_nents) {
838e0a89
HG
1323 mapped_src_nents = dma_map_sg(jrdev, req->src,
1324 src_nents, DMA_TO_DEVICE);
1325 if (unlikely(!mapped_src_nents)) {
fa0c92db 1326 dev_err(jrdev, "unable to map source\n");
fa0c92db
HG
1327 return ERR_PTR(-ENOMEM);
1328 }
838e0a89
HG
1329 } else {
1330 mapped_src_nents = 0;
f2147b88
HX
1331 }
1332
763069ba
HG
1333 /* Cover also the case of null (zero length) output data */
1334 if (dst_nents) {
1335 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1336 dst_nents,
1337 DMA_FROM_DEVICE);
1338 if (unlikely(!mapped_dst_nents)) {
1339 dev_err(jrdev, "unable to map destination\n");
1340 dma_unmap_sg(jrdev, req->src, src_nents,
1341 DMA_TO_DEVICE);
1342 return ERR_PTR(-ENOMEM);
1343 }
1344 } else {
1345 mapped_dst_nents = 0;
f2147b88
HX
1346 }
1347 }
1348
838e0a89
HG
1349 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1350 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1351 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1352
1353 /* allocate space for base edesc and hw desc commands, link tables */
1354 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1355 GFP_DMA | flags);
1356 if (!edesc) {
1357 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
cf5448b5 1358 0, 0, 0);
838e0a89
HG
1359 return ERR_PTR(-ENOMEM);
1360 }
1361
8e8ec596
KP
1362 edesc->src_nents = src_nents;
1363 edesc->dst_nents = dst_nents;
ba4cf71b
IP
1364 edesc->mapped_src_nents = mapped_src_nents;
1365 edesc->mapped_dst_nents = mapped_dst_nents;
a299c837
YK
1366 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1367 desc_bytes;
838e0a89 1368 *all_contig_ptr = !(mapped_src_nents > 1);
1acebad3 1369
a299c837 1370 sec4_sg_index = 0;
838e0a89
HG
1371 if (mapped_src_nents > 1) {
1372 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1373 edesc->sec4_sg + sec4_sg_index, 0);
1374 sec4_sg_index += mapped_src_nents;
1acebad3 1375 }
838e0a89
HG
1376 if (mapped_dst_nents > 1) {
1377 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
a299c837 1378 edesc->sec4_sg + sec4_sg_index, 0);
1acebad3 1379 }
f2147b88
HX
1380
1381 if (!sec4_sg_bytes)
1382 return edesc;
1383
1da2be33
RG
1384 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1385 sec4_sg_bytes, DMA_TO_DEVICE);
ce572085
HG
1386 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1387 dev_err(jrdev, "unable to map S/G table\n");
f2147b88
HX
1388 aead_unmap(jrdev, edesc, req);
1389 kfree(edesc);
ce572085
HG
1390 return ERR_PTR(-ENOMEM);
1391 }
8e8ec596 1392
f2147b88
HX
1393 edesc->sec4_sg_bytes = sec4_sg_bytes;
1394
8e8ec596
KP
1395 return edesc;
1396}
1397
f2147b88 1398static int gcm_encrypt(struct aead_request *req)
8e8ec596 1399{
0e479300
YK
1400 struct aead_edesc *edesc;
1401 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1402 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1403 struct device *jrdev = ctx->jrdev;
1acebad3 1404 bool all_contig;
8e8ec596 1405 u32 *desc;
1acebad3
YK
1406 int ret = 0;
1407
8e8ec596 1408 /* allocate extended descriptor */
f2147b88 1409 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
8e8ec596
KP
1410 if (IS_ERR(edesc))
1411 return PTR_ERR(edesc);
1412
1acebad3 1413 /* Create and submit job descriptor */
f2147b88 1414 init_gcm_job(req, edesc, all_contig, true);
1acebad3 1415#ifdef DEBUG
514df281 1416 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1acebad3
YK
1417 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1418 desc_bytes(edesc->hw_desc), 1);
1419#endif
8e8ec596 1420
1acebad3
YK
1421 desc = edesc->hw_desc;
1422 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1423 if (!ret) {
1424 ret = -EINPROGRESS;
1425 } else {
1426 aead_unmap(jrdev, edesc, req);
1427 kfree(edesc);
1428 }
8e8ec596 1429
1acebad3 1430 return ret;
8e8ec596
KP
1431}
1432
d6bbd4ee
HG
1433static int chachapoly_encrypt(struct aead_request *req)
1434{
1435 struct aead_edesc *edesc;
1436 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1437 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1438 struct device *jrdev = ctx->jrdev;
1439 bool all_contig;
1440 u32 *desc;
1441 int ret;
1442
1443 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1444 true);
1445 if (IS_ERR(edesc))
1446 return PTR_ERR(edesc);
1447
1448 desc = edesc->hw_desc;
1449
1450 init_chachapoly_job(req, edesc, all_contig, true);
1451 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1452 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1453 1);
1454
1455 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1456 if (!ret) {
1457 ret = -EINPROGRESS;
1458 } else {
1459 aead_unmap(jrdev, edesc, req);
1460 kfree(edesc);
1461 }
1462
1463 return ret;
1464}
1465
1466static int chachapoly_decrypt(struct aead_request *req)
1467{
1468 struct aead_edesc *edesc;
1469 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1470 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1471 struct device *jrdev = ctx->jrdev;
1472 bool all_contig;
1473 u32 *desc;
1474 int ret;
1475
1476 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1477 false);
1478 if (IS_ERR(edesc))
1479 return PTR_ERR(edesc);
1480
1481 desc = edesc->hw_desc;
1482
1483 init_chachapoly_job(req, edesc, all_contig, false);
1484 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1485 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1486 1);
1487
1488 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1489 if (!ret) {
1490 ret = -EINPROGRESS;
1491 } else {
1492 aead_unmap(jrdev, edesc, req);
1493 kfree(edesc);
1494 }
1495
1496 return ret;
1497}
1498
46218750
HX
1499static int ipsec_gcm_encrypt(struct aead_request *req)
1500{
1501 if (req->assoclen < 8)
1502 return -EINVAL;
1503
1504 return gcm_encrypt(req);
1505}
1506
479bcc7c 1507static int aead_encrypt(struct aead_request *req)
f2147b88
HX
1508{
1509 struct aead_edesc *edesc;
1510 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1511 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1512 struct device *jrdev = ctx->jrdev;
1513 bool all_contig;
1514 u32 *desc;
1515 int ret = 0;
1516
1517 /* allocate extended descriptor */
479bcc7c
HX
1518 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1519 &all_contig, true);
f2147b88
HX
1520 if (IS_ERR(edesc))
1521 return PTR_ERR(edesc);
1522
1523 /* Create and submit job descriptor */
479bcc7c 1524 init_authenc_job(req, edesc, all_contig, true);
f2147b88
HX
1525#ifdef DEBUG
1526 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1527 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1528 desc_bytes(edesc->hw_desc), 1);
1529#endif
1530
1531 desc = edesc->hw_desc;
479bcc7c 1532 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
f2147b88
HX
1533 if (!ret) {
1534 ret = -EINPROGRESS;
1535 } else {
479bcc7c 1536 aead_unmap(jrdev, edesc, req);
f2147b88
HX
1537 kfree(edesc);
1538 }
1539
1540 return ret;
1541}
1542
1543static int gcm_decrypt(struct aead_request *req)
1544{
1545 struct aead_edesc *edesc;
1546 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1547 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1548 struct device *jrdev = ctx->jrdev;
1549 bool all_contig;
1550 u32 *desc;
1551 int ret = 0;
1552
1553 /* allocate extended descriptor */
1554 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1555 if (IS_ERR(edesc))
1556 return PTR_ERR(edesc);
1557
1558 /* Create and submit job descriptor*/
1559 init_gcm_job(req, edesc, all_contig, false);
1560#ifdef DEBUG
1561 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1562 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1563 desc_bytes(edesc->hw_desc), 1);
1564#endif
1565
1566 desc = edesc->hw_desc;
1567 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1568 if (!ret) {
1569 ret = -EINPROGRESS;
1570 } else {
1571 aead_unmap(jrdev, edesc, req);
1572 kfree(edesc);
1573 }
1574
1575 return ret;
1576}
1577
46218750
HX
1578static int ipsec_gcm_decrypt(struct aead_request *req)
1579{
1580 if (req->assoclen < 8)
1581 return -EINVAL;
1582
1583 return gcm_decrypt(req);
1584}
1585
479bcc7c 1586static int aead_decrypt(struct aead_request *req)
8e8ec596 1587{
1acebad3 1588 struct aead_edesc *edesc;
8e8ec596 1589 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1590 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1591 struct device *jrdev = ctx->jrdev;
1acebad3 1592 bool all_contig;
8e8ec596 1593 u32 *desc;
1acebad3 1594 int ret = 0;
8e8ec596 1595
972b812b
HG
1596 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1597 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1598 req->assoclen + req->cryptlen, 1);
5ecf8ef9 1599
8e8ec596 1600 /* allocate extended descriptor */
479bcc7c
HX
1601 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1602 &all_contig, false);
8e8ec596
KP
1603 if (IS_ERR(edesc))
1604 return PTR_ERR(edesc);
1605
1acebad3 1606 /* Create and submit job descriptor*/
479bcc7c 1607 init_authenc_job(req, edesc, all_contig, false);
1acebad3 1608#ifdef DEBUG
514df281 1609 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1acebad3
YK
1610 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1611 desc_bytes(edesc->hw_desc), 1);
1612#endif
1613
8e8ec596 1614 desc = edesc->hw_desc;
479bcc7c 1615 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1acebad3
YK
1616 if (!ret) {
1617 ret = -EINPROGRESS;
1618 } else {
479bcc7c 1619 aead_unmap(jrdev, edesc, req);
1acebad3
YK
1620 kfree(edesc);
1621 }
8e8ec596 1622
1acebad3
YK
1623 return ret;
1624}
8e8ec596 1625
acdca31d 1626/*
5ca7badb 1627 * allocate and map the skcipher extended descriptor for skcipher
acdca31d 1628 */
5ca7badb
HG
1629static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1630 int desc_bytes)
acdca31d 1631{
5ca7badb
HG
1632 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1633 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1634 struct device *jrdev = ctx->jrdev;
42cfcafb 1635 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
acdca31d 1636 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1637 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
5ca7badb 1638 struct skcipher_edesc *edesc;
eaed71a4 1639 dma_addr_t iv_dma = 0;
115957bb 1640 u8 *iv;
5ca7badb 1641 int ivsize = crypto_skcipher_ivsize(skcipher);
838e0a89 1642 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
acdca31d 1643
5ca7badb 1644 src_nents = sg_nents_for_len(req->src, req->cryptlen);
fd144d83
HG
1645 if (unlikely(src_nents < 0)) {
1646 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
5ca7badb 1647 req->cryptlen);
fd144d83
HG
1648 return ERR_PTR(src_nents);
1649 }
acdca31d 1650
fd144d83 1651 if (req->dst != req->src) {
5ca7badb 1652 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
fd144d83
HG
1653 if (unlikely(dst_nents < 0)) {
1654 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
5ca7badb 1655 req->cryptlen);
fd144d83
HG
1656 return ERR_PTR(dst_nents);
1657 }
1658 }
acdca31d
YK
1659
1660 if (likely(req->src == req->dst)) {
838e0a89
HG
1661 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1662 DMA_BIDIRECTIONAL);
1663 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1664 dev_err(jrdev, "unable to map source\n");
1665 return ERR_PTR(-ENOMEM);
1666 }
acdca31d 1667 } else {
838e0a89
HG
1668 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1669 DMA_TO_DEVICE);
1670 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1671 dev_err(jrdev, "unable to map source\n");
1672 return ERR_PTR(-ENOMEM);
1673 }
838e0a89
HG
1674 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1675 DMA_FROM_DEVICE);
1676 if (unlikely(!mapped_dst_nents)) {
c73e36e8 1677 dev_err(jrdev, "unable to map destination\n");
fa0c92db 1678 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
c73e36e8
HG
1679 return ERR_PTR(-ENOMEM);
1680 }
acdca31d
YK
1681 }
1682
eaed71a4
IP
1683 if (!ivsize && mapped_src_nents == 1)
1684 sec4_sg_ents = 0; // no need for an input hw s/g table
1685 else
1686 sec4_sg_ents = mapped_src_nents + !!ivsize;
fa0c92db 1687 dst_sg_idx = sec4_sg_ents;
838e0a89 1688 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
fa0c92db 1689 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
acdca31d 1690
115957bb
HG
1691 /*
1692 * allocate space for base edesc and hw desc commands, link tables, IV
1693 */
1694 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
dde20ae9 1695 GFP_DMA | flags);
acdca31d
YK
1696 if (!edesc) {
1697 dev_err(jrdev, "could not allocate extended descriptor\n");
115957bb 1698 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
cf5448b5 1699 0, 0, 0);
acdca31d
YK
1700 return ERR_PTR(-ENOMEM);
1701 }
1702
1703 edesc->src_nents = src_nents;
1704 edesc->dst_nents = dst_nents;
ba4cf71b
IP
1705 edesc->mapped_src_nents = mapped_src_nents;
1706 edesc->mapped_dst_nents = mapped_dst_nents;
a299c837 1707 edesc->sec4_sg_bytes = sec4_sg_bytes;
13cc6f48
HG
1708 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1709 desc_bytes);
acdca31d 1710
115957bb 1711 /* Make sure IV is located in a DMAable area */
eaed71a4
IP
1712 if (ivsize) {
1713 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1714 memcpy(iv, req->iv, ivsize);
1715
1716 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1717 if (dma_mapping_error(jrdev, iv_dma)) {
1718 dev_err(jrdev, "unable to map IV\n");
1719 caam_unmap(jrdev, req->src, req->dst, src_nents,
1720 dst_nents, 0, 0, 0, 0);
1721 kfree(edesc);
1722 return ERR_PTR(-ENOMEM);
1723 }
115957bb 1724
eaed71a4 1725 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
acdca31d 1726 }
eaed71a4
IP
1727 if (dst_sg_idx)
1728 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg +
1729 !!ivsize, 0);
115957bb 1730
838e0a89
HG
1731 if (mapped_dst_nents > 1) {
1732 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1733 edesc->sec4_sg + dst_sg_idx, 0);
acdca31d
YK
1734 }
1735
eaed71a4
IP
1736 if (sec4_sg_bytes) {
1737 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1738 sec4_sg_bytes,
1739 DMA_TO_DEVICE);
1740 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1741 dev_err(jrdev, "unable to map S/G table\n");
1742 caam_unmap(jrdev, req->src, req->dst, src_nents,
1743 dst_nents, iv_dma, ivsize, 0, 0);
1744 kfree(edesc);
1745 return ERR_PTR(-ENOMEM);
1746 }
ce572085
HG
1747 }
1748
acdca31d
YK
1749 edesc->iv_dma = iv_dma;
1750
1751#ifdef DEBUG
5ca7badb 1752 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
a299c837
YK
1753 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1754 sec4_sg_bytes, 1);
acdca31d
YK
1755#endif
1756
acdca31d
YK
1757 return edesc;
1758}
1759
5ca7badb 1760static int skcipher_encrypt(struct skcipher_request *req)
acdca31d 1761{
5ca7badb
HG
1762 struct skcipher_edesc *edesc;
1763 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1764 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1765 struct device *jrdev = ctx->jrdev;
acdca31d
YK
1766 u32 *desc;
1767 int ret = 0;
1768
1769 /* allocate extended descriptor */
5ca7badb 1770 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
acdca31d
YK
1771 if (IS_ERR(edesc))
1772 return PTR_ERR(edesc);
1773
1774 /* Create and submit job descriptor*/
5ca7badb 1775 init_skcipher_job(req, edesc, true);
acdca31d 1776#ifdef DEBUG
5ca7badb 1777 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
acdca31d
YK
1778 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1779 desc_bytes(edesc->hw_desc), 1);
1780#endif
1781 desc = edesc->hw_desc;
5ca7badb 1782 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
acdca31d
YK
1783
1784 if (!ret) {
1785 ret = -EINPROGRESS;
1786 } else {
5ca7badb 1787 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1788 kfree(edesc);
1789 }
1790
1791 return ret;
1792}
1793
5ca7badb 1794static int skcipher_decrypt(struct skcipher_request *req)
acdca31d 1795{
5ca7badb
HG
1796 struct skcipher_edesc *edesc;
1797 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1798 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1799 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1800 struct device *jrdev = ctx->jrdev;
acdca31d
YK
1801 u32 *desc;
1802 int ret = 0;
1803
1804 /* allocate extended descriptor */
5ca7badb 1805 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
acdca31d
YK
1806 if (IS_ERR(edesc))
1807 return PTR_ERR(edesc);
1808
115957bb 1809 /*
5ca7badb 1810 * The crypto API expects us to set the IV (req->iv) to the last
115957bb
HG
1811 * ciphertext block.
1812 */
eaed71a4
IP
1813 if (ivsize)
1814 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1815 ivsize, ivsize, 0);
115957bb 1816
acdca31d 1817 /* Create and submit job descriptor*/
5ca7badb 1818 init_skcipher_job(req, edesc, false);
acdca31d
YK
1819 desc = edesc->hw_desc;
1820#ifdef DEBUG
5ca7badb 1821 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
acdca31d
YK
1822 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1823 desc_bytes(edesc->hw_desc), 1);
1824#endif
1825
5ca7badb 1826 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
acdca31d
YK
1827 if (!ret) {
1828 ret = -EINPROGRESS;
1829 } else {
5ca7badb 1830 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1831 kfree(edesc);
1832 }
1833
1834 return ret;
1835}
1836
5ca7badb 1837static struct caam_skcipher_alg driver_algs[] = {
ae4a825f 1838 {
5ca7badb
HG
1839 .skcipher = {
1840 .base = {
1841 .cra_name = "cbc(aes)",
1842 .cra_driver_name = "cbc-aes-caam",
1843 .cra_blocksize = AES_BLOCK_SIZE,
1844 },
1845 .setkey = skcipher_setkey,
1846 .encrypt = skcipher_encrypt,
1847 .decrypt = skcipher_decrypt,
479bcc7c
HX
1848 .min_keysize = AES_MIN_KEY_SIZE,
1849 .max_keysize = AES_MAX_KEY_SIZE,
1850 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1851 },
1852 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
479bcc7c
HX
1853 },
1854 {
5ca7badb
HG
1855 .skcipher = {
1856 .base = {
1857 .cra_name = "cbc(des3_ede)",
1858 .cra_driver_name = "cbc-3des-caam",
1859 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1860 },
cf64e495 1861 .setkey = des_skcipher_setkey,
5ca7badb
HG
1862 .encrypt = skcipher_encrypt,
1863 .decrypt = skcipher_decrypt,
479bcc7c
HX
1864 .min_keysize = DES3_EDE_KEY_SIZE,
1865 .max_keysize = DES3_EDE_KEY_SIZE,
1866 .ivsize = DES3_EDE_BLOCK_SIZE,
5ca7badb
HG
1867 },
1868 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
479bcc7c
HX
1869 },
1870 {
5ca7badb
HG
1871 .skcipher = {
1872 .base = {
1873 .cra_name = "cbc(des)",
1874 .cra_driver_name = "cbc-des-caam",
1875 .cra_blocksize = DES_BLOCK_SIZE,
1876 },
cf64e495 1877 .setkey = des_skcipher_setkey,
5ca7badb
HG
1878 .encrypt = skcipher_encrypt,
1879 .decrypt = skcipher_decrypt,
479bcc7c
HX
1880 .min_keysize = DES_KEY_SIZE,
1881 .max_keysize = DES_KEY_SIZE,
1882 .ivsize = DES_BLOCK_SIZE,
5ca7badb
HG
1883 },
1884 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
479bcc7c
HX
1885 },
1886 {
5ca7badb
HG
1887 .skcipher = {
1888 .base = {
1889 .cra_name = "ctr(aes)",
1890 .cra_driver_name = "ctr-aes-caam",
1891 .cra_blocksize = 1,
1892 },
1893 .setkey = skcipher_setkey,
1894 .encrypt = skcipher_encrypt,
1895 .decrypt = skcipher_decrypt,
479bcc7c
HX
1896 .min_keysize = AES_MIN_KEY_SIZE,
1897 .max_keysize = AES_MAX_KEY_SIZE,
1898 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1899 .chunksize = AES_BLOCK_SIZE,
1900 },
1901 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1902 OP_ALG_AAI_CTR_MOD128,
479bcc7c
HX
1903 },
1904 {
5ca7badb
HG
1905 .skcipher = {
1906 .base = {
1907 .cra_name = "rfc3686(ctr(aes))",
1908 .cra_driver_name = "rfc3686-ctr-aes-caam",
1909 .cra_blocksize = 1,
1910 },
1911 .setkey = skcipher_setkey,
1912 .encrypt = skcipher_encrypt,
1913 .decrypt = skcipher_decrypt,
479bcc7c
HX
1914 .min_keysize = AES_MIN_KEY_SIZE +
1915 CTR_RFC3686_NONCE_SIZE,
1916 .max_keysize = AES_MAX_KEY_SIZE +
1917 CTR_RFC3686_NONCE_SIZE,
1918 .ivsize = CTR_RFC3686_IV_SIZE,
5ca7badb
HG
1919 .chunksize = AES_BLOCK_SIZE,
1920 },
1921 .caam = {
1922 .class1_alg_type = OP_ALG_ALGSEL_AES |
1923 OP_ALG_AAI_CTR_MOD128,
1924 .rfc3686 = true,
1925 },
c6415a60
CV
1926 },
1927 {
5ca7badb
HG
1928 .skcipher = {
1929 .base = {
1930 .cra_name = "xts(aes)",
1931 .cra_driver_name = "xts-aes-caam",
1932 .cra_blocksize = AES_BLOCK_SIZE,
1933 },
1934 .setkey = xts_skcipher_setkey,
1935 .encrypt = skcipher_encrypt,
1936 .decrypt = skcipher_decrypt,
c6415a60
CV
1937 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1938 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1939 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1940 },
1941 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
c6415a60 1942 },
eaed71a4
IP
1943 {
1944 .skcipher = {
1945 .base = {
1946 .cra_name = "ecb(des)",
1947 .cra_driver_name = "ecb-des-caam",
1948 .cra_blocksize = DES_BLOCK_SIZE,
1949 },
1950 .setkey = des_skcipher_setkey,
1951 .encrypt = skcipher_encrypt,
1952 .decrypt = skcipher_decrypt,
1953 .min_keysize = DES_KEY_SIZE,
1954 .max_keysize = DES_KEY_SIZE,
1955 },
1956 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1957 },
1958 {
1959 .skcipher = {
1960 .base = {
1961 .cra_name = "ecb(aes)",
1962 .cra_driver_name = "ecb-aes-caam",
1963 .cra_blocksize = AES_BLOCK_SIZE,
1964 },
1965 .setkey = skcipher_setkey,
1966 .encrypt = skcipher_encrypt,
1967 .decrypt = skcipher_decrypt,
1968 .min_keysize = AES_MIN_KEY_SIZE,
1969 .max_keysize = AES_MAX_KEY_SIZE,
1970 },
1971 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
1972 },
1973 {
1974 .skcipher = {
1975 .base = {
1976 .cra_name = "ecb(des3_ede)",
1977 .cra_driver_name = "ecb-des3-caam",
1978 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1979 },
1980 .setkey = des_skcipher_setkey,
1981 .encrypt = skcipher_encrypt,
1982 .decrypt = skcipher_decrypt,
1983 .min_keysize = DES3_EDE_KEY_SIZE,
1984 .max_keysize = DES3_EDE_KEY_SIZE,
1985 },
1986 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
1987 },
1988 {
1989 .skcipher = {
1990 .base = {
1991 .cra_name = "ecb(arc4)",
1992 .cra_driver_name = "ecb-arc4-caam",
1993 .cra_blocksize = ARC4_BLOCK_SIZE,
1994 },
1995 .setkey = skcipher_setkey,
1996 .encrypt = skcipher_encrypt,
1997 .decrypt = skcipher_decrypt,
1998 .min_keysize = ARC4_MIN_KEY_SIZE,
1999 .max_keysize = ARC4_MAX_KEY_SIZE,
2000 },
2001 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
2002 },
479bcc7c
HX
2003};
2004
2005static struct caam_aead_alg driver_aeads[] = {
2006 {
2007 .aead = {
2008 .base = {
2009 .cra_name = "rfc4106(gcm(aes))",
2010 .cra_driver_name = "rfc4106-gcm-aes-caam",
2011 .cra_blocksize = 1,
2012 },
2013 .setkey = rfc4106_setkey,
2014 .setauthsize = rfc4106_setauthsize,
2015 .encrypt = ipsec_gcm_encrypt,
2016 .decrypt = ipsec_gcm_decrypt,
7545e166 2017 .ivsize = GCM_RFC4106_IV_SIZE,
479bcc7c
HX
2018 .maxauthsize = AES_BLOCK_SIZE,
2019 },
2020 .caam = {
2021 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2022 },
2023 },
2024 {
2025 .aead = {
2026 .base = {
2027 .cra_name = "rfc4543(gcm(aes))",
2028 .cra_driver_name = "rfc4543-gcm-aes-caam",
2029 .cra_blocksize = 1,
2030 },
2031 .setkey = rfc4543_setkey,
2032 .setauthsize = rfc4543_setauthsize,
2033 .encrypt = ipsec_gcm_encrypt,
2034 .decrypt = ipsec_gcm_decrypt,
7545e166 2035 .ivsize = GCM_RFC4543_IV_SIZE,
479bcc7c
HX
2036 .maxauthsize = AES_BLOCK_SIZE,
2037 },
2038 .caam = {
2039 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2040 },
2041 },
2042 /* Galois Counter Mode */
2043 {
2044 .aead = {
2045 .base = {
2046 .cra_name = "gcm(aes)",
2047 .cra_driver_name = "gcm-aes-caam",
2048 .cra_blocksize = 1,
2049 },
2050 .setkey = gcm_setkey,
2051 .setauthsize = gcm_setauthsize,
2052 .encrypt = gcm_encrypt,
2053 .decrypt = gcm_decrypt,
7545e166 2054 .ivsize = GCM_AES_IV_SIZE,
479bcc7c
HX
2055 .maxauthsize = AES_BLOCK_SIZE,
2056 },
2057 .caam = {
2058 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2059 },
2060 },
2061 /* single-pass ipsec_esp descriptor */
2062 {
2063 .aead = {
2064 .base = {
2065 .cra_name = "authenc(hmac(md5),"
2066 "ecb(cipher_null))",
2067 .cra_driver_name = "authenc-hmac-md5-"
2068 "ecb-cipher_null-caam",
2069 .cra_blocksize = NULL_BLOCK_SIZE,
2070 },
2071 .setkey = aead_setkey,
2072 .setauthsize = aead_setauthsize,
2073 .encrypt = aead_encrypt,
2074 .decrypt = aead_decrypt,
ae4a825f 2075 .ivsize = NULL_IV_SIZE,
479bcc7c
HX
2076 .maxauthsize = MD5_DIGEST_SIZE,
2077 },
2078 .caam = {
2079 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2080 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2081 },
2082 },
2083 {
2084 .aead = {
2085 .base = {
2086 .cra_name = "authenc(hmac(sha1),"
2087 "ecb(cipher_null))",
2088 .cra_driver_name = "authenc-hmac-sha1-"
2089 "ecb-cipher_null-caam",
2090 .cra_blocksize = NULL_BLOCK_SIZE,
ae4a825f 2091 },
479bcc7c
HX
2092 .setkey = aead_setkey,
2093 .setauthsize = aead_setauthsize,
2094 .encrypt = aead_encrypt,
2095 .decrypt = aead_decrypt,
2096 .ivsize = NULL_IV_SIZE,
2097 .maxauthsize = SHA1_DIGEST_SIZE,
2098 },
2099 .caam = {
2100 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2101 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2102 },
ae4a825f
HG
2103 },
2104 {
479bcc7c
HX
2105 .aead = {
2106 .base = {
2107 .cra_name = "authenc(hmac(sha224),"
2108 "ecb(cipher_null))",
2109 .cra_driver_name = "authenc-hmac-sha224-"
2110 "ecb-cipher_null-caam",
2111 .cra_blocksize = NULL_BLOCK_SIZE,
2112 },
ae4a825f
HG
2113 .setkey = aead_setkey,
2114 .setauthsize = aead_setauthsize,
479bcc7c
HX
2115 .encrypt = aead_encrypt,
2116 .decrypt = aead_decrypt,
ae4a825f
HG
2117 .ivsize = NULL_IV_SIZE,
2118 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2119 },
2120 .caam = {
2121 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2122 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2123 },
ae4a825f
HG
2124 },
2125 {
479bcc7c
HX
2126 .aead = {
2127 .base = {
2128 .cra_name = "authenc(hmac(sha256),"
2129 "ecb(cipher_null))",
2130 .cra_driver_name = "authenc-hmac-sha256-"
2131 "ecb-cipher_null-caam",
2132 .cra_blocksize = NULL_BLOCK_SIZE,
2133 },
ae4a825f
HG
2134 .setkey = aead_setkey,
2135 .setauthsize = aead_setauthsize,
479bcc7c
HX
2136 .encrypt = aead_encrypt,
2137 .decrypt = aead_decrypt,
ae4a825f
HG
2138 .ivsize = NULL_IV_SIZE,
2139 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2140 },
2141 .caam = {
2142 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2143 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2144 },
ae4a825f
HG
2145 },
2146 {
479bcc7c
HX
2147 .aead = {
2148 .base = {
2149 .cra_name = "authenc(hmac(sha384),"
2150 "ecb(cipher_null))",
2151 .cra_driver_name = "authenc-hmac-sha384-"
2152 "ecb-cipher_null-caam",
2153 .cra_blocksize = NULL_BLOCK_SIZE,
2154 },
ae4a825f
HG
2155 .setkey = aead_setkey,
2156 .setauthsize = aead_setauthsize,
479bcc7c
HX
2157 .encrypt = aead_encrypt,
2158 .decrypt = aead_decrypt,
ae4a825f
HG
2159 .ivsize = NULL_IV_SIZE,
2160 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2161 },
2162 .caam = {
2163 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2164 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2165 },
ae4a825f
HG
2166 },
2167 {
479bcc7c
HX
2168 .aead = {
2169 .base = {
2170 .cra_name = "authenc(hmac(sha512),"
2171 "ecb(cipher_null))",
2172 .cra_driver_name = "authenc-hmac-sha512-"
2173 "ecb-cipher_null-caam",
2174 .cra_blocksize = NULL_BLOCK_SIZE,
2175 },
ae4a825f
HG
2176 .setkey = aead_setkey,
2177 .setauthsize = aead_setauthsize,
479bcc7c
HX
2178 .encrypt = aead_encrypt,
2179 .decrypt = aead_decrypt,
ae4a825f
HG
2180 .ivsize = NULL_IV_SIZE,
2181 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2182 },
2183 .caam = {
2184 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2185 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2186 },
2187 },
2188 {
2189 .aead = {
2190 .base = {
2191 .cra_name = "authenc(hmac(md5),cbc(aes))",
2192 .cra_driver_name = "authenc-hmac-md5-"
2193 "cbc-aes-caam",
2194 .cra_blocksize = AES_BLOCK_SIZE,
ae4a825f 2195 },
479bcc7c
HX
2196 .setkey = aead_setkey,
2197 .setauthsize = aead_setauthsize,
2198 .encrypt = aead_encrypt,
2199 .decrypt = aead_decrypt,
2200 .ivsize = AES_BLOCK_SIZE,
2201 .maxauthsize = MD5_DIGEST_SIZE,
2202 },
2203 .caam = {
2204 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2205 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2206 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2207 },
ae4a825f 2208 },
8b4d43a4 2209 {
479bcc7c
HX
2210 .aead = {
2211 .base = {
2212 .cra_name = "echainiv(authenc(hmac(md5),"
2213 "cbc(aes)))",
2214 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2215 "cbc-aes-caam",
2216 .cra_blocksize = AES_BLOCK_SIZE,
2217 },
8b4d43a4
KP
2218 .setkey = aead_setkey,
2219 .setauthsize = aead_setauthsize,
479bcc7c 2220 .encrypt = aead_encrypt,
8b18e235 2221 .decrypt = aead_decrypt,
8b4d43a4
KP
2222 .ivsize = AES_BLOCK_SIZE,
2223 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2224 },
2225 .caam = {
2226 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2227 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2228 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2229 .geniv = true,
2230 },
2231 },
2232 {
2233 .aead = {
2234 .base = {
2235 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2236 .cra_driver_name = "authenc-hmac-sha1-"
2237 "cbc-aes-caam",
2238 .cra_blocksize = AES_BLOCK_SIZE,
8b4d43a4 2239 },
479bcc7c
HX
2240 .setkey = aead_setkey,
2241 .setauthsize = aead_setauthsize,
2242 .encrypt = aead_encrypt,
2243 .decrypt = aead_decrypt,
2244 .ivsize = AES_BLOCK_SIZE,
2245 .maxauthsize = SHA1_DIGEST_SIZE,
2246 },
2247 .caam = {
2248 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2249 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2250 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2251 },
8b4d43a4 2252 },
8e8ec596 2253 {
479bcc7c
HX
2254 .aead = {
2255 .base = {
2256 .cra_name = "echainiv(authenc(hmac(sha1),"
2257 "cbc(aes)))",
2258 .cra_driver_name = "echainiv-authenc-"
2259 "hmac-sha1-cbc-aes-caam",
2260 .cra_blocksize = AES_BLOCK_SIZE,
2261 },
0e479300
YK
2262 .setkey = aead_setkey,
2263 .setauthsize = aead_setauthsize,
479bcc7c 2264 .encrypt = aead_encrypt,
8b18e235 2265 .decrypt = aead_decrypt,
8e8ec596
KP
2266 .ivsize = AES_BLOCK_SIZE,
2267 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2268 },
2269 .caam = {
2270 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2271 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2272 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2273 .geniv = true,
2274 },
2275 },
2276 {
2277 .aead = {
2278 .base = {
2279 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2280 .cra_driver_name = "authenc-hmac-sha224-"
2281 "cbc-aes-caam",
2282 .cra_blocksize = AES_BLOCK_SIZE,
8e8ec596 2283 },
479bcc7c
HX
2284 .setkey = aead_setkey,
2285 .setauthsize = aead_setauthsize,
2286 .encrypt = aead_encrypt,
2287 .decrypt = aead_decrypt,
2288 .ivsize = AES_BLOCK_SIZE,
2289 .maxauthsize = SHA224_DIGEST_SIZE,
2290 },
2291 .caam = {
2292 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2293 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2294 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2295 },
8e8ec596 2296 },
e863f9cc 2297 {
479bcc7c
HX
2298 .aead = {
2299 .base = {
2300 .cra_name = "echainiv(authenc(hmac(sha224),"
2301 "cbc(aes)))",
2302 .cra_driver_name = "echainiv-authenc-"
2303 "hmac-sha224-cbc-aes-caam",
2304 .cra_blocksize = AES_BLOCK_SIZE,
2305 },
e863f9cc
HA
2306 .setkey = aead_setkey,
2307 .setauthsize = aead_setauthsize,
479bcc7c 2308 .encrypt = aead_encrypt,
8b18e235 2309 .decrypt = aead_decrypt,
e863f9cc
HA
2310 .ivsize = AES_BLOCK_SIZE,
2311 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2312 },
2313 .caam = {
2314 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2315 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2316 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2317 .geniv = true,
2318 },
2319 },
2320 {
2321 .aead = {
2322 .base = {
2323 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2324 .cra_driver_name = "authenc-hmac-sha256-"
2325 "cbc-aes-caam",
2326 .cra_blocksize = AES_BLOCK_SIZE,
e863f9cc 2327 },
479bcc7c
HX
2328 .setkey = aead_setkey,
2329 .setauthsize = aead_setauthsize,
2330 .encrypt = aead_encrypt,
2331 .decrypt = aead_decrypt,
2332 .ivsize = AES_BLOCK_SIZE,
2333 .maxauthsize = SHA256_DIGEST_SIZE,
2334 },
2335 .caam = {
2336 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2337 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2338 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2339 },
e863f9cc 2340 },
8e8ec596 2341 {
479bcc7c
HX
2342 .aead = {
2343 .base = {
2344 .cra_name = "echainiv(authenc(hmac(sha256),"
2345 "cbc(aes)))",
2346 .cra_driver_name = "echainiv-authenc-"
2347 "hmac-sha256-cbc-aes-caam",
2348 .cra_blocksize = AES_BLOCK_SIZE,
2349 },
2350 .setkey = aead_setkey,
2351 .setauthsize = aead_setauthsize,
2352 .encrypt = aead_encrypt,
8b18e235 2353 .decrypt = aead_decrypt,
479bcc7c
HX
2354 .ivsize = AES_BLOCK_SIZE,
2355 .maxauthsize = SHA256_DIGEST_SIZE,
2356 },
2357 .caam = {
2358 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2359 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2360 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2361 .geniv = true,
2362 },
2363 },
2364 {
2365 .aead = {
2366 .base = {
2367 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2368 .cra_driver_name = "authenc-hmac-sha384-"
2369 "cbc-aes-caam",
2370 .cra_blocksize = AES_BLOCK_SIZE,
2371 },
2372 .setkey = aead_setkey,
2373 .setauthsize = aead_setauthsize,
2374 .encrypt = aead_encrypt,
2375 .decrypt = aead_decrypt,
2376 .ivsize = AES_BLOCK_SIZE,
2377 .maxauthsize = SHA384_DIGEST_SIZE,
2378 },
2379 .caam = {
2380 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2381 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2382 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2383 },
2384 },
2385 {
2386 .aead = {
2387 .base = {
2388 .cra_name = "echainiv(authenc(hmac(sha384),"
2389 "cbc(aes)))",
2390 .cra_driver_name = "echainiv-authenc-"
2391 "hmac-sha384-cbc-aes-caam",
2392 .cra_blocksize = AES_BLOCK_SIZE,
2393 },
0e479300
YK
2394 .setkey = aead_setkey,
2395 .setauthsize = aead_setauthsize,
479bcc7c 2396 .encrypt = aead_encrypt,
8b18e235 2397 .decrypt = aead_decrypt,
8e8ec596 2398 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2399 .maxauthsize = SHA384_DIGEST_SIZE,
2400 },
2401 .caam = {
2402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2403 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2404 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2405 .geniv = true,
2406 },
8e8ec596 2407 },
e863f9cc 2408 {
479bcc7c
HX
2409 .aead = {
2410 .base = {
2411 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2412 .cra_driver_name = "authenc-hmac-sha512-"
2413 "cbc-aes-caam",
2414 .cra_blocksize = AES_BLOCK_SIZE,
2415 },
e863f9cc
HA
2416 .setkey = aead_setkey,
2417 .setauthsize = aead_setauthsize,
479bcc7c
HX
2418 .encrypt = aead_encrypt,
2419 .decrypt = aead_decrypt,
e863f9cc 2420 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2421 .maxauthsize = SHA512_DIGEST_SIZE,
2422 },
2423 .caam = {
2424 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2425 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2426 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2427 },
e863f9cc 2428 },
4427b1b4 2429 {
479bcc7c
HX
2430 .aead = {
2431 .base = {
2432 .cra_name = "echainiv(authenc(hmac(sha512),"
2433 "cbc(aes)))",
2434 .cra_driver_name = "echainiv-authenc-"
2435 "hmac-sha512-cbc-aes-caam",
2436 .cra_blocksize = AES_BLOCK_SIZE,
2437 },
0e479300
YK
2438 .setkey = aead_setkey,
2439 .setauthsize = aead_setauthsize,
479bcc7c 2440 .encrypt = aead_encrypt,
8b18e235 2441 .decrypt = aead_decrypt,
4427b1b4
KP
2442 .ivsize = AES_BLOCK_SIZE,
2443 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2444 },
2445 .caam = {
2446 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2447 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2448 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2449 .geniv = true,
2450 },
2451 },
2452 {
2453 .aead = {
2454 .base = {
2455 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2456 .cra_driver_name = "authenc-hmac-md5-"
2457 "cbc-des3_ede-caam",
2458 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
4427b1b4 2459 },
479bcc7c
HX
2460 .setkey = aead_setkey,
2461 .setauthsize = aead_setauthsize,
2462 .encrypt = aead_encrypt,
2463 .decrypt = aead_decrypt,
2464 .ivsize = DES3_EDE_BLOCK_SIZE,
2465 .maxauthsize = MD5_DIGEST_SIZE,
2466 },
2467 .caam = {
2468 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2469 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2470 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2471 }
4427b1b4 2472 },
8b4d43a4 2473 {
479bcc7c
HX
2474 .aead = {
2475 .base = {
2476 .cra_name = "echainiv(authenc(hmac(md5),"
2477 "cbc(des3_ede)))",
2478 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2479 "cbc-des3_ede-caam",
2480 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2481 },
8b4d43a4
KP
2482 .setkey = aead_setkey,
2483 .setauthsize = aead_setauthsize,
479bcc7c 2484 .encrypt = aead_encrypt,
8b18e235 2485 .decrypt = aead_decrypt,
8b4d43a4
KP
2486 .ivsize = DES3_EDE_BLOCK_SIZE,
2487 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2488 },
2489 .caam = {
2490 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2491 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2492 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2493 .geniv = true,
2494 }
2495 },
2496 {
2497 .aead = {
2498 .base = {
2499 .cra_name = "authenc(hmac(sha1),"
2500 "cbc(des3_ede))",
2501 .cra_driver_name = "authenc-hmac-sha1-"
2502 "cbc-des3_ede-caam",
2503 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8b4d43a4 2504 },
479bcc7c
HX
2505 .setkey = aead_setkey,
2506 .setauthsize = aead_setauthsize,
2507 .encrypt = aead_encrypt,
2508 .decrypt = aead_decrypt,
2509 .ivsize = DES3_EDE_BLOCK_SIZE,
2510 .maxauthsize = SHA1_DIGEST_SIZE,
2511 },
2512 .caam = {
2513 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2514 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2515 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2516 },
8b4d43a4 2517 },
8e8ec596 2518 {
479bcc7c
HX
2519 .aead = {
2520 .base = {
2521 .cra_name = "echainiv(authenc(hmac(sha1),"
2522 "cbc(des3_ede)))",
2523 .cra_driver_name = "echainiv-authenc-"
2524 "hmac-sha1-"
2525 "cbc-des3_ede-caam",
2526 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2527 },
0e479300
YK
2528 .setkey = aead_setkey,
2529 .setauthsize = aead_setauthsize,
479bcc7c 2530 .encrypt = aead_encrypt,
8b18e235 2531 .decrypt = aead_decrypt,
8e8ec596
KP
2532 .ivsize = DES3_EDE_BLOCK_SIZE,
2533 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2534 },
2535 .caam = {
2536 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2537 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2538 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2539 .geniv = true,
2540 },
2541 },
2542 {
2543 .aead = {
2544 .base = {
2545 .cra_name = "authenc(hmac(sha224),"
2546 "cbc(des3_ede))",
2547 .cra_driver_name = "authenc-hmac-sha224-"
2548 "cbc-des3_ede-caam",
2549 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2550 },
479bcc7c
HX
2551 .setkey = aead_setkey,
2552 .setauthsize = aead_setauthsize,
2553 .encrypt = aead_encrypt,
2554 .decrypt = aead_decrypt,
2555 .ivsize = DES3_EDE_BLOCK_SIZE,
2556 .maxauthsize = SHA224_DIGEST_SIZE,
2557 },
2558 .caam = {
2559 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2560 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2561 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2562 },
8e8ec596 2563 },
e863f9cc 2564 {
479bcc7c
HX
2565 .aead = {
2566 .base = {
2567 .cra_name = "echainiv(authenc(hmac(sha224),"
2568 "cbc(des3_ede)))",
2569 .cra_driver_name = "echainiv-authenc-"
2570 "hmac-sha224-"
2571 "cbc-des3_ede-caam",
2572 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2573 },
e863f9cc
HA
2574 .setkey = aead_setkey,
2575 .setauthsize = aead_setauthsize,
479bcc7c 2576 .encrypt = aead_encrypt,
8b18e235 2577 .decrypt = aead_decrypt,
e863f9cc
HA
2578 .ivsize = DES3_EDE_BLOCK_SIZE,
2579 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2580 },
2581 .caam = {
2582 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2583 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2584 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2585 .geniv = true,
2586 },
2587 },
2588 {
2589 .aead = {
2590 .base = {
2591 .cra_name = "authenc(hmac(sha256),"
2592 "cbc(des3_ede))",
2593 .cra_driver_name = "authenc-hmac-sha256-"
2594 "cbc-des3_ede-caam",
2595 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2596 },
479bcc7c
HX
2597 .setkey = aead_setkey,
2598 .setauthsize = aead_setauthsize,
2599 .encrypt = aead_encrypt,
2600 .decrypt = aead_decrypt,
2601 .ivsize = DES3_EDE_BLOCK_SIZE,
2602 .maxauthsize = SHA256_DIGEST_SIZE,
2603 },
2604 .caam = {
2605 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2606 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2607 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2608 },
e863f9cc 2609 },
8e8ec596 2610 {
479bcc7c
HX
2611 .aead = {
2612 .base = {
2613 .cra_name = "echainiv(authenc(hmac(sha256),"
2614 "cbc(des3_ede)))",
2615 .cra_driver_name = "echainiv-authenc-"
2616 "hmac-sha256-"
2617 "cbc-des3_ede-caam",
2618 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2619 },
0e479300
YK
2620 .setkey = aead_setkey,
2621 .setauthsize = aead_setauthsize,
479bcc7c 2622 .encrypt = aead_encrypt,
8b18e235 2623 .decrypt = aead_decrypt,
8e8ec596
KP
2624 .ivsize = DES3_EDE_BLOCK_SIZE,
2625 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2626 },
2627 .caam = {
2628 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2629 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2630 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2631 .geniv = true,
2632 },
2633 },
2634 {
2635 .aead = {
2636 .base = {
2637 .cra_name = "authenc(hmac(sha384),"
2638 "cbc(des3_ede))",
2639 .cra_driver_name = "authenc-hmac-sha384-"
2640 "cbc-des3_ede-caam",
2641 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2642 },
479bcc7c
HX
2643 .setkey = aead_setkey,
2644 .setauthsize = aead_setauthsize,
2645 .encrypt = aead_encrypt,
2646 .decrypt = aead_decrypt,
2647 .ivsize = DES3_EDE_BLOCK_SIZE,
2648 .maxauthsize = SHA384_DIGEST_SIZE,
2649 },
2650 .caam = {
2651 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2652 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2653 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2654 },
8e8ec596 2655 },
e863f9cc 2656 {
479bcc7c
HX
2657 .aead = {
2658 .base = {
2659 .cra_name = "echainiv(authenc(hmac(sha384),"
2660 "cbc(des3_ede)))",
2661 .cra_driver_name = "echainiv-authenc-"
2662 "hmac-sha384-"
2663 "cbc-des3_ede-caam",
2664 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2665 },
e863f9cc
HA
2666 .setkey = aead_setkey,
2667 .setauthsize = aead_setauthsize,
479bcc7c 2668 .encrypt = aead_encrypt,
8b18e235 2669 .decrypt = aead_decrypt,
e863f9cc
HA
2670 .ivsize = DES3_EDE_BLOCK_SIZE,
2671 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2672 },
2673 .caam = {
2674 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2675 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2676 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2677 .geniv = true,
2678 },
2679 },
2680 {
2681 .aead = {
2682 .base = {
2683 .cra_name = "authenc(hmac(sha512),"
2684 "cbc(des3_ede))",
2685 .cra_driver_name = "authenc-hmac-sha512-"
2686 "cbc-des3_ede-caam",
2687 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2688 },
479bcc7c
HX
2689 .setkey = aead_setkey,
2690 .setauthsize = aead_setauthsize,
2691 .encrypt = aead_encrypt,
2692 .decrypt = aead_decrypt,
2693 .ivsize = DES3_EDE_BLOCK_SIZE,
2694 .maxauthsize = SHA512_DIGEST_SIZE,
2695 },
2696 .caam = {
2697 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2698 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2699 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2700 },
e863f9cc 2701 },
4427b1b4 2702 {
479bcc7c
HX
2703 .aead = {
2704 .base = {
2705 .cra_name = "echainiv(authenc(hmac(sha512),"
2706 "cbc(des3_ede)))",
2707 .cra_driver_name = "echainiv-authenc-"
2708 "hmac-sha512-"
2709 "cbc-des3_ede-caam",
2710 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2711 },
0e479300
YK
2712 .setkey = aead_setkey,
2713 .setauthsize = aead_setauthsize,
479bcc7c 2714 .encrypt = aead_encrypt,
8b18e235 2715 .decrypt = aead_decrypt,
4427b1b4
KP
2716 .ivsize = DES3_EDE_BLOCK_SIZE,
2717 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2718 },
2719 .caam = {
2720 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2721 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2722 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2723 .geniv = true,
2724 },
2725 },
2726 {
2727 .aead = {
2728 .base = {
2729 .cra_name = "authenc(hmac(md5),cbc(des))",
2730 .cra_driver_name = "authenc-hmac-md5-"
2731 "cbc-des-caam",
2732 .cra_blocksize = DES_BLOCK_SIZE,
4427b1b4 2733 },
479bcc7c
HX
2734 .setkey = aead_setkey,
2735 .setauthsize = aead_setauthsize,
2736 .encrypt = aead_encrypt,
2737 .decrypt = aead_decrypt,
2738 .ivsize = DES_BLOCK_SIZE,
2739 .maxauthsize = MD5_DIGEST_SIZE,
2740 },
2741 .caam = {
2742 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2743 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2744 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2745 },
4427b1b4 2746 },
8b4d43a4 2747 {
479bcc7c
HX
2748 .aead = {
2749 .base = {
2750 .cra_name = "echainiv(authenc(hmac(md5),"
2751 "cbc(des)))",
2752 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2753 "cbc-des-caam",
2754 .cra_blocksize = DES_BLOCK_SIZE,
2755 },
8b4d43a4
KP
2756 .setkey = aead_setkey,
2757 .setauthsize = aead_setauthsize,
479bcc7c 2758 .encrypt = aead_encrypt,
8b18e235 2759 .decrypt = aead_decrypt,
8b4d43a4
KP
2760 .ivsize = DES_BLOCK_SIZE,
2761 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2762 },
2763 .caam = {
2764 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2765 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2766 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2767 .geniv = true,
2768 },
2769 },
2770 {
2771 .aead = {
2772 .base = {
2773 .cra_name = "authenc(hmac(sha1),cbc(des))",
2774 .cra_driver_name = "authenc-hmac-sha1-"
2775 "cbc-des-caam",
2776 .cra_blocksize = DES_BLOCK_SIZE,
8b4d43a4 2777 },
479bcc7c
HX
2778 .setkey = aead_setkey,
2779 .setauthsize = aead_setauthsize,
2780 .encrypt = aead_encrypt,
2781 .decrypt = aead_decrypt,
2782 .ivsize = DES_BLOCK_SIZE,
2783 .maxauthsize = SHA1_DIGEST_SIZE,
2784 },
2785 .caam = {
2786 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2787 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2788 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2789 },
8b4d43a4 2790 },
8e8ec596 2791 {
479bcc7c
HX
2792 .aead = {
2793 .base = {
2794 .cra_name = "echainiv(authenc(hmac(sha1),"
2795 "cbc(des)))",
2796 .cra_driver_name = "echainiv-authenc-"
2797 "hmac-sha1-cbc-des-caam",
2798 .cra_blocksize = DES_BLOCK_SIZE,
2799 },
0e479300
YK
2800 .setkey = aead_setkey,
2801 .setauthsize = aead_setauthsize,
479bcc7c 2802 .encrypt = aead_encrypt,
8b18e235 2803 .decrypt = aead_decrypt,
8e8ec596
KP
2804 .ivsize = DES_BLOCK_SIZE,
2805 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2806 },
2807 .caam = {
2808 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2809 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2810 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2811 .geniv = true,
2812 },
2813 },
2814 {
2815 .aead = {
2816 .base = {
2817 .cra_name = "authenc(hmac(sha224),cbc(des))",
2818 .cra_driver_name = "authenc-hmac-sha224-"
2819 "cbc-des-caam",
2820 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2821 },
479bcc7c
HX
2822 .setkey = aead_setkey,
2823 .setauthsize = aead_setauthsize,
2824 .encrypt = aead_encrypt,
2825 .decrypt = aead_decrypt,
2826 .ivsize = DES_BLOCK_SIZE,
2827 .maxauthsize = SHA224_DIGEST_SIZE,
2828 },
2829 .caam = {
2830 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2831 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2832 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2833 },
8e8ec596 2834 },
e863f9cc 2835 {
479bcc7c
HX
2836 .aead = {
2837 .base = {
2838 .cra_name = "echainiv(authenc(hmac(sha224),"
2839 "cbc(des)))",
2840 .cra_driver_name = "echainiv-authenc-"
2841 "hmac-sha224-cbc-des-caam",
2842 .cra_blocksize = DES_BLOCK_SIZE,
2843 },
e863f9cc
HA
2844 .setkey = aead_setkey,
2845 .setauthsize = aead_setauthsize,
479bcc7c 2846 .encrypt = aead_encrypt,
8b18e235 2847 .decrypt = aead_decrypt,
e863f9cc
HA
2848 .ivsize = DES_BLOCK_SIZE,
2849 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2850 },
2851 .caam = {
2852 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2853 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2854 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2855 .geniv = true,
2856 },
2857 },
2858 {
2859 .aead = {
2860 .base = {
2861 .cra_name = "authenc(hmac(sha256),cbc(des))",
2862 .cra_driver_name = "authenc-hmac-sha256-"
2863 "cbc-des-caam",
2864 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2865 },
479bcc7c
HX
2866 .setkey = aead_setkey,
2867 .setauthsize = aead_setauthsize,
2868 .encrypt = aead_encrypt,
2869 .decrypt = aead_decrypt,
2870 .ivsize = DES_BLOCK_SIZE,
2871 .maxauthsize = SHA256_DIGEST_SIZE,
2872 },
2873 .caam = {
2874 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2875 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2876 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2877 },
e863f9cc 2878 },
8e8ec596 2879 {
479bcc7c
HX
2880 .aead = {
2881 .base = {
2882 .cra_name = "echainiv(authenc(hmac(sha256),"
2883 "cbc(des)))",
2884 .cra_driver_name = "echainiv-authenc-"
2885 "hmac-sha256-cbc-des-caam",
2886 .cra_blocksize = DES_BLOCK_SIZE,
2887 },
0e479300
YK
2888 .setkey = aead_setkey,
2889 .setauthsize = aead_setauthsize,
479bcc7c 2890 .encrypt = aead_encrypt,
8b18e235 2891 .decrypt = aead_decrypt,
8e8ec596
KP
2892 .ivsize = DES_BLOCK_SIZE,
2893 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2894 },
2895 .caam = {
2896 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2897 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2898 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2899 .geniv = true,
2900 },
2901 },
2902 {
2903 .aead = {
2904 .base = {
2905 .cra_name = "authenc(hmac(sha384),cbc(des))",
2906 .cra_driver_name = "authenc-hmac-sha384-"
2907 "cbc-des-caam",
2908 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2909 },
479bcc7c
HX
2910 .setkey = aead_setkey,
2911 .setauthsize = aead_setauthsize,
2912 .encrypt = aead_encrypt,
2913 .decrypt = aead_decrypt,
2914 .ivsize = DES_BLOCK_SIZE,
2915 .maxauthsize = SHA384_DIGEST_SIZE,
2916 },
2917 .caam = {
2918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2919 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2920 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2921 },
8e8ec596 2922 },
e863f9cc 2923 {
479bcc7c
HX
2924 .aead = {
2925 .base = {
2926 .cra_name = "echainiv(authenc(hmac(sha384),"
2927 "cbc(des)))",
2928 .cra_driver_name = "echainiv-authenc-"
2929 "hmac-sha384-cbc-des-caam",
2930 .cra_blocksize = DES_BLOCK_SIZE,
2931 },
e863f9cc
HA
2932 .setkey = aead_setkey,
2933 .setauthsize = aead_setauthsize,
479bcc7c 2934 .encrypt = aead_encrypt,
8b18e235 2935 .decrypt = aead_decrypt,
e863f9cc
HA
2936 .ivsize = DES_BLOCK_SIZE,
2937 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2938 },
2939 .caam = {
2940 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2941 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2942 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2943 .geniv = true,
2944 },
2945 },
2946 {
2947 .aead = {
2948 .base = {
2949 .cra_name = "authenc(hmac(sha512),cbc(des))",
2950 .cra_driver_name = "authenc-hmac-sha512-"
2951 "cbc-des-caam",
2952 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2953 },
479bcc7c
HX
2954 .setkey = aead_setkey,
2955 .setauthsize = aead_setauthsize,
2956 .encrypt = aead_encrypt,
2957 .decrypt = aead_decrypt,
2958 .ivsize = DES_BLOCK_SIZE,
2959 .maxauthsize = SHA512_DIGEST_SIZE,
2960 },
2961 .caam = {
2962 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2963 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2964 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2965 },
e863f9cc 2966 },
4427b1b4 2967 {
479bcc7c
HX
2968 .aead = {
2969 .base = {
2970 .cra_name = "echainiv(authenc(hmac(sha512),"
2971 "cbc(des)))",
2972 .cra_driver_name = "echainiv-authenc-"
2973 "hmac-sha512-cbc-des-caam",
2974 .cra_blocksize = DES_BLOCK_SIZE,
2975 },
0e479300
YK
2976 .setkey = aead_setkey,
2977 .setauthsize = aead_setauthsize,
479bcc7c 2978 .encrypt = aead_encrypt,
8b18e235 2979 .decrypt = aead_decrypt,
4427b1b4
KP
2980 .ivsize = DES_BLOCK_SIZE,
2981 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2982 },
2983 .caam = {
2984 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2985 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2986 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2987 .geniv = true,
2988 },
4427b1b4 2989 },
daebc465 2990 {
479bcc7c
HX
2991 .aead = {
2992 .base = {
2993 .cra_name = "authenc(hmac(md5),"
2994 "rfc3686(ctr(aes)))",
2995 .cra_driver_name = "authenc-hmac-md5-"
2996 "rfc3686-ctr-aes-caam",
2997 .cra_blocksize = 1,
2998 },
daebc465
CV
2999 .setkey = aead_setkey,
3000 .setauthsize = aead_setauthsize,
479bcc7c
HX
3001 .encrypt = aead_encrypt,
3002 .decrypt = aead_decrypt,
daebc465
CV
3003 .ivsize = CTR_RFC3686_IV_SIZE,
3004 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
3005 },
3006 .caam = {
3007 .class1_alg_type = OP_ALG_ALGSEL_AES |
3008 OP_ALG_AAI_CTR_MOD128,
3009 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3010 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3011 .rfc3686 = true,
3012 },
daebc465
CV
3013 },
3014 {
479bcc7c
HX
3015 .aead = {
3016 .base = {
3017 .cra_name = "seqiv(authenc("
3018 "hmac(md5),rfc3686(ctr(aes))))",
3019 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3020 "rfc3686-ctr-aes-caam",
3021 .cra_blocksize = 1,
3022 },
daebc465
CV
3023 .setkey = aead_setkey,
3024 .setauthsize = aead_setauthsize,
479bcc7c 3025 .encrypt = aead_encrypt,
8b18e235 3026 .decrypt = aead_decrypt,
daebc465 3027 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3028 .maxauthsize = MD5_DIGEST_SIZE,
3029 },
3030 .caam = {
3031 .class1_alg_type = OP_ALG_ALGSEL_AES |
3032 OP_ALG_AAI_CTR_MOD128,
3033 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3034 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3035 .rfc3686 = true,
3036 .geniv = true,
3037 },
daebc465
CV
3038 },
3039 {
479bcc7c
HX
3040 .aead = {
3041 .base = {
3042 .cra_name = "authenc(hmac(sha1),"
3043 "rfc3686(ctr(aes)))",
3044 .cra_driver_name = "authenc-hmac-sha1-"
3045 "rfc3686-ctr-aes-caam",
3046 .cra_blocksize = 1,
3047 },
daebc465
CV
3048 .setkey = aead_setkey,
3049 .setauthsize = aead_setauthsize,
479bcc7c
HX
3050 .encrypt = aead_encrypt,
3051 .decrypt = aead_decrypt,
daebc465 3052 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3053 .maxauthsize = SHA1_DIGEST_SIZE,
3054 },
3055 .caam = {
3056 .class1_alg_type = OP_ALG_ALGSEL_AES |
3057 OP_ALG_AAI_CTR_MOD128,
3058 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3059 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3060 .rfc3686 = true,
3061 },
daebc465
CV
3062 },
3063 {
479bcc7c
HX
3064 .aead = {
3065 .base = {
3066 .cra_name = "seqiv(authenc("
3067 "hmac(sha1),rfc3686(ctr(aes))))",
3068 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3069 "rfc3686-ctr-aes-caam",
3070 .cra_blocksize = 1,
3071 },
daebc465
CV
3072 .setkey = aead_setkey,
3073 .setauthsize = aead_setauthsize,
479bcc7c 3074 .encrypt = aead_encrypt,
8b18e235 3075 .decrypt = aead_decrypt,
daebc465 3076 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3077 .maxauthsize = SHA1_DIGEST_SIZE,
3078 },
3079 .caam = {
3080 .class1_alg_type = OP_ALG_ALGSEL_AES |
3081 OP_ALG_AAI_CTR_MOD128,
3082 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3083 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3084 .rfc3686 = true,
3085 .geniv = true,
3086 },
daebc465
CV
3087 },
3088 {
479bcc7c
HX
3089 .aead = {
3090 .base = {
3091 .cra_name = "authenc(hmac(sha224),"
3092 "rfc3686(ctr(aes)))",
3093 .cra_driver_name = "authenc-hmac-sha224-"
3094 "rfc3686-ctr-aes-caam",
3095 .cra_blocksize = 1,
3096 },
daebc465
CV
3097 .setkey = aead_setkey,
3098 .setauthsize = aead_setauthsize,
479bcc7c
HX
3099 .encrypt = aead_encrypt,
3100 .decrypt = aead_decrypt,
daebc465 3101 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3102 .maxauthsize = SHA224_DIGEST_SIZE,
3103 },
3104 .caam = {
3105 .class1_alg_type = OP_ALG_ALGSEL_AES |
3106 OP_ALG_AAI_CTR_MOD128,
3107 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3108 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3109 .rfc3686 = true,
3110 },
daebc465
CV
3111 },
3112 {
479bcc7c
HX
3113 .aead = {
3114 .base = {
3115 .cra_name = "seqiv(authenc("
3116 "hmac(sha224),rfc3686(ctr(aes))))",
3117 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3118 "rfc3686-ctr-aes-caam",
3119 .cra_blocksize = 1,
3120 },
daebc465
CV
3121 .setkey = aead_setkey,
3122 .setauthsize = aead_setauthsize,
479bcc7c 3123 .encrypt = aead_encrypt,
8b18e235 3124 .decrypt = aead_decrypt,
daebc465 3125 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3126 .maxauthsize = SHA224_DIGEST_SIZE,
3127 },
3128 .caam = {
3129 .class1_alg_type = OP_ALG_ALGSEL_AES |
3130 OP_ALG_AAI_CTR_MOD128,
3131 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3132 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3133 .rfc3686 = true,
3134 .geniv = true,
3135 },
acdca31d
YK
3136 },
3137 {
479bcc7c
HX
3138 .aead = {
3139 .base = {
3140 .cra_name = "authenc(hmac(sha256),"
3141 "rfc3686(ctr(aes)))",
3142 .cra_driver_name = "authenc-hmac-sha256-"
3143 "rfc3686-ctr-aes-caam",
3144 .cra_blocksize = 1,
acdca31d 3145 },
479bcc7c
HX
3146 .setkey = aead_setkey,
3147 .setauthsize = aead_setauthsize,
3148 .encrypt = aead_encrypt,
3149 .decrypt = aead_decrypt,
3150 .ivsize = CTR_RFC3686_IV_SIZE,
3151 .maxauthsize = SHA256_DIGEST_SIZE,
3152 },
3153 .caam = {
3154 .class1_alg_type = OP_ALG_ALGSEL_AES |
3155 OP_ALG_AAI_CTR_MOD128,
3156 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3157 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3158 .rfc3686 = true,
3159 },
acdca31d
YK
3160 },
3161 {
479bcc7c
HX
3162 .aead = {
3163 .base = {
3164 .cra_name = "seqiv(authenc(hmac(sha256),"
3165 "rfc3686(ctr(aes))))",
3166 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3167 "rfc3686-ctr-aes-caam",
3168 .cra_blocksize = 1,
acdca31d 3169 },
479bcc7c
HX
3170 .setkey = aead_setkey,
3171 .setauthsize = aead_setauthsize,
3172 .encrypt = aead_encrypt,
8b18e235 3173 .decrypt = aead_decrypt,
479bcc7c
HX
3174 .ivsize = CTR_RFC3686_IV_SIZE,
3175 .maxauthsize = SHA256_DIGEST_SIZE,
3176 },
3177 .caam = {
3178 .class1_alg_type = OP_ALG_ALGSEL_AES |
3179 OP_ALG_AAI_CTR_MOD128,
3180 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3181 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3182 .rfc3686 = true,
3183 .geniv = true,
3184 },
2b22f6c5
CV
3185 },
3186 {
479bcc7c
HX
3187 .aead = {
3188 .base = {
3189 .cra_name = "authenc(hmac(sha384),"
3190 "rfc3686(ctr(aes)))",
3191 .cra_driver_name = "authenc-hmac-sha384-"
3192 "rfc3686-ctr-aes-caam",
3193 .cra_blocksize = 1,
2b22f6c5 3194 },
479bcc7c
HX
3195 .setkey = aead_setkey,
3196 .setauthsize = aead_setauthsize,
3197 .encrypt = aead_encrypt,
3198 .decrypt = aead_decrypt,
a5f57cff 3199 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3200 .maxauthsize = SHA384_DIGEST_SIZE,
3201 },
3202 .caam = {
3203 .class1_alg_type = OP_ALG_ALGSEL_AES |
3204 OP_ALG_AAI_CTR_MOD128,
3205 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3206 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3207 .rfc3686 = true,
3208 },
3209 },
f2147b88
HX
3210 {
3211 .aead = {
3212 .base = {
479bcc7c
HX
3213 .cra_name = "seqiv(authenc(hmac(sha384),"
3214 "rfc3686(ctr(aes))))",
3215 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3216 "rfc3686-ctr-aes-caam",
f2147b88
HX
3217 .cra_blocksize = 1,
3218 },
479bcc7c
HX
3219 .setkey = aead_setkey,
3220 .setauthsize = aead_setauthsize,
3221 .encrypt = aead_encrypt,
8b18e235 3222 .decrypt = aead_decrypt,
479bcc7c
HX
3223 .ivsize = CTR_RFC3686_IV_SIZE,
3224 .maxauthsize = SHA384_DIGEST_SIZE,
f2147b88
HX
3225 },
3226 .caam = {
479bcc7c
HX
3227 .class1_alg_type = OP_ALG_ALGSEL_AES |
3228 OP_ALG_AAI_CTR_MOD128,
3229 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3230 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3231 .rfc3686 = true,
3232 .geniv = true,
f2147b88
HX
3233 },
3234 },
3235 {
3236 .aead = {
3237 .base = {
479bcc7c
HX
3238 .cra_name = "authenc(hmac(sha512),"
3239 "rfc3686(ctr(aes)))",
3240 .cra_driver_name = "authenc-hmac-sha512-"
3241 "rfc3686-ctr-aes-caam",
f2147b88
HX
3242 .cra_blocksize = 1,
3243 },
479bcc7c
HX
3244 .setkey = aead_setkey,
3245 .setauthsize = aead_setauthsize,
3246 .encrypt = aead_encrypt,
3247 .decrypt = aead_decrypt,
3248 .ivsize = CTR_RFC3686_IV_SIZE,
3249 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3250 },
3251 .caam = {
479bcc7c
HX
3252 .class1_alg_type = OP_ALG_ALGSEL_AES |
3253 OP_ALG_AAI_CTR_MOD128,
3254 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3255 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 3256 .rfc3686 = true,
f2147b88
HX
3257 },
3258 },
f2147b88
HX
3259 {
3260 .aead = {
3261 .base = {
479bcc7c
HX
3262 .cra_name = "seqiv(authenc(hmac(sha512),"
3263 "rfc3686(ctr(aes))))",
3264 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3265 "rfc3686-ctr-aes-caam",
f2147b88
HX
3266 .cra_blocksize = 1,
3267 },
479bcc7c
HX
3268 .setkey = aead_setkey,
3269 .setauthsize = aead_setauthsize,
3270 .encrypt = aead_encrypt,
8b18e235 3271 .decrypt = aead_decrypt,
479bcc7c
HX
3272 .ivsize = CTR_RFC3686_IV_SIZE,
3273 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3274 },
3275 .caam = {
479bcc7c
HX
3276 .class1_alg_type = OP_ALG_ALGSEL_AES |
3277 OP_ALG_AAI_CTR_MOD128,
3278 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3279 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3280 .rfc3686 = true,
3281 .geniv = true,
f2147b88
HX
3282 },
3283 },
d6bbd4ee
HG
3284 {
3285 .aead = {
3286 .base = {
3287 .cra_name = "rfc7539(chacha20,poly1305)",
3288 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3289 "caam",
3290 .cra_blocksize = 1,
3291 },
3292 .setkey = chachapoly_setkey,
3293 .setauthsize = chachapoly_setauthsize,
3294 .encrypt = chachapoly_encrypt,
3295 .decrypt = chachapoly_decrypt,
3296 .ivsize = CHACHAPOLY_IV_SIZE,
3297 .maxauthsize = POLY1305_DIGEST_SIZE,
3298 },
3299 .caam = {
3300 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3301 OP_ALG_AAI_AEAD,
3302 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3303 OP_ALG_AAI_AEAD,
3304 },
3305 },
3306 {
3307 .aead = {
3308 .base = {
3309 .cra_name = "rfc7539esp(chacha20,poly1305)",
3310 .cra_driver_name = "rfc7539esp-chacha20-"
3311 "poly1305-caam",
3312 .cra_blocksize = 1,
3313 },
3314 .setkey = chachapoly_setkey,
3315 .setauthsize = chachapoly_setauthsize,
3316 .encrypt = chachapoly_encrypt,
3317 .decrypt = chachapoly_decrypt,
3318 .ivsize = 8,
3319 .maxauthsize = POLY1305_DIGEST_SIZE,
3320 },
3321 .caam = {
3322 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3323 OP_ALG_AAI_AEAD,
3324 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3325 OP_ALG_AAI_AEAD,
3326 },
3327 },
f2147b88
HX
3328};
3329
7e0880b9
HG
3330static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3331 bool uses_dkp)
8e8ec596 3332{
bbf22344 3333 dma_addr_t dma_addr;
7e0880b9 3334 struct caam_drv_private *priv;
bbf22344 3335
cfc6f11b
RG
3336 ctx->jrdev = caam_jr_alloc();
3337 if (IS_ERR(ctx->jrdev)) {
3338 pr_err("Job Ring Device allocation for transform failed\n");
3339 return PTR_ERR(ctx->jrdev);
3340 }
8e8ec596 3341
7e0880b9
HG
3342 priv = dev_get_drvdata(ctx->jrdev->parent);
3343 if (priv->era >= 6 && uses_dkp)
3344 ctx->dir = DMA_BIDIRECTIONAL;
3345 else
3346 ctx->dir = DMA_TO_DEVICE;
3347
bbf22344
HG
3348 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3349 offsetof(struct caam_ctx,
3350 sh_desc_enc_dma),
7e0880b9 3351 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
bbf22344
HG
3352 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3353 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3354 caam_jr_free(ctx->jrdev);
3355 return -ENOMEM;
3356 }
3357
3358 ctx->sh_desc_enc_dma = dma_addr;
3359 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3360 sh_desc_dec);
bbf22344
HG
3361 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3362
8e8ec596 3363 /* copy descriptor header template value */
db57656b
HG
3364 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3365 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
8e8ec596
KP
3366
3367 return 0;
3368}
3369
5ca7badb 3370static int caam_cra_init(struct crypto_skcipher *tfm)
8e8ec596 3371{
5ca7badb
HG
3372 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3373 struct caam_skcipher_alg *caam_alg =
3374 container_of(alg, typeof(*caam_alg), skcipher);
8e8ec596 3375
5ca7badb
HG
3376 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3377 false);
f2147b88
HX
3378}
3379
3380static int caam_aead_init(struct crypto_aead *tfm)
3381{
3382 struct aead_alg *alg = crypto_aead_alg(tfm);
3383 struct caam_aead_alg *caam_alg =
3384 container_of(alg, struct caam_aead_alg, aead);
3385 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3386
7e0880b9
HG
3387 return caam_init_common(ctx, &caam_alg->caam,
3388 alg->setkey == aead_setkey);
f2147b88
HX
3389}
3390
3391static void caam_exit_common(struct caam_ctx *ctx)
3392{
bbf22344
HG
3393 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3394 offsetof(struct caam_ctx, sh_desc_enc_dma),
7e0880b9 3395 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
cfc6f11b 3396 caam_jr_free(ctx->jrdev);
8e8ec596
KP
3397}
3398
5ca7badb 3399static void caam_cra_exit(struct crypto_skcipher *tfm)
f2147b88 3400{
5ca7badb 3401 caam_exit_common(crypto_skcipher_ctx(tfm));
f2147b88
HX
3402}
3403
3404static void caam_aead_exit(struct crypto_aead *tfm)
3405{
3406 caam_exit_common(crypto_aead_ctx(tfm));
3407}
3408
8e8ec596
KP
3409static void __exit caam_algapi_exit(void)
3410{
f2147b88
HX
3411 int i;
3412
3413 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3414 struct caam_aead_alg *t_alg = driver_aeads + i;
3415
3416 if (t_alg->registered)
3417 crypto_unregister_aead(&t_alg->aead);
3418 }
8e8ec596 3419
5ca7badb
HG
3420 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3421 struct caam_skcipher_alg *t_alg = driver_algs + i;
8e8ec596 3422
5ca7badb
HG
3423 if (t_alg->registered)
3424 crypto_unregister_skcipher(&t_alg->skcipher);
8e8ec596 3425 }
8e8ec596
KP
3426}
3427
5ca7badb 3428static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
8e8ec596 3429{
5ca7badb 3430 struct skcipher_alg *alg = &t_alg->skcipher;
8e8ec596 3431
5ca7badb
HG
3432 alg->base.cra_module = THIS_MODULE;
3433 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3434 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3435 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
8e8ec596 3436
5ca7badb
HG
3437 alg->init = caam_cra_init;
3438 alg->exit = caam_cra_exit;
8e8ec596
KP
3439}
3440
f2147b88
HX
3441static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3442{
3443 struct aead_alg *alg = &t_alg->aead;
3444
3445 alg->base.cra_module = THIS_MODULE;
3446 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3447 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
5e4b8c1f 3448 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
f2147b88
HX
3449
3450 alg->init = caam_aead_init;
3451 alg->exit = caam_aead_exit;
3452}
3453
8e8ec596
KP
3454static int __init caam_algapi_init(void)
3455{
35af6403
RG
3456 struct device_node *dev_node;
3457 struct platform_device *pdev;
bf83490e 3458 struct caam_drv_private *priv;
8e8ec596 3459 int i = 0, err = 0;
d6bbd4ee 3460 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
eaed71a4 3461 u32 arc4_inst;
bf83490e 3462 unsigned int md_limit = SHA512_DIGEST_SIZE;
f2147b88 3463 bool registered = false;
8e8ec596 3464
35af6403
RG
3465 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3466 if (!dev_node) {
3467 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3468 if (!dev_node)
3469 return -ENODEV;
3470 }
3471
3472 pdev = of_find_device_by_node(dev_node);
3473 if (!pdev) {
3474 of_node_put(dev_node);
3475 return -ENODEV;
3476 }
3477
00e87449 3478 priv = dev_get_drvdata(&pdev->dev);
35af6403
RG
3479 of_node_put(dev_node);
3480
3481 /*
3482 * If priv is NULL, it's probably because the caam driver wasn't
3483 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3484 */
00e87449
WY
3485 if (!priv) {
3486 err = -ENODEV;
3487 goto out_put_dev;
3488 }
35af6403
RG
3489
3490
bf83490e
VM
3491 /*
3492 * Register crypto algorithms the device supports.
3493 * First, detect presence and attributes of DES, AES, and MD blocks.
3494 */
d239b10d
HG
3495 if (priv->era < 10) {
3496 u32 cha_vid, cha_inst;
3497
3498 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3499 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3500 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3501
3502 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3503 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3504 CHA_ID_LS_DES_SHIFT;
3505 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3506 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
eaed71a4
IP
3507 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3508 CHA_ID_LS_ARC4_SHIFT;
d6bbd4ee
HG
3509 ccha_inst = 0;
3510 ptha_inst = 0;
d239b10d
HG
3511 } else {
3512 u32 aesa, mdha;
3513
3514 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3515 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3516
3517 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3518 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3519
3520 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3521 aes_inst = aesa & CHA_VER_NUM_MASK;
3522 md_inst = mdha & CHA_VER_NUM_MASK;
d6bbd4ee
HG
3523 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3524 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
eaed71a4 3525 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
d239b10d 3526 }
bf83490e
VM
3527
3528 /* If MD is present, limit digest size based on LP256 */
d239b10d 3529 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
bf83490e
VM
3530 md_limit = SHA256_DIGEST_SIZE;
3531
8e8ec596 3532 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
5ca7badb
HG
3533 struct caam_skcipher_alg *t_alg = driver_algs + i;
3534 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
bf83490e
VM
3535
3536 /* Skip DES algorithms if not supported by device */
3537 if (!des_inst &&
3538 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3539 (alg_sel == OP_ALG_ALGSEL_DES)))
3540 continue;
3541
3542 /* Skip AES algorithms if not supported by device */
3543 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3544 continue;
8e8ec596 3545
eaed71a4
IP
3546 /* Skip ARC4 algorithms if not supported by device */
3547 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3548 continue;
3549
83d2c9a9
SE
3550 /*
3551 * Check support for AES modes not available
3552 * on LP devices.
3553 */
d239b10d
HG
3554 if (aes_vid == CHA_VER_VID_AES_LP &&
3555 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3556 OP_ALG_AAI_XTS)
3557 continue;
83d2c9a9 3558
5ca7badb 3559 caam_skcipher_alg_init(t_alg);
8e8ec596 3560
5ca7badb 3561 err = crypto_register_skcipher(&t_alg->skcipher);
8e8ec596 3562 if (err) {
cfc6f11b 3563 pr_warn("%s alg registration failed\n",
5ca7badb 3564 t_alg->skcipher.base.cra_driver_name);
f2147b88
HX
3565 continue;
3566 }
3567
5ca7badb 3568 t_alg->registered = true;
f2147b88
HX
3569 registered = true;
3570 }
3571
3572 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3573 struct caam_aead_alg *t_alg = driver_aeads + i;
bf83490e
VM
3574 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3575 OP_ALG_ALGSEL_MASK;
3576 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3577 OP_ALG_ALGSEL_MASK;
3578 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3579
3580 /* Skip DES algorithms if not supported by device */
3581 if (!des_inst &&
3582 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3583 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3584 continue;
3585
3586 /* Skip AES algorithms if not supported by device */
3587 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3588 continue;
3589
d6bbd4ee
HG
3590 /* Skip CHACHA20 algorithms if not supported by device */
3591 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3592 continue;
3593
3594 /* Skip POLY1305 algorithms if not supported by device */
3595 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3596 continue;
3597
bf83490e
VM
3598 /*
3599 * Check support for AES algorithms not available
3600 * on LP devices.
3601 */
d239b10d
HG
3602 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
3603 continue;
bf83490e
VM
3604
3605 /*
3606 * Skip algorithms requiring message digests
3607 * if MD or MD size is not supported by device.
3608 */
2dd3fde4 3609 if (is_mdha(c2_alg_sel) &&
d6bbd4ee
HG
3610 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3611 continue;
f2147b88
HX
3612
3613 caam_aead_alg_init(t_alg);
3614
3615 err = crypto_register_aead(&t_alg->aead);
3616 if (err) {
3617 pr_warn("%s alg registration failed\n",
3618 t_alg->aead.base.cra_driver_name);
3619 continue;
3620 }
3621
3622 t_alg->registered = true;
3623 registered = true;
8e8ec596 3624 }
f2147b88
HX
3625
3626 if (registered)
cfc6f11b 3627 pr_info("caam algorithms registered in /proc/crypto\n");
8e8ec596 3628
00e87449
WY
3629out_put_dev:
3630 put_device(&pdev->dev);
8e8ec596
KP
3631 return err;
3632}
3633
3634module_init(caam_algapi_init);
3635module_exit(caam_algapi_exit);
3636
3637MODULE_LICENSE("GPL");
3638MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3639MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");