]> git.proxmox.com Git - mirror_ubuntu-eoan-kernel.git/blame - drivers/crypto/caam/caamalg.c
UBUNTU: Ubuntu-5.3.0-29.31
[mirror_ubuntu-eoan-kernel.git] / drivers / crypto / caam / caamalg.c
CommitLineData
618b5dc4 1// SPDX-License-Identifier: GPL-2.0+
8e8ec596
KP
2/*
3 * caam - Freescale FSL CAAM support for crypto API
4 *
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
eaed71a4 6 * Copyright 2016-2019 NXP
8e8ec596
KP
7 *
8 * Based on talitos crypto API driver.
9 *
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 *
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
16 * . | | (cipherKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
20 * | *(packet 2) | |
21 * --------------- |
22 * . |
23 * . |
24 * --------------- |
25 * | JobDesc #3 |------------
26 * | *(packet 3) |
27 * ---------------
28 *
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
34 *
35 * So, a job desc looks like:
36 *
37 * ---------------------
38 * | Header |
39 * | ShareDesc Pointer |
40 * | SEQ_OUT_PTR |
41 * | (output buffer) |
6ec47334 42 * | (output length) |
8e8ec596
KP
43 * | SEQ_IN_PTR |
44 * | (input buffer) |
6ec47334 45 * | (input length) |
8e8ec596
KP
46 * ---------------------
47 */
48
49#include "compat.h"
50
51#include "regs.h"
52#include "intern.h"
53#include "desc_constr.h"
54#include "jr.h"
55#include "error.h"
a299c837 56#include "sg_sw_sec4.h"
4c1ec1f9 57#include "key_gen.h"
8cea7b66 58#include "caamalg_desc.h"
8e8ec596
KP
59
60/*
61 * crypto alg
62 */
63#define CAAM_CRA_PRIORITY 3000
64/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
daebc465 66 CTR_RFC3686_NONCE_SIZE + \
8e8ec596 67 SHA512_DIGEST_SIZE * 2)
8e8ec596 68
f2147b88
HX
69#define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70#define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 CAAM_CMD_SZ * 4)
479bcc7c
HX
72#define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
73 CAAM_CMD_SZ * 5)
f2147b88 74
d6bbd4ee
HG
75#define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
76
87e51b07
HX
77#define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
78#define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
4427b1b4 79
479bcc7c
HX
80struct caam_alg_entry {
81 int class1_alg_type;
82 int class2_alg_type;
479bcc7c
HX
83 bool rfc3686;
84 bool geniv;
24586b5f 85 bool nodkp;
479bcc7c
HX
86};
87
88struct caam_aead_alg {
89 struct aead_alg aead;
90 struct caam_alg_entry caam;
91 bool registered;
92};
93
5ca7badb
HG
94struct caam_skcipher_alg {
95 struct skcipher_alg skcipher;
96 struct caam_alg_entry caam;
97 bool registered;
98};
99
8e8ec596
KP
100/*
101 * per-session context
102 */
103struct caam_ctx {
1acebad3
YK
104 u32 sh_desc_enc[DESC_MAX_USED_LEN];
105 u32 sh_desc_dec[DESC_MAX_USED_LEN];
bbf22344 106 u8 key[CAAM_MAX_KEY_SIZE];
1acebad3
YK
107 dma_addr_t sh_desc_enc_dma;
108 dma_addr_t sh_desc_dec_dma;
885e9e2f 109 dma_addr_t key_dma;
7e0880b9 110 enum dma_data_direction dir;
bbf22344 111 struct device *jrdev;
db57656b
HG
112 struct alginfo adata;
113 struct alginfo cdata;
8e8ec596
KP
114 unsigned int authsize;
115};
116
ae4a825f
HG
117static int aead_null_set_sh_desc(struct crypto_aead *aead)
118{
ae4a825f
HG
119 struct caam_ctx *ctx = crypto_aead_ctx(aead);
120 struct device *jrdev = ctx->jrdev;
7e0880b9 121 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
ae4a825f 122 u32 *desc;
4cbe79cc
HG
123 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
124 ctx->adata.keylen_pad;
ae4a825f
HG
125
126 /*
127 * Job Descriptor and Shared Descriptors
128 * must all fit into the 64-word Descriptor h/w Buffer
129 */
4cbe79cc 130 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
db57656b 131 ctx->adata.key_inline = true;
9c0bc511 132 ctx->adata.key_virt = ctx->key;
db57656b
HG
133 } else {
134 ctx->adata.key_inline = false;
9c0bc511 135 ctx->adata.key_dma = ctx->key_dma;
db57656b 136 }
ae4a825f 137
479bcc7c 138 /* aead_encrypt shared descriptor */
ae4a825f 139 desc = ctx->sh_desc_enc;
7e0880b9
HG
140 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
141 ctrlpriv->era);
bbf22344 142 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 143 desc_bytes(desc), ctx->dir);
ae4a825f
HG
144
145 /*
146 * Job Descriptor and Shared Descriptors
147 * must all fit into the 64-word Descriptor h/w Buffer
148 */
4cbe79cc 149 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
db57656b 150 ctx->adata.key_inline = true;
9c0bc511 151 ctx->adata.key_virt = ctx->key;
db57656b
HG
152 } else {
153 ctx->adata.key_inline = false;
9c0bc511 154 ctx->adata.key_dma = ctx->key_dma;
db57656b 155 }
ae4a825f 156
479bcc7c 157 /* aead_decrypt shared descriptor */
8cea7b66 158 desc = ctx->sh_desc_dec;
7e0880b9
HG
159 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
160 ctrlpriv->era);
bbf22344 161 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 162 desc_bytes(desc), ctx->dir);
ae4a825f
HG
163
164 return 0;
165}
166
1acebad3
YK
167static int aead_set_sh_desc(struct crypto_aead *aead)
168{
479bcc7c
HX
169 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
170 struct caam_aead_alg, aead);
add86d55 171 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3
YK
172 struct caam_ctx *ctx = crypto_aead_ctx(aead);
173 struct device *jrdev = ctx->jrdev;
7e0880b9 174 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
daebc465 175 u32 ctx1_iv_off = 0;
8cea7b66 176 u32 *desc, *nonce = NULL;
4cbe79cc
HG
177 u32 inl_mask;
178 unsigned int data_len[2];
db57656b 179 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
daebc465 180 OP_ALG_AAI_CTR_MOD128);
479bcc7c 181 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 182
2fdea258
HG
183 if (!ctx->authsize)
184 return 0;
185
ae4a825f 186 /* NULL encryption / decryption */
db57656b 187 if (!ctx->cdata.keylen)
ae4a825f
HG
188 return aead_null_set_sh_desc(aead);
189
daebc465
CV
190 /*
191 * AES-CTR needs to load IV in CONTEXT1 reg
192 * at an offset of 128bits (16bytes)
193 * CONTEXT1[255:128] = IV
194 */
195 if (ctr_mode)
196 ctx1_iv_off = 16;
197
198 /*
199 * RFC3686 specific:
200 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
201 */
8cea7b66 202 if (is_rfc3686) {
daebc465 203 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
8cea7b66
HG
204 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
205 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
206 }
daebc465 207
4cbe79cc
HG
208 data_len[0] = ctx->adata.keylen_pad;
209 data_len[1] = ctx->cdata.keylen;
210
479bcc7c
HX
211 if (alg->caam.geniv)
212 goto skip_enc;
213
1acebad3
YK
214 /*
215 * Job Descriptor and Shared Descriptors
216 * must all fit into the 64-word Descriptor h/w Buffer
217 */
4cbe79cc
HG
218 if (desc_inline_query(DESC_AEAD_ENC_LEN +
219 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
220 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
221 ARRAY_SIZE(data_len)) < 0)
222 return -EINVAL;
223
224 if (inl_mask & 1)
9c0bc511 225 ctx->adata.key_virt = ctx->key;
4cbe79cc 226 else
9c0bc511 227 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
228
229 if (inl_mask & 2)
9c0bc511 230 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 231 else
9c0bc511 232 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
233
234 ctx->adata.key_inline = !!(inl_mask & 1);
235 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 236
479bcc7c 237 /* aead_encrypt shared descriptor */
1acebad3 238 desc = ctx->sh_desc_enc;
b189817c
HG
239 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
240 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
7e0880b9 241 false, ctrlpriv->era);
bbf22344 242 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 243 desc_bytes(desc), ctx->dir);
1acebad3 244
479bcc7c 245skip_enc:
1acebad3
YK
246 /*
247 * Job Descriptor and Shared Descriptors
248 * must all fit into the 64-word Descriptor h/w Buffer
249 */
4cbe79cc
HG
250 if (desc_inline_query(DESC_AEAD_DEC_LEN +
251 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
252 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
253 ARRAY_SIZE(data_len)) < 0)
254 return -EINVAL;
255
256 if (inl_mask & 1)
9c0bc511 257 ctx->adata.key_virt = ctx->key;
4cbe79cc 258 else
9c0bc511 259 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
260
261 if (inl_mask & 2)
9c0bc511 262 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 263 else
9c0bc511 264 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
265
266 ctx->adata.key_inline = !!(inl_mask & 1);
267 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 268
479bcc7c 269 /* aead_decrypt shared descriptor */
4464a7d4 270 desc = ctx->sh_desc_dec;
8cea7b66
HG
271 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
272 ctx->authsize, alg->caam.geniv, is_rfc3686,
7e0880b9 273 nonce, ctx1_iv_off, false, ctrlpriv->era);
bbf22344 274 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 275 desc_bytes(desc), ctx->dir);
1acebad3 276
479bcc7c
HX
277 if (!alg->caam.geniv)
278 goto skip_givenc;
279
1acebad3
YK
280 /*
281 * Job Descriptor and Shared Descriptors
282 * must all fit into the 64-word Descriptor h/w Buffer
283 */
4cbe79cc
HG
284 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
285 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
286 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
287 ARRAY_SIZE(data_len)) < 0)
288 return -EINVAL;
289
290 if (inl_mask & 1)
9c0bc511 291 ctx->adata.key_virt = ctx->key;
4cbe79cc 292 else
9c0bc511 293 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
294
295 if (inl_mask & 2)
9c0bc511 296 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 297 else
9c0bc511 298 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
299
300 ctx->adata.key_inline = !!(inl_mask & 1);
301 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3
YK
302
303 /* aead_givencrypt shared descriptor */
1d2d87e8 304 desc = ctx->sh_desc_enc;
8cea7b66
HG
305 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
306 ctx->authsize, is_rfc3686, nonce,
7e0880b9 307 ctx1_iv_off, false, ctrlpriv->era);
bbf22344 308 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 309 desc_bytes(desc), ctx->dir);
1acebad3 310
479bcc7c 311skip_givenc:
1acebad3
YK
312 return 0;
313}
314
0e479300 315static int aead_setauthsize(struct crypto_aead *authenc,
8e8ec596
KP
316 unsigned int authsize)
317{
318 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
319
320 ctx->authsize = authsize;
1acebad3 321 aead_set_sh_desc(authenc);
8e8ec596
KP
322
323 return 0;
324}
325
3ef8d945
TA
326static int gcm_set_sh_desc(struct crypto_aead *aead)
327{
3ef8d945
TA
328 struct caam_ctx *ctx = crypto_aead_ctx(aead);
329 struct device *jrdev = ctx->jrdev;
87ec3a0b 330 unsigned int ivsize = crypto_aead_ivsize(aead);
3ef8d945 331 u32 *desc;
4cbe79cc
HG
332 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
333 ctx->cdata.keylen;
3ef8d945 334
db57656b 335 if (!ctx->cdata.keylen || !ctx->authsize)
3ef8d945
TA
336 return 0;
337
338 /*
339 * AES GCM encrypt shared descriptor
340 * Job Descriptor and Shared Descriptor
341 * must fit into the 64-word Descriptor h/w Buffer
342 */
4cbe79cc 343 if (rem_bytes >= DESC_GCM_ENC_LEN) {
db57656b 344 ctx->cdata.key_inline = true;
9c0bc511 345 ctx->cdata.key_virt = ctx->key;
db57656b
HG
346 } else {
347 ctx->cdata.key_inline = false;
9c0bc511 348 ctx->cdata.key_dma = ctx->key_dma;
db57656b 349 }
3ef8d945
TA
350
351 desc = ctx->sh_desc_enc;
87ec3a0b 352 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
bbf22344 353 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 354 desc_bytes(desc), ctx->dir);
3ef8d945
TA
355
356 /*
357 * Job Descriptor and Shared Descriptors
358 * must all fit into the 64-word Descriptor h/w Buffer
359 */
4cbe79cc 360 if (rem_bytes >= DESC_GCM_DEC_LEN) {
db57656b 361 ctx->cdata.key_inline = true;
9c0bc511 362 ctx->cdata.key_virt = ctx->key;
db57656b
HG
363 } else {
364 ctx->cdata.key_inline = false;
9c0bc511 365 ctx->cdata.key_dma = ctx->key_dma;
db57656b 366 }
3ef8d945
TA
367
368 desc = ctx->sh_desc_dec;
87ec3a0b 369 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
bbf22344 370 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 371 desc_bytes(desc), ctx->dir);
3ef8d945
TA
372
373 return 0;
374}
375
376static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
377{
378 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
379
380 ctx->authsize = authsize;
381 gcm_set_sh_desc(authenc);
382
383 return 0;
384}
385
bac68f2c
TA
386static int rfc4106_set_sh_desc(struct crypto_aead *aead)
387{
bac68f2c
TA
388 struct caam_ctx *ctx = crypto_aead_ctx(aead);
389 struct device *jrdev = ctx->jrdev;
87ec3a0b 390 unsigned int ivsize = crypto_aead_ivsize(aead);
bac68f2c 391 u32 *desc;
4cbe79cc
HG
392 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
393 ctx->cdata.keylen;
bac68f2c 394
db57656b 395 if (!ctx->cdata.keylen || !ctx->authsize)
bac68f2c
TA
396 return 0;
397
398 /*
399 * RFC4106 encrypt shared descriptor
400 * Job Descriptor and Shared Descriptor
401 * must fit into the 64-word Descriptor h/w Buffer
402 */
4cbe79cc 403 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
db57656b 404 ctx->cdata.key_inline = true;
9c0bc511 405 ctx->cdata.key_virt = ctx->key;
db57656b
HG
406 } else {
407 ctx->cdata.key_inline = false;
9c0bc511 408 ctx->cdata.key_dma = ctx->key_dma;
db57656b 409 }
bac68f2c
TA
410
411 desc = ctx->sh_desc_enc;
87ec3a0b
HG
412 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
413 false);
bbf22344 414 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 415 desc_bytes(desc), ctx->dir);
bac68f2c
TA
416
417 /*
418 * Job Descriptor and Shared Descriptors
419 * must all fit into the 64-word Descriptor h/w Buffer
420 */
4cbe79cc 421 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
db57656b 422 ctx->cdata.key_inline = true;
9c0bc511 423 ctx->cdata.key_virt = ctx->key;
db57656b
HG
424 } else {
425 ctx->cdata.key_inline = false;
9c0bc511 426 ctx->cdata.key_dma = ctx->key_dma;
db57656b 427 }
bac68f2c
TA
428
429 desc = ctx->sh_desc_dec;
87ec3a0b
HG
430 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
431 false);
bbf22344 432 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 433 desc_bytes(desc), ctx->dir);
bac68f2c 434
bac68f2c
TA
435 return 0;
436}
437
438static int rfc4106_setauthsize(struct crypto_aead *authenc,
439 unsigned int authsize)
440{
441 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
442
443 ctx->authsize = authsize;
444 rfc4106_set_sh_desc(authenc);
445
446 return 0;
447}
448
5d0429a3
TA
449static int rfc4543_set_sh_desc(struct crypto_aead *aead)
450{
5d0429a3
TA
451 struct caam_ctx *ctx = crypto_aead_ctx(aead);
452 struct device *jrdev = ctx->jrdev;
87ec3a0b 453 unsigned int ivsize = crypto_aead_ivsize(aead);
5d0429a3 454 u32 *desc;
4cbe79cc
HG
455 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
456 ctx->cdata.keylen;
5d0429a3 457
db57656b 458 if (!ctx->cdata.keylen || !ctx->authsize)
5d0429a3
TA
459 return 0;
460
461 /*
462 * RFC4543 encrypt shared descriptor
463 * Job Descriptor and Shared Descriptor
464 * must fit into the 64-word Descriptor h/w Buffer
465 */
4cbe79cc 466 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
db57656b 467 ctx->cdata.key_inline = true;
9c0bc511 468 ctx->cdata.key_virt = ctx->key;
db57656b
HG
469 } else {
470 ctx->cdata.key_inline = false;
9c0bc511 471 ctx->cdata.key_dma = ctx->key_dma;
db57656b 472 }
5d0429a3
TA
473
474 desc = ctx->sh_desc_enc;
87ec3a0b
HG
475 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
476 false);
bbf22344 477 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 478 desc_bytes(desc), ctx->dir);
5d0429a3
TA
479
480 /*
481 * Job Descriptor and Shared Descriptors
482 * must all fit into the 64-word Descriptor h/w Buffer
483 */
4cbe79cc 484 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
db57656b 485 ctx->cdata.key_inline = true;
9c0bc511 486 ctx->cdata.key_virt = ctx->key;
db57656b
HG
487 } else {
488 ctx->cdata.key_inline = false;
9c0bc511 489 ctx->cdata.key_dma = ctx->key_dma;
db57656b 490 }
5d0429a3
TA
491
492 desc = ctx->sh_desc_dec;
87ec3a0b
HG
493 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
494 false);
bbf22344 495 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 496 desc_bytes(desc), ctx->dir);
5d0429a3 497
f2147b88
HX
498 return 0;
499}
5d0429a3 500
f2147b88
HX
501static int rfc4543_setauthsize(struct crypto_aead *authenc,
502 unsigned int authsize)
503{
504 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
5d0429a3 505
f2147b88
HX
506 ctx->authsize = authsize;
507 rfc4543_set_sh_desc(authenc);
5d0429a3 508
f2147b88
HX
509 return 0;
510}
5d0429a3 511
d6bbd4ee
HG
512static int chachapoly_set_sh_desc(struct crypto_aead *aead)
513{
514 struct caam_ctx *ctx = crypto_aead_ctx(aead);
515 struct device *jrdev = ctx->jrdev;
516 unsigned int ivsize = crypto_aead_ivsize(aead);
517 u32 *desc;
518
519 if (!ctx->cdata.keylen || !ctx->authsize)
520 return 0;
521
522 desc = ctx->sh_desc_enc;
523 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
c10a5336 524 ctx->authsize, true, false);
d6bbd4ee
HG
525 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
526 desc_bytes(desc), ctx->dir);
527
528 desc = ctx->sh_desc_dec;
529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
c10a5336 530 ctx->authsize, false, false);
d6bbd4ee
HG
531 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
532 desc_bytes(desc), ctx->dir);
533
534 return 0;
535}
536
537static int chachapoly_setauthsize(struct crypto_aead *aead,
538 unsigned int authsize)
539{
540 struct caam_ctx *ctx = crypto_aead_ctx(aead);
541
542 if (authsize != POLY1305_DIGEST_SIZE)
543 return -EINVAL;
544
545 ctx->authsize = authsize;
546 return chachapoly_set_sh_desc(aead);
547}
548
549static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
550 unsigned int keylen)
551{
552 struct caam_ctx *ctx = crypto_aead_ctx(aead);
553 unsigned int ivsize = crypto_aead_ivsize(aead);
554 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
555
1ca1b917 556 if (keylen != CHACHA_KEY_SIZE + saltlen) {
d6bbd4ee
HG
557 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
558 return -EINVAL;
559 }
560
561 ctx->cdata.key_virt = key;
562 ctx->cdata.keylen = keylen - saltlen;
563
564 return chachapoly_set_sh_desc(aead);
565}
566
0e479300 567static int aead_setkey(struct crypto_aead *aead,
8e8ec596
KP
568 const u8 *key, unsigned int keylen)
569{
8e8ec596
KP
570 struct caam_ctx *ctx = crypto_aead_ctx(aead);
571 struct device *jrdev = ctx->jrdev;
7e0880b9 572 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
4e6e0b27 573 struct crypto_authenc_keys keys;
8e8ec596
KP
574 int ret = 0;
575
4e6e0b27 576 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
8e8ec596
KP
577 goto badkey;
578
6e005503 579 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
4e6e0b27
HG
580 keys.authkeylen + keys.enckeylen, keys.enckeylen,
581 keys.authkeylen);
6e005503
SH
582 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
583 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
8e8ec596 584
7e0880b9
HG
585 /*
586 * If DKP is supported, use it in the shared descriptor to generate
587 * the split key.
588 */
589 if (ctrlpriv->era >= 6) {
590 ctx->adata.keylen = keys.authkeylen;
591 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
592 OP_ALG_ALGSEL_MASK);
593
594 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
595 goto badkey;
596
597 memcpy(ctx->key, keys.authkey, keys.authkeylen);
598 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
599 keys.enckeylen);
600 dma_sync_single_for_device(jrdev, ctx->key_dma,
601 ctx->adata.keylen_pad +
602 keys.enckeylen, ctx->dir);
603 goto skip_split_key;
604 }
605
6655cb8e
HG
606 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
607 keys.authkeylen, CAAM_MAX_KEY_SIZE -
608 keys.enckeylen);
8e8ec596 609 if (ret) {
8e8ec596
KP
610 goto badkey;
611 }
612
613 /* postpend encryption key to auth split key */
db57656b 614 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
bbf22344 615 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
7e0880b9 616 keys.enckeylen, ctx->dir);
6e005503
SH
617
618 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
619 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
620 ctx->adata.keylen_pad + keys.enckeylen, 1);
7e0880b9
HG
621
622skip_split_key:
db57656b 623 ctx->cdata.keylen = keys.enckeylen;
61dab972 624 memzero_explicit(&keys, sizeof(keys));
bbf22344 625 return aead_set_sh_desc(aead);
8e8ec596
KP
626badkey:
627 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
61dab972 628 memzero_explicit(&keys, sizeof(keys));
8e8ec596
KP
629 return -EINVAL;
630}
631
1b52c409
HX
632static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
633 unsigned int keylen)
634{
635 struct crypto_authenc_keys keys;
636 u32 flags;
637 int err;
638
639 err = crypto_authenc_extractkeys(&keys, key, keylen);
640 if (unlikely(err))
641 goto badkey;
642
643 err = -EINVAL;
644 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
645 goto badkey;
646
647 flags = crypto_aead_get_flags(aead);
648 err = __des3_verify_key(&flags, keys.enckey);
649 if (unlikely(err)) {
650 crypto_aead_set_flags(aead, flags);
651 goto out;
652 }
653
654 err = aead_setkey(aead, key, keylen);
655
656out:
657 memzero_explicit(&keys, sizeof(keys));
658 return err;
659
660badkey:
661 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
662 goto out;
663}
664
3ef8d945
TA
665static int gcm_setkey(struct crypto_aead *aead,
666 const u8 *key, unsigned int keylen)
667{
668 struct caam_ctx *ctx = crypto_aead_ctx(aead);
669 struct device *jrdev = ctx->jrdev;
3ef8d945 670
6e005503
SH
671 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
672 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
3ef8d945
TA
673
674 memcpy(ctx->key, key, keylen);
7e0880b9 675 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
db57656b 676 ctx->cdata.keylen = keylen;
3ef8d945 677
bbf22344 678 return gcm_set_sh_desc(aead);
3ef8d945
TA
679}
680
bac68f2c
TA
681static int rfc4106_setkey(struct crypto_aead *aead,
682 const u8 *key, unsigned int keylen)
683{
684 struct caam_ctx *ctx = crypto_aead_ctx(aead);
685 struct device *jrdev = ctx->jrdev;
bac68f2c
TA
686
687 if (keylen < 4)
688 return -EINVAL;
689
6e005503
SH
690 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
691 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
bac68f2c
TA
692
693 memcpy(ctx->key, key, keylen);
694
695 /*
696 * The last four bytes of the key material are used as the salt value
697 * in the nonce. Update the AES key length.
698 */
db57656b 699 ctx->cdata.keylen = keylen - 4;
bbf22344 700 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
7e0880b9 701 ctx->dir);
bbf22344 702 return rfc4106_set_sh_desc(aead);
bac68f2c
TA
703}
704
5d0429a3
TA
705static int rfc4543_setkey(struct crypto_aead *aead,
706 const u8 *key, unsigned int keylen)
707{
708 struct caam_ctx *ctx = crypto_aead_ctx(aead);
709 struct device *jrdev = ctx->jrdev;
5d0429a3
TA
710
711 if (keylen < 4)
712 return -EINVAL;
713
6e005503
SH
714 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
715 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
5d0429a3
TA
716
717 memcpy(ctx->key, key, keylen);
718
719 /*
720 * The last four bytes of the key material are used as the salt value
721 * in the nonce. Update the AES key length.
722 */
db57656b 723 ctx->cdata.keylen = keylen - 4;
bbf22344 724 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
7e0880b9 725 ctx->dir);
bbf22344 726 return rfc4543_set_sh_desc(aead);
5d0429a3
TA
727}
728
5ca7badb
HG
729static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
730 unsigned int keylen)
acdca31d 731{
5ca7badb
HG
732 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
733 struct caam_skcipher_alg *alg =
734 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
735 skcipher);
acdca31d 736 struct device *jrdev = ctx->jrdev;
5ca7badb 737 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 738 u32 *desc;
2b22f6c5 739 u32 ctx1_iv_off = 0;
db57656b 740 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
2b22f6c5 741 OP_ALG_AAI_CTR_MOD128);
5ca7badb 742 const bool is_rfc3686 = alg->caam.rfc3686;
acdca31d 743
6e005503
SH
744 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
745 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
2b22f6c5
CV
746 /*
747 * AES-CTR needs to load IV in CONTEXT1 reg
748 * at an offset of 128bits (16bytes)
749 * CONTEXT1[255:128] = IV
750 */
751 if (ctr_mode)
752 ctx1_iv_off = 16;
acdca31d 753
a5f57cff
CV
754 /*
755 * RFC3686 specific:
756 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
757 * | *key = {KEY, NONCE}
758 */
759 if (is_rfc3686) {
760 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
761 keylen -= CTR_RFC3686_NONCE_SIZE;
762 }
763
db57656b 764 ctx->cdata.keylen = keylen;
662f70ed 765 ctx->cdata.key_virt = key;
db57656b 766 ctx->cdata.key_inline = true;
acdca31d 767
5ca7badb 768 /* skcipher_encrypt shared descriptor */
acdca31d 769 desc = ctx->sh_desc_enc;
9dbe3072
HG
770 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
771 ctx1_iv_off);
bbf22344 772 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 773 desc_bytes(desc), ctx->dir);
8cea7b66 774
5ca7badb 775 /* skcipher_decrypt shared descriptor */
acdca31d 776 desc = ctx->sh_desc_dec;
9dbe3072
HG
777 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
778 ctx1_iv_off);
bbf22344 779 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 780 desc_bytes(desc), ctx->dir);
acdca31d 781
8cea7b66 782 return 0;
acdca31d
YK
783}
784
eaed71a4
IP
785static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
786 const u8 *key, unsigned int keylen)
787{
788 u32 tmp[DES3_EDE_EXPKEY_WORDS];
789 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
790
791 if (keylen == DES3_EDE_KEY_SIZE &&
792 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) {
793 return -EINVAL;
794 }
795
796 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) &
797 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
798 crypto_skcipher_set_flags(skcipher,
799 CRYPTO_TFM_RES_WEAK_KEY);
800 return -EINVAL;
801 }
802
803 return skcipher_setkey(skcipher, key, keylen);
804}
805
5ca7badb
HG
806static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
807 unsigned int keylen)
c6415a60 808{
5ca7badb 809 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
c6415a60 810 struct device *jrdev = ctx->jrdev;
8cea7b66 811 u32 *desc;
c6415a60
CV
812
813 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
5ca7badb 814 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
c6415a60
CV
815 dev_err(jrdev, "key size mismatch\n");
816 return -EINVAL;
817 }
818
db57656b 819 ctx->cdata.keylen = keylen;
662f70ed 820 ctx->cdata.key_virt = key;
db57656b 821 ctx->cdata.key_inline = true;
c6415a60 822
5ca7badb 823 /* xts_skcipher_encrypt shared descriptor */
c6415a60 824 desc = ctx->sh_desc_enc;
9dbe3072 825 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
bbf22344 826 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 827 desc_bytes(desc), ctx->dir);
c6415a60 828
5ca7badb 829 /* xts_skcipher_decrypt shared descriptor */
c6415a60 830 desc = ctx->sh_desc_dec;
9dbe3072 831 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
bbf22344 832 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 833 desc_bytes(desc), ctx->dir);
c6415a60
CV
834
835 return 0;
836}
837
8e8ec596 838/*
1acebad3 839 * aead_edesc - s/w-extended aead descriptor
fa0c92db
HG
840 * @src_nents: number of segments in input s/w scatterlist
841 * @dst_nents: number of segments in output s/w scatterlist
ba4cf71b
IP
842 * @mapped_src_nents: number of segments in input h/w link table
843 * @mapped_dst_nents: number of segments in output h/w link table
a299c837
YK
844 * @sec4_sg_bytes: length of dma mapped sec4_sg space
845 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 846 * @sec4_sg: pointer to h/w link table
8e8ec596
KP
847 * @hw_desc: the h/w job descriptor followed by any referenced link tables
848 */
0e479300 849struct aead_edesc {
8e8ec596
KP
850 int src_nents;
851 int dst_nents;
ba4cf71b
IP
852 int mapped_src_nents;
853 int mapped_dst_nents;
a299c837
YK
854 int sec4_sg_bytes;
855 dma_addr_t sec4_sg_dma;
856 struct sec4_sg_entry *sec4_sg;
f2147b88 857 u32 hw_desc[];
8e8ec596
KP
858};
859
acdca31d 860/*
5ca7badb 861 * skcipher_edesc - s/w-extended skcipher descriptor
fa0c92db
HG
862 * @src_nents: number of segments in input s/w scatterlist
863 * @dst_nents: number of segments in output s/w scatterlist
ba4cf71b
IP
864 * @mapped_src_nents: number of segments in input h/w link table
865 * @mapped_dst_nents: number of segments in output h/w link table
acdca31d 866 * @iv_dma: dma address of iv for checking continuity and link table
a299c837
YK
867 * @sec4_sg_bytes: length of dma mapped sec4_sg space
868 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 869 * @sec4_sg: pointer to h/w link table
acdca31d 870 * @hw_desc: the h/w job descriptor followed by any referenced link tables
115957bb 871 * and IV
acdca31d 872 */
5ca7badb 873struct skcipher_edesc {
acdca31d
YK
874 int src_nents;
875 int dst_nents;
ba4cf71b
IP
876 int mapped_src_nents;
877 int mapped_dst_nents;
acdca31d 878 dma_addr_t iv_dma;
a299c837
YK
879 int sec4_sg_bytes;
880 dma_addr_t sec4_sg_dma;
881 struct sec4_sg_entry *sec4_sg;
acdca31d
YK
882 u32 hw_desc[0];
883};
884
1acebad3 885static void caam_unmap(struct device *dev, struct scatterlist *src,
643b39b0 886 struct scatterlist *dst, int src_nents,
13fb8fd7 887 int dst_nents,
cf5448b5 888 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
a299c837 889 int sec4_sg_bytes)
8e8ec596 890{
643b39b0 891 if (dst != src) {
fa0c92db
HG
892 if (src_nents)
893 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
763069ba
HG
894 if (dst_nents)
895 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
8e8ec596 896 } else {
fa0c92db 897 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
8e8ec596
KP
898 }
899
1acebad3 900 if (iv_dma)
334d37c9 901 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
a299c837
YK
902 if (sec4_sg_bytes)
903 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
8e8ec596
KP
904 DMA_TO_DEVICE);
905}
906
1acebad3
YK
907static void aead_unmap(struct device *dev,
908 struct aead_edesc *edesc,
909 struct aead_request *req)
f2147b88
HX
910{
911 caam_unmap(dev, req->src, req->dst,
cf5448b5 912 edesc->src_nents, edesc->dst_nents, 0, 0,
f2147b88
HX
913 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
914}
915
5ca7badb
HG
916static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
917 struct skcipher_request *req)
acdca31d 918{
5ca7badb
HG
919 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
920 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d
YK
921
922 caam_unmap(dev, req->src, req->dst,
13fb8fd7 923 edesc->src_nents, edesc->dst_nents,
cf5448b5 924 edesc->iv_dma, ivsize,
643b39b0 925 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
acdca31d
YK
926}
927
0e479300 928static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
929 void *context)
930{
0e479300
YK
931 struct aead_request *req = context;
932 struct aead_edesc *edesc;
f2147b88 933
6e005503 934 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
f2147b88
HX
935
936 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
937
938 if (err)
939 caam_jr_strstatus(jrdev, err);
940
941 aead_unmap(jrdev, edesc, req);
942
943 kfree(edesc);
944
945 aead_request_complete(req, err);
946}
947
0e479300 948static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
949 void *context)
950{
0e479300
YK
951 struct aead_request *req = context;
952 struct aead_edesc *edesc;
f2147b88 953
6e005503 954 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
f2147b88
HX
955
956 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
957
958 if (err)
959 caam_jr_strstatus(jrdev, err);
960
961 aead_unmap(jrdev, edesc, req);
962
963 /*
964 * verify hw auth check passed else return -EBADMSG
965 */
966 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
967 err = -EBADMSG;
968
969 kfree(edesc);
970
971 aead_request_complete(req, err);
972}
973
5ca7badb
HG
974static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
975 void *context)
acdca31d 976{
5ca7badb
HG
977 struct skcipher_request *req = context;
978 struct skcipher_edesc *edesc;
979 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
980 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 981
6e005503 982 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
acdca31d 983
5ca7badb 984 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
acdca31d 985
fa9659cd
MV
986 if (err)
987 caam_jr_strstatus(jrdev, err);
acdca31d 988
5ca7badb 989 skcipher_unmap(jrdev, edesc, req);
854b06f7
DG
990
991 /*
5ca7badb 992 * The crypto API expects us to set the IV (req->iv) to the last
334d37c9
HG
993 * ciphertext block (CBC mode) or last counter (CTR mode).
994 * This is used e.g. by the CTS mode.
854b06f7 995 */
334d37c9
HG
996 if (ivsize) {
997 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
998 ivsize);
854b06f7 999
6e005503
SH
1000 print_hex_dump_debug("dstiv @"__stringify(__LINE__)": ",
1001 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1002 edesc->src_nents > 1 ? 100 : ivsize, 1);
334d37c9 1003 }
6e005503 1004
8a82451b 1005 caam_dump_sg("dst @" __stringify(__LINE__)": ",
bb992bc4
SH
1006 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1007 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1008
acdca31d
YK
1009 kfree(edesc);
1010
5ca7badb 1011 skcipher_request_complete(req, err);
acdca31d
YK
1012}
1013
5ca7badb
HG
1014static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1015 void *context)
acdca31d 1016{
5ca7badb
HG
1017 struct skcipher_request *req = context;
1018 struct skcipher_edesc *edesc;
5ca7badb
HG
1019 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1020 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1021
6e005503 1022 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
acdca31d 1023
5ca7badb 1024 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
fa9659cd
MV
1025 if (err)
1026 caam_jr_strstatus(jrdev, err);
acdca31d 1027
bb992bc4
SH
1028 skcipher_unmap(jrdev, edesc, req);
1029
334d37c9
HG
1030 /*
1031 * The crypto API expects us to set the IV (req->iv) to the last
1032 * ciphertext block (CBC mode) or last counter (CTR mode).
1033 * This is used e.g. by the CTS mode.
1034 */
1035 if (ivsize) {
1036 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1037 ivsize);
1038
1039 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1040 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1041 ivsize, 1);
1042 }
1043
8a82451b 1044 caam_dump_sg("dst @" __stringify(__LINE__)": ",
972b812b 1045 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
5ca7badb 1046 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
acdca31d 1047
acdca31d
YK
1048 kfree(edesc);
1049
5ca7badb 1050 skcipher_request_complete(req, err);
acdca31d
YK
1051}
1052
f2147b88
HX
1053/*
1054 * Fill in aead job descriptor
1055 */
1056static void init_aead_job(struct aead_request *req,
1057 struct aead_edesc *edesc,
1058 bool all_contig, bool encrypt)
1059{
1060 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1061 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1062 int authsize = ctx->authsize;
1063 u32 *desc = edesc->hw_desc;
1064 u32 out_options, in_options;
1065 dma_addr_t dst_dma, src_dma;
1066 int len, sec4_sg_index = 0;
1067 dma_addr_t ptr;
1068 u32 *sh_desc;
1069
1070 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1071 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1072
1073 len = desc_len(sh_desc);
1074 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1075
1076 if (all_contig) {
ba4cf71b
IP
1077 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1078 0;
f2147b88
HX
1079 in_options = 0;
1080 } else {
1081 src_dma = edesc->sec4_sg_dma;
ba4cf71b 1082 sec4_sg_index += edesc->mapped_src_nents;
f2147b88
HX
1083 in_options = LDST_SGF;
1084 }
1085
1086 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1087 in_options);
1088
1089 dst_dma = src_dma;
1090 out_options = in_options;
1091
1092 if (unlikely(req->src != req->dst)) {
ba4cf71b 1093 if (!edesc->mapped_dst_nents) {
763069ba 1094 dst_dma = 0;
dcd9c76e 1095 out_options = 0;
ba4cf71b 1096 } else if (edesc->mapped_dst_nents == 1) {
f2147b88 1097 dst_dma = sg_dma_address(req->dst);
42e95d1f 1098 out_options = 0;
f2147b88
HX
1099 } else {
1100 dst_dma = edesc->sec4_sg_dma +
1101 sec4_sg_index *
1102 sizeof(struct sec4_sg_entry);
1103 out_options = LDST_SGF;
1104 }
1105 }
1106
1107 if (encrypt)
1108 append_seq_out_ptr(desc, dst_dma,
1109 req->assoclen + req->cryptlen + authsize,
1110 out_options);
1111 else
1112 append_seq_out_ptr(desc, dst_dma,
1113 req->assoclen + req->cryptlen - authsize,
1114 out_options);
f2147b88
HX
1115}
1116
1117static void init_gcm_job(struct aead_request *req,
1118 struct aead_edesc *edesc,
1119 bool all_contig, bool encrypt)
1120{
1121 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1122 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1123 unsigned int ivsize = crypto_aead_ivsize(aead);
1124 u32 *desc = edesc->hw_desc;
7545e166 1125 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
f2147b88
HX
1126 unsigned int last;
1127
1128 init_aead_job(req, edesc, all_contig, encrypt);
7e0880b9 1129 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
f2147b88
HX
1130
1131 /* BUG This should not be specific to generic GCM. */
1132 last = 0;
1133 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1134 last = FIFOLD_TYPE_LAST1;
1135
1136 /* Read GCM IV */
1137 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
7545e166 1138 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
f2147b88
HX
1139 /* Append Salt */
1140 if (!generic_gcm)
db57656b 1141 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
f2147b88
HX
1142 /* Append IV */
1143 append_data(desc, req->iv, ivsize);
1144 /* End of blank commands */
1145}
1146
d6bbd4ee
HG
1147static void init_chachapoly_job(struct aead_request *req,
1148 struct aead_edesc *edesc, bool all_contig,
1149 bool encrypt)
1150{
1151 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1152 unsigned int ivsize = crypto_aead_ivsize(aead);
1153 unsigned int assoclen = req->assoclen;
1154 u32 *desc = edesc->hw_desc;
1155 u32 ctx_iv_off = 4;
1156
1157 init_aead_job(req, edesc, all_contig, encrypt);
1158
1159 if (ivsize != CHACHAPOLY_IV_SIZE) {
1160 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1161 ctx_iv_off += 4;
1162
1163 /*
1164 * The associated data comes already with the IV but we need
1165 * to skip it when we authenticate or encrypt...
1166 */
1167 assoclen -= ivsize;
1168 }
1169
1170 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1171
1172 /*
1173 * For IPsec load the IV further in the same register.
1174 * For RFC7539 simply load the 12 bytes nonce in a single operation
1175 */
1176 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1177 LDST_SRCDST_BYTE_CONTEXT |
1178 ctx_iv_off << LDST_OFFSET_SHIFT);
1179}
1180
479bcc7c
HX
1181static void init_authenc_job(struct aead_request *req,
1182 struct aead_edesc *edesc,
1183 bool all_contig, bool encrypt)
1acebad3
YK
1184{
1185 struct crypto_aead *aead = crypto_aead_reqtfm(req);
479bcc7c
HX
1186 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1187 struct caam_aead_alg, aead);
1188 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3 1189 struct caam_ctx *ctx = crypto_aead_ctx(aead);
7e0880b9 1190 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
db57656b 1191 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
479bcc7c
HX
1192 OP_ALG_AAI_CTR_MOD128);
1193 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 1194 u32 *desc = edesc->hw_desc;
479bcc7c 1195 u32 ivoffset = 0;
8e8ec596 1196
479bcc7c
HX
1197 /*
1198 * AES-CTR needs to load IV in CONTEXT1 reg
1199 * at an offset of 128bits (16bytes)
1200 * CONTEXT1[255:128] = IV
1201 */
1202 if (ctr_mode)
1203 ivoffset = 16;
1acebad3 1204
479bcc7c
HX
1205 /*
1206 * RFC3686 specific:
1207 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1208 */
1209 if (is_rfc3686)
1210 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
8e8ec596 1211
479bcc7c 1212 init_aead_job(req, edesc, all_contig, encrypt);
1acebad3 1213
7e0880b9
HG
1214 /*
1215 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1216 * having DPOVRD as destination.
1217 */
1218 if (ctrlpriv->era < 3)
1219 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1220 else
1221 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1222
8b18e235 1223 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
479bcc7c
HX
1224 append_load_as_imm(desc, req->iv, ivsize,
1225 LDST_CLASS_1_CCB |
1226 LDST_SRCDST_BYTE_CONTEXT |
1227 (ivoffset << LDST_OFFSET_SHIFT));
8e8ec596
KP
1228}
1229
acdca31d 1230/*
5ca7badb 1231 * Fill in skcipher job descriptor
acdca31d 1232 */
5ca7badb
HG
1233static void init_skcipher_job(struct skcipher_request *req,
1234 struct skcipher_edesc *edesc,
1235 const bool encrypt)
acdca31d 1236{
5ca7badb
HG
1237 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1238 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
6e005503 1239 struct device *jrdev = ctx->jrdev;
5ca7badb 1240 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1241 u32 *desc = edesc->hw_desc;
5ca7badb 1242 u32 *sh_desc;
eaed71a4
IP
1243 u32 in_options = 0, out_options = 0;
1244 dma_addr_t src_dma, dst_dma, ptr;
1245 int len, sec4_sg_index = 0;
acdca31d 1246
6e005503
SH
1247 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1248 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1249 dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
5ca7badb 1250 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
6e005503 1251
8a82451b 1252 caam_dump_sg("src @" __stringify(__LINE__)": ",
972b812b 1253 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
5ca7badb
HG
1254 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1255
1256 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1257 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
acdca31d
YK
1258
1259 len = desc_len(sh_desc);
1260 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1261
eaed71a4
IP
1262 if (ivsize || edesc->mapped_src_nents > 1) {
1263 src_dma = edesc->sec4_sg_dma;
1264 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1265 in_options = LDST_SGF;
1266 } else {
1267 src_dma = sg_dma_address(req->src);
1268 }
1269
1270 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
acdca31d
YK
1271
1272 if (likely(req->src == req->dst)) {
eaed71a4
IP
1273 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1274 out_options = in_options;
334d37c9 1275 } else if (!ivsize && edesc->mapped_dst_nents == 1) {
eaed71a4 1276 dst_dma = sg_dma_address(req->dst);
acdca31d 1277 } else {
eaed71a4
IP
1278 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1279 sizeof(struct sec4_sg_entry);
1280 out_options = LDST_SGF;
acdca31d 1281 }
eaed71a4 1282
334d37c9 1283 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
acdca31d
YK
1284}
1285
8e8ec596 1286/*
1acebad3 1287 * allocate and map the aead extended descriptor
8e8ec596 1288 */
479bcc7c
HX
1289static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1290 int desc_bytes, bool *all_contig_ptr,
1291 bool encrypt)
8e8ec596 1292{
0e479300 1293 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1294 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1295 struct device *jrdev = ctx->jrdev;
019d62db
HG
1296 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1297 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1298 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
059d73ee 1299 int src_len, dst_len = 0;
0e479300 1300 struct aead_edesc *edesc;
fa0c92db 1301 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
bbf9c893 1302 unsigned int authsize = ctx->authsize;
1acebad3 1303
bbf9c893 1304 if (unlikely(req->dst != req->src)) {
059d73ee
HG
1305 src_len = req->assoclen + req->cryptlen;
1306 dst_len = src_len + (encrypt ? authsize : (-authsize));
1307
1308 src_nents = sg_nents_for_len(req->src, src_len);
fd144d83
HG
1309 if (unlikely(src_nents < 0)) {
1310 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
059d73ee 1311 src_len);
fd144d83
HG
1312 return ERR_PTR(src_nents);
1313 }
1314
059d73ee 1315 dst_nents = sg_nents_for_len(req->dst, dst_len);
fd144d83
HG
1316 if (unlikely(dst_nents < 0)) {
1317 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
059d73ee 1318 dst_len);
fd144d83
HG
1319 return ERR_PTR(dst_nents);
1320 }
bbf9c893 1321 } else {
059d73ee
HG
1322 src_len = req->assoclen + req->cryptlen +
1323 (encrypt ? authsize : 0);
1324
1325 src_nents = sg_nents_for_len(req->src, src_len);
fd144d83
HG
1326 if (unlikely(src_nents < 0)) {
1327 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
059d73ee 1328 src_len);
fd144d83
HG
1329 return ERR_PTR(src_nents);
1330 }
f2147b88 1331 }
3ef8d945 1332
f2147b88 1333 if (likely(req->src == req->dst)) {
838e0a89
HG
1334 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1335 DMA_BIDIRECTIONAL);
1336 if (unlikely(!mapped_src_nents)) {
f2147b88 1337 dev_err(jrdev, "unable to map source\n");
f2147b88
HX
1338 return ERR_PTR(-ENOMEM);
1339 }
1340 } else {
fa0c92db
HG
1341 /* Cover also the case of null (zero length) input data */
1342 if (src_nents) {
838e0a89
HG
1343 mapped_src_nents = dma_map_sg(jrdev, req->src,
1344 src_nents, DMA_TO_DEVICE);
1345 if (unlikely(!mapped_src_nents)) {
fa0c92db 1346 dev_err(jrdev, "unable to map source\n");
fa0c92db
HG
1347 return ERR_PTR(-ENOMEM);
1348 }
838e0a89
HG
1349 } else {
1350 mapped_src_nents = 0;
f2147b88
HX
1351 }
1352
763069ba
HG
1353 /* Cover also the case of null (zero length) output data */
1354 if (dst_nents) {
1355 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1356 dst_nents,
1357 DMA_FROM_DEVICE);
1358 if (unlikely(!mapped_dst_nents)) {
1359 dev_err(jrdev, "unable to map destination\n");
1360 dma_unmap_sg(jrdev, req->src, src_nents,
1361 DMA_TO_DEVICE);
1362 return ERR_PTR(-ENOMEM);
1363 }
1364 } else {
1365 mapped_dst_nents = 0;
f2147b88
HX
1366 }
1367 }
1368
a5e5c133
HG
1369 /*
1370 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1371 * the end of the table by allocating more S/G entries.
1372 */
838e0a89 1373 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
a5e5c133
HG
1374 if (mapped_dst_nents > 1)
1375 sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1376 else
1377 sec4_sg_len = pad_sg_nents(sec4_sg_len);
1378
838e0a89
HG
1379 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1380
1381 /* allocate space for base edesc and hw desc commands, link tables */
1382 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1383 GFP_DMA | flags);
1384 if (!edesc) {
1385 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
cf5448b5 1386 0, 0, 0);
838e0a89
HG
1387 return ERR_PTR(-ENOMEM);
1388 }
1389
8e8ec596
KP
1390 edesc->src_nents = src_nents;
1391 edesc->dst_nents = dst_nents;
ba4cf71b
IP
1392 edesc->mapped_src_nents = mapped_src_nents;
1393 edesc->mapped_dst_nents = mapped_dst_nents;
a299c837
YK
1394 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1395 desc_bytes;
838e0a89 1396 *all_contig_ptr = !(mapped_src_nents > 1);
1acebad3 1397
a299c837 1398 sec4_sg_index = 0;
838e0a89 1399 if (mapped_src_nents > 1) {
059d73ee 1400 sg_to_sec4_sg_last(req->src, src_len,
838e0a89
HG
1401 edesc->sec4_sg + sec4_sg_index, 0);
1402 sec4_sg_index += mapped_src_nents;
1acebad3 1403 }
838e0a89 1404 if (mapped_dst_nents > 1) {
059d73ee 1405 sg_to_sec4_sg_last(req->dst, dst_len,
a299c837 1406 edesc->sec4_sg + sec4_sg_index, 0);
1acebad3 1407 }
f2147b88
HX
1408
1409 if (!sec4_sg_bytes)
1410 return edesc;
1411
1da2be33
RG
1412 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1413 sec4_sg_bytes, DMA_TO_DEVICE);
ce572085
HG
1414 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1415 dev_err(jrdev, "unable to map S/G table\n");
f2147b88
HX
1416 aead_unmap(jrdev, edesc, req);
1417 kfree(edesc);
ce572085
HG
1418 return ERR_PTR(-ENOMEM);
1419 }
8e8ec596 1420
f2147b88
HX
1421 edesc->sec4_sg_bytes = sec4_sg_bytes;
1422
8e8ec596
KP
1423 return edesc;
1424}
1425
f2147b88 1426static int gcm_encrypt(struct aead_request *req)
8e8ec596 1427{
0e479300
YK
1428 struct aead_edesc *edesc;
1429 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1430 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1431 struct device *jrdev = ctx->jrdev;
1acebad3 1432 bool all_contig;
8e8ec596 1433 u32 *desc;
1acebad3
YK
1434 int ret = 0;
1435
8e8ec596 1436 /* allocate extended descriptor */
f2147b88 1437 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
8e8ec596
KP
1438 if (IS_ERR(edesc))
1439 return PTR_ERR(edesc);
1440
1acebad3 1441 /* Create and submit job descriptor */
f2147b88 1442 init_gcm_job(req, edesc, all_contig, true);
6e005503
SH
1443
1444 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1445 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1446 desc_bytes(edesc->hw_desc), 1);
8e8ec596 1447
1acebad3
YK
1448 desc = edesc->hw_desc;
1449 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1450 if (!ret) {
1451 ret = -EINPROGRESS;
1452 } else {
1453 aead_unmap(jrdev, edesc, req);
1454 kfree(edesc);
1455 }
8e8ec596 1456
1acebad3 1457 return ret;
8e8ec596
KP
1458}
1459
d6bbd4ee
HG
1460static int chachapoly_encrypt(struct aead_request *req)
1461{
1462 struct aead_edesc *edesc;
1463 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1464 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1465 struct device *jrdev = ctx->jrdev;
1466 bool all_contig;
1467 u32 *desc;
1468 int ret;
1469
1470 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1471 true);
1472 if (IS_ERR(edesc))
1473 return PTR_ERR(edesc);
1474
1475 desc = edesc->hw_desc;
1476
1477 init_chachapoly_job(req, edesc, all_contig, true);
1478 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1479 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1480 1);
1481
1482 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1483 if (!ret) {
1484 ret = -EINPROGRESS;
1485 } else {
1486 aead_unmap(jrdev, edesc, req);
1487 kfree(edesc);
1488 }
1489
1490 return ret;
1491}
1492
1493static int chachapoly_decrypt(struct aead_request *req)
1494{
1495 struct aead_edesc *edesc;
1496 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1497 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1498 struct device *jrdev = ctx->jrdev;
1499 bool all_contig;
1500 u32 *desc;
1501 int ret;
1502
1503 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1504 false);
1505 if (IS_ERR(edesc))
1506 return PTR_ERR(edesc);
1507
1508 desc = edesc->hw_desc;
1509
1510 init_chachapoly_job(req, edesc, all_contig, false);
1511 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1512 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1513 1);
1514
1515 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1516 if (!ret) {
1517 ret = -EINPROGRESS;
1518 } else {
1519 aead_unmap(jrdev, edesc, req);
1520 kfree(edesc);
1521 }
1522
1523 return ret;
1524}
1525
46218750
HX
1526static int ipsec_gcm_encrypt(struct aead_request *req)
1527{
1528 if (req->assoclen < 8)
1529 return -EINVAL;
1530
1531 return gcm_encrypt(req);
1532}
1533
479bcc7c 1534static int aead_encrypt(struct aead_request *req)
f2147b88
HX
1535{
1536 struct aead_edesc *edesc;
1537 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1538 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1539 struct device *jrdev = ctx->jrdev;
1540 bool all_contig;
1541 u32 *desc;
1542 int ret = 0;
1543
1544 /* allocate extended descriptor */
479bcc7c
HX
1545 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1546 &all_contig, true);
f2147b88
HX
1547 if (IS_ERR(edesc))
1548 return PTR_ERR(edesc);
1549
1550 /* Create and submit job descriptor */
479bcc7c 1551 init_authenc_job(req, edesc, all_contig, true);
6e005503
SH
1552
1553 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1554 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1555 desc_bytes(edesc->hw_desc), 1);
f2147b88
HX
1556
1557 desc = edesc->hw_desc;
479bcc7c 1558 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
f2147b88
HX
1559 if (!ret) {
1560 ret = -EINPROGRESS;
1561 } else {
479bcc7c 1562 aead_unmap(jrdev, edesc, req);
f2147b88
HX
1563 kfree(edesc);
1564 }
1565
1566 return ret;
1567}
1568
1569static int gcm_decrypt(struct aead_request *req)
1570{
1571 struct aead_edesc *edesc;
1572 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1573 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1574 struct device *jrdev = ctx->jrdev;
1575 bool all_contig;
1576 u32 *desc;
1577 int ret = 0;
1578
1579 /* allocate extended descriptor */
1580 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1581 if (IS_ERR(edesc))
1582 return PTR_ERR(edesc);
1583
1584 /* Create and submit job descriptor*/
1585 init_gcm_job(req, edesc, all_contig, false);
6e005503
SH
1586
1587 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1588 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1589 desc_bytes(edesc->hw_desc), 1);
f2147b88
HX
1590
1591 desc = edesc->hw_desc;
1592 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1593 if (!ret) {
1594 ret = -EINPROGRESS;
1595 } else {
1596 aead_unmap(jrdev, edesc, req);
1597 kfree(edesc);
1598 }
1599
1600 return ret;
1601}
1602
46218750
HX
1603static int ipsec_gcm_decrypt(struct aead_request *req)
1604{
1605 if (req->assoclen < 8)
1606 return -EINVAL;
1607
1608 return gcm_decrypt(req);
1609}
1610
479bcc7c 1611static int aead_decrypt(struct aead_request *req)
8e8ec596 1612{
1acebad3 1613 struct aead_edesc *edesc;
8e8ec596 1614 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1615 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1616 struct device *jrdev = ctx->jrdev;
1acebad3 1617 bool all_contig;
8e8ec596 1618 u32 *desc;
1acebad3 1619 int ret = 0;
8e8ec596 1620
8a82451b 1621 caam_dump_sg("dec src@" __stringify(__LINE__)": ",
972b812b
HG
1622 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1623 req->assoclen + req->cryptlen, 1);
5ecf8ef9 1624
8e8ec596 1625 /* allocate extended descriptor */
479bcc7c
HX
1626 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1627 &all_contig, false);
8e8ec596
KP
1628 if (IS_ERR(edesc))
1629 return PTR_ERR(edesc);
1630
1acebad3 1631 /* Create and submit job descriptor*/
479bcc7c 1632 init_authenc_job(req, edesc, all_contig, false);
6e005503
SH
1633
1634 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1635 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1636 desc_bytes(edesc->hw_desc), 1);
1acebad3 1637
8e8ec596 1638 desc = edesc->hw_desc;
479bcc7c 1639 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1acebad3
YK
1640 if (!ret) {
1641 ret = -EINPROGRESS;
1642 } else {
479bcc7c 1643 aead_unmap(jrdev, edesc, req);
1acebad3
YK
1644 kfree(edesc);
1645 }
8e8ec596 1646
1acebad3
YK
1647 return ret;
1648}
8e8ec596 1649
acdca31d 1650/*
5ca7badb 1651 * allocate and map the skcipher extended descriptor for skcipher
acdca31d 1652 */
5ca7badb
HG
1653static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1654 int desc_bytes)
acdca31d 1655{
5ca7badb
HG
1656 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1657 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1658 struct device *jrdev = ctx->jrdev;
42cfcafb 1659 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
acdca31d 1660 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1661 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
5ca7badb 1662 struct skcipher_edesc *edesc;
eaed71a4 1663 dma_addr_t iv_dma = 0;
115957bb 1664 u8 *iv;
5ca7badb 1665 int ivsize = crypto_skcipher_ivsize(skcipher);
838e0a89 1666 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
acdca31d 1667
5ca7badb 1668 src_nents = sg_nents_for_len(req->src, req->cryptlen);
fd144d83
HG
1669 if (unlikely(src_nents < 0)) {
1670 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
5ca7badb 1671 req->cryptlen);
fd144d83
HG
1672 return ERR_PTR(src_nents);
1673 }
acdca31d 1674
fd144d83 1675 if (req->dst != req->src) {
5ca7badb 1676 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
fd144d83
HG
1677 if (unlikely(dst_nents < 0)) {
1678 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
5ca7badb 1679 req->cryptlen);
fd144d83
HG
1680 return ERR_PTR(dst_nents);
1681 }
1682 }
acdca31d
YK
1683
1684 if (likely(req->src == req->dst)) {
838e0a89
HG
1685 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1686 DMA_BIDIRECTIONAL);
1687 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1688 dev_err(jrdev, "unable to map source\n");
1689 return ERR_PTR(-ENOMEM);
1690 }
acdca31d 1691 } else {
838e0a89
HG
1692 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1693 DMA_TO_DEVICE);
1694 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1695 dev_err(jrdev, "unable to map source\n");
1696 return ERR_PTR(-ENOMEM);
1697 }
838e0a89
HG
1698 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1699 DMA_FROM_DEVICE);
1700 if (unlikely(!mapped_dst_nents)) {
c73e36e8 1701 dev_err(jrdev, "unable to map destination\n");
fa0c92db 1702 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
c73e36e8
HG
1703 return ERR_PTR(-ENOMEM);
1704 }
acdca31d
YK
1705 }
1706
eaed71a4
IP
1707 if (!ivsize && mapped_src_nents == 1)
1708 sec4_sg_ents = 0; // no need for an input hw s/g table
1709 else
1710 sec4_sg_ents = mapped_src_nents + !!ivsize;
fa0c92db 1711 dst_sg_idx = sec4_sg_ents;
a5e5c133
HG
1712
1713 /*
334d37c9
HG
1714 * Input, output HW S/G tables: [IV, src][dst, IV]
1715 * IV entries point to the same buffer
1716 * If src == dst, S/G entries are reused (S/G tables overlap)
1717 *
a5e5c133
HG
1718 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1719 * the end of the table by allocating more S/G entries. Logic:
334d37c9 1720 * if (output S/G)
a5e5c133 1721 * pad output S/G, if needed
a5e5c133
HG
1722 * else if (input S/G) ...
1723 * pad input S/G, if needed
1724 */
334d37c9
HG
1725 if (ivsize || mapped_dst_nents > 1) {
1726 if (req->src == req->dst)
1727 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1728 else
1729 sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1730 !!ivsize);
1731 } else {
a5e5c133 1732 sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
334d37c9 1733 }
a5e5c133 1734
fa0c92db 1735 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
acdca31d 1736
115957bb
HG
1737 /*
1738 * allocate space for base edesc and hw desc commands, link tables, IV
1739 */
1740 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
dde20ae9 1741 GFP_DMA | flags);
acdca31d
YK
1742 if (!edesc) {
1743 dev_err(jrdev, "could not allocate extended descriptor\n");
115957bb 1744 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
cf5448b5 1745 0, 0, 0);
acdca31d
YK
1746 return ERR_PTR(-ENOMEM);
1747 }
1748
1749 edesc->src_nents = src_nents;
1750 edesc->dst_nents = dst_nents;
ba4cf71b
IP
1751 edesc->mapped_src_nents = mapped_src_nents;
1752 edesc->mapped_dst_nents = mapped_dst_nents;
a299c837 1753 edesc->sec4_sg_bytes = sec4_sg_bytes;
13cc6f48
HG
1754 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1755 desc_bytes);
acdca31d 1756
115957bb 1757 /* Make sure IV is located in a DMAable area */
eaed71a4 1758 if (ivsize) {
334d37c9 1759 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
eaed71a4
IP
1760 memcpy(iv, req->iv, ivsize);
1761
334d37c9 1762 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
eaed71a4
IP
1763 if (dma_mapping_error(jrdev, iv_dma)) {
1764 dev_err(jrdev, "unable to map IV\n");
1765 caam_unmap(jrdev, req->src, req->dst, src_nents,
1766 dst_nents, 0, 0, 0, 0);
1767 kfree(edesc);
1768 return ERR_PTR(-ENOMEM);
1769 }
115957bb 1770
eaed71a4 1771 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
acdca31d 1772 }
eaed71a4 1773 if (dst_sg_idx)
334d37c9
HG
1774 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1775 !!ivsize, 0);
115957bb 1776
334d37c9
HG
1777 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1778 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1779 dst_sg_idx, 0);
1780
1781 if (ivsize)
1782 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1783 mapped_dst_nents, iv_dma, ivsize, 0);
1784
1785 if (ivsize || mapped_dst_nents > 1)
1786 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1787 mapped_dst_nents);
acdca31d 1788
eaed71a4
IP
1789 if (sec4_sg_bytes) {
1790 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1791 sec4_sg_bytes,
1792 DMA_TO_DEVICE);
1793 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1794 dev_err(jrdev, "unable to map S/G table\n");
1795 caam_unmap(jrdev, req->src, req->dst, src_nents,
1796 dst_nents, iv_dma, ivsize, 0, 0);
1797 kfree(edesc);
1798 return ERR_PTR(-ENOMEM);
1799 }
ce572085
HG
1800 }
1801
acdca31d
YK
1802 edesc->iv_dma = iv_dma;
1803
6e005503
SH
1804 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1805 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1806 sec4_sg_bytes, 1);
acdca31d 1807
acdca31d
YK
1808 return edesc;
1809}
1810
5ca7badb 1811static int skcipher_encrypt(struct skcipher_request *req)
acdca31d 1812{
5ca7badb
HG
1813 struct skcipher_edesc *edesc;
1814 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1815 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1816 struct device *jrdev = ctx->jrdev;
acdca31d
YK
1817 u32 *desc;
1818 int ret = 0;
1819
1820 /* allocate extended descriptor */
5ca7badb 1821 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
acdca31d
YK
1822 if (IS_ERR(edesc))
1823 return PTR_ERR(edesc);
1824
1825 /* Create and submit job descriptor*/
5ca7badb 1826 init_skcipher_job(req, edesc, true);
6e005503
SH
1827
1828 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1829 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1830 desc_bytes(edesc->hw_desc), 1);
1831
acdca31d 1832 desc = edesc->hw_desc;
5ca7badb 1833 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
acdca31d
YK
1834
1835 if (!ret) {
1836 ret = -EINPROGRESS;
1837 } else {
5ca7badb 1838 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1839 kfree(edesc);
1840 }
1841
1842 return ret;
1843}
1844
5ca7badb 1845static int skcipher_decrypt(struct skcipher_request *req)
acdca31d 1846{
5ca7badb
HG
1847 struct skcipher_edesc *edesc;
1848 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1849 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1850 struct device *jrdev = ctx->jrdev;
acdca31d
YK
1851 u32 *desc;
1852 int ret = 0;
1853
1854 /* allocate extended descriptor */
5ca7badb 1855 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
acdca31d
YK
1856 if (IS_ERR(edesc))
1857 return PTR_ERR(edesc);
1858
1859 /* Create and submit job descriptor*/
5ca7badb 1860 init_skcipher_job(req, edesc, false);
acdca31d 1861 desc = edesc->hw_desc;
6e005503
SH
1862
1863 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1864 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1865 desc_bytes(edesc->hw_desc), 1);
acdca31d 1866
5ca7badb 1867 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
acdca31d
YK
1868 if (!ret) {
1869 ret = -EINPROGRESS;
1870 } else {
5ca7badb 1871 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1872 kfree(edesc);
1873 }
1874
1875 return ret;
1876}
1877
5ca7badb 1878static struct caam_skcipher_alg driver_algs[] = {
ae4a825f 1879 {
5ca7badb
HG
1880 .skcipher = {
1881 .base = {
1882 .cra_name = "cbc(aes)",
1883 .cra_driver_name = "cbc-aes-caam",
1884 .cra_blocksize = AES_BLOCK_SIZE,
1885 },
1886 .setkey = skcipher_setkey,
1887 .encrypt = skcipher_encrypt,
1888 .decrypt = skcipher_decrypt,
479bcc7c
HX
1889 .min_keysize = AES_MIN_KEY_SIZE,
1890 .max_keysize = AES_MAX_KEY_SIZE,
1891 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1892 },
1893 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
479bcc7c
HX
1894 },
1895 {
5ca7badb
HG
1896 .skcipher = {
1897 .base = {
1898 .cra_name = "cbc(des3_ede)",
1899 .cra_driver_name = "cbc-3des-caam",
1900 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1901 },
cf64e495 1902 .setkey = des_skcipher_setkey,
5ca7badb
HG
1903 .encrypt = skcipher_encrypt,
1904 .decrypt = skcipher_decrypt,
479bcc7c
HX
1905 .min_keysize = DES3_EDE_KEY_SIZE,
1906 .max_keysize = DES3_EDE_KEY_SIZE,
1907 .ivsize = DES3_EDE_BLOCK_SIZE,
5ca7badb
HG
1908 },
1909 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
479bcc7c
HX
1910 },
1911 {
5ca7badb
HG
1912 .skcipher = {
1913 .base = {
1914 .cra_name = "cbc(des)",
1915 .cra_driver_name = "cbc-des-caam",
1916 .cra_blocksize = DES_BLOCK_SIZE,
1917 },
cf64e495 1918 .setkey = des_skcipher_setkey,
5ca7badb
HG
1919 .encrypt = skcipher_encrypt,
1920 .decrypt = skcipher_decrypt,
479bcc7c
HX
1921 .min_keysize = DES_KEY_SIZE,
1922 .max_keysize = DES_KEY_SIZE,
1923 .ivsize = DES_BLOCK_SIZE,
5ca7badb
HG
1924 },
1925 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
479bcc7c
HX
1926 },
1927 {
5ca7badb
HG
1928 .skcipher = {
1929 .base = {
1930 .cra_name = "ctr(aes)",
1931 .cra_driver_name = "ctr-aes-caam",
1932 .cra_blocksize = 1,
1933 },
1934 .setkey = skcipher_setkey,
1935 .encrypt = skcipher_encrypt,
1936 .decrypt = skcipher_decrypt,
479bcc7c
HX
1937 .min_keysize = AES_MIN_KEY_SIZE,
1938 .max_keysize = AES_MAX_KEY_SIZE,
1939 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1940 .chunksize = AES_BLOCK_SIZE,
1941 },
1942 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1943 OP_ALG_AAI_CTR_MOD128,
479bcc7c
HX
1944 },
1945 {
5ca7badb
HG
1946 .skcipher = {
1947 .base = {
1948 .cra_name = "rfc3686(ctr(aes))",
1949 .cra_driver_name = "rfc3686-ctr-aes-caam",
1950 .cra_blocksize = 1,
1951 },
1952 .setkey = skcipher_setkey,
1953 .encrypt = skcipher_encrypt,
1954 .decrypt = skcipher_decrypt,
479bcc7c
HX
1955 .min_keysize = AES_MIN_KEY_SIZE +
1956 CTR_RFC3686_NONCE_SIZE,
1957 .max_keysize = AES_MAX_KEY_SIZE +
1958 CTR_RFC3686_NONCE_SIZE,
1959 .ivsize = CTR_RFC3686_IV_SIZE,
5ca7badb
HG
1960 .chunksize = AES_BLOCK_SIZE,
1961 },
1962 .caam = {
1963 .class1_alg_type = OP_ALG_ALGSEL_AES |
1964 OP_ALG_AAI_CTR_MOD128,
1965 .rfc3686 = true,
1966 },
c6415a60
CV
1967 },
1968 {
5ca7badb
HG
1969 .skcipher = {
1970 .base = {
1971 .cra_name = "xts(aes)",
1972 .cra_driver_name = "xts-aes-caam",
1973 .cra_blocksize = AES_BLOCK_SIZE,
1974 },
1975 .setkey = xts_skcipher_setkey,
1976 .encrypt = skcipher_encrypt,
1977 .decrypt = skcipher_decrypt,
c6415a60
CV
1978 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1979 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1980 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1981 },
1982 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
c6415a60 1983 },
eaed71a4
IP
1984 {
1985 .skcipher = {
1986 .base = {
1987 .cra_name = "ecb(des)",
1988 .cra_driver_name = "ecb-des-caam",
1989 .cra_blocksize = DES_BLOCK_SIZE,
1990 },
1991 .setkey = des_skcipher_setkey,
1992 .encrypt = skcipher_encrypt,
1993 .decrypt = skcipher_decrypt,
1994 .min_keysize = DES_KEY_SIZE,
1995 .max_keysize = DES_KEY_SIZE,
1996 },
1997 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1998 },
1999 {
2000 .skcipher = {
2001 .base = {
2002 .cra_name = "ecb(aes)",
2003 .cra_driver_name = "ecb-aes-caam",
2004 .cra_blocksize = AES_BLOCK_SIZE,
2005 },
2006 .setkey = skcipher_setkey,
2007 .encrypt = skcipher_encrypt,
2008 .decrypt = skcipher_decrypt,
2009 .min_keysize = AES_MIN_KEY_SIZE,
2010 .max_keysize = AES_MAX_KEY_SIZE,
2011 },
2012 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2013 },
2014 {
2015 .skcipher = {
2016 .base = {
2017 .cra_name = "ecb(des3_ede)",
2018 .cra_driver_name = "ecb-des3-caam",
2019 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2020 },
2021 .setkey = des_skcipher_setkey,
2022 .encrypt = skcipher_encrypt,
2023 .decrypt = skcipher_decrypt,
2024 .min_keysize = DES3_EDE_KEY_SIZE,
2025 .max_keysize = DES3_EDE_KEY_SIZE,
2026 },
2027 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2028 },
2029 {
2030 .skcipher = {
2031 .base = {
2032 .cra_name = "ecb(arc4)",
2033 .cra_driver_name = "ecb-arc4-caam",
2034 .cra_blocksize = ARC4_BLOCK_SIZE,
2035 },
2036 .setkey = skcipher_setkey,
2037 .encrypt = skcipher_encrypt,
2038 .decrypt = skcipher_decrypt,
2039 .min_keysize = ARC4_MIN_KEY_SIZE,
2040 .max_keysize = ARC4_MAX_KEY_SIZE,
2041 },
2042 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
2043 },
479bcc7c
HX
2044};
2045
2046static struct caam_aead_alg driver_aeads[] = {
2047 {
2048 .aead = {
2049 .base = {
2050 .cra_name = "rfc4106(gcm(aes))",
2051 .cra_driver_name = "rfc4106-gcm-aes-caam",
2052 .cra_blocksize = 1,
2053 },
2054 .setkey = rfc4106_setkey,
2055 .setauthsize = rfc4106_setauthsize,
2056 .encrypt = ipsec_gcm_encrypt,
2057 .decrypt = ipsec_gcm_decrypt,
7545e166 2058 .ivsize = GCM_RFC4106_IV_SIZE,
479bcc7c
HX
2059 .maxauthsize = AES_BLOCK_SIZE,
2060 },
2061 .caam = {
2062 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
24586b5f 2063 .nodkp = true,
479bcc7c
HX
2064 },
2065 },
2066 {
2067 .aead = {
2068 .base = {
2069 .cra_name = "rfc4543(gcm(aes))",
2070 .cra_driver_name = "rfc4543-gcm-aes-caam",
2071 .cra_blocksize = 1,
2072 },
2073 .setkey = rfc4543_setkey,
2074 .setauthsize = rfc4543_setauthsize,
2075 .encrypt = ipsec_gcm_encrypt,
2076 .decrypt = ipsec_gcm_decrypt,
7545e166 2077 .ivsize = GCM_RFC4543_IV_SIZE,
479bcc7c
HX
2078 .maxauthsize = AES_BLOCK_SIZE,
2079 },
2080 .caam = {
2081 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
24586b5f 2082 .nodkp = true,
479bcc7c
HX
2083 },
2084 },
2085 /* Galois Counter Mode */
2086 {
2087 .aead = {
2088 .base = {
2089 .cra_name = "gcm(aes)",
2090 .cra_driver_name = "gcm-aes-caam",
2091 .cra_blocksize = 1,
2092 },
2093 .setkey = gcm_setkey,
2094 .setauthsize = gcm_setauthsize,
2095 .encrypt = gcm_encrypt,
2096 .decrypt = gcm_decrypt,
7545e166 2097 .ivsize = GCM_AES_IV_SIZE,
479bcc7c
HX
2098 .maxauthsize = AES_BLOCK_SIZE,
2099 },
2100 .caam = {
2101 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
24586b5f 2102 .nodkp = true,
479bcc7c
HX
2103 },
2104 },
2105 /* single-pass ipsec_esp descriptor */
2106 {
2107 .aead = {
2108 .base = {
2109 .cra_name = "authenc(hmac(md5),"
2110 "ecb(cipher_null))",
2111 .cra_driver_name = "authenc-hmac-md5-"
2112 "ecb-cipher_null-caam",
2113 .cra_blocksize = NULL_BLOCK_SIZE,
2114 },
2115 .setkey = aead_setkey,
2116 .setauthsize = aead_setauthsize,
2117 .encrypt = aead_encrypt,
2118 .decrypt = aead_decrypt,
ae4a825f 2119 .ivsize = NULL_IV_SIZE,
479bcc7c
HX
2120 .maxauthsize = MD5_DIGEST_SIZE,
2121 },
2122 .caam = {
2123 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2124 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2125 },
2126 },
2127 {
2128 .aead = {
2129 .base = {
2130 .cra_name = "authenc(hmac(sha1),"
2131 "ecb(cipher_null))",
2132 .cra_driver_name = "authenc-hmac-sha1-"
2133 "ecb-cipher_null-caam",
2134 .cra_blocksize = NULL_BLOCK_SIZE,
ae4a825f 2135 },
479bcc7c
HX
2136 .setkey = aead_setkey,
2137 .setauthsize = aead_setauthsize,
2138 .encrypt = aead_encrypt,
2139 .decrypt = aead_decrypt,
2140 .ivsize = NULL_IV_SIZE,
2141 .maxauthsize = SHA1_DIGEST_SIZE,
2142 },
2143 .caam = {
2144 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2145 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2146 },
ae4a825f
HG
2147 },
2148 {
479bcc7c
HX
2149 .aead = {
2150 .base = {
2151 .cra_name = "authenc(hmac(sha224),"
2152 "ecb(cipher_null))",
2153 .cra_driver_name = "authenc-hmac-sha224-"
2154 "ecb-cipher_null-caam",
2155 .cra_blocksize = NULL_BLOCK_SIZE,
2156 },
ae4a825f
HG
2157 .setkey = aead_setkey,
2158 .setauthsize = aead_setauthsize,
479bcc7c
HX
2159 .encrypt = aead_encrypt,
2160 .decrypt = aead_decrypt,
ae4a825f
HG
2161 .ivsize = NULL_IV_SIZE,
2162 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2163 },
2164 .caam = {
2165 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2166 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2167 },
ae4a825f
HG
2168 },
2169 {
479bcc7c
HX
2170 .aead = {
2171 .base = {
2172 .cra_name = "authenc(hmac(sha256),"
2173 "ecb(cipher_null))",
2174 .cra_driver_name = "authenc-hmac-sha256-"
2175 "ecb-cipher_null-caam",
2176 .cra_blocksize = NULL_BLOCK_SIZE,
2177 },
ae4a825f
HG
2178 .setkey = aead_setkey,
2179 .setauthsize = aead_setauthsize,
479bcc7c
HX
2180 .encrypt = aead_encrypt,
2181 .decrypt = aead_decrypt,
ae4a825f
HG
2182 .ivsize = NULL_IV_SIZE,
2183 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2184 },
2185 .caam = {
2186 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2187 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2188 },
ae4a825f
HG
2189 },
2190 {
479bcc7c
HX
2191 .aead = {
2192 .base = {
2193 .cra_name = "authenc(hmac(sha384),"
2194 "ecb(cipher_null))",
2195 .cra_driver_name = "authenc-hmac-sha384-"
2196 "ecb-cipher_null-caam",
2197 .cra_blocksize = NULL_BLOCK_SIZE,
2198 },
ae4a825f
HG
2199 .setkey = aead_setkey,
2200 .setauthsize = aead_setauthsize,
479bcc7c
HX
2201 .encrypt = aead_encrypt,
2202 .decrypt = aead_decrypt,
ae4a825f
HG
2203 .ivsize = NULL_IV_SIZE,
2204 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2205 },
2206 .caam = {
2207 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2208 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2209 },
ae4a825f
HG
2210 },
2211 {
479bcc7c
HX
2212 .aead = {
2213 .base = {
2214 .cra_name = "authenc(hmac(sha512),"
2215 "ecb(cipher_null))",
2216 .cra_driver_name = "authenc-hmac-sha512-"
2217 "ecb-cipher_null-caam",
2218 .cra_blocksize = NULL_BLOCK_SIZE,
2219 },
ae4a825f
HG
2220 .setkey = aead_setkey,
2221 .setauthsize = aead_setauthsize,
479bcc7c
HX
2222 .encrypt = aead_encrypt,
2223 .decrypt = aead_decrypt,
ae4a825f
HG
2224 .ivsize = NULL_IV_SIZE,
2225 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2226 },
2227 .caam = {
2228 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2229 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2230 },
2231 },
2232 {
2233 .aead = {
2234 .base = {
2235 .cra_name = "authenc(hmac(md5),cbc(aes))",
2236 .cra_driver_name = "authenc-hmac-md5-"
2237 "cbc-aes-caam",
2238 .cra_blocksize = AES_BLOCK_SIZE,
ae4a825f 2239 },
479bcc7c
HX
2240 .setkey = aead_setkey,
2241 .setauthsize = aead_setauthsize,
2242 .encrypt = aead_encrypt,
2243 .decrypt = aead_decrypt,
2244 .ivsize = AES_BLOCK_SIZE,
2245 .maxauthsize = MD5_DIGEST_SIZE,
2246 },
2247 .caam = {
2248 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2249 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2250 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2251 },
ae4a825f 2252 },
8b4d43a4 2253 {
479bcc7c
HX
2254 .aead = {
2255 .base = {
2256 .cra_name = "echainiv(authenc(hmac(md5),"
2257 "cbc(aes)))",
2258 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2259 "cbc-aes-caam",
2260 .cra_blocksize = AES_BLOCK_SIZE,
2261 },
8b4d43a4
KP
2262 .setkey = aead_setkey,
2263 .setauthsize = aead_setauthsize,
479bcc7c 2264 .encrypt = aead_encrypt,
8b18e235 2265 .decrypt = aead_decrypt,
8b4d43a4
KP
2266 .ivsize = AES_BLOCK_SIZE,
2267 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2268 },
2269 .caam = {
2270 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2271 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2272 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2273 .geniv = true,
2274 },
2275 },
2276 {
2277 .aead = {
2278 .base = {
2279 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2280 .cra_driver_name = "authenc-hmac-sha1-"
2281 "cbc-aes-caam",
2282 .cra_blocksize = AES_BLOCK_SIZE,
8b4d43a4 2283 },
479bcc7c
HX
2284 .setkey = aead_setkey,
2285 .setauthsize = aead_setauthsize,
2286 .encrypt = aead_encrypt,
2287 .decrypt = aead_decrypt,
2288 .ivsize = AES_BLOCK_SIZE,
2289 .maxauthsize = SHA1_DIGEST_SIZE,
2290 },
2291 .caam = {
2292 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2293 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2294 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2295 },
8b4d43a4 2296 },
8e8ec596 2297 {
479bcc7c
HX
2298 .aead = {
2299 .base = {
2300 .cra_name = "echainiv(authenc(hmac(sha1),"
2301 "cbc(aes)))",
2302 .cra_driver_name = "echainiv-authenc-"
2303 "hmac-sha1-cbc-aes-caam",
2304 .cra_blocksize = AES_BLOCK_SIZE,
2305 },
0e479300
YK
2306 .setkey = aead_setkey,
2307 .setauthsize = aead_setauthsize,
479bcc7c 2308 .encrypt = aead_encrypt,
8b18e235 2309 .decrypt = aead_decrypt,
8e8ec596
KP
2310 .ivsize = AES_BLOCK_SIZE,
2311 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2312 },
2313 .caam = {
2314 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2315 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2316 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2317 .geniv = true,
2318 },
2319 },
2320 {
2321 .aead = {
2322 .base = {
2323 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2324 .cra_driver_name = "authenc-hmac-sha224-"
2325 "cbc-aes-caam",
2326 .cra_blocksize = AES_BLOCK_SIZE,
8e8ec596 2327 },
479bcc7c
HX
2328 .setkey = aead_setkey,
2329 .setauthsize = aead_setauthsize,
2330 .encrypt = aead_encrypt,
2331 .decrypt = aead_decrypt,
2332 .ivsize = AES_BLOCK_SIZE,
2333 .maxauthsize = SHA224_DIGEST_SIZE,
2334 },
2335 .caam = {
2336 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2337 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2338 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2339 },
8e8ec596 2340 },
e863f9cc 2341 {
479bcc7c
HX
2342 .aead = {
2343 .base = {
2344 .cra_name = "echainiv(authenc(hmac(sha224),"
2345 "cbc(aes)))",
2346 .cra_driver_name = "echainiv-authenc-"
2347 "hmac-sha224-cbc-aes-caam",
2348 .cra_blocksize = AES_BLOCK_SIZE,
2349 },
e863f9cc
HA
2350 .setkey = aead_setkey,
2351 .setauthsize = aead_setauthsize,
479bcc7c 2352 .encrypt = aead_encrypt,
8b18e235 2353 .decrypt = aead_decrypt,
e863f9cc
HA
2354 .ivsize = AES_BLOCK_SIZE,
2355 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2356 },
2357 .caam = {
2358 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2359 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2360 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2361 .geniv = true,
2362 },
2363 },
2364 {
2365 .aead = {
2366 .base = {
2367 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2368 .cra_driver_name = "authenc-hmac-sha256-"
2369 "cbc-aes-caam",
2370 .cra_blocksize = AES_BLOCK_SIZE,
e863f9cc 2371 },
479bcc7c
HX
2372 .setkey = aead_setkey,
2373 .setauthsize = aead_setauthsize,
2374 .encrypt = aead_encrypt,
2375 .decrypt = aead_decrypt,
2376 .ivsize = AES_BLOCK_SIZE,
2377 .maxauthsize = SHA256_DIGEST_SIZE,
2378 },
2379 .caam = {
2380 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2381 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2382 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2383 },
e863f9cc 2384 },
8e8ec596 2385 {
479bcc7c
HX
2386 .aead = {
2387 .base = {
2388 .cra_name = "echainiv(authenc(hmac(sha256),"
2389 "cbc(aes)))",
2390 .cra_driver_name = "echainiv-authenc-"
2391 "hmac-sha256-cbc-aes-caam",
2392 .cra_blocksize = AES_BLOCK_SIZE,
2393 },
2394 .setkey = aead_setkey,
2395 .setauthsize = aead_setauthsize,
2396 .encrypt = aead_encrypt,
8b18e235 2397 .decrypt = aead_decrypt,
479bcc7c
HX
2398 .ivsize = AES_BLOCK_SIZE,
2399 .maxauthsize = SHA256_DIGEST_SIZE,
2400 },
2401 .caam = {
2402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2403 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2404 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2405 .geniv = true,
2406 },
2407 },
2408 {
2409 .aead = {
2410 .base = {
2411 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2412 .cra_driver_name = "authenc-hmac-sha384-"
2413 "cbc-aes-caam",
2414 .cra_blocksize = AES_BLOCK_SIZE,
2415 },
2416 .setkey = aead_setkey,
2417 .setauthsize = aead_setauthsize,
2418 .encrypt = aead_encrypt,
2419 .decrypt = aead_decrypt,
2420 .ivsize = AES_BLOCK_SIZE,
2421 .maxauthsize = SHA384_DIGEST_SIZE,
2422 },
2423 .caam = {
2424 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2425 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2426 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2427 },
2428 },
2429 {
2430 .aead = {
2431 .base = {
2432 .cra_name = "echainiv(authenc(hmac(sha384),"
2433 "cbc(aes)))",
2434 .cra_driver_name = "echainiv-authenc-"
2435 "hmac-sha384-cbc-aes-caam",
2436 .cra_blocksize = AES_BLOCK_SIZE,
2437 },
0e479300
YK
2438 .setkey = aead_setkey,
2439 .setauthsize = aead_setauthsize,
479bcc7c 2440 .encrypt = aead_encrypt,
8b18e235 2441 .decrypt = aead_decrypt,
8e8ec596 2442 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2443 .maxauthsize = SHA384_DIGEST_SIZE,
2444 },
2445 .caam = {
2446 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2447 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2448 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2449 .geniv = true,
2450 },
8e8ec596 2451 },
e863f9cc 2452 {
479bcc7c
HX
2453 .aead = {
2454 .base = {
2455 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2456 .cra_driver_name = "authenc-hmac-sha512-"
2457 "cbc-aes-caam",
2458 .cra_blocksize = AES_BLOCK_SIZE,
2459 },
e863f9cc
HA
2460 .setkey = aead_setkey,
2461 .setauthsize = aead_setauthsize,
479bcc7c
HX
2462 .encrypt = aead_encrypt,
2463 .decrypt = aead_decrypt,
e863f9cc 2464 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2465 .maxauthsize = SHA512_DIGEST_SIZE,
2466 },
2467 .caam = {
2468 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2469 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2470 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2471 },
e863f9cc 2472 },
4427b1b4 2473 {
479bcc7c
HX
2474 .aead = {
2475 .base = {
2476 .cra_name = "echainiv(authenc(hmac(sha512),"
2477 "cbc(aes)))",
2478 .cra_driver_name = "echainiv-authenc-"
2479 "hmac-sha512-cbc-aes-caam",
2480 .cra_blocksize = AES_BLOCK_SIZE,
2481 },
0e479300
YK
2482 .setkey = aead_setkey,
2483 .setauthsize = aead_setauthsize,
479bcc7c 2484 .encrypt = aead_encrypt,
8b18e235 2485 .decrypt = aead_decrypt,
4427b1b4
KP
2486 .ivsize = AES_BLOCK_SIZE,
2487 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2488 },
2489 .caam = {
2490 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2491 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2492 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2493 .geniv = true,
2494 },
2495 },
2496 {
2497 .aead = {
2498 .base = {
2499 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2500 .cra_driver_name = "authenc-hmac-md5-"
2501 "cbc-des3_ede-caam",
2502 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
4427b1b4 2503 },
1b52c409 2504 .setkey = des3_aead_setkey,
479bcc7c
HX
2505 .setauthsize = aead_setauthsize,
2506 .encrypt = aead_encrypt,
2507 .decrypt = aead_decrypt,
2508 .ivsize = DES3_EDE_BLOCK_SIZE,
2509 .maxauthsize = MD5_DIGEST_SIZE,
2510 },
2511 .caam = {
2512 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2513 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2514 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2515 }
4427b1b4 2516 },
8b4d43a4 2517 {
479bcc7c
HX
2518 .aead = {
2519 .base = {
2520 .cra_name = "echainiv(authenc(hmac(md5),"
2521 "cbc(des3_ede)))",
2522 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2523 "cbc-des3_ede-caam",
2524 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2525 },
1b52c409 2526 .setkey = des3_aead_setkey,
8b4d43a4 2527 .setauthsize = aead_setauthsize,
479bcc7c 2528 .encrypt = aead_encrypt,
8b18e235 2529 .decrypt = aead_decrypt,
8b4d43a4
KP
2530 .ivsize = DES3_EDE_BLOCK_SIZE,
2531 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2532 },
2533 .caam = {
2534 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2535 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2536 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2537 .geniv = true,
2538 }
2539 },
2540 {
2541 .aead = {
2542 .base = {
2543 .cra_name = "authenc(hmac(sha1),"
2544 "cbc(des3_ede))",
2545 .cra_driver_name = "authenc-hmac-sha1-"
2546 "cbc-des3_ede-caam",
2547 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8b4d43a4 2548 },
1b52c409 2549 .setkey = des3_aead_setkey,
479bcc7c
HX
2550 .setauthsize = aead_setauthsize,
2551 .encrypt = aead_encrypt,
2552 .decrypt = aead_decrypt,
2553 .ivsize = DES3_EDE_BLOCK_SIZE,
2554 .maxauthsize = SHA1_DIGEST_SIZE,
2555 },
2556 .caam = {
2557 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2558 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2559 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2560 },
8b4d43a4 2561 },
8e8ec596 2562 {
479bcc7c
HX
2563 .aead = {
2564 .base = {
2565 .cra_name = "echainiv(authenc(hmac(sha1),"
2566 "cbc(des3_ede)))",
2567 .cra_driver_name = "echainiv-authenc-"
2568 "hmac-sha1-"
2569 "cbc-des3_ede-caam",
2570 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2571 },
1b52c409 2572 .setkey = des3_aead_setkey,
0e479300 2573 .setauthsize = aead_setauthsize,
479bcc7c 2574 .encrypt = aead_encrypt,
8b18e235 2575 .decrypt = aead_decrypt,
8e8ec596
KP
2576 .ivsize = DES3_EDE_BLOCK_SIZE,
2577 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2578 },
2579 .caam = {
2580 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2581 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2582 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2583 .geniv = true,
2584 },
2585 },
2586 {
2587 .aead = {
2588 .base = {
2589 .cra_name = "authenc(hmac(sha224),"
2590 "cbc(des3_ede))",
2591 .cra_driver_name = "authenc-hmac-sha224-"
2592 "cbc-des3_ede-caam",
2593 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2594 },
1b52c409 2595 .setkey = des3_aead_setkey,
479bcc7c
HX
2596 .setauthsize = aead_setauthsize,
2597 .encrypt = aead_encrypt,
2598 .decrypt = aead_decrypt,
2599 .ivsize = DES3_EDE_BLOCK_SIZE,
2600 .maxauthsize = SHA224_DIGEST_SIZE,
2601 },
2602 .caam = {
2603 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2604 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2605 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2606 },
8e8ec596 2607 },
e863f9cc 2608 {
479bcc7c
HX
2609 .aead = {
2610 .base = {
2611 .cra_name = "echainiv(authenc(hmac(sha224),"
2612 "cbc(des3_ede)))",
2613 .cra_driver_name = "echainiv-authenc-"
2614 "hmac-sha224-"
2615 "cbc-des3_ede-caam",
2616 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2617 },
1b52c409 2618 .setkey = des3_aead_setkey,
e863f9cc 2619 .setauthsize = aead_setauthsize,
479bcc7c 2620 .encrypt = aead_encrypt,
8b18e235 2621 .decrypt = aead_decrypt,
e863f9cc
HA
2622 .ivsize = DES3_EDE_BLOCK_SIZE,
2623 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2624 },
2625 .caam = {
2626 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2627 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2628 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2629 .geniv = true,
2630 },
2631 },
2632 {
2633 .aead = {
2634 .base = {
2635 .cra_name = "authenc(hmac(sha256),"
2636 "cbc(des3_ede))",
2637 .cra_driver_name = "authenc-hmac-sha256-"
2638 "cbc-des3_ede-caam",
2639 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2640 },
1b52c409 2641 .setkey = des3_aead_setkey,
479bcc7c
HX
2642 .setauthsize = aead_setauthsize,
2643 .encrypt = aead_encrypt,
2644 .decrypt = aead_decrypt,
2645 .ivsize = DES3_EDE_BLOCK_SIZE,
2646 .maxauthsize = SHA256_DIGEST_SIZE,
2647 },
2648 .caam = {
2649 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2650 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2651 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2652 },
e863f9cc 2653 },
8e8ec596 2654 {
479bcc7c
HX
2655 .aead = {
2656 .base = {
2657 .cra_name = "echainiv(authenc(hmac(sha256),"
2658 "cbc(des3_ede)))",
2659 .cra_driver_name = "echainiv-authenc-"
2660 "hmac-sha256-"
2661 "cbc-des3_ede-caam",
2662 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2663 },
1b52c409 2664 .setkey = des3_aead_setkey,
0e479300 2665 .setauthsize = aead_setauthsize,
479bcc7c 2666 .encrypt = aead_encrypt,
8b18e235 2667 .decrypt = aead_decrypt,
8e8ec596
KP
2668 .ivsize = DES3_EDE_BLOCK_SIZE,
2669 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2670 },
2671 .caam = {
2672 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2673 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2674 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2675 .geniv = true,
2676 },
2677 },
2678 {
2679 .aead = {
2680 .base = {
2681 .cra_name = "authenc(hmac(sha384),"
2682 "cbc(des3_ede))",
2683 .cra_driver_name = "authenc-hmac-sha384-"
2684 "cbc-des3_ede-caam",
2685 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2686 },
1b52c409 2687 .setkey = des3_aead_setkey,
479bcc7c
HX
2688 .setauthsize = aead_setauthsize,
2689 .encrypt = aead_encrypt,
2690 .decrypt = aead_decrypt,
2691 .ivsize = DES3_EDE_BLOCK_SIZE,
2692 .maxauthsize = SHA384_DIGEST_SIZE,
2693 },
2694 .caam = {
2695 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2696 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2697 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2698 },
8e8ec596 2699 },
e863f9cc 2700 {
479bcc7c
HX
2701 .aead = {
2702 .base = {
2703 .cra_name = "echainiv(authenc(hmac(sha384),"
2704 "cbc(des3_ede)))",
2705 .cra_driver_name = "echainiv-authenc-"
2706 "hmac-sha384-"
2707 "cbc-des3_ede-caam",
2708 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2709 },
1b52c409 2710 .setkey = des3_aead_setkey,
e863f9cc 2711 .setauthsize = aead_setauthsize,
479bcc7c 2712 .encrypt = aead_encrypt,
8b18e235 2713 .decrypt = aead_decrypt,
e863f9cc
HA
2714 .ivsize = DES3_EDE_BLOCK_SIZE,
2715 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2716 },
2717 .caam = {
2718 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2719 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2720 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2721 .geniv = true,
2722 },
2723 },
2724 {
2725 .aead = {
2726 .base = {
2727 .cra_name = "authenc(hmac(sha512),"
2728 "cbc(des3_ede))",
2729 .cra_driver_name = "authenc-hmac-sha512-"
2730 "cbc-des3_ede-caam",
2731 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2732 },
1b52c409 2733 .setkey = des3_aead_setkey,
479bcc7c
HX
2734 .setauthsize = aead_setauthsize,
2735 .encrypt = aead_encrypt,
2736 .decrypt = aead_decrypt,
2737 .ivsize = DES3_EDE_BLOCK_SIZE,
2738 .maxauthsize = SHA512_DIGEST_SIZE,
2739 },
2740 .caam = {
2741 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2742 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2743 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2744 },
e863f9cc 2745 },
4427b1b4 2746 {
479bcc7c
HX
2747 .aead = {
2748 .base = {
2749 .cra_name = "echainiv(authenc(hmac(sha512),"
2750 "cbc(des3_ede)))",
2751 .cra_driver_name = "echainiv-authenc-"
2752 "hmac-sha512-"
2753 "cbc-des3_ede-caam",
2754 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2755 },
1b52c409 2756 .setkey = des3_aead_setkey,
0e479300 2757 .setauthsize = aead_setauthsize,
479bcc7c 2758 .encrypt = aead_encrypt,
8b18e235 2759 .decrypt = aead_decrypt,
4427b1b4
KP
2760 .ivsize = DES3_EDE_BLOCK_SIZE,
2761 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2762 },
2763 .caam = {
2764 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2765 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2766 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2767 .geniv = true,
2768 },
2769 },
2770 {
2771 .aead = {
2772 .base = {
2773 .cra_name = "authenc(hmac(md5),cbc(des))",
2774 .cra_driver_name = "authenc-hmac-md5-"
2775 "cbc-des-caam",
2776 .cra_blocksize = DES_BLOCK_SIZE,
4427b1b4 2777 },
479bcc7c
HX
2778 .setkey = aead_setkey,
2779 .setauthsize = aead_setauthsize,
2780 .encrypt = aead_encrypt,
2781 .decrypt = aead_decrypt,
2782 .ivsize = DES_BLOCK_SIZE,
2783 .maxauthsize = MD5_DIGEST_SIZE,
2784 },
2785 .caam = {
2786 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2787 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2788 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2789 },
4427b1b4 2790 },
8b4d43a4 2791 {
479bcc7c
HX
2792 .aead = {
2793 .base = {
2794 .cra_name = "echainiv(authenc(hmac(md5),"
2795 "cbc(des)))",
2796 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2797 "cbc-des-caam",
2798 .cra_blocksize = DES_BLOCK_SIZE,
2799 },
8b4d43a4
KP
2800 .setkey = aead_setkey,
2801 .setauthsize = aead_setauthsize,
479bcc7c 2802 .encrypt = aead_encrypt,
8b18e235 2803 .decrypt = aead_decrypt,
8b4d43a4
KP
2804 .ivsize = DES_BLOCK_SIZE,
2805 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2806 },
2807 .caam = {
2808 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2809 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2810 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2811 .geniv = true,
2812 },
2813 },
2814 {
2815 .aead = {
2816 .base = {
2817 .cra_name = "authenc(hmac(sha1),cbc(des))",
2818 .cra_driver_name = "authenc-hmac-sha1-"
2819 "cbc-des-caam",
2820 .cra_blocksize = DES_BLOCK_SIZE,
8b4d43a4 2821 },
479bcc7c
HX
2822 .setkey = aead_setkey,
2823 .setauthsize = aead_setauthsize,
2824 .encrypt = aead_encrypt,
2825 .decrypt = aead_decrypt,
2826 .ivsize = DES_BLOCK_SIZE,
2827 .maxauthsize = SHA1_DIGEST_SIZE,
2828 },
2829 .caam = {
2830 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2831 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2832 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2833 },
8b4d43a4 2834 },
8e8ec596 2835 {
479bcc7c
HX
2836 .aead = {
2837 .base = {
2838 .cra_name = "echainiv(authenc(hmac(sha1),"
2839 "cbc(des)))",
2840 .cra_driver_name = "echainiv-authenc-"
2841 "hmac-sha1-cbc-des-caam",
2842 .cra_blocksize = DES_BLOCK_SIZE,
2843 },
0e479300
YK
2844 .setkey = aead_setkey,
2845 .setauthsize = aead_setauthsize,
479bcc7c 2846 .encrypt = aead_encrypt,
8b18e235 2847 .decrypt = aead_decrypt,
8e8ec596
KP
2848 .ivsize = DES_BLOCK_SIZE,
2849 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2850 },
2851 .caam = {
2852 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2853 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2854 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2855 .geniv = true,
2856 },
2857 },
2858 {
2859 .aead = {
2860 .base = {
2861 .cra_name = "authenc(hmac(sha224),cbc(des))",
2862 .cra_driver_name = "authenc-hmac-sha224-"
2863 "cbc-des-caam",
2864 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2865 },
479bcc7c
HX
2866 .setkey = aead_setkey,
2867 .setauthsize = aead_setauthsize,
2868 .encrypt = aead_encrypt,
2869 .decrypt = aead_decrypt,
2870 .ivsize = DES_BLOCK_SIZE,
2871 .maxauthsize = SHA224_DIGEST_SIZE,
2872 },
2873 .caam = {
2874 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2875 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2876 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2877 },
8e8ec596 2878 },
e863f9cc 2879 {
479bcc7c
HX
2880 .aead = {
2881 .base = {
2882 .cra_name = "echainiv(authenc(hmac(sha224),"
2883 "cbc(des)))",
2884 .cra_driver_name = "echainiv-authenc-"
2885 "hmac-sha224-cbc-des-caam",
2886 .cra_blocksize = DES_BLOCK_SIZE,
2887 },
e863f9cc
HA
2888 .setkey = aead_setkey,
2889 .setauthsize = aead_setauthsize,
479bcc7c 2890 .encrypt = aead_encrypt,
8b18e235 2891 .decrypt = aead_decrypt,
e863f9cc
HA
2892 .ivsize = DES_BLOCK_SIZE,
2893 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2894 },
2895 .caam = {
2896 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2897 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2898 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2899 .geniv = true,
2900 },
2901 },
2902 {
2903 .aead = {
2904 .base = {
2905 .cra_name = "authenc(hmac(sha256),cbc(des))",
2906 .cra_driver_name = "authenc-hmac-sha256-"
2907 "cbc-des-caam",
2908 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2909 },
479bcc7c
HX
2910 .setkey = aead_setkey,
2911 .setauthsize = aead_setauthsize,
2912 .encrypt = aead_encrypt,
2913 .decrypt = aead_decrypt,
2914 .ivsize = DES_BLOCK_SIZE,
2915 .maxauthsize = SHA256_DIGEST_SIZE,
2916 },
2917 .caam = {
2918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2919 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2920 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2921 },
e863f9cc 2922 },
8e8ec596 2923 {
479bcc7c
HX
2924 .aead = {
2925 .base = {
2926 .cra_name = "echainiv(authenc(hmac(sha256),"
2927 "cbc(des)))",
2928 .cra_driver_name = "echainiv-authenc-"
2929 "hmac-sha256-cbc-des-caam",
2930 .cra_blocksize = DES_BLOCK_SIZE,
2931 },
0e479300
YK
2932 .setkey = aead_setkey,
2933 .setauthsize = aead_setauthsize,
479bcc7c 2934 .encrypt = aead_encrypt,
8b18e235 2935 .decrypt = aead_decrypt,
8e8ec596
KP
2936 .ivsize = DES_BLOCK_SIZE,
2937 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2938 },
2939 .caam = {
2940 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2941 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2942 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2943 .geniv = true,
2944 },
2945 },
2946 {
2947 .aead = {
2948 .base = {
2949 .cra_name = "authenc(hmac(sha384),cbc(des))",
2950 .cra_driver_name = "authenc-hmac-sha384-"
2951 "cbc-des-caam",
2952 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2953 },
479bcc7c
HX
2954 .setkey = aead_setkey,
2955 .setauthsize = aead_setauthsize,
2956 .encrypt = aead_encrypt,
2957 .decrypt = aead_decrypt,
2958 .ivsize = DES_BLOCK_SIZE,
2959 .maxauthsize = SHA384_DIGEST_SIZE,
2960 },
2961 .caam = {
2962 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2963 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2964 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2965 },
8e8ec596 2966 },
e863f9cc 2967 {
479bcc7c
HX
2968 .aead = {
2969 .base = {
2970 .cra_name = "echainiv(authenc(hmac(sha384),"
2971 "cbc(des)))",
2972 .cra_driver_name = "echainiv-authenc-"
2973 "hmac-sha384-cbc-des-caam",
2974 .cra_blocksize = DES_BLOCK_SIZE,
2975 },
e863f9cc
HA
2976 .setkey = aead_setkey,
2977 .setauthsize = aead_setauthsize,
479bcc7c 2978 .encrypt = aead_encrypt,
8b18e235 2979 .decrypt = aead_decrypt,
e863f9cc
HA
2980 .ivsize = DES_BLOCK_SIZE,
2981 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2982 },
2983 .caam = {
2984 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2985 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2986 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2987 .geniv = true,
2988 },
2989 },
2990 {
2991 .aead = {
2992 .base = {
2993 .cra_name = "authenc(hmac(sha512),cbc(des))",
2994 .cra_driver_name = "authenc-hmac-sha512-"
2995 "cbc-des-caam",
2996 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2997 },
479bcc7c
HX
2998 .setkey = aead_setkey,
2999 .setauthsize = aead_setauthsize,
3000 .encrypt = aead_encrypt,
3001 .decrypt = aead_decrypt,
3002 .ivsize = DES_BLOCK_SIZE,
3003 .maxauthsize = SHA512_DIGEST_SIZE,
3004 },
3005 .caam = {
3006 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3007 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3008 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 3009 },
e863f9cc 3010 },
4427b1b4 3011 {
479bcc7c
HX
3012 .aead = {
3013 .base = {
3014 .cra_name = "echainiv(authenc(hmac(sha512),"
3015 "cbc(des)))",
3016 .cra_driver_name = "echainiv-authenc-"
3017 "hmac-sha512-cbc-des-caam",
3018 .cra_blocksize = DES_BLOCK_SIZE,
3019 },
0e479300
YK
3020 .setkey = aead_setkey,
3021 .setauthsize = aead_setauthsize,
479bcc7c 3022 .encrypt = aead_encrypt,
8b18e235 3023 .decrypt = aead_decrypt,
4427b1b4
KP
3024 .ivsize = DES_BLOCK_SIZE,
3025 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
3026 },
3027 .caam = {
3028 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3029 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3030 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3031 .geniv = true,
3032 },
4427b1b4 3033 },
daebc465 3034 {
479bcc7c
HX
3035 .aead = {
3036 .base = {
3037 .cra_name = "authenc(hmac(md5),"
3038 "rfc3686(ctr(aes)))",
3039 .cra_driver_name = "authenc-hmac-md5-"
3040 "rfc3686-ctr-aes-caam",
3041 .cra_blocksize = 1,
3042 },
daebc465
CV
3043 .setkey = aead_setkey,
3044 .setauthsize = aead_setauthsize,
479bcc7c
HX
3045 .encrypt = aead_encrypt,
3046 .decrypt = aead_decrypt,
daebc465
CV
3047 .ivsize = CTR_RFC3686_IV_SIZE,
3048 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
3049 },
3050 .caam = {
3051 .class1_alg_type = OP_ALG_ALGSEL_AES |
3052 OP_ALG_AAI_CTR_MOD128,
3053 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3054 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3055 .rfc3686 = true,
3056 },
daebc465
CV
3057 },
3058 {
479bcc7c
HX
3059 .aead = {
3060 .base = {
3061 .cra_name = "seqiv(authenc("
3062 "hmac(md5),rfc3686(ctr(aes))))",
3063 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3064 "rfc3686-ctr-aes-caam",
3065 .cra_blocksize = 1,
3066 },
daebc465
CV
3067 .setkey = aead_setkey,
3068 .setauthsize = aead_setauthsize,
479bcc7c 3069 .encrypt = aead_encrypt,
8b18e235 3070 .decrypt = aead_decrypt,
daebc465 3071 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3072 .maxauthsize = MD5_DIGEST_SIZE,
3073 },
3074 .caam = {
3075 .class1_alg_type = OP_ALG_ALGSEL_AES |
3076 OP_ALG_AAI_CTR_MOD128,
3077 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3078 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3079 .rfc3686 = true,
3080 .geniv = true,
3081 },
daebc465
CV
3082 },
3083 {
479bcc7c
HX
3084 .aead = {
3085 .base = {
3086 .cra_name = "authenc(hmac(sha1),"
3087 "rfc3686(ctr(aes)))",
3088 .cra_driver_name = "authenc-hmac-sha1-"
3089 "rfc3686-ctr-aes-caam",
3090 .cra_blocksize = 1,
3091 },
daebc465
CV
3092 .setkey = aead_setkey,
3093 .setauthsize = aead_setauthsize,
479bcc7c
HX
3094 .encrypt = aead_encrypt,
3095 .decrypt = aead_decrypt,
daebc465 3096 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3097 .maxauthsize = SHA1_DIGEST_SIZE,
3098 },
3099 .caam = {
3100 .class1_alg_type = OP_ALG_ALGSEL_AES |
3101 OP_ALG_AAI_CTR_MOD128,
3102 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3103 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3104 .rfc3686 = true,
3105 },
daebc465
CV
3106 },
3107 {
479bcc7c
HX
3108 .aead = {
3109 .base = {
3110 .cra_name = "seqiv(authenc("
3111 "hmac(sha1),rfc3686(ctr(aes))))",
3112 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3113 "rfc3686-ctr-aes-caam",
3114 .cra_blocksize = 1,
3115 },
daebc465
CV
3116 .setkey = aead_setkey,
3117 .setauthsize = aead_setauthsize,
479bcc7c 3118 .encrypt = aead_encrypt,
8b18e235 3119 .decrypt = aead_decrypt,
daebc465 3120 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3121 .maxauthsize = SHA1_DIGEST_SIZE,
3122 },
3123 .caam = {
3124 .class1_alg_type = OP_ALG_ALGSEL_AES |
3125 OP_ALG_AAI_CTR_MOD128,
3126 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3127 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3128 .rfc3686 = true,
3129 .geniv = true,
3130 },
daebc465
CV
3131 },
3132 {
479bcc7c
HX
3133 .aead = {
3134 .base = {
3135 .cra_name = "authenc(hmac(sha224),"
3136 "rfc3686(ctr(aes)))",
3137 .cra_driver_name = "authenc-hmac-sha224-"
3138 "rfc3686-ctr-aes-caam",
3139 .cra_blocksize = 1,
3140 },
daebc465
CV
3141 .setkey = aead_setkey,
3142 .setauthsize = aead_setauthsize,
479bcc7c
HX
3143 .encrypt = aead_encrypt,
3144 .decrypt = aead_decrypt,
daebc465 3145 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3146 .maxauthsize = SHA224_DIGEST_SIZE,
3147 },
3148 .caam = {
3149 .class1_alg_type = OP_ALG_ALGSEL_AES |
3150 OP_ALG_AAI_CTR_MOD128,
3151 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3152 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3153 .rfc3686 = true,
3154 },
daebc465
CV
3155 },
3156 {
479bcc7c
HX
3157 .aead = {
3158 .base = {
3159 .cra_name = "seqiv(authenc("
3160 "hmac(sha224),rfc3686(ctr(aes))))",
3161 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3162 "rfc3686-ctr-aes-caam",
3163 .cra_blocksize = 1,
3164 },
daebc465
CV
3165 .setkey = aead_setkey,
3166 .setauthsize = aead_setauthsize,
479bcc7c 3167 .encrypt = aead_encrypt,
8b18e235 3168 .decrypt = aead_decrypt,
daebc465 3169 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3170 .maxauthsize = SHA224_DIGEST_SIZE,
3171 },
3172 .caam = {
3173 .class1_alg_type = OP_ALG_ALGSEL_AES |
3174 OP_ALG_AAI_CTR_MOD128,
3175 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3176 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3177 .rfc3686 = true,
3178 .geniv = true,
3179 },
acdca31d
YK
3180 },
3181 {
479bcc7c
HX
3182 .aead = {
3183 .base = {
3184 .cra_name = "authenc(hmac(sha256),"
3185 "rfc3686(ctr(aes)))",
3186 .cra_driver_name = "authenc-hmac-sha256-"
3187 "rfc3686-ctr-aes-caam",
3188 .cra_blocksize = 1,
acdca31d 3189 },
479bcc7c
HX
3190 .setkey = aead_setkey,
3191 .setauthsize = aead_setauthsize,
3192 .encrypt = aead_encrypt,
3193 .decrypt = aead_decrypt,
3194 .ivsize = CTR_RFC3686_IV_SIZE,
3195 .maxauthsize = SHA256_DIGEST_SIZE,
3196 },
3197 .caam = {
3198 .class1_alg_type = OP_ALG_ALGSEL_AES |
3199 OP_ALG_AAI_CTR_MOD128,
3200 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3201 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3202 .rfc3686 = true,
3203 },
acdca31d
YK
3204 },
3205 {
479bcc7c
HX
3206 .aead = {
3207 .base = {
3208 .cra_name = "seqiv(authenc(hmac(sha256),"
3209 "rfc3686(ctr(aes))))",
3210 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3211 "rfc3686-ctr-aes-caam",
3212 .cra_blocksize = 1,
acdca31d 3213 },
479bcc7c
HX
3214 .setkey = aead_setkey,
3215 .setauthsize = aead_setauthsize,
3216 .encrypt = aead_encrypt,
8b18e235 3217 .decrypt = aead_decrypt,
479bcc7c
HX
3218 .ivsize = CTR_RFC3686_IV_SIZE,
3219 .maxauthsize = SHA256_DIGEST_SIZE,
3220 },
3221 .caam = {
3222 .class1_alg_type = OP_ALG_ALGSEL_AES |
3223 OP_ALG_AAI_CTR_MOD128,
3224 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3225 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3226 .rfc3686 = true,
3227 .geniv = true,
3228 },
2b22f6c5
CV
3229 },
3230 {
479bcc7c
HX
3231 .aead = {
3232 .base = {
3233 .cra_name = "authenc(hmac(sha384),"
3234 "rfc3686(ctr(aes)))",
3235 .cra_driver_name = "authenc-hmac-sha384-"
3236 "rfc3686-ctr-aes-caam",
3237 .cra_blocksize = 1,
2b22f6c5 3238 },
479bcc7c
HX
3239 .setkey = aead_setkey,
3240 .setauthsize = aead_setauthsize,
3241 .encrypt = aead_encrypt,
3242 .decrypt = aead_decrypt,
a5f57cff 3243 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3244 .maxauthsize = SHA384_DIGEST_SIZE,
3245 },
3246 .caam = {
3247 .class1_alg_type = OP_ALG_ALGSEL_AES |
3248 OP_ALG_AAI_CTR_MOD128,
3249 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3250 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3251 .rfc3686 = true,
3252 },
3253 },
f2147b88
HX
3254 {
3255 .aead = {
3256 .base = {
479bcc7c
HX
3257 .cra_name = "seqiv(authenc(hmac(sha384),"
3258 "rfc3686(ctr(aes))))",
3259 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3260 "rfc3686-ctr-aes-caam",
f2147b88
HX
3261 .cra_blocksize = 1,
3262 },
479bcc7c
HX
3263 .setkey = aead_setkey,
3264 .setauthsize = aead_setauthsize,
3265 .encrypt = aead_encrypt,
8b18e235 3266 .decrypt = aead_decrypt,
479bcc7c
HX
3267 .ivsize = CTR_RFC3686_IV_SIZE,
3268 .maxauthsize = SHA384_DIGEST_SIZE,
f2147b88
HX
3269 },
3270 .caam = {
479bcc7c
HX
3271 .class1_alg_type = OP_ALG_ALGSEL_AES |
3272 OP_ALG_AAI_CTR_MOD128,
3273 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3274 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3275 .rfc3686 = true,
3276 .geniv = true,
f2147b88
HX
3277 },
3278 },
3279 {
3280 .aead = {
3281 .base = {
479bcc7c
HX
3282 .cra_name = "authenc(hmac(sha512),"
3283 "rfc3686(ctr(aes)))",
3284 .cra_driver_name = "authenc-hmac-sha512-"
3285 "rfc3686-ctr-aes-caam",
f2147b88
HX
3286 .cra_blocksize = 1,
3287 },
479bcc7c
HX
3288 .setkey = aead_setkey,
3289 .setauthsize = aead_setauthsize,
3290 .encrypt = aead_encrypt,
3291 .decrypt = aead_decrypt,
3292 .ivsize = CTR_RFC3686_IV_SIZE,
3293 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3294 },
3295 .caam = {
479bcc7c
HX
3296 .class1_alg_type = OP_ALG_ALGSEL_AES |
3297 OP_ALG_AAI_CTR_MOD128,
3298 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3299 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 3300 .rfc3686 = true,
f2147b88
HX
3301 },
3302 },
f2147b88
HX
3303 {
3304 .aead = {
3305 .base = {
479bcc7c
HX
3306 .cra_name = "seqiv(authenc(hmac(sha512),"
3307 "rfc3686(ctr(aes))))",
3308 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3309 "rfc3686-ctr-aes-caam",
f2147b88
HX
3310 .cra_blocksize = 1,
3311 },
479bcc7c
HX
3312 .setkey = aead_setkey,
3313 .setauthsize = aead_setauthsize,
3314 .encrypt = aead_encrypt,
8b18e235 3315 .decrypt = aead_decrypt,
479bcc7c
HX
3316 .ivsize = CTR_RFC3686_IV_SIZE,
3317 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3318 },
3319 .caam = {
479bcc7c
HX
3320 .class1_alg_type = OP_ALG_ALGSEL_AES |
3321 OP_ALG_AAI_CTR_MOD128,
3322 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3323 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3324 .rfc3686 = true,
3325 .geniv = true,
f2147b88
HX
3326 },
3327 },
d6bbd4ee
HG
3328 {
3329 .aead = {
3330 .base = {
3331 .cra_name = "rfc7539(chacha20,poly1305)",
3332 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3333 "caam",
3334 .cra_blocksize = 1,
3335 },
3336 .setkey = chachapoly_setkey,
3337 .setauthsize = chachapoly_setauthsize,
3338 .encrypt = chachapoly_encrypt,
3339 .decrypt = chachapoly_decrypt,
3340 .ivsize = CHACHAPOLY_IV_SIZE,
3341 .maxauthsize = POLY1305_DIGEST_SIZE,
3342 },
3343 .caam = {
3344 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3345 OP_ALG_AAI_AEAD,
3346 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3347 OP_ALG_AAI_AEAD,
24586b5f 3348 .nodkp = true,
d6bbd4ee
HG
3349 },
3350 },
3351 {
3352 .aead = {
3353 .base = {
3354 .cra_name = "rfc7539esp(chacha20,poly1305)",
3355 .cra_driver_name = "rfc7539esp-chacha20-"
3356 "poly1305-caam",
3357 .cra_blocksize = 1,
3358 },
3359 .setkey = chachapoly_setkey,
3360 .setauthsize = chachapoly_setauthsize,
3361 .encrypt = chachapoly_encrypt,
3362 .decrypt = chachapoly_decrypt,
3363 .ivsize = 8,
3364 .maxauthsize = POLY1305_DIGEST_SIZE,
3365 },
3366 .caam = {
3367 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3368 OP_ALG_AAI_AEAD,
3369 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3370 OP_ALG_AAI_AEAD,
24586b5f 3371 .nodkp = true,
d6bbd4ee
HG
3372 },
3373 },
f2147b88
HX
3374};
3375
7e0880b9
HG
3376static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3377 bool uses_dkp)
8e8ec596 3378{
bbf22344 3379 dma_addr_t dma_addr;
7e0880b9 3380 struct caam_drv_private *priv;
bbf22344 3381
cfc6f11b
RG
3382 ctx->jrdev = caam_jr_alloc();
3383 if (IS_ERR(ctx->jrdev)) {
3384 pr_err("Job Ring Device allocation for transform failed\n");
3385 return PTR_ERR(ctx->jrdev);
3386 }
8e8ec596 3387
7e0880b9
HG
3388 priv = dev_get_drvdata(ctx->jrdev->parent);
3389 if (priv->era >= 6 && uses_dkp)
3390 ctx->dir = DMA_BIDIRECTIONAL;
3391 else
3392 ctx->dir = DMA_TO_DEVICE;
3393
bbf22344
HG
3394 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3395 offsetof(struct caam_ctx,
3396 sh_desc_enc_dma),
7e0880b9 3397 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
bbf22344
HG
3398 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3399 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3400 caam_jr_free(ctx->jrdev);
3401 return -ENOMEM;
3402 }
3403
3404 ctx->sh_desc_enc_dma = dma_addr;
3405 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3406 sh_desc_dec);
bbf22344
HG
3407 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3408
8e8ec596 3409 /* copy descriptor header template value */
db57656b
HG
3410 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3411 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
8e8ec596
KP
3412
3413 return 0;
3414}
3415
5ca7badb 3416static int caam_cra_init(struct crypto_skcipher *tfm)
8e8ec596 3417{
5ca7badb
HG
3418 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3419 struct caam_skcipher_alg *caam_alg =
3420 container_of(alg, typeof(*caam_alg), skcipher);
8e8ec596 3421
5ca7badb
HG
3422 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3423 false);
f2147b88
HX
3424}
3425
3426static int caam_aead_init(struct crypto_aead *tfm)
3427{
3428 struct aead_alg *alg = crypto_aead_alg(tfm);
3429 struct caam_aead_alg *caam_alg =
3430 container_of(alg, struct caam_aead_alg, aead);
3431 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3432
24586b5f 3433 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
f2147b88
HX
3434}
3435
3436static void caam_exit_common(struct caam_ctx *ctx)
3437{
bbf22344
HG
3438 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3439 offsetof(struct caam_ctx, sh_desc_enc_dma),
7e0880b9 3440 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
cfc6f11b 3441 caam_jr_free(ctx->jrdev);
8e8ec596
KP
3442}
3443
5ca7badb 3444static void caam_cra_exit(struct crypto_skcipher *tfm)
f2147b88 3445{
5ca7badb 3446 caam_exit_common(crypto_skcipher_ctx(tfm));
f2147b88
HX
3447}
3448
3449static void caam_aead_exit(struct crypto_aead *tfm)
3450{
3451 caam_exit_common(crypto_aead_ctx(tfm));
3452}
3453
1b46c90c 3454void caam_algapi_exit(void)
8e8ec596 3455{
f2147b88
HX
3456 int i;
3457
3458 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3459 struct caam_aead_alg *t_alg = driver_aeads + i;
3460
3461 if (t_alg->registered)
3462 crypto_unregister_aead(&t_alg->aead);
3463 }
8e8ec596 3464
5ca7badb
HG
3465 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3466 struct caam_skcipher_alg *t_alg = driver_algs + i;
8e8ec596 3467
5ca7badb
HG
3468 if (t_alg->registered)
3469 crypto_unregister_skcipher(&t_alg->skcipher);
8e8ec596 3470 }
8e8ec596
KP
3471}
3472
5ca7badb 3473static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
8e8ec596 3474{
5ca7badb 3475 struct skcipher_alg *alg = &t_alg->skcipher;
8e8ec596 3476
5ca7badb
HG
3477 alg->base.cra_module = THIS_MODULE;
3478 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3479 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3480 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
8e8ec596 3481
5ca7badb
HG
3482 alg->init = caam_cra_init;
3483 alg->exit = caam_cra_exit;
8e8ec596
KP
3484}
3485
f2147b88
HX
3486static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3487{
3488 struct aead_alg *alg = &t_alg->aead;
3489
3490 alg->base.cra_module = THIS_MODULE;
3491 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3492 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
5e4b8c1f 3493 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
f2147b88
HX
3494
3495 alg->init = caam_aead_init;
3496 alg->exit = caam_aead_exit;
3497}
3498
1b46c90c 3499int caam_algapi_init(struct device *ctrldev)
8e8ec596 3500{
1b46c90c 3501 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
8e8ec596 3502 int i = 0, err = 0;
d6bbd4ee 3503 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
eaed71a4 3504 u32 arc4_inst;
bf83490e 3505 unsigned int md_limit = SHA512_DIGEST_SIZE;
df80bfd3 3506 bool registered = false, gcm_support;
8e8ec596 3507
bf83490e
VM
3508 /*
3509 * Register crypto algorithms the device supports.
3510 * First, detect presence and attributes of DES, AES, and MD blocks.
3511 */
d239b10d 3512 if (priv->era < 10) {
df80bfd3 3513 u32 cha_vid, cha_inst, aes_rn;
d239b10d
HG
3514
3515 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3516 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3517 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3518
3519 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3520 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3521 CHA_ID_LS_DES_SHIFT;
3522 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3523 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
eaed71a4
IP
3524 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3525 CHA_ID_LS_ARC4_SHIFT;
d6bbd4ee
HG
3526 ccha_inst = 0;
3527 ptha_inst = 0;
df80bfd3
HG
3528
3529 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3530 CHA_ID_LS_AES_MASK;
3531 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
d239b10d
HG
3532 } else {
3533 u32 aesa, mdha;
3534
3535 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3536 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3537
3538 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3539 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3540
3541 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3542 aes_inst = aesa & CHA_VER_NUM_MASK;
3543 md_inst = mdha & CHA_VER_NUM_MASK;
d6bbd4ee
HG
3544 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3545 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
eaed71a4 3546 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
df80bfd3
HG
3547
3548 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
d239b10d 3549 }
bf83490e
VM
3550
3551 /* If MD is present, limit digest size based on LP256 */
d239b10d 3552 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
bf83490e
VM
3553 md_limit = SHA256_DIGEST_SIZE;
3554
8e8ec596 3555 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
5ca7badb
HG
3556 struct caam_skcipher_alg *t_alg = driver_algs + i;
3557 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
bf83490e
VM
3558
3559 /* Skip DES algorithms if not supported by device */
3560 if (!des_inst &&
3561 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3562 (alg_sel == OP_ALG_ALGSEL_DES)))
3563 continue;
3564
3565 /* Skip AES algorithms if not supported by device */
3566 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3567 continue;
8e8ec596 3568
eaed71a4
IP
3569 /* Skip ARC4 algorithms if not supported by device */
3570 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3571 continue;
3572
83d2c9a9
SE
3573 /*
3574 * Check support for AES modes not available
3575 * on LP devices.
3576 */
d239b10d
HG
3577 if (aes_vid == CHA_VER_VID_AES_LP &&
3578 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3579 OP_ALG_AAI_XTS)
3580 continue;
83d2c9a9 3581
5ca7badb 3582 caam_skcipher_alg_init(t_alg);
8e8ec596 3583
5ca7badb 3584 err = crypto_register_skcipher(&t_alg->skcipher);
8e8ec596 3585 if (err) {
cfc6f11b 3586 pr_warn("%s alg registration failed\n",
5ca7badb 3587 t_alg->skcipher.base.cra_driver_name);
f2147b88
HX
3588 continue;
3589 }
3590
5ca7badb 3591 t_alg->registered = true;
f2147b88
HX
3592 registered = true;
3593 }
3594
3595 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3596 struct caam_aead_alg *t_alg = driver_aeads + i;
bf83490e
VM
3597 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3598 OP_ALG_ALGSEL_MASK;
3599 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3600 OP_ALG_ALGSEL_MASK;
3601 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3602
3603 /* Skip DES algorithms if not supported by device */
3604 if (!des_inst &&
3605 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3606 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3607 continue;
3608
3609 /* Skip AES algorithms if not supported by device */
3610 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3611 continue;
3612
d6bbd4ee
HG
3613 /* Skip CHACHA20 algorithms if not supported by device */
3614 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3615 continue;
3616
3617 /* Skip POLY1305 algorithms if not supported by device */
3618 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3619 continue;
3620
df80bfd3
HG
3621 /* Skip GCM algorithms if not supported by device */
3622 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3623 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
d239b10d 3624 continue;
bf83490e
VM
3625
3626 /*
3627 * Skip algorithms requiring message digests
3628 * if MD or MD size is not supported by device.
3629 */
2dd3fde4 3630 if (is_mdha(c2_alg_sel) &&
d6bbd4ee
HG
3631 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3632 continue;
f2147b88
HX
3633
3634 caam_aead_alg_init(t_alg);
3635
3636 err = crypto_register_aead(&t_alg->aead);
3637 if (err) {
3638 pr_warn("%s alg registration failed\n",
3639 t_alg->aead.base.cra_driver_name);
3640 continue;
3641 }
3642
3643 t_alg->registered = true;
3644 registered = true;
8e8ec596 3645 }
f2147b88
HX
3646
3647 if (registered)
cfc6f11b 3648 pr_info("caam algorithms registered in /proc/crypto\n");
8e8ec596
KP
3649
3650 return err;
3651}