]> git.proxmox.com Git - mirror_ubuntu-eoan-kernel.git/blame - drivers/crypto/caam/caamalg.c
crypto: caam - Forbid 2-key 3DES in FIPS mode
[mirror_ubuntu-eoan-kernel.git] / drivers / crypto / caam / caamalg.c
CommitLineData
618b5dc4 1// SPDX-License-Identifier: GPL-2.0+
8e8ec596
KP
2/*
3 * caam - Freescale FSL CAAM support for crypto API
4 *
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
eaed71a4 6 * Copyright 2016-2019 NXP
8e8ec596
KP
7 *
8 * Based on talitos crypto API driver.
9 *
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 *
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
16 * . | | (cipherKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
20 * | *(packet 2) | |
21 * --------------- |
22 * . |
23 * . |
24 * --------------- |
25 * | JobDesc #3 |------------
26 * | *(packet 3) |
27 * ---------------
28 *
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
34 *
35 * So, a job desc looks like:
36 *
37 * ---------------------
38 * | Header |
39 * | ShareDesc Pointer |
40 * | SEQ_OUT_PTR |
41 * | (output buffer) |
6ec47334 42 * | (output length) |
8e8ec596
KP
43 * | SEQ_IN_PTR |
44 * | (input buffer) |
6ec47334 45 * | (input length) |
8e8ec596
KP
46 * ---------------------
47 */
48
49#include "compat.h"
50
51#include "regs.h"
52#include "intern.h"
53#include "desc_constr.h"
54#include "jr.h"
55#include "error.h"
a299c837 56#include "sg_sw_sec4.h"
4c1ec1f9 57#include "key_gen.h"
8cea7b66 58#include "caamalg_desc.h"
8e8ec596
KP
59
60/*
61 * crypto alg
62 */
63#define CAAM_CRA_PRIORITY 3000
64/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
daebc465 66 CTR_RFC3686_NONCE_SIZE + \
8e8ec596 67 SHA512_DIGEST_SIZE * 2)
8e8ec596 68
f2147b88
HX
69#define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70#define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 CAAM_CMD_SZ * 4)
479bcc7c
HX
72#define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
73 CAAM_CMD_SZ * 5)
f2147b88 74
d6bbd4ee
HG
75#define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
76
87e51b07
HX
77#define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
78#define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
4427b1b4 79
8e8ec596
KP
80#ifdef DEBUG
81/* for print_hex_dumps with line references */
8e8ec596
KP
82#define debug(format, arg...) printk(format, arg)
83#else
84#define debug(format, arg...)
85#endif
5ecf8ef9 86
479bcc7c
HX
87struct caam_alg_entry {
88 int class1_alg_type;
89 int class2_alg_type;
479bcc7c
HX
90 bool rfc3686;
91 bool geniv;
92};
93
94struct caam_aead_alg {
95 struct aead_alg aead;
96 struct caam_alg_entry caam;
97 bool registered;
98};
99
5ca7badb
HG
100struct caam_skcipher_alg {
101 struct skcipher_alg skcipher;
102 struct caam_alg_entry caam;
103 bool registered;
104};
105
8e8ec596
KP
106/*
107 * per-session context
108 */
109struct caam_ctx {
1acebad3
YK
110 u32 sh_desc_enc[DESC_MAX_USED_LEN];
111 u32 sh_desc_dec[DESC_MAX_USED_LEN];
bbf22344 112 u8 key[CAAM_MAX_KEY_SIZE];
1acebad3
YK
113 dma_addr_t sh_desc_enc_dma;
114 dma_addr_t sh_desc_dec_dma;
885e9e2f 115 dma_addr_t key_dma;
7e0880b9 116 enum dma_data_direction dir;
bbf22344 117 struct device *jrdev;
db57656b
HG
118 struct alginfo adata;
119 struct alginfo cdata;
8e8ec596
KP
120 unsigned int authsize;
121};
122
ae4a825f
HG
123static int aead_null_set_sh_desc(struct crypto_aead *aead)
124{
ae4a825f
HG
125 struct caam_ctx *ctx = crypto_aead_ctx(aead);
126 struct device *jrdev = ctx->jrdev;
7e0880b9 127 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
ae4a825f 128 u32 *desc;
4cbe79cc
HG
129 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
130 ctx->adata.keylen_pad;
ae4a825f
HG
131
132 /*
133 * Job Descriptor and Shared Descriptors
134 * must all fit into the 64-word Descriptor h/w Buffer
135 */
4cbe79cc 136 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
db57656b 137 ctx->adata.key_inline = true;
9c0bc511 138 ctx->adata.key_virt = ctx->key;
db57656b
HG
139 } else {
140 ctx->adata.key_inline = false;
9c0bc511 141 ctx->adata.key_dma = ctx->key_dma;
db57656b 142 }
ae4a825f 143
479bcc7c 144 /* aead_encrypt shared descriptor */
ae4a825f 145 desc = ctx->sh_desc_enc;
7e0880b9
HG
146 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
147 ctrlpriv->era);
bbf22344 148 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 149 desc_bytes(desc), ctx->dir);
ae4a825f
HG
150
151 /*
152 * Job Descriptor and Shared Descriptors
153 * must all fit into the 64-word Descriptor h/w Buffer
154 */
4cbe79cc 155 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
db57656b 156 ctx->adata.key_inline = true;
9c0bc511 157 ctx->adata.key_virt = ctx->key;
db57656b
HG
158 } else {
159 ctx->adata.key_inline = false;
9c0bc511 160 ctx->adata.key_dma = ctx->key_dma;
db57656b 161 }
ae4a825f 162
479bcc7c 163 /* aead_decrypt shared descriptor */
8cea7b66 164 desc = ctx->sh_desc_dec;
7e0880b9
HG
165 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
166 ctrlpriv->era);
bbf22344 167 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 168 desc_bytes(desc), ctx->dir);
ae4a825f
HG
169
170 return 0;
171}
172
1acebad3
YK
173static int aead_set_sh_desc(struct crypto_aead *aead)
174{
479bcc7c
HX
175 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
176 struct caam_aead_alg, aead);
add86d55 177 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3
YK
178 struct caam_ctx *ctx = crypto_aead_ctx(aead);
179 struct device *jrdev = ctx->jrdev;
7e0880b9 180 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
daebc465 181 u32 ctx1_iv_off = 0;
8cea7b66 182 u32 *desc, *nonce = NULL;
4cbe79cc
HG
183 u32 inl_mask;
184 unsigned int data_len[2];
db57656b 185 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
daebc465 186 OP_ALG_AAI_CTR_MOD128);
479bcc7c 187 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 188
2fdea258
HG
189 if (!ctx->authsize)
190 return 0;
191
ae4a825f 192 /* NULL encryption / decryption */
db57656b 193 if (!ctx->cdata.keylen)
ae4a825f
HG
194 return aead_null_set_sh_desc(aead);
195
daebc465
CV
196 /*
197 * AES-CTR needs to load IV in CONTEXT1 reg
198 * at an offset of 128bits (16bytes)
199 * CONTEXT1[255:128] = IV
200 */
201 if (ctr_mode)
202 ctx1_iv_off = 16;
203
204 /*
205 * RFC3686 specific:
206 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
207 */
8cea7b66 208 if (is_rfc3686) {
daebc465 209 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
8cea7b66
HG
210 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
211 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
212 }
daebc465 213
4cbe79cc
HG
214 data_len[0] = ctx->adata.keylen_pad;
215 data_len[1] = ctx->cdata.keylen;
216
479bcc7c
HX
217 if (alg->caam.geniv)
218 goto skip_enc;
219
1acebad3
YK
220 /*
221 * Job Descriptor and Shared Descriptors
222 * must all fit into the 64-word Descriptor h/w Buffer
223 */
4cbe79cc
HG
224 if (desc_inline_query(DESC_AEAD_ENC_LEN +
225 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
226 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
227 ARRAY_SIZE(data_len)) < 0)
228 return -EINVAL;
229
230 if (inl_mask & 1)
9c0bc511 231 ctx->adata.key_virt = ctx->key;
4cbe79cc 232 else
9c0bc511 233 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
234
235 if (inl_mask & 2)
9c0bc511 236 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 237 else
9c0bc511 238 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
239
240 ctx->adata.key_inline = !!(inl_mask & 1);
241 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 242
479bcc7c 243 /* aead_encrypt shared descriptor */
1acebad3 244 desc = ctx->sh_desc_enc;
b189817c
HG
245 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
246 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
7e0880b9 247 false, ctrlpriv->era);
bbf22344 248 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 249 desc_bytes(desc), ctx->dir);
1acebad3 250
479bcc7c 251skip_enc:
1acebad3
YK
252 /*
253 * Job Descriptor and Shared Descriptors
254 * must all fit into the 64-word Descriptor h/w Buffer
255 */
4cbe79cc
HG
256 if (desc_inline_query(DESC_AEAD_DEC_LEN +
257 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
258 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
259 ARRAY_SIZE(data_len)) < 0)
260 return -EINVAL;
261
262 if (inl_mask & 1)
9c0bc511 263 ctx->adata.key_virt = ctx->key;
4cbe79cc 264 else
9c0bc511 265 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
266
267 if (inl_mask & 2)
9c0bc511 268 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 269 else
9c0bc511 270 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
271
272 ctx->adata.key_inline = !!(inl_mask & 1);
273 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 274
479bcc7c 275 /* aead_decrypt shared descriptor */
4464a7d4 276 desc = ctx->sh_desc_dec;
8cea7b66
HG
277 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
278 ctx->authsize, alg->caam.geniv, is_rfc3686,
7e0880b9 279 nonce, ctx1_iv_off, false, ctrlpriv->era);
bbf22344 280 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 281 desc_bytes(desc), ctx->dir);
1acebad3 282
479bcc7c
HX
283 if (!alg->caam.geniv)
284 goto skip_givenc;
285
1acebad3
YK
286 /*
287 * Job Descriptor and Shared Descriptors
288 * must all fit into the 64-word Descriptor h/w Buffer
289 */
4cbe79cc
HG
290 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
291 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
292 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
293 ARRAY_SIZE(data_len)) < 0)
294 return -EINVAL;
295
296 if (inl_mask & 1)
9c0bc511 297 ctx->adata.key_virt = ctx->key;
4cbe79cc 298 else
9c0bc511 299 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
300
301 if (inl_mask & 2)
9c0bc511 302 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 303 else
9c0bc511 304 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
305
306 ctx->adata.key_inline = !!(inl_mask & 1);
307 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3
YK
308
309 /* aead_givencrypt shared descriptor */
1d2d87e8 310 desc = ctx->sh_desc_enc;
8cea7b66
HG
311 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
312 ctx->authsize, is_rfc3686, nonce,
7e0880b9 313 ctx1_iv_off, false, ctrlpriv->era);
bbf22344 314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 315 desc_bytes(desc), ctx->dir);
1acebad3 316
479bcc7c 317skip_givenc:
1acebad3
YK
318 return 0;
319}
320
0e479300 321static int aead_setauthsize(struct crypto_aead *authenc,
8e8ec596
KP
322 unsigned int authsize)
323{
324 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
325
326 ctx->authsize = authsize;
1acebad3 327 aead_set_sh_desc(authenc);
8e8ec596
KP
328
329 return 0;
330}
331
3ef8d945
TA
332static int gcm_set_sh_desc(struct crypto_aead *aead)
333{
3ef8d945
TA
334 struct caam_ctx *ctx = crypto_aead_ctx(aead);
335 struct device *jrdev = ctx->jrdev;
87ec3a0b 336 unsigned int ivsize = crypto_aead_ivsize(aead);
3ef8d945 337 u32 *desc;
4cbe79cc
HG
338 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
339 ctx->cdata.keylen;
3ef8d945 340
db57656b 341 if (!ctx->cdata.keylen || !ctx->authsize)
3ef8d945
TA
342 return 0;
343
344 /*
345 * AES GCM encrypt shared descriptor
346 * Job Descriptor and Shared Descriptor
347 * must fit into the 64-word Descriptor h/w Buffer
348 */
4cbe79cc 349 if (rem_bytes >= DESC_GCM_ENC_LEN) {
db57656b 350 ctx->cdata.key_inline = true;
9c0bc511 351 ctx->cdata.key_virt = ctx->key;
db57656b
HG
352 } else {
353 ctx->cdata.key_inline = false;
9c0bc511 354 ctx->cdata.key_dma = ctx->key_dma;
db57656b 355 }
3ef8d945
TA
356
357 desc = ctx->sh_desc_enc;
87ec3a0b 358 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
bbf22344 359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 360 desc_bytes(desc), ctx->dir);
3ef8d945
TA
361
362 /*
363 * Job Descriptor and Shared Descriptors
364 * must all fit into the 64-word Descriptor h/w Buffer
365 */
4cbe79cc 366 if (rem_bytes >= DESC_GCM_DEC_LEN) {
db57656b 367 ctx->cdata.key_inline = true;
9c0bc511 368 ctx->cdata.key_virt = ctx->key;
db57656b
HG
369 } else {
370 ctx->cdata.key_inline = false;
9c0bc511 371 ctx->cdata.key_dma = ctx->key_dma;
db57656b 372 }
3ef8d945
TA
373
374 desc = ctx->sh_desc_dec;
87ec3a0b 375 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
bbf22344 376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 377 desc_bytes(desc), ctx->dir);
3ef8d945
TA
378
379 return 0;
380}
381
382static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
383{
384 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
385
386 ctx->authsize = authsize;
387 gcm_set_sh_desc(authenc);
388
389 return 0;
390}
391
bac68f2c
TA
392static int rfc4106_set_sh_desc(struct crypto_aead *aead)
393{
bac68f2c
TA
394 struct caam_ctx *ctx = crypto_aead_ctx(aead);
395 struct device *jrdev = ctx->jrdev;
87ec3a0b 396 unsigned int ivsize = crypto_aead_ivsize(aead);
bac68f2c 397 u32 *desc;
4cbe79cc
HG
398 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
399 ctx->cdata.keylen;
bac68f2c 400
db57656b 401 if (!ctx->cdata.keylen || !ctx->authsize)
bac68f2c
TA
402 return 0;
403
404 /*
405 * RFC4106 encrypt shared descriptor
406 * Job Descriptor and Shared Descriptor
407 * must fit into the 64-word Descriptor h/w Buffer
408 */
4cbe79cc 409 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
db57656b 410 ctx->cdata.key_inline = true;
9c0bc511 411 ctx->cdata.key_virt = ctx->key;
db57656b
HG
412 } else {
413 ctx->cdata.key_inline = false;
9c0bc511 414 ctx->cdata.key_dma = ctx->key_dma;
db57656b 415 }
bac68f2c
TA
416
417 desc = ctx->sh_desc_enc;
87ec3a0b
HG
418 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
419 false);
bbf22344 420 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 421 desc_bytes(desc), ctx->dir);
bac68f2c
TA
422
423 /*
424 * Job Descriptor and Shared Descriptors
425 * must all fit into the 64-word Descriptor h/w Buffer
426 */
4cbe79cc 427 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
db57656b 428 ctx->cdata.key_inline = true;
9c0bc511 429 ctx->cdata.key_virt = ctx->key;
db57656b
HG
430 } else {
431 ctx->cdata.key_inline = false;
9c0bc511 432 ctx->cdata.key_dma = ctx->key_dma;
db57656b 433 }
bac68f2c
TA
434
435 desc = ctx->sh_desc_dec;
87ec3a0b
HG
436 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
437 false);
bbf22344 438 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 439 desc_bytes(desc), ctx->dir);
bac68f2c 440
bac68f2c
TA
441 return 0;
442}
443
444static int rfc4106_setauthsize(struct crypto_aead *authenc,
445 unsigned int authsize)
446{
447 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
448
449 ctx->authsize = authsize;
450 rfc4106_set_sh_desc(authenc);
451
452 return 0;
453}
454
5d0429a3
TA
455static int rfc4543_set_sh_desc(struct crypto_aead *aead)
456{
5d0429a3
TA
457 struct caam_ctx *ctx = crypto_aead_ctx(aead);
458 struct device *jrdev = ctx->jrdev;
87ec3a0b 459 unsigned int ivsize = crypto_aead_ivsize(aead);
5d0429a3 460 u32 *desc;
4cbe79cc
HG
461 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
462 ctx->cdata.keylen;
5d0429a3 463
db57656b 464 if (!ctx->cdata.keylen || !ctx->authsize)
5d0429a3
TA
465 return 0;
466
467 /*
468 * RFC4543 encrypt shared descriptor
469 * Job Descriptor and Shared Descriptor
470 * must fit into the 64-word Descriptor h/w Buffer
471 */
4cbe79cc 472 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
db57656b 473 ctx->cdata.key_inline = true;
9c0bc511 474 ctx->cdata.key_virt = ctx->key;
db57656b
HG
475 } else {
476 ctx->cdata.key_inline = false;
9c0bc511 477 ctx->cdata.key_dma = ctx->key_dma;
db57656b 478 }
5d0429a3
TA
479
480 desc = ctx->sh_desc_enc;
87ec3a0b
HG
481 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
482 false);
bbf22344 483 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 484 desc_bytes(desc), ctx->dir);
5d0429a3
TA
485
486 /*
487 * Job Descriptor and Shared Descriptors
488 * must all fit into the 64-word Descriptor h/w Buffer
489 */
4cbe79cc 490 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
db57656b 491 ctx->cdata.key_inline = true;
9c0bc511 492 ctx->cdata.key_virt = ctx->key;
db57656b
HG
493 } else {
494 ctx->cdata.key_inline = false;
9c0bc511 495 ctx->cdata.key_dma = ctx->key_dma;
db57656b 496 }
5d0429a3
TA
497
498 desc = ctx->sh_desc_dec;
87ec3a0b
HG
499 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
500 false);
bbf22344 501 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 502 desc_bytes(desc), ctx->dir);
5d0429a3 503
f2147b88
HX
504 return 0;
505}
5d0429a3 506
f2147b88
HX
507static int rfc4543_setauthsize(struct crypto_aead *authenc,
508 unsigned int authsize)
509{
510 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
5d0429a3 511
f2147b88
HX
512 ctx->authsize = authsize;
513 rfc4543_set_sh_desc(authenc);
5d0429a3 514
f2147b88
HX
515 return 0;
516}
5d0429a3 517
d6bbd4ee
HG
518static int chachapoly_set_sh_desc(struct crypto_aead *aead)
519{
520 struct caam_ctx *ctx = crypto_aead_ctx(aead);
521 struct device *jrdev = ctx->jrdev;
522 unsigned int ivsize = crypto_aead_ivsize(aead);
523 u32 *desc;
524
525 if (!ctx->cdata.keylen || !ctx->authsize)
526 return 0;
527
528 desc = ctx->sh_desc_enc;
529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
c10a5336 530 ctx->authsize, true, false);
d6bbd4ee
HG
531 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
532 desc_bytes(desc), ctx->dir);
533
534 desc = ctx->sh_desc_dec;
535 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
c10a5336 536 ctx->authsize, false, false);
d6bbd4ee
HG
537 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
538 desc_bytes(desc), ctx->dir);
539
540 return 0;
541}
542
543static int chachapoly_setauthsize(struct crypto_aead *aead,
544 unsigned int authsize)
545{
546 struct caam_ctx *ctx = crypto_aead_ctx(aead);
547
548 if (authsize != POLY1305_DIGEST_SIZE)
549 return -EINVAL;
550
551 ctx->authsize = authsize;
552 return chachapoly_set_sh_desc(aead);
553}
554
555static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
556 unsigned int keylen)
557{
558 struct caam_ctx *ctx = crypto_aead_ctx(aead);
559 unsigned int ivsize = crypto_aead_ivsize(aead);
560 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
561
1ca1b917 562 if (keylen != CHACHA_KEY_SIZE + saltlen) {
d6bbd4ee
HG
563 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
564 return -EINVAL;
565 }
566
567 ctx->cdata.key_virt = key;
568 ctx->cdata.keylen = keylen - saltlen;
569
570 return chachapoly_set_sh_desc(aead);
571}
572
0e479300 573static int aead_setkey(struct crypto_aead *aead,
8e8ec596
KP
574 const u8 *key, unsigned int keylen)
575{
8e8ec596
KP
576 struct caam_ctx *ctx = crypto_aead_ctx(aead);
577 struct device *jrdev = ctx->jrdev;
7e0880b9 578 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
4e6e0b27 579 struct crypto_authenc_keys keys;
8e8ec596
KP
580 int ret = 0;
581
4e6e0b27 582 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
8e8ec596
KP
583 goto badkey;
584
8e8ec596
KP
585#ifdef DEBUG
586 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
4e6e0b27
HG
587 keys.authkeylen + keys.enckeylen, keys.enckeylen,
588 keys.authkeylen);
514df281 589 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
8e8ec596
KP
590 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
591#endif
8e8ec596 592
7e0880b9
HG
593 /*
594 * If DKP is supported, use it in the shared descriptor to generate
595 * the split key.
596 */
597 if (ctrlpriv->era >= 6) {
598 ctx->adata.keylen = keys.authkeylen;
599 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
600 OP_ALG_ALGSEL_MASK);
601
602 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
603 goto badkey;
604
605 memcpy(ctx->key, keys.authkey, keys.authkeylen);
606 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
607 keys.enckeylen);
608 dma_sync_single_for_device(jrdev, ctx->key_dma,
609 ctx->adata.keylen_pad +
610 keys.enckeylen, ctx->dir);
611 goto skip_split_key;
612 }
613
6655cb8e
HG
614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
615 keys.authkeylen, CAAM_MAX_KEY_SIZE -
616 keys.enckeylen);
8e8ec596 617 if (ret) {
8e8ec596
KP
618 goto badkey;
619 }
620
621 /* postpend encryption key to auth split key */
db57656b 622 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
bbf22344 623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
7e0880b9 624 keys.enckeylen, ctx->dir);
8e8ec596 625#ifdef DEBUG
514df281 626 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
8e8ec596 627 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
db57656b 628 ctx->adata.keylen_pad + keys.enckeylen, 1);
8e8ec596 629#endif
7e0880b9
HG
630
631skip_split_key:
db57656b 632 ctx->cdata.keylen = keys.enckeylen;
61dab972 633 memzero_explicit(&keys, sizeof(keys));
bbf22344 634 return aead_set_sh_desc(aead);
8e8ec596
KP
635badkey:
636 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
61dab972 637 memzero_explicit(&keys, sizeof(keys));
8e8ec596
KP
638 return -EINVAL;
639}
640
1b52c409
HX
641static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
642 unsigned int keylen)
643{
644 struct crypto_authenc_keys keys;
645 u32 flags;
646 int err;
647
648 err = crypto_authenc_extractkeys(&keys, key, keylen);
649 if (unlikely(err))
650 goto badkey;
651
652 err = -EINVAL;
653 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
654 goto badkey;
655
656 flags = crypto_aead_get_flags(aead);
657 err = __des3_verify_key(&flags, keys.enckey);
658 if (unlikely(err)) {
659 crypto_aead_set_flags(aead, flags);
660 goto out;
661 }
662
663 err = aead_setkey(aead, key, keylen);
664
665out:
666 memzero_explicit(&keys, sizeof(keys));
667 return err;
668
669badkey:
670 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
671 goto out;
672}
673
3ef8d945
TA
674static int gcm_setkey(struct crypto_aead *aead,
675 const u8 *key, unsigned int keylen)
676{
677 struct caam_ctx *ctx = crypto_aead_ctx(aead);
678 struct device *jrdev = ctx->jrdev;
3ef8d945
TA
679
680#ifdef DEBUG
681 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
682 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
683#endif
684
685 memcpy(ctx->key, key, keylen);
7e0880b9 686 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
db57656b 687 ctx->cdata.keylen = keylen;
3ef8d945 688
bbf22344 689 return gcm_set_sh_desc(aead);
3ef8d945
TA
690}
691
bac68f2c
TA
692static int rfc4106_setkey(struct crypto_aead *aead,
693 const u8 *key, unsigned int keylen)
694{
695 struct caam_ctx *ctx = crypto_aead_ctx(aead);
696 struct device *jrdev = ctx->jrdev;
bac68f2c
TA
697
698 if (keylen < 4)
699 return -EINVAL;
700
701#ifdef DEBUG
702 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
703 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
704#endif
705
706 memcpy(ctx->key, key, keylen);
707
708 /*
709 * The last four bytes of the key material are used as the salt value
710 * in the nonce. Update the AES key length.
711 */
db57656b 712 ctx->cdata.keylen = keylen - 4;
bbf22344 713 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
7e0880b9 714 ctx->dir);
bbf22344 715 return rfc4106_set_sh_desc(aead);
bac68f2c
TA
716}
717
5d0429a3
TA
718static int rfc4543_setkey(struct crypto_aead *aead,
719 const u8 *key, unsigned int keylen)
720{
721 struct caam_ctx *ctx = crypto_aead_ctx(aead);
722 struct device *jrdev = ctx->jrdev;
5d0429a3
TA
723
724 if (keylen < 4)
725 return -EINVAL;
726
727#ifdef DEBUG
728 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
729 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
730#endif
731
732 memcpy(ctx->key, key, keylen);
733
734 /*
735 * The last four bytes of the key material are used as the salt value
736 * in the nonce. Update the AES key length.
737 */
db57656b 738 ctx->cdata.keylen = keylen - 4;
bbf22344 739 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
7e0880b9 740 ctx->dir);
bbf22344 741 return rfc4543_set_sh_desc(aead);
5d0429a3
TA
742}
743
5ca7badb
HG
744static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
745 unsigned int keylen)
acdca31d 746{
5ca7badb
HG
747 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
748 struct caam_skcipher_alg *alg =
749 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
750 skcipher);
acdca31d 751 struct device *jrdev = ctx->jrdev;
5ca7badb 752 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 753 u32 *desc;
2b22f6c5 754 u32 ctx1_iv_off = 0;
db57656b 755 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
2b22f6c5 756 OP_ALG_AAI_CTR_MOD128);
5ca7badb 757 const bool is_rfc3686 = alg->caam.rfc3686;
acdca31d
YK
758
759#ifdef DEBUG
514df281 760 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
acdca31d
YK
761 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
762#endif
2b22f6c5
CV
763 /*
764 * AES-CTR needs to load IV in CONTEXT1 reg
765 * at an offset of 128bits (16bytes)
766 * CONTEXT1[255:128] = IV
767 */
768 if (ctr_mode)
769 ctx1_iv_off = 16;
acdca31d 770
a5f57cff
CV
771 /*
772 * RFC3686 specific:
773 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
774 * | *key = {KEY, NONCE}
775 */
776 if (is_rfc3686) {
777 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
778 keylen -= CTR_RFC3686_NONCE_SIZE;
779 }
780
db57656b 781 ctx->cdata.keylen = keylen;
662f70ed 782 ctx->cdata.key_virt = key;
db57656b 783 ctx->cdata.key_inline = true;
acdca31d 784
5ca7badb 785 /* skcipher_encrypt shared descriptor */
acdca31d 786 desc = ctx->sh_desc_enc;
9dbe3072
HG
787 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
788 ctx1_iv_off);
bbf22344 789 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 790 desc_bytes(desc), ctx->dir);
8cea7b66 791
5ca7badb 792 /* skcipher_decrypt shared descriptor */
acdca31d 793 desc = ctx->sh_desc_dec;
9dbe3072
HG
794 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
795 ctx1_iv_off);
bbf22344 796 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 797 desc_bytes(desc), ctx->dir);
acdca31d 798
8cea7b66 799 return 0;
acdca31d
YK
800}
801
eaed71a4
IP
802static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
803 const u8 *key, unsigned int keylen)
804{
805 u32 tmp[DES3_EDE_EXPKEY_WORDS];
806 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
807
808 if (keylen == DES3_EDE_KEY_SIZE &&
809 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) {
810 return -EINVAL;
811 }
812
813 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) &
814 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
815 crypto_skcipher_set_flags(skcipher,
816 CRYPTO_TFM_RES_WEAK_KEY);
817 return -EINVAL;
818 }
819
820 return skcipher_setkey(skcipher, key, keylen);
821}
822
5ca7badb
HG
823static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
824 unsigned int keylen)
c6415a60 825{
5ca7badb 826 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
c6415a60 827 struct device *jrdev = ctx->jrdev;
8cea7b66 828 u32 *desc;
c6415a60
CV
829
830 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
5ca7badb 831 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
c6415a60
CV
832 dev_err(jrdev, "key size mismatch\n");
833 return -EINVAL;
834 }
835
db57656b 836 ctx->cdata.keylen = keylen;
662f70ed 837 ctx->cdata.key_virt = key;
db57656b 838 ctx->cdata.key_inline = true;
c6415a60 839
5ca7badb 840 /* xts_skcipher_encrypt shared descriptor */
c6415a60 841 desc = ctx->sh_desc_enc;
9dbe3072 842 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
bbf22344 843 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
7e0880b9 844 desc_bytes(desc), ctx->dir);
c6415a60 845
5ca7badb 846 /* xts_skcipher_decrypt shared descriptor */
c6415a60 847 desc = ctx->sh_desc_dec;
9dbe3072 848 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
bbf22344 849 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
7e0880b9 850 desc_bytes(desc), ctx->dir);
c6415a60
CV
851
852 return 0;
853}
854
8e8ec596 855/*
1acebad3 856 * aead_edesc - s/w-extended aead descriptor
fa0c92db
HG
857 * @src_nents: number of segments in input s/w scatterlist
858 * @dst_nents: number of segments in output s/w scatterlist
ba4cf71b
IP
859 * @mapped_src_nents: number of segments in input h/w link table
860 * @mapped_dst_nents: number of segments in output h/w link table
a299c837
YK
861 * @sec4_sg_bytes: length of dma mapped sec4_sg space
862 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 863 * @sec4_sg: pointer to h/w link table
8e8ec596
KP
864 * @hw_desc: the h/w job descriptor followed by any referenced link tables
865 */
0e479300 866struct aead_edesc {
8e8ec596
KP
867 int src_nents;
868 int dst_nents;
ba4cf71b
IP
869 int mapped_src_nents;
870 int mapped_dst_nents;
a299c837
YK
871 int sec4_sg_bytes;
872 dma_addr_t sec4_sg_dma;
873 struct sec4_sg_entry *sec4_sg;
f2147b88 874 u32 hw_desc[];
8e8ec596
KP
875};
876
acdca31d 877/*
5ca7badb 878 * skcipher_edesc - s/w-extended skcipher descriptor
fa0c92db
HG
879 * @src_nents: number of segments in input s/w scatterlist
880 * @dst_nents: number of segments in output s/w scatterlist
ba4cf71b
IP
881 * @mapped_src_nents: number of segments in input h/w link table
882 * @mapped_dst_nents: number of segments in output h/w link table
acdca31d 883 * @iv_dma: dma address of iv for checking continuity and link table
a299c837
YK
884 * @sec4_sg_bytes: length of dma mapped sec4_sg space
885 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 886 * @sec4_sg: pointer to h/w link table
acdca31d 887 * @hw_desc: the h/w job descriptor followed by any referenced link tables
115957bb 888 * and IV
acdca31d 889 */
5ca7badb 890struct skcipher_edesc {
acdca31d
YK
891 int src_nents;
892 int dst_nents;
ba4cf71b
IP
893 int mapped_src_nents;
894 int mapped_dst_nents;
acdca31d 895 dma_addr_t iv_dma;
a299c837
YK
896 int sec4_sg_bytes;
897 dma_addr_t sec4_sg_dma;
898 struct sec4_sg_entry *sec4_sg;
acdca31d
YK
899 u32 hw_desc[0];
900};
901
1acebad3 902static void caam_unmap(struct device *dev, struct scatterlist *src,
643b39b0 903 struct scatterlist *dst, int src_nents,
13fb8fd7 904 int dst_nents,
cf5448b5 905 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
a299c837 906 int sec4_sg_bytes)
8e8ec596 907{
643b39b0 908 if (dst != src) {
fa0c92db
HG
909 if (src_nents)
910 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
763069ba
HG
911 if (dst_nents)
912 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
8e8ec596 913 } else {
fa0c92db 914 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
8e8ec596
KP
915 }
916
1acebad3 917 if (iv_dma)
cf5448b5 918 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
a299c837
YK
919 if (sec4_sg_bytes)
920 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
8e8ec596
KP
921 DMA_TO_DEVICE);
922}
923
1acebad3
YK
924static void aead_unmap(struct device *dev,
925 struct aead_edesc *edesc,
926 struct aead_request *req)
f2147b88
HX
927{
928 caam_unmap(dev, req->src, req->dst,
cf5448b5 929 edesc->src_nents, edesc->dst_nents, 0, 0,
f2147b88
HX
930 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
931}
932
5ca7badb
HG
933static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
934 struct skcipher_request *req)
acdca31d 935{
5ca7badb
HG
936 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
937 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d
YK
938
939 caam_unmap(dev, req->src, req->dst,
13fb8fd7 940 edesc->src_nents, edesc->dst_nents,
cf5448b5 941 edesc->iv_dma, ivsize,
643b39b0 942 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
acdca31d
YK
943}
944
0e479300 945static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
946 void *context)
947{
0e479300
YK
948 struct aead_request *req = context;
949 struct aead_edesc *edesc;
f2147b88
HX
950
951#ifdef DEBUG
952 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
953#endif
954
955 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
956
957 if (err)
958 caam_jr_strstatus(jrdev, err);
959
960 aead_unmap(jrdev, edesc, req);
961
962 kfree(edesc);
963
964 aead_request_complete(req, err);
965}
966
0e479300 967static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
968 void *context)
969{
0e479300
YK
970 struct aead_request *req = context;
971 struct aead_edesc *edesc;
f2147b88
HX
972
973#ifdef DEBUG
974 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
975#endif
976
977 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
978
979 if (err)
980 caam_jr_strstatus(jrdev, err);
981
982 aead_unmap(jrdev, edesc, req);
983
984 /*
985 * verify hw auth check passed else return -EBADMSG
986 */
987 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
988 err = -EBADMSG;
989
990 kfree(edesc);
991
992 aead_request_complete(req, err);
993}
994
5ca7badb
HG
995static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
996 void *context)
acdca31d 997{
5ca7badb
HG
998 struct skcipher_request *req = context;
999 struct skcipher_edesc *edesc;
1000 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1001 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1002
854b06f7 1003#ifdef DEBUG
acdca31d
YK
1004 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1005#endif
1006
5ca7badb 1007 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
acdca31d 1008
fa9659cd
MV
1009 if (err)
1010 caam_jr_strstatus(jrdev, err);
acdca31d
YK
1011
1012#ifdef DEBUG
514df281 1013 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
5ca7badb 1014 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
acdca31d 1015 edesc->src_nents > 1 ? 100 : ivsize, 1);
acdca31d 1016#endif
972b812b
HG
1017 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1018 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
5ca7badb 1019 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
acdca31d 1020
5ca7badb 1021 skcipher_unmap(jrdev, edesc, req);
854b06f7
DG
1022
1023 /*
5ca7badb 1024 * The crypto API expects us to set the IV (req->iv) to the last
854b06f7
DG
1025 * ciphertext block. This is used e.g. by the CTS mode.
1026 */
eaed71a4
IP
1027 if (ivsize)
1028 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1029 ivsize, ivsize, 0);
854b06f7 1030
acdca31d
YK
1031 kfree(edesc);
1032
5ca7badb 1033 skcipher_request_complete(req, err);
acdca31d
YK
1034}
1035
5ca7badb
HG
1036static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1037 void *context)
acdca31d 1038{
5ca7badb
HG
1039 struct skcipher_request *req = context;
1040 struct skcipher_edesc *edesc;
115957bb 1041#ifdef DEBUG
5ca7badb
HG
1042 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1043 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d
YK
1044
1045 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1046#endif
1047
5ca7badb 1048 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
fa9659cd
MV
1049 if (err)
1050 caam_jr_strstatus(jrdev, err);
acdca31d
YK
1051
1052#ifdef DEBUG
514df281 1053 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
5ca7badb 1054 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
acdca31d 1055#endif
972b812b
HG
1056 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1057 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
5ca7badb 1058 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
acdca31d 1059
5ca7badb 1060 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1061 kfree(edesc);
1062
5ca7badb 1063 skcipher_request_complete(req, err);
acdca31d
YK
1064}
1065
f2147b88
HX
1066/*
1067 * Fill in aead job descriptor
1068 */
1069static void init_aead_job(struct aead_request *req,
1070 struct aead_edesc *edesc,
1071 bool all_contig, bool encrypt)
1072{
1073 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1074 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1075 int authsize = ctx->authsize;
1076 u32 *desc = edesc->hw_desc;
1077 u32 out_options, in_options;
1078 dma_addr_t dst_dma, src_dma;
1079 int len, sec4_sg_index = 0;
1080 dma_addr_t ptr;
1081 u32 *sh_desc;
1082
1083 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1084 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1085
1086 len = desc_len(sh_desc);
1087 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1088
1089 if (all_contig) {
ba4cf71b
IP
1090 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1091 0;
f2147b88
HX
1092 in_options = 0;
1093 } else {
1094 src_dma = edesc->sec4_sg_dma;
ba4cf71b 1095 sec4_sg_index += edesc->mapped_src_nents;
f2147b88
HX
1096 in_options = LDST_SGF;
1097 }
1098
1099 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1100 in_options);
1101
1102 dst_dma = src_dma;
1103 out_options = in_options;
1104
1105 if (unlikely(req->src != req->dst)) {
ba4cf71b 1106 if (!edesc->mapped_dst_nents) {
763069ba 1107 dst_dma = 0;
ba4cf71b 1108 } else if (edesc->mapped_dst_nents == 1) {
f2147b88 1109 dst_dma = sg_dma_address(req->dst);
42e95d1f 1110 out_options = 0;
f2147b88
HX
1111 } else {
1112 dst_dma = edesc->sec4_sg_dma +
1113 sec4_sg_index *
1114 sizeof(struct sec4_sg_entry);
1115 out_options = LDST_SGF;
1116 }
1117 }
1118
1119 if (encrypt)
1120 append_seq_out_ptr(desc, dst_dma,
1121 req->assoclen + req->cryptlen + authsize,
1122 out_options);
1123 else
1124 append_seq_out_ptr(desc, dst_dma,
1125 req->assoclen + req->cryptlen - authsize,
1126 out_options);
f2147b88
HX
1127}
1128
1129static void init_gcm_job(struct aead_request *req,
1130 struct aead_edesc *edesc,
1131 bool all_contig, bool encrypt)
1132{
1133 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1134 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1135 unsigned int ivsize = crypto_aead_ivsize(aead);
1136 u32 *desc = edesc->hw_desc;
7545e166 1137 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
f2147b88
HX
1138 unsigned int last;
1139
1140 init_aead_job(req, edesc, all_contig, encrypt);
7e0880b9 1141 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
f2147b88
HX
1142
1143 /* BUG This should not be specific to generic GCM. */
1144 last = 0;
1145 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1146 last = FIFOLD_TYPE_LAST1;
1147
1148 /* Read GCM IV */
1149 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
7545e166 1150 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
f2147b88
HX
1151 /* Append Salt */
1152 if (!generic_gcm)
db57656b 1153 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
f2147b88
HX
1154 /* Append IV */
1155 append_data(desc, req->iv, ivsize);
1156 /* End of blank commands */
1157}
1158
d6bbd4ee
HG
1159static void init_chachapoly_job(struct aead_request *req,
1160 struct aead_edesc *edesc, bool all_contig,
1161 bool encrypt)
1162{
1163 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1164 unsigned int ivsize = crypto_aead_ivsize(aead);
1165 unsigned int assoclen = req->assoclen;
1166 u32 *desc = edesc->hw_desc;
1167 u32 ctx_iv_off = 4;
1168
1169 init_aead_job(req, edesc, all_contig, encrypt);
1170
1171 if (ivsize != CHACHAPOLY_IV_SIZE) {
1172 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1173 ctx_iv_off += 4;
1174
1175 /*
1176 * The associated data comes already with the IV but we need
1177 * to skip it when we authenticate or encrypt...
1178 */
1179 assoclen -= ivsize;
1180 }
1181
1182 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1183
1184 /*
1185 * For IPsec load the IV further in the same register.
1186 * For RFC7539 simply load the 12 bytes nonce in a single operation
1187 */
1188 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1189 LDST_SRCDST_BYTE_CONTEXT |
1190 ctx_iv_off << LDST_OFFSET_SHIFT);
1191}
1192
479bcc7c
HX
1193static void init_authenc_job(struct aead_request *req,
1194 struct aead_edesc *edesc,
1195 bool all_contig, bool encrypt)
1acebad3
YK
1196{
1197 struct crypto_aead *aead = crypto_aead_reqtfm(req);
479bcc7c
HX
1198 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1199 struct caam_aead_alg, aead);
1200 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3 1201 struct caam_ctx *ctx = crypto_aead_ctx(aead);
7e0880b9 1202 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
db57656b 1203 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
479bcc7c
HX
1204 OP_ALG_AAI_CTR_MOD128);
1205 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 1206 u32 *desc = edesc->hw_desc;
479bcc7c 1207 u32 ivoffset = 0;
8e8ec596 1208
479bcc7c
HX
1209 /*
1210 * AES-CTR needs to load IV in CONTEXT1 reg
1211 * at an offset of 128bits (16bytes)
1212 * CONTEXT1[255:128] = IV
1213 */
1214 if (ctr_mode)
1215 ivoffset = 16;
1acebad3 1216
479bcc7c
HX
1217 /*
1218 * RFC3686 specific:
1219 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1220 */
1221 if (is_rfc3686)
1222 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
8e8ec596 1223
479bcc7c 1224 init_aead_job(req, edesc, all_contig, encrypt);
1acebad3 1225
7e0880b9
HG
1226 /*
1227 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1228 * having DPOVRD as destination.
1229 */
1230 if (ctrlpriv->era < 3)
1231 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1232 else
1233 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1234
8b18e235 1235 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
479bcc7c
HX
1236 append_load_as_imm(desc, req->iv, ivsize,
1237 LDST_CLASS_1_CCB |
1238 LDST_SRCDST_BYTE_CONTEXT |
1239 (ivoffset << LDST_OFFSET_SHIFT));
8e8ec596
KP
1240}
1241
acdca31d 1242/*
5ca7badb 1243 * Fill in skcipher job descriptor
acdca31d 1244 */
5ca7badb
HG
1245static void init_skcipher_job(struct skcipher_request *req,
1246 struct skcipher_edesc *edesc,
1247 const bool encrypt)
acdca31d 1248{
5ca7badb
HG
1249 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1250 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1251 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1252 u32 *desc = edesc->hw_desc;
5ca7badb 1253 u32 *sh_desc;
eaed71a4
IP
1254 u32 in_options = 0, out_options = 0;
1255 dma_addr_t src_dma, dst_dma, ptr;
1256 int len, sec4_sg_index = 0;
acdca31d
YK
1257
1258#ifdef DEBUG
514df281 1259 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
5ca7badb
HG
1260 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1261 pr_err("asked=%d, cryptlen%d\n",
1262 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
acdca31d 1263#endif
972b812b
HG
1264 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1265 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
5ca7badb
HG
1266 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1267
1268 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1269 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
acdca31d
YK
1270
1271 len = desc_len(sh_desc);
1272 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1273
eaed71a4
IP
1274 if (ivsize || edesc->mapped_src_nents > 1) {
1275 src_dma = edesc->sec4_sg_dma;
1276 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1277 in_options = LDST_SGF;
1278 } else {
1279 src_dma = sg_dma_address(req->src);
1280 }
1281
1282 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
acdca31d
YK
1283
1284 if (likely(req->src == req->dst)) {
eaed71a4
IP
1285 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1286 out_options = in_options;
1287 } else if (edesc->mapped_dst_nents == 1) {
1288 dst_dma = sg_dma_address(req->dst);
acdca31d 1289 } else {
eaed71a4
IP
1290 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1291 sizeof(struct sec4_sg_entry);
1292 out_options = LDST_SGF;
acdca31d 1293 }
eaed71a4 1294
5ca7badb 1295 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
acdca31d
YK
1296}
1297
8e8ec596 1298/*
1acebad3 1299 * allocate and map the aead extended descriptor
8e8ec596 1300 */
479bcc7c
HX
1301static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1302 int desc_bytes, bool *all_contig_ptr,
1303 bool encrypt)
8e8ec596 1304{
0e479300 1305 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1306 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1307 struct device *jrdev = ctx->jrdev;
019d62db
HG
1308 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1309 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1310 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
0e479300 1311 struct aead_edesc *edesc;
fa0c92db 1312 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
bbf9c893 1313 unsigned int authsize = ctx->authsize;
1acebad3 1314
bbf9c893 1315 if (unlikely(req->dst != req->src)) {
fa0c92db
HG
1316 src_nents = sg_nents_for_len(req->src, req->assoclen +
1317 req->cryptlen);
fd144d83
HG
1318 if (unlikely(src_nents < 0)) {
1319 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1320 req->assoclen + req->cryptlen);
1321 return ERR_PTR(src_nents);
1322 }
1323
fa0c92db
HG
1324 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1325 req->cryptlen +
1326 (encrypt ? authsize :
1327 (-authsize)));
fd144d83
HG
1328 if (unlikely(dst_nents < 0)) {
1329 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1330 req->assoclen + req->cryptlen +
1331 (encrypt ? authsize : (-authsize)));
1332 return ERR_PTR(dst_nents);
1333 }
bbf9c893 1334 } else {
fa0c92db
HG
1335 src_nents = sg_nents_for_len(req->src, req->assoclen +
1336 req->cryptlen +
1337 (encrypt ? authsize : 0));
fd144d83
HG
1338 if (unlikely(src_nents < 0)) {
1339 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1340 req->assoclen + req->cryptlen +
1341 (encrypt ? authsize : 0));
1342 return ERR_PTR(src_nents);
1343 }
f2147b88 1344 }
3ef8d945 1345
f2147b88 1346 if (likely(req->src == req->dst)) {
838e0a89
HG
1347 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1348 DMA_BIDIRECTIONAL);
1349 if (unlikely(!mapped_src_nents)) {
f2147b88 1350 dev_err(jrdev, "unable to map source\n");
f2147b88
HX
1351 return ERR_PTR(-ENOMEM);
1352 }
1353 } else {
fa0c92db
HG
1354 /* Cover also the case of null (zero length) input data */
1355 if (src_nents) {
838e0a89
HG
1356 mapped_src_nents = dma_map_sg(jrdev, req->src,
1357 src_nents, DMA_TO_DEVICE);
1358 if (unlikely(!mapped_src_nents)) {
fa0c92db 1359 dev_err(jrdev, "unable to map source\n");
fa0c92db
HG
1360 return ERR_PTR(-ENOMEM);
1361 }
838e0a89
HG
1362 } else {
1363 mapped_src_nents = 0;
f2147b88
HX
1364 }
1365
763069ba
HG
1366 /* Cover also the case of null (zero length) output data */
1367 if (dst_nents) {
1368 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1369 dst_nents,
1370 DMA_FROM_DEVICE);
1371 if (unlikely(!mapped_dst_nents)) {
1372 dev_err(jrdev, "unable to map destination\n");
1373 dma_unmap_sg(jrdev, req->src, src_nents,
1374 DMA_TO_DEVICE);
1375 return ERR_PTR(-ENOMEM);
1376 }
1377 } else {
1378 mapped_dst_nents = 0;
f2147b88
HX
1379 }
1380 }
1381
838e0a89
HG
1382 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1383 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1384 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1385
1386 /* allocate space for base edesc and hw desc commands, link tables */
1387 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1388 GFP_DMA | flags);
1389 if (!edesc) {
1390 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
cf5448b5 1391 0, 0, 0);
838e0a89
HG
1392 return ERR_PTR(-ENOMEM);
1393 }
1394
8e8ec596
KP
1395 edesc->src_nents = src_nents;
1396 edesc->dst_nents = dst_nents;
ba4cf71b
IP
1397 edesc->mapped_src_nents = mapped_src_nents;
1398 edesc->mapped_dst_nents = mapped_dst_nents;
a299c837
YK
1399 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1400 desc_bytes;
838e0a89 1401 *all_contig_ptr = !(mapped_src_nents > 1);
1acebad3 1402
a299c837 1403 sec4_sg_index = 0;
838e0a89
HG
1404 if (mapped_src_nents > 1) {
1405 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1406 edesc->sec4_sg + sec4_sg_index, 0);
1407 sec4_sg_index += mapped_src_nents;
1acebad3 1408 }
838e0a89
HG
1409 if (mapped_dst_nents > 1) {
1410 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
a299c837 1411 edesc->sec4_sg + sec4_sg_index, 0);
1acebad3 1412 }
f2147b88
HX
1413
1414 if (!sec4_sg_bytes)
1415 return edesc;
1416
1da2be33
RG
1417 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1418 sec4_sg_bytes, DMA_TO_DEVICE);
ce572085
HG
1419 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1420 dev_err(jrdev, "unable to map S/G table\n");
f2147b88
HX
1421 aead_unmap(jrdev, edesc, req);
1422 kfree(edesc);
ce572085
HG
1423 return ERR_PTR(-ENOMEM);
1424 }
8e8ec596 1425
f2147b88
HX
1426 edesc->sec4_sg_bytes = sec4_sg_bytes;
1427
8e8ec596
KP
1428 return edesc;
1429}
1430
f2147b88 1431static int gcm_encrypt(struct aead_request *req)
8e8ec596 1432{
0e479300
YK
1433 struct aead_edesc *edesc;
1434 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1435 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1436 struct device *jrdev = ctx->jrdev;
1acebad3 1437 bool all_contig;
8e8ec596 1438 u32 *desc;
1acebad3
YK
1439 int ret = 0;
1440
8e8ec596 1441 /* allocate extended descriptor */
f2147b88 1442 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
8e8ec596
KP
1443 if (IS_ERR(edesc))
1444 return PTR_ERR(edesc);
1445
1acebad3 1446 /* Create and submit job descriptor */
f2147b88 1447 init_gcm_job(req, edesc, all_contig, true);
1acebad3 1448#ifdef DEBUG
514df281 1449 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1acebad3
YK
1450 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1451 desc_bytes(edesc->hw_desc), 1);
1452#endif
8e8ec596 1453
1acebad3
YK
1454 desc = edesc->hw_desc;
1455 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1456 if (!ret) {
1457 ret = -EINPROGRESS;
1458 } else {
1459 aead_unmap(jrdev, edesc, req);
1460 kfree(edesc);
1461 }
8e8ec596 1462
1acebad3 1463 return ret;
8e8ec596
KP
1464}
1465
d6bbd4ee
HG
1466static int chachapoly_encrypt(struct aead_request *req)
1467{
1468 struct aead_edesc *edesc;
1469 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1470 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1471 struct device *jrdev = ctx->jrdev;
1472 bool all_contig;
1473 u32 *desc;
1474 int ret;
1475
1476 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1477 true);
1478 if (IS_ERR(edesc))
1479 return PTR_ERR(edesc);
1480
1481 desc = edesc->hw_desc;
1482
1483 init_chachapoly_job(req, edesc, all_contig, true);
1484 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1485 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1486 1);
1487
1488 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1489 if (!ret) {
1490 ret = -EINPROGRESS;
1491 } else {
1492 aead_unmap(jrdev, edesc, req);
1493 kfree(edesc);
1494 }
1495
1496 return ret;
1497}
1498
1499static int chachapoly_decrypt(struct aead_request *req)
1500{
1501 struct aead_edesc *edesc;
1502 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1503 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1504 struct device *jrdev = ctx->jrdev;
1505 bool all_contig;
1506 u32 *desc;
1507 int ret;
1508
1509 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1510 false);
1511 if (IS_ERR(edesc))
1512 return PTR_ERR(edesc);
1513
1514 desc = edesc->hw_desc;
1515
1516 init_chachapoly_job(req, edesc, all_contig, false);
1517 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1518 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1519 1);
1520
1521 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1522 if (!ret) {
1523 ret = -EINPROGRESS;
1524 } else {
1525 aead_unmap(jrdev, edesc, req);
1526 kfree(edesc);
1527 }
1528
1529 return ret;
1530}
1531
46218750
HX
1532static int ipsec_gcm_encrypt(struct aead_request *req)
1533{
1534 if (req->assoclen < 8)
1535 return -EINVAL;
1536
1537 return gcm_encrypt(req);
1538}
1539
479bcc7c 1540static int aead_encrypt(struct aead_request *req)
f2147b88
HX
1541{
1542 struct aead_edesc *edesc;
1543 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1544 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1545 struct device *jrdev = ctx->jrdev;
1546 bool all_contig;
1547 u32 *desc;
1548 int ret = 0;
1549
1550 /* allocate extended descriptor */
479bcc7c
HX
1551 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1552 &all_contig, true);
f2147b88
HX
1553 if (IS_ERR(edesc))
1554 return PTR_ERR(edesc);
1555
1556 /* Create and submit job descriptor */
479bcc7c 1557 init_authenc_job(req, edesc, all_contig, true);
f2147b88
HX
1558#ifdef DEBUG
1559 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1560 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1561 desc_bytes(edesc->hw_desc), 1);
1562#endif
1563
1564 desc = edesc->hw_desc;
479bcc7c 1565 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
f2147b88
HX
1566 if (!ret) {
1567 ret = -EINPROGRESS;
1568 } else {
479bcc7c 1569 aead_unmap(jrdev, edesc, req);
f2147b88
HX
1570 kfree(edesc);
1571 }
1572
1573 return ret;
1574}
1575
1576static int gcm_decrypt(struct aead_request *req)
1577{
1578 struct aead_edesc *edesc;
1579 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1580 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1581 struct device *jrdev = ctx->jrdev;
1582 bool all_contig;
1583 u32 *desc;
1584 int ret = 0;
1585
1586 /* allocate extended descriptor */
1587 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1588 if (IS_ERR(edesc))
1589 return PTR_ERR(edesc);
1590
1591 /* Create and submit job descriptor*/
1592 init_gcm_job(req, edesc, all_contig, false);
1593#ifdef DEBUG
1594 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1595 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1596 desc_bytes(edesc->hw_desc), 1);
1597#endif
1598
1599 desc = edesc->hw_desc;
1600 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1601 if (!ret) {
1602 ret = -EINPROGRESS;
1603 } else {
1604 aead_unmap(jrdev, edesc, req);
1605 kfree(edesc);
1606 }
1607
1608 return ret;
1609}
1610
46218750
HX
1611static int ipsec_gcm_decrypt(struct aead_request *req)
1612{
1613 if (req->assoclen < 8)
1614 return -EINVAL;
1615
1616 return gcm_decrypt(req);
1617}
1618
479bcc7c 1619static int aead_decrypt(struct aead_request *req)
8e8ec596 1620{
1acebad3 1621 struct aead_edesc *edesc;
8e8ec596 1622 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1623 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1624 struct device *jrdev = ctx->jrdev;
1acebad3 1625 bool all_contig;
8e8ec596 1626 u32 *desc;
1acebad3 1627 int ret = 0;
8e8ec596 1628
972b812b
HG
1629 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1630 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1631 req->assoclen + req->cryptlen, 1);
5ecf8ef9 1632
8e8ec596 1633 /* allocate extended descriptor */
479bcc7c
HX
1634 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1635 &all_contig, false);
8e8ec596
KP
1636 if (IS_ERR(edesc))
1637 return PTR_ERR(edesc);
1638
1acebad3 1639 /* Create and submit job descriptor*/
479bcc7c 1640 init_authenc_job(req, edesc, all_contig, false);
1acebad3 1641#ifdef DEBUG
514df281 1642 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1acebad3
YK
1643 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1644 desc_bytes(edesc->hw_desc), 1);
1645#endif
1646
8e8ec596 1647 desc = edesc->hw_desc;
479bcc7c 1648 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1acebad3
YK
1649 if (!ret) {
1650 ret = -EINPROGRESS;
1651 } else {
479bcc7c 1652 aead_unmap(jrdev, edesc, req);
1acebad3
YK
1653 kfree(edesc);
1654 }
8e8ec596 1655
1acebad3
YK
1656 return ret;
1657}
8e8ec596 1658
acdca31d 1659/*
5ca7badb 1660 * allocate and map the skcipher extended descriptor for skcipher
acdca31d 1661 */
5ca7badb
HG
1662static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1663 int desc_bytes)
acdca31d 1664{
5ca7badb
HG
1665 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1666 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1667 struct device *jrdev = ctx->jrdev;
42cfcafb 1668 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
acdca31d 1669 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1670 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
5ca7badb 1671 struct skcipher_edesc *edesc;
eaed71a4 1672 dma_addr_t iv_dma = 0;
115957bb 1673 u8 *iv;
5ca7badb 1674 int ivsize = crypto_skcipher_ivsize(skcipher);
838e0a89 1675 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
acdca31d 1676
5ca7badb 1677 src_nents = sg_nents_for_len(req->src, req->cryptlen);
fd144d83
HG
1678 if (unlikely(src_nents < 0)) {
1679 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
5ca7badb 1680 req->cryptlen);
fd144d83
HG
1681 return ERR_PTR(src_nents);
1682 }
acdca31d 1683
fd144d83 1684 if (req->dst != req->src) {
5ca7badb 1685 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
fd144d83
HG
1686 if (unlikely(dst_nents < 0)) {
1687 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
5ca7badb 1688 req->cryptlen);
fd144d83
HG
1689 return ERR_PTR(dst_nents);
1690 }
1691 }
acdca31d
YK
1692
1693 if (likely(req->src == req->dst)) {
838e0a89
HG
1694 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1695 DMA_BIDIRECTIONAL);
1696 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1697 dev_err(jrdev, "unable to map source\n");
1698 return ERR_PTR(-ENOMEM);
1699 }
acdca31d 1700 } else {
838e0a89
HG
1701 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1702 DMA_TO_DEVICE);
1703 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1704 dev_err(jrdev, "unable to map source\n");
1705 return ERR_PTR(-ENOMEM);
1706 }
838e0a89
HG
1707 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1708 DMA_FROM_DEVICE);
1709 if (unlikely(!mapped_dst_nents)) {
c73e36e8 1710 dev_err(jrdev, "unable to map destination\n");
fa0c92db 1711 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
c73e36e8
HG
1712 return ERR_PTR(-ENOMEM);
1713 }
acdca31d
YK
1714 }
1715
eaed71a4
IP
1716 if (!ivsize && mapped_src_nents == 1)
1717 sec4_sg_ents = 0; // no need for an input hw s/g table
1718 else
1719 sec4_sg_ents = mapped_src_nents + !!ivsize;
fa0c92db 1720 dst_sg_idx = sec4_sg_ents;
838e0a89 1721 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
fa0c92db 1722 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
acdca31d 1723
115957bb
HG
1724 /*
1725 * allocate space for base edesc and hw desc commands, link tables, IV
1726 */
1727 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
dde20ae9 1728 GFP_DMA | flags);
acdca31d
YK
1729 if (!edesc) {
1730 dev_err(jrdev, "could not allocate extended descriptor\n");
115957bb 1731 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
cf5448b5 1732 0, 0, 0);
acdca31d
YK
1733 return ERR_PTR(-ENOMEM);
1734 }
1735
1736 edesc->src_nents = src_nents;
1737 edesc->dst_nents = dst_nents;
ba4cf71b
IP
1738 edesc->mapped_src_nents = mapped_src_nents;
1739 edesc->mapped_dst_nents = mapped_dst_nents;
a299c837 1740 edesc->sec4_sg_bytes = sec4_sg_bytes;
13cc6f48
HG
1741 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1742 desc_bytes);
acdca31d 1743
115957bb 1744 /* Make sure IV is located in a DMAable area */
eaed71a4
IP
1745 if (ivsize) {
1746 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1747 memcpy(iv, req->iv, ivsize);
1748
1749 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1750 if (dma_mapping_error(jrdev, iv_dma)) {
1751 dev_err(jrdev, "unable to map IV\n");
1752 caam_unmap(jrdev, req->src, req->dst, src_nents,
1753 dst_nents, 0, 0, 0, 0);
1754 kfree(edesc);
1755 return ERR_PTR(-ENOMEM);
1756 }
115957bb 1757
eaed71a4 1758 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
acdca31d 1759 }
eaed71a4
IP
1760 if (dst_sg_idx)
1761 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg +
1762 !!ivsize, 0);
115957bb 1763
838e0a89
HG
1764 if (mapped_dst_nents > 1) {
1765 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1766 edesc->sec4_sg + dst_sg_idx, 0);
acdca31d
YK
1767 }
1768
eaed71a4
IP
1769 if (sec4_sg_bytes) {
1770 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1771 sec4_sg_bytes,
1772 DMA_TO_DEVICE);
1773 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1774 dev_err(jrdev, "unable to map S/G table\n");
1775 caam_unmap(jrdev, req->src, req->dst, src_nents,
1776 dst_nents, iv_dma, ivsize, 0, 0);
1777 kfree(edesc);
1778 return ERR_PTR(-ENOMEM);
1779 }
ce572085
HG
1780 }
1781
acdca31d
YK
1782 edesc->iv_dma = iv_dma;
1783
1784#ifdef DEBUG
5ca7badb 1785 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
a299c837
YK
1786 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1787 sec4_sg_bytes, 1);
acdca31d
YK
1788#endif
1789
acdca31d
YK
1790 return edesc;
1791}
1792
5ca7badb 1793static int skcipher_encrypt(struct skcipher_request *req)
acdca31d 1794{
5ca7badb
HG
1795 struct skcipher_edesc *edesc;
1796 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1797 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
acdca31d 1798 struct device *jrdev = ctx->jrdev;
acdca31d
YK
1799 u32 *desc;
1800 int ret = 0;
1801
1802 /* allocate extended descriptor */
5ca7badb 1803 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
acdca31d
YK
1804 if (IS_ERR(edesc))
1805 return PTR_ERR(edesc);
1806
1807 /* Create and submit job descriptor*/
5ca7badb 1808 init_skcipher_job(req, edesc, true);
acdca31d 1809#ifdef DEBUG
5ca7badb 1810 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
acdca31d
YK
1811 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1812 desc_bytes(edesc->hw_desc), 1);
1813#endif
1814 desc = edesc->hw_desc;
5ca7badb 1815 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
acdca31d
YK
1816
1817 if (!ret) {
1818 ret = -EINPROGRESS;
1819 } else {
5ca7badb 1820 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1821 kfree(edesc);
1822 }
1823
1824 return ret;
1825}
1826
5ca7badb 1827static int skcipher_decrypt(struct skcipher_request *req)
acdca31d 1828{
5ca7badb
HG
1829 struct skcipher_edesc *edesc;
1830 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1831 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1832 int ivsize = crypto_skcipher_ivsize(skcipher);
acdca31d 1833 struct device *jrdev = ctx->jrdev;
acdca31d
YK
1834 u32 *desc;
1835 int ret = 0;
1836
1837 /* allocate extended descriptor */
5ca7badb 1838 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
acdca31d
YK
1839 if (IS_ERR(edesc))
1840 return PTR_ERR(edesc);
1841
115957bb 1842 /*
5ca7badb 1843 * The crypto API expects us to set the IV (req->iv) to the last
115957bb
HG
1844 * ciphertext block.
1845 */
eaed71a4
IP
1846 if (ivsize)
1847 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1848 ivsize, ivsize, 0);
115957bb 1849
acdca31d 1850 /* Create and submit job descriptor*/
5ca7badb 1851 init_skcipher_job(req, edesc, false);
acdca31d
YK
1852 desc = edesc->hw_desc;
1853#ifdef DEBUG
5ca7badb 1854 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
acdca31d
YK
1855 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1856 desc_bytes(edesc->hw_desc), 1);
1857#endif
1858
5ca7badb 1859 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
acdca31d
YK
1860 if (!ret) {
1861 ret = -EINPROGRESS;
1862 } else {
5ca7badb 1863 skcipher_unmap(jrdev, edesc, req);
acdca31d
YK
1864 kfree(edesc);
1865 }
1866
1867 return ret;
1868}
1869
5ca7badb 1870static struct caam_skcipher_alg driver_algs[] = {
ae4a825f 1871 {
5ca7badb
HG
1872 .skcipher = {
1873 .base = {
1874 .cra_name = "cbc(aes)",
1875 .cra_driver_name = "cbc-aes-caam",
1876 .cra_blocksize = AES_BLOCK_SIZE,
1877 },
1878 .setkey = skcipher_setkey,
1879 .encrypt = skcipher_encrypt,
1880 .decrypt = skcipher_decrypt,
479bcc7c
HX
1881 .min_keysize = AES_MIN_KEY_SIZE,
1882 .max_keysize = AES_MAX_KEY_SIZE,
1883 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1884 },
1885 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
479bcc7c
HX
1886 },
1887 {
5ca7badb
HG
1888 .skcipher = {
1889 .base = {
1890 .cra_name = "cbc(des3_ede)",
1891 .cra_driver_name = "cbc-3des-caam",
1892 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1893 },
cf64e495 1894 .setkey = des_skcipher_setkey,
5ca7badb
HG
1895 .encrypt = skcipher_encrypt,
1896 .decrypt = skcipher_decrypt,
479bcc7c
HX
1897 .min_keysize = DES3_EDE_KEY_SIZE,
1898 .max_keysize = DES3_EDE_KEY_SIZE,
1899 .ivsize = DES3_EDE_BLOCK_SIZE,
5ca7badb
HG
1900 },
1901 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
479bcc7c
HX
1902 },
1903 {
5ca7badb
HG
1904 .skcipher = {
1905 .base = {
1906 .cra_name = "cbc(des)",
1907 .cra_driver_name = "cbc-des-caam",
1908 .cra_blocksize = DES_BLOCK_SIZE,
1909 },
cf64e495 1910 .setkey = des_skcipher_setkey,
5ca7badb
HG
1911 .encrypt = skcipher_encrypt,
1912 .decrypt = skcipher_decrypt,
479bcc7c
HX
1913 .min_keysize = DES_KEY_SIZE,
1914 .max_keysize = DES_KEY_SIZE,
1915 .ivsize = DES_BLOCK_SIZE,
5ca7badb
HG
1916 },
1917 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
479bcc7c
HX
1918 },
1919 {
5ca7badb
HG
1920 .skcipher = {
1921 .base = {
1922 .cra_name = "ctr(aes)",
1923 .cra_driver_name = "ctr-aes-caam",
1924 .cra_blocksize = 1,
1925 },
1926 .setkey = skcipher_setkey,
1927 .encrypt = skcipher_encrypt,
1928 .decrypt = skcipher_decrypt,
479bcc7c
HX
1929 .min_keysize = AES_MIN_KEY_SIZE,
1930 .max_keysize = AES_MAX_KEY_SIZE,
1931 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1932 .chunksize = AES_BLOCK_SIZE,
1933 },
1934 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1935 OP_ALG_AAI_CTR_MOD128,
479bcc7c
HX
1936 },
1937 {
5ca7badb
HG
1938 .skcipher = {
1939 .base = {
1940 .cra_name = "rfc3686(ctr(aes))",
1941 .cra_driver_name = "rfc3686-ctr-aes-caam",
1942 .cra_blocksize = 1,
1943 },
1944 .setkey = skcipher_setkey,
1945 .encrypt = skcipher_encrypt,
1946 .decrypt = skcipher_decrypt,
479bcc7c
HX
1947 .min_keysize = AES_MIN_KEY_SIZE +
1948 CTR_RFC3686_NONCE_SIZE,
1949 .max_keysize = AES_MAX_KEY_SIZE +
1950 CTR_RFC3686_NONCE_SIZE,
1951 .ivsize = CTR_RFC3686_IV_SIZE,
5ca7badb
HG
1952 .chunksize = AES_BLOCK_SIZE,
1953 },
1954 .caam = {
1955 .class1_alg_type = OP_ALG_ALGSEL_AES |
1956 OP_ALG_AAI_CTR_MOD128,
1957 .rfc3686 = true,
1958 },
c6415a60
CV
1959 },
1960 {
5ca7badb
HG
1961 .skcipher = {
1962 .base = {
1963 .cra_name = "xts(aes)",
1964 .cra_driver_name = "xts-aes-caam",
1965 .cra_blocksize = AES_BLOCK_SIZE,
1966 },
1967 .setkey = xts_skcipher_setkey,
1968 .encrypt = skcipher_encrypt,
1969 .decrypt = skcipher_decrypt,
c6415a60
CV
1970 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1971 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1972 .ivsize = AES_BLOCK_SIZE,
5ca7badb
HG
1973 },
1974 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
c6415a60 1975 },
eaed71a4
IP
1976 {
1977 .skcipher = {
1978 .base = {
1979 .cra_name = "ecb(des)",
1980 .cra_driver_name = "ecb-des-caam",
1981 .cra_blocksize = DES_BLOCK_SIZE,
1982 },
1983 .setkey = des_skcipher_setkey,
1984 .encrypt = skcipher_encrypt,
1985 .decrypt = skcipher_decrypt,
1986 .min_keysize = DES_KEY_SIZE,
1987 .max_keysize = DES_KEY_SIZE,
1988 },
1989 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1990 },
1991 {
1992 .skcipher = {
1993 .base = {
1994 .cra_name = "ecb(aes)",
1995 .cra_driver_name = "ecb-aes-caam",
1996 .cra_blocksize = AES_BLOCK_SIZE,
1997 },
1998 .setkey = skcipher_setkey,
1999 .encrypt = skcipher_encrypt,
2000 .decrypt = skcipher_decrypt,
2001 .min_keysize = AES_MIN_KEY_SIZE,
2002 .max_keysize = AES_MAX_KEY_SIZE,
2003 },
2004 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2005 },
2006 {
2007 .skcipher = {
2008 .base = {
2009 .cra_name = "ecb(des3_ede)",
2010 .cra_driver_name = "ecb-des3-caam",
2011 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2012 },
2013 .setkey = des_skcipher_setkey,
2014 .encrypt = skcipher_encrypt,
2015 .decrypt = skcipher_decrypt,
2016 .min_keysize = DES3_EDE_KEY_SIZE,
2017 .max_keysize = DES3_EDE_KEY_SIZE,
2018 },
2019 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2020 },
2021 {
2022 .skcipher = {
2023 .base = {
2024 .cra_name = "ecb(arc4)",
2025 .cra_driver_name = "ecb-arc4-caam",
2026 .cra_blocksize = ARC4_BLOCK_SIZE,
2027 },
2028 .setkey = skcipher_setkey,
2029 .encrypt = skcipher_encrypt,
2030 .decrypt = skcipher_decrypt,
2031 .min_keysize = ARC4_MIN_KEY_SIZE,
2032 .max_keysize = ARC4_MAX_KEY_SIZE,
2033 },
2034 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
2035 },
479bcc7c
HX
2036};
2037
2038static struct caam_aead_alg driver_aeads[] = {
2039 {
2040 .aead = {
2041 .base = {
2042 .cra_name = "rfc4106(gcm(aes))",
2043 .cra_driver_name = "rfc4106-gcm-aes-caam",
2044 .cra_blocksize = 1,
2045 },
2046 .setkey = rfc4106_setkey,
2047 .setauthsize = rfc4106_setauthsize,
2048 .encrypt = ipsec_gcm_encrypt,
2049 .decrypt = ipsec_gcm_decrypt,
7545e166 2050 .ivsize = GCM_RFC4106_IV_SIZE,
479bcc7c
HX
2051 .maxauthsize = AES_BLOCK_SIZE,
2052 },
2053 .caam = {
2054 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2055 },
2056 },
2057 {
2058 .aead = {
2059 .base = {
2060 .cra_name = "rfc4543(gcm(aes))",
2061 .cra_driver_name = "rfc4543-gcm-aes-caam",
2062 .cra_blocksize = 1,
2063 },
2064 .setkey = rfc4543_setkey,
2065 .setauthsize = rfc4543_setauthsize,
2066 .encrypt = ipsec_gcm_encrypt,
2067 .decrypt = ipsec_gcm_decrypt,
7545e166 2068 .ivsize = GCM_RFC4543_IV_SIZE,
479bcc7c
HX
2069 .maxauthsize = AES_BLOCK_SIZE,
2070 },
2071 .caam = {
2072 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2073 },
2074 },
2075 /* Galois Counter Mode */
2076 {
2077 .aead = {
2078 .base = {
2079 .cra_name = "gcm(aes)",
2080 .cra_driver_name = "gcm-aes-caam",
2081 .cra_blocksize = 1,
2082 },
2083 .setkey = gcm_setkey,
2084 .setauthsize = gcm_setauthsize,
2085 .encrypt = gcm_encrypt,
2086 .decrypt = gcm_decrypt,
7545e166 2087 .ivsize = GCM_AES_IV_SIZE,
479bcc7c
HX
2088 .maxauthsize = AES_BLOCK_SIZE,
2089 },
2090 .caam = {
2091 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2092 },
2093 },
2094 /* single-pass ipsec_esp descriptor */
2095 {
2096 .aead = {
2097 .base = {
2098 .cra_name = "authenc(hmac(md5),"
2099 "ecb(cipher_null))",
2100 .cra_driver_name = "authenc-hmac-md5-"
2101 "ecb-cipher_null-caam",
2102 .cra_blocksize = NULL_BLOCK_SIZE,
2103 },
2104 .setkey = aead_setkey,
2105 .setauthsize = aead_setauthsize,
2106 .encrypt = aead_encrypt,
2107 .decrypt = aead_decrypt,
ae4a825f 2108 .ivsize = NULL_IV_SIZE,
479bcc7c
HX
2109 .maxauthsize = MD5_DIGEST_SIZE,
2110 },
2111 .caam = {
2112 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2113 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2114 },
2115 },
2116 {
2117 .aead = {
2118 .base = {
2119 .cra_name = "authenc(hmac(sha1),"
2120 "ecb(cipher_null))",
2121 .cra_driver_name = "authenc-hmac-sha1-"
2122 "ecb-cipher_null-caam",
2123 .cra_blocksize = NULL_BLOCK_SIZE,
ae4a825f 2124 },
479bcc7c
HX
2125 .setkey = aead_setkey,
2126 .setauthsize = aead_setauthsize,
2127 .encrypt = aead_encrypt,
2128 .decrypt = aead_decrypt,
2129 .ivsize = NULL_IV_SIZE,
2130 .maxauthsize = SHA1_DIGEST_SIZE,
2131 },
2132 .caam = {
2133 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2134 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2135 },
ae4a825f
HG
2136 },
2137 {
479bcc7c
HX
2138 .aead = {
2139 .base = {
2140 .cra_name = "authenc(hmac(sha224),"
2141 "ecb(cipher_null))",
2142 .cra_driver_name = "authenc-hmac-sha224-"
2143 "ecb-cipher_null-caam",
2144 .cra_blocksize = NULL_BLOCK_SIZE,
2145 },
ae4a825f
HG
2146 .setkey = aead_setkey,
2147 .setauthsize = aead_setauthsize,
479bcc7c
HX
2148 .encrypt = aead_encrypt,
2149 .decrypt = aead_decrypt,
ae4a825f
HG
2150 .ivsize = NULL_IV_SIZE,
2151 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2152 },
2153 .caam = {
2154 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2155 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2156 },
ae4a825f
HG
2157 },
2158 {
479bcc7c
HX
2159 .aead = {
2160 .base = {
2161 .cra_name = "authenc(hmac(sha256),"
2162 "ecb(cipher_null))",
2163 .cra_driver_name = "authenc-hmac-sha256-"
2164 "ecb-cipher_null-caam",
2165 .cra_blocksize = NULL_BLOCK_SIZE,
2166 },
ae4a825f
HG
2167 .setkey = aead_setkey,
2168 .setauthsize = aead_setauthsize,
479bcc7c
HX
2169 .encrypt = aead_encrypt,
2170 .decrypt = aead_decrypt,
ae4a825f
HG
2171 .ivsize = NULL_IV_SIZE,
2172 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2173 },
2174 .caam = {
2175 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2176 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2177 },
ae4a825f
HG
2178 },
2179 {
479bcc7c
HX
2180 .aead = {
2181 .base = {
2182 .cra_name = "authenc(hmac(sha384),"
2183 "ecb(cipher_null))",
2184 .cra_driver_name = "authenc-hmac-sha384-"
2185 "ecb-cipher_null-caam",
2186 .cra_blocksize = NULL_BLOCK_SIZE,
2187 },
ae4a825f
HG
2188 .setkey = aead_setkey,
2189 .setauthsize = aead_setauthsize,
479bcc7c
HX
2190 .encrypt = aead_encrypt,
2191 .decrypt = aead_decrypt,
ae4a825f
HG
2192 .ivsize = NULL_IV_SIZE,
2193 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2194 },
2195 .caam = {
2196 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2197 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2198 },
ae4a825f
HG
2199 },
2200 {
479bcc7c
HX
2201 .aead = {
2202 .base = {
2203 .cra_name = "authenc(hmac(sha512),"
2204 "ecb(cipher_null))",
2205 .cra_driver_name = "authenc-hmac-sha512-"
2206 "ecb-cipher_null-caam",
2207 .cra_blocksize = NULL_BLOCK_SIZE,
2208 },
ae4a825f
HG
2209 .setkey = aead_setkey,
2210 .setauthsize = aead_setauthsize,
479bcc7c
HX
2211 .encrypt = aead_encrypt,
2212 .decrypt = aead_decrypt,
ae4a825f
HG
2213 .ivsize = NULL_IV_SIZE,
2214 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2215 },
2216 .caam = {
2217 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2218 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2219 },
2220 },
2221 {
2222 .aead = {
2223 .base = {
2224 .cra_name = "authenc(hmac(md5),cbc(aes))",
2225 .cra_driver_name = "authenc-hmac-md5-"
2226 "cbc-aes-caam",
2227 .cra_blocksize = AES_BLOCK_SIZE,
ae4a825f 2228 },
479bcc7c
HX
2229 .setkey = aead_setkey,
2230 .setauthsize = aead_setauthsize,
2231 .encrypt = aead_encrypt,
2232 .decrypt = aead_decrypt,
2233 .ivsize = AES_BLOCK_SIZE,
2234 .maxauthsize = MD5_DIGEST_SIZE,
2235 },
2236 .caam = {
2237 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2238 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2239 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2240 },
ae4a825f 2241 },
8b4d43a4 2242 {
479bcc7c
HX
2243 .aead = {
2244 .base = {
2245 .cra_name = "echainiv(authenc(hmac(md5),"
2246 "cbc(aes)))",
2247 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2248 "cbc-aes-caam",
2249 .cra_blocksize = AES_BLOCK_SIZE,
2250 },
8b4d43a4
KP
2251 .setkey = aead_setkey,
2252 .setauthsize = aead_setauthsize,
479bcc7c 2253 .encrypt = aead_encrypt,
8b18e235 2254 .decrypt = aead_decrypt,
8b4d43a4
KP
2255 .ivsize = AES_BLOCK_SIZE,
2256 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2257 },
2258 .caam = {
2259 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2260 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2261 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2262 .geniv = true,
2263 },
2264 },
2265 {
2266 .aead = {
2267 .base = {
2268 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269 .cra_driver_name = "authenc-hmac-sha1-"
2270 "cbc-aes-caam",
2271 .cra_blocksize = AES_BLOCK_SIZE,
8b4d43a4 2272 },
479bcc7c
HX
2273 .setkey = aead_setkey,
2274 .setauthsize = aead_setauthsize,
2275 .encrypt = aead_encrypt,
2276 .decrypt = aead_decrypt,
2277 .ivsize = AES_BLOCK_SIZE,
2278 .maxauthsize = SHA1_DIGEST_SIZE,
2279 },
2280 .caam = {
2281 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2282 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2283 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2284 },
8b4d43a4 2285 },
8e8ec596 2286 {
479bcc7c
HX
2287 .aead = {
2288 .base = {
2289 .cra_name = "echainiv(authenc(hmac(sha1),"
2290 "cbc(aes)))",
2291 .cra_driver_name = "echainiv-authenc-"
2292 "hmac-sha1-cbc-aes-caam",
2293 .cra_blocksize = AES_BLOCK_SIZE,
2294 },
0e479300
YK
2295 .setkey = aead_setkey,
2296 .setauthsize = aead_setauthsize,
479bcc7c 2297 .encrypt = aead_encrypt,
8b18e235 2298 .decrypt = aead_decrypt,
8e8ec596
KP
2299 .ivsize = AES_BLOCK_SIZE,
2300 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2301 },
2302 .caam = {
2303 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2304 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2305 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2306 .geniv = true,
2307 },
2308 },
2309 {
2310 .aead = {
2311 .base = {
2312 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2313 .cra_driver_name = "authenc-hmac-sha224-"
2314 "cbc-aes-caam",
2315 .cra_blocksize = AES_BLOCK_SIZE,
8e8ec596 2316 },
479bcc7c
HX
2317 .setkey = aead_setkey,
2318 .setauthsize = aead_setauthsize,
2319 .encrypt = aead_encrypt,
2320 .decrypt = aead_decrypt,
2321 .ivsize = AES_BLOCK_SIZE,
2322 .maxauthsize = SHA224_DIGEST_SIZE,
2323 },
2324 .caam = {
2325 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2326 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2327 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2328 },
8e8ec596 2329 },
e863f9cc 2330 {
479bcc7c
HX
2331 .aead = {
2332 .base = {
2333 .cra_name = "echainiv(authenc(hmac(sha224),"
2334 "cbc(aes)))",
2335 .cra_driver_name = "echainiv-authenc-"
2336 "hmac-sha224-cbc-aes-caam",
2337 .cra_blocksize = AES_BLOCK_SIZE,
2338 },
e863f9cc
HA
2339 .setkey = aead_setkey,
2340 .setauthsize = aead_setauthsize,
479bcc7c 2341 .encrypt = aead_encrypt,
8b18e235 2342 .decrypt = aead_decrypt,
e863f9cc
HA
2343 .ivsize = AES_BLOCK_SIZE,
2344 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2345 },
2346 .caam = {
2347 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2348 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2349 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2350 .geniv = true,
2351 },
2352 },
2353 {
2354 .aead = {
2355 .base = {
2356 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2357 .cra_driver_name = "authenc-hmac-sha256-"
2358 "cbc-aes-caam",
2359 .cra_blocksize = AES_BLOCK_SIZE,
e863f9cc 2360 },
479bcc7c
HX
2361 .setkey = aead_setkey,
2362 .setauthsize = aead_setauthsize,
2363 .encrypt = aead_encrypt,
2364 .decrypt = aead_decrypt,
2365 .ivsize = AES_BLOCK_SIZE,
2366 .maxauthsize = SHA256_DIGEST_SIZE,
2367 },
2368 .caam = {
2369 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2370 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2371 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2372 },
e863f9cc 2373 },
8e8ec596 2374 {
479bcc7c
HX
2375 .aead = {
2376 .base = {
2377 .cra_name = "echainiv(authenc(hmac(sha256),"
2378 "cbc(aes)))",
2379 .cra_driver_name = "echainiv-authenc-"
2380 "hmac-sha256-cbc-aes-caam",
2381 .cra_blocksize = AES_BLOCK_SIZE,
2382 },
2383 .setkey = aead_setkey,
2384 .setauthsize = aead_setauthsize,
2385 .encrypt = aead_encrypt,
8b18e235 2386 .decrypt = aead_decrypt,
479bcc7c
HX
2387 .ivsize = AES_BLOCK_SIZE,
2388 .maxauthsize = SHA256_DIGEST_SIZE,
2389 },
2390 .caam = {
2391 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2392 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2393 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2394 .geniv = true,
2395 },
2396 },
2397 {
2398 .aead = {
2399 .base = {
2400 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2401 .cra_driver_name = "authenc-hmac-sha384-"
2402 "cbc-aes-caam",
2403 .cra_blocksize = AES_BLOCK_SIZE,
2404 },
2405 .setkey = aead_setkey,
2406 .setauthsize = aead_setauthsize,
2407 .encrypt = aead_encrypt,
2408 .decrypt = aead_decrypt,
2409 .ivsize = AES_BLOCK_SIZE,
2410 .maxauthsize = SHA384_DIGEST_SIZE,
2411 },
2412 .caam = {
2413 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2414 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2415 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2416 },
2417 },
2418 {
2419 .aead = {
2420 .base = {
2421 .cra_name = "echainiv(authenc(hmac(sha384),"
2422 "cbc(aes)))",
2423 .cra_driver_name = "echainiv-authenc-"
2424 "hmac-sha384-cbc-aes-caam",
2425 .cra_blocksize = AES_BLOCK_SIZE,
2426 },
0e479300
YK
2427 .setkey = aead_setkey,
2428 .setauthsize = aead_setauthsize,
479bcc7c 2429 .encrypt = aead_encrypt,
8b18e235 2430 .decrypt = aead_decrypt,
8e8ec596 2431 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2432 .maxauthsize = SHA384_DIGEST_SIZE,
2433 },
2434 .caam = {
2435 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2436 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2437 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2438 .geniv = true,
2439 },
8e8ec596 2440 },
e863f9cc 2441 {
479bcc7c
HX
2442 .aead = {
2443 .base = {
2444 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2445 .cra_driver_name = "authenc-hmac-sha512-"
2446 "cbc-aes-caam",
2447 .cra_blocksize = AES_BLOCK_SIZE,
2448 },
e863f9cc
HA
2449 .setkey = aead_setkey,
2450 .setauthsize = aead_setauthsize,
479bcc7c
HX
2451 .encrypt = aead_encrypt,
2452 .decrypt = aead_decrypt,
e863f9cc 2453 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2454 .maxauthsize = SHA512_DIGEST_SIZE,
2455 },
2456 .caam = {
2457 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2458 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2459 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2460 },
e863f9cc 2461 },
4427b1b4 2462 {
479bcc7c
HX
2463 .aead = {
2464 .base = {
2465 .cra_name = "echainiv(authenc(hmac(sha512),"
2466 "cbc(aes)))",
2467 .cra_driver_name = "echainiv-authenc-"
2468 "hmac-sha512-cbc-aes-caam",
2469 .cra_blocksize = AES_BLOCK_SIZE,
2470 },
0e479300
YK
2471 .setkey = aead_setkey,
2472 .setauthsize = aead_setauthsize,
479bcc7c 2473 .encrypt = aead_encrypt,
8b18e235 2474 .decrypt = aead_decrypt,
4427b1b4
KP
2475 .ivsize = AES_BLOCK_SIZE,
2476 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2477 },
2478 .caam = {
2479 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2480 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2481 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2482 .geniv = true,
2483 },
2484 },
2485 {
2486 .aead = {
2487 .base = {
2488 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2489 .cra_driver_name = "authenc-hmac-md5-"
2490 "cbc-des3_ede-caam",
2491 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
4427b1b4 2492 },
1b52c409 2493 .setkey = des3_aead_setkey,
479bcc7c
HX
2494 .setauthsize = aead_setauthsize,
2495 .encrypt = aead_encrypt,
2496 .decrypt = aead_decrypt,
2497 .ivsize = DES3_EDE_BLOCK_SIZE,
2498 .maxauthsize = MD5_DIGEST_SIZE,
2499 },
2500 .caam = {
2501 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2502 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2503 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2504 }
4427b1b4 2505 },
8b4d43a4 2506 {
479bcc7c
HX
2507 .aead = {
2508 .base = {
2509 .cra_name = "echainiv(authenc(hmac(md5),"
2510 "cbc(des3_ede)))",
2511 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2512 "cbc-des3_ede-caam",
2513 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2514 },
1b52c409 2515 .setkey = des3_aead_setkey,
8b4d43a4 2516 .setauthsize = aead_setauthsize,
479bcc7c 2517 .encrypt = aead_encrypt,
8b18e235 2518 .decrypt = aead_decrypt,
8b4d43a4
KP
2519 .ivsize = DES3_EDE_BLOCK_SIZE,
2520 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2521 },
2522 .caam = {
2523 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2524 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2525 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2526 .geniv = true,
2527 }
2528 },
2529 {
2530 .aead = {
2531 .base = {
2532 .cra_name = "authenc(hmac(sha1),"
2533 "cbc(des3_ede))",
2534 .cra_driver_name = "authenc-hmac-sha1-"
2535 "cbc-des3_ede-caam",
2536 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8b4d43a4 2537 },
1b52c409 2538 .setkey = des3_aead_setkey,
479bcc7c
HX
2539 .setauthsize = aead_setauthsize,
2540 .encrypt = aead_encrypt,
2541 .decrypt = aead_decrypt,
2542 .ivsize = DES3_EDE_BLOCK_SIZE,
2543 .maxauthsize = SHA1_DIGEST_SIZE,
2544 },
2545 .caam = {
2546 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2547 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2548 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2549 },
8b4d43a4 2550 },
8e8ec596 2551 {
479bcc7c
HX
2552 .aead = {
2553 .base = {
2554 .cra_name = "echainiv(authenc(hmac(sha1),"
2555 "cbc(des3_ede)))",
2556 .cra_driver_name = "echainiv-authenc-"
2557 "hmac-sha1-"
2558 "cbc-des3_ede-caam",
2559 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2560 },
1b52c409 2561 .setkey = des3_aead_setkey,
0e479300 2562 .setauthsize = aead_setauthsize,
479bcc7c 2563 .encrypt = aead_encrypt,
8b18e235 2564 .decrypt = aead_decrypt,
8e8ec596
KP
2565 .ivsize = DES3_EDE_BLOCK_SIZE,
2566 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2567 },
2568 .caam = {
2569 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2570 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2571 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2572 .geniv = true,
2573 },
2574 },
2575 {
2576 .aead = {
2577 .base = {
2578 .cra_name = "authenc(hmac(sha224),"
2579 "cbc(des3_ede))",
2580 .cra_driver_name = "authenc-hmac-sha224-"
2581 "cbc-des3_ede-caam",
2582 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2583 },
1b52c409 2584 .setkey = des3_aead_setkey,
479bcc7c
HX
2585 .setauthsize = aead_setauthsize,
2586 .encrypt = aead_encrypt,
2587 .decrypt = aead_decrypt,
2588 .ivsize = DES3_EDE_BLOCK_SIZE,
2589 .maxauthsize = SHA224_DIGEST_SIZE,
2590 },
2591 .caam = {
2592 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2593 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2594 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2595 },
8e8ec596 2596 },
e863f9cc 2597 {
479bcc7c
HX
2598 .aead = {
2599 .base = {
2600 .cra_name = "echainiv(authenc(hmac(sha224),"
2601 "cbc(des3_ede)))",
2602 .cra_driver_name = "echainiv-authenc-"
2603 "hmac-sha224-"
2604 "cbc-des3_ede-caam",
2605 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2606 },
1b52c409 2607 .setkey = des3_aead_setkey,
e863f9cc 2608 .setauthsize = aead_setauthsize,
479bcc7c 2609 .encrypt = aead_encrypt,
8b18e235 2610 .decrypt = aead_decrypt,
e863f9cc
HA
2611 .ivsize = DES3_EDE_BLOCK_SIZE,
2612 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2613 },
2614 .caam = {
2615 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2616 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2617 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2618 .geniv = true,
2619 },
2620 },
2621 {
2622 .aead = {
2623 .base = {
2624 .cra_name = "authenc(hmac(sha256),"
2625 "cbc(des3_ede))",
2626 .cra_driver_name = "authenc-hmac-sha256-"
2627 "cbc-des3_ede-caam",
2628 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2629 },
1b52c409 2630 .setkey = des3_aead_setkey,
479bcc7c
HX
2631 .setauthsize = aead_setauthsize,
2632 .encrypt = aead_encrypt,
2633 .decrypt = aead_decrypt,
2634 .ivsize = DES3_EDE_BLOCK_SIZE,
2635 .maxauthsize = SHA256_DIGEST_SIZE,
2636 },
2637 .caam = {
2638 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2639 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2640 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2641 },
e863f9cc 2642 },
8e8ec596 2643 {
479bcc7c
HX
2644 .aead = {
2645 .base = {
2646 .cra_name = "echainiv(authenc(hmac(sha256),"
2647 "cbc(des3_ede)))",
2648 .cra_driver_name = "echainiv-authenc-"
2649 "hmac-sha256-"
2650 "cbc-des3_ede-caam",
2651 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2652 },
1b52c409 2653 .setkey = des3_aead_setkey,
0e479300 2654 .setauthsize = aead_setauthsize,
479bcc7c 2655 .encrypt = aead_encrypt,
8b18e235 2656 .decrypt = aead_decrypt,
8e8ec596
KP
2657 .ivsize = DES3_EDE_BLOCK_SIZE,
2658 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2659 },
2660 .caam = {
2661 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2662 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2663 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2664 .geniv = true,
2665 },
2666 },
2667 {
2668 .aead = {
2669 .base = {
2670 .cra_name = "authenc(hmac(sha384),"
2671 "cbc(des3_ede))",
2672 .cra_driver_name = "authenc-hmac-sha384-"
2673 "cbc-des3_ede-caam",
2674 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2675 },
1b52c409 2676 .setkey = des3_aead_setkey,
479bcc7c
HX
2677 .setauthsize = aead_setauthsize,
2678 .encrypt = aead_encrypt,
2679 .decrypt = aead_decrypt,
2680 .ivsize = DES3_EDE_BLOCK_SIZE,
2681 .maxauthsize = SHA384_DIGEST_SIZE,
2682 },
2683 .caam = {
2684 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2685 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2686 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2687 },
8e8ec596 2688 },
e863f9cc 2689 {
479bcc7c
HX
2690 .aead = {
2691 .base = {
2692 .cra_name = "echainiv(authenc(hmac(sha384),"
2693 "cbc(des3_ede)))",
2694 .cra_driver_name = "echainiv-authenc-"
2695 "hmac-sha384-"
2696 "cbc-des3_ede-caam",
2697 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2698 },
1b52c409 2699 .setkey = des3_aead_setkey,
e863f9cc 2700 .setauthsize = aead_setauthsize,
479bcc7c 2701 .encrypt = aead_encrypt,
8b18e235 2702 .decrypt = aead_decrypt,
e863f9cc
HA
2703 .ivsize = DES3_EDE_BLOCK_SIZE,
2704 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2705 },
2706 .caam = {
2707 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2708 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2709 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2710 .geniv = true,
2711 },
2712 },
2713 {
2714 .aead = {
2715 .base = {
2716 .cra_name = "authenc(hmac(sha512),"
2717 "cbc(des3_ede))",
2718 .cra_driver_name = "authenc-hmac-sha512-"
2719 "cbc-des3_ede-caam",
2720 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2721 },
1b52c409 2722 .setkey = des3_aead_setkey,
479bcc7c
HX
2723 .setauthsize = aead_setauthsize,
2724 .encrypt = aead_encrypt,
2725 .decrypt = aead_decrypt,
2726 .ivsize = DES3_EDE_BLOCK_SIZE,
2727 .maxauthsize = SHA512_DIGEST_SIZE,
2728 },
2729 .caam = {
2730 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2731 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2732 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2733 },
e863f9cc 2734 },
4427b1b4 2735 {
479bcc7c
HX
2736 .aead = {
2737 .base = {
2738 .cra_name = "echainiv(authenc(hmac(sha512),"
2739 "cbc(des3_ede)))",
2740 .cra_driver_name = "echainiv-authenc-"
2741 "hmac-sha512-"
2742 "cbc-des3_ede-caam",
2743 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2744 },
1b52c409 2745 .setkey = des3_aead_setkey,
0e479300 2746 .setauthsize = aead_setauthsize,
479bcc7c 2747 .encrypt = aead_encrypt,
8b18e235 2748 .decrypt = aead_decrypt,
4427b1b4
KP
2749 .ivsize = DES3_EDE_BLOCK_SIZE,
2750 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2751 },
2752 .caam = {
2753 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2754 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2755 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2756 .geniv = true,
2757 },
2758 },
2759 {
2760 .aead = {
2761 .base = {
2762 .cra_name = "authenc(hmac(md5),cbc(des))",
2763 .cra_driver_name = "authenc-hmac-md5-"
2764 "cbc-des-caam",
2765 .cra_blocksize = DES_BLOCK_SIZE,
4427b1b4 2766 },
479bcc7c
HX
2767 .setkey = aead_setkey,
2768 .setauthsize = aead_setauthsize,
2769 .encrypt = aead_encrypt,
2770 .decrypt = aead_decrypt,
2771 .ivsize = DES_BLOCK_SIZE,
2772 .maxauthsize = MD5_DIGEST_SIZE,
2773 },
2774 .caam = {
2775 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2776 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2777 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2778 },
4427b1b4 2779 },
8b4d43a4 2780 {
479bcc7c
HX
2781 .aead = {
2782 .base = {
2783 .cra_name = "echainiv(authenc(hmac(md5),"
2784 "cbc(des)))",
2785 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2786 "cbc-des-caam",
2787 .cra_blocksize = DES_BLOCK_SIZE,
2788 },
8b4d43a4
KP
2789 .setkey = aead_setkey,
2790 .setauthsize = aead_setauthsize,
479bcc7c 2791 .encrypt = aead_encrypt,
8b18e235 2792 .decrypt = aead_decrypt,
8b4d43a4
KP
2793 .ivsize = DES_BLOCK_SIZE,
2794 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2795 },
2796 .caam = {
2797 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2798 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2799 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2800 .geniv = true,
2801 },
2802 },
2803 {
2804 .aead = {
2805 .base = {
2806 .cra_name = "authenc(hmac(sha1),cbc(des))",
2807 .cra_driver_name = "authenc-hmac-sha1-"
2808 "cbc-des-caam",
2809 .cra_blocksize = DES_BLOCK_SIZE,
8b4d43a4 2810 },
479bcc7c
HX
2811 .setkey = aead_setkey,
2812 .setauthsize = aead_setauthsize,
2813 .encrypt = aead_encrypt,
2814 .decrypt = aead_decrypt,
2815 .ivsize = DES_BLOCK_SIZE,
2816 .maxauthsize = SHA1_DIGEST_SIZE,
2817 },
2818 .caam = {
2819 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2820 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2821 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2822 },
8b4d43a4 2823 },
8e8ec596 2824 {
479bcc7c
HX
2825 .aead = {
2826 .base = {
2827 .cra_name = "echainiv(authenc(hmac(sha1),"
2828 "cbc(des)))",
2829 .cra_driver_name = "echainiv-authenc-"
2830 "hmac-sha1-cbc-des-caam",
2831 .cra_blocksize = DES_BLOCK_SIZE,
2832 },
0e479300
YK
2833 .setkey = aead_setkey,
2834 .setauthsize = aead_setauthsize,
479bcc7c 2835 .encrypt = aead_encrypt,
8b18e235 2836 .decrypt = aead_decrypt,
8e8ec596
KP
2837 .ivsize = DES_BLOCK_SIZE,
2838 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2839 },
2840 .caam = {
2841 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2842 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2843 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2844 .geniv = true,
2845 },
2846 },
2847 {
2848 .aead = {
2849 .base = {
2850 .cra_name = "authenc(hmac(sha224),cbc(des))",
2851 .cra_driver_name = "authenc-hmac-sha224-"
2852 "cbc-des-caam",
2853 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2854 },
479bcc7c
HX
2855 .setkey = aead_setkey,
2856 .setauthsize = aead_setauthsize,
2857 .encrypt = aead_encrypt,
2858 .decrypt = aead_decrypt,
2859 .ivsize = DES_BLOCK_SIZE,
2860 .maxauthsize = SHA224_DIGEST_SIZE,
2861 },
2862 .caam = {
2863 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2864 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2865 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2866 },
8e8ec596 2867 },
e863f9cc 2868 {
479bcc7c
HX
2869 .aead = {
2870 .base = {
2871 .cra_name = "echainiv(authenc(hmac(sha224),"
2872 "cbc(des)))",
2873 .cra_driver_name = "echainiv-authenc-"
2874 "hmac-sha224-cbc-des-caam",
2875 .cra_blocksize = DES_BLOCK_SIZE,
2876 },
e863f9cc
HA
2877 .setkey = aead_setkey,
2878 .setauthsize = aead_setauthsize,
479bcc7c 2879 .encrypt = aead_encrypt,
8b18e235 2880 .decrypt = aead_decrypt,
e863f9cc
HA
2881 .ivsize = DES_BLOCK_SIZE,
2882 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2883 },
2884 .caam = {
2885 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2886 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2887 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2888 .geniv = true,
2889 },
2890 },
2891 {
2892 .aead = {
2893 .base = {
2894 .cra_name = "authenc(hmac(sha256),cbc(des))",
2895 .cra_driver_name = "authenc-hmac-sha256-"
2896 "cbc-des-caam",
2897 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2898 },
479bcc7c
HX
2899 .setkey = aead_setkey,
2900 .setauthsize = aead_setauthsize,
2901 .encrypt = aead_encrypt,
2902 .decrypt = aead_decrypt,
2903 .ivsize = DES_BLOCK_SIZE,
2904 .maxauthsize = SHA256_DIGEST_SIZE,
2905 },
2906 .caam = {
2907 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2908 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2909 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2910 },
e863f9cc 2911 },
8e8ec596 2912 {
479bcc7c
HX
2913 .aead = {
2914 .base = {
2915 .cra_name = "echainiv(authenc(hmac(sha256),"
2916 "cbc(des)))",
2917 .cra_driver_name = "echainiv-authenc-"
2918 "hmac-sha256-cbc-des-caam",
2919 .cra_blocksize = DES_BLOCK_SIZE,
2920 },
0e479300
YK
2921 .setkey = aead_setkey,
2922 .setauthsize = aead_setauthsize,
479bcc7c 2923 .encrypt = aead_encrypt,
8b18e235 2924 .decrypt = aead_decrypt,
8e8ec596
KP
2925 .ivsize = DES_BLOCK_SIZE,
2926 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2927 },
2928 .caam = {
2929 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2930 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2931 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2932 .geniv = true,
2933 },
2934 },
2935 {
2936 .aead = {
2937 .base = {
2938 .cra_name = "authenc(hmac(sha384),cbc(des))",
2939 .cra_driver_name = "authenc-hmac-sha384-"
2940 "cbc-des-caam",
2941 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2942 },
479bcc7c
HX
2943 .setkey = aead_setkey,
2944 .setauthsize = aead_setauthsize,
2945 .encrypt = aead_encrypt,
2946 .decrypt = aead_decrypt,
2947 .ivsize = DES_BLOCK_SIZE,
2948 .maxauthsize = SHA384_DIGEST_SIZE,
2949 },
2950 .caam = {
2951 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2952 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2953 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2954 },
8e8ec596 2955 },
e863f9cc 2956 {
479bcc7c
HX
2957 .aead = {
2958 .base = {
2959 .cra_name = "echainiv(authenc(hmac(sha384),"
2960 "cbc(des)))",
2961 .cra_driver_name = "echainiv-authenc-"
2962 "hmac-sha384-cbc-des-caam",
2963 .cra_blocksize = DES_BLOCK_SIZE,
2964 },
e863f9cc
HA
2965 .setkey = aead_setkey,
2966 .setauthsize = aead_setauthsize,
479bcc7c 2967 .encrypt = aead_encrypt,
8b18e235 2968 .decrypt = aead_decrypt,
e863f9cc
HA
2969 .ivsize = DES_BLOCK_SIZE,
2970 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2971 },
2972 .caam = {
2973 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2974 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2975 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2976 .geniv = true,
2977 },
2978 },
2979 {
2980 .aead = {
2981 .base = {
2982 .cra_name = "authenc(hmac(sha512),cbc(des))",
2983 .cra_driver_name = "authenc-hmac-sha512-"
2984 "cbc-des-caam",
2985 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2986 },
479bcc7c
HX
2987 .setkey = aead_setkey,
2988 .setauthsize = aead_setauthsize,
2989 .encrypt = aead_encrypt,
2990 .decrypt = aead_decrypt,
2991 .ivsize = DES_BLOCK_SIZE,
2992 .maxauthsize = SHA512_DIGEST_SIZE,
2993 },
2994 .caam = {
2995 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2996 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2997 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2998 },
e863f9cc 2999 },
4427b1b4 3000 {
479bcc7c
HX
3001 .aead = {
3002 .base = {
3003 .cra_name = "echainiv(authenc(hmac(sha512),"
3004 "cbc(des)))",
3005 .cra_driver_name = "echainiv-authenc-"
3006 "hmac-sha512-cbc-des-caam",
3007 .cra_blocksize = DES_BLOCK_SIZE,
3008 },
0e479300
YK
3009 .setkey = aead_setkey,
3010 .setauthsize = aead_setauthsize,
479bcc7c 3011 .encrypt = aead_encrypt,
8b18e235 3012 .decrypt = aead_decrypt,
4427b1b4
KP
3013 .ivsize = DES_BLOCK_SIZE,
3014 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
3015 },
3016 .caam = {
3017 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3018 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3019 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3020 .geniv = true,
3021 },
4427b1b4 3022 },
daebc465 3023 {
479bcc7c
HX
3024 .aead = {
3025 .base = {
3026 .cra_name = "authenc(hmac(md5),"
3027 "rfc3686(ctr(aes)))",
3028 .cra_driver_name = "authenc-hmac-md5-"
3029 "rfc3686-ctr-aes-caam",
3030 .cra_blocksize = 1,
3031 },
daebc465
CV
3032 .setkey = aead_setkey,
3033 .setauthsize = aead_setauthsize,
479bcc7c
HX
3034 .encrypt = aead_encrypt,
3035 .decrypt = aead_decrypt,
daebc465
CV
3036 .ivsize = CTR_RFC3686_IV_SIZE,
3037 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
3038 },
3039 .caam = {
3040 .class1_alg_type = OP_ALG_ALGSEL_AES |
3041 OP_ALG_AAI_CTR_MOD128,
3042 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3043 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3044 .rfc3686 = true,
3045 },
daebc465
CV
3046 },
3047 {
479bcc7c
HX
3048 .aead = {
3049 .base = {
3050 .cra_name = "seqiv(authenc("
3051 "hmac(md5),rfc3686(ctr(aes))))",
3052 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3053 "rfc3686-ctr-aes-caam",
3054 .cra_blocksize = 1,
3055 },
daebc465
CV
3056 .setkey = aead_setkey,
3057 .setauthsize = aead_setauthsize,
479bcc7c 3058 .encrypt = aead_encrypt,
8b18e235 3059 .decrypt = aead_decrypt,
daebc465 3060 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3061 .maxauthsize = MD5_DIGEST_SIZE,
3062 },
3063 .caam = {
3064 .class1_alg_type = OP_ALG_ALGSEL_AES |
3065 OP_ALG_AAI_CTR_MOD128,
3066 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3067 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3068 .rfc3686 = true,
3069 .geniv = true,
3070 },
daebc465
CV
3071 },
3072 {
479bcc7c
HX
3073 .aead = {
3074 .base = {
3075 .cra_name = "authenc(hmac(sha1),"
3076 "rfc3686(ctr(aes)))",
3077 .cra_driver_name = "authenc-hmac-sha1-"
3078 "rfc3686-ctr-aes-caam",
3079 .cra_blocksize = 1,
3080 },
daebc465
CV
3081 .setkey = aead_setkey,
3082 .setauthsize = aead_setauthsize,
479bcc7c
HX
3083 .encrypt = aead_encrypt,
3084 .decrypt = aead_decrypt,
daebc465 3085 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3086 .maxauthsize = SHA1_DIGEST_SIZE,
3087 },
3088 .caam = {
3089 .class1_alg_type = OP_ALG_ALGSEL_AES |
3090 OP_ALG_AAI_CTR_MOD128,
3091 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3092 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3093 .rfc3686 = true,
3094 },
daebc465
CV
3095 },
3096 {
479bcc7c
HX
3097 .aead = {
3098 .base = {
3099 .cra_name = "seqiv(authenc("
3100 "hmac(sha1),rfc3686(ctr(aes))))",
3101 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3102 "rfc3686-ctr-aes-caam",
3103 .cra_blocksize = 1,
3104 },
daebc465
CV
3105 .setkey = aead_setkey,
3106 .setauthsize = aead_setauthsize,
479bcc7c 3107 .encrypt = aead_encrypt,
8b18e235 3108 .decrypt = aead_decrypt,
daebc465 3109 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3110 .maxauthsize = SHA1_DIGEST_SIZE,
3111 },
3112 .caam = {
3113 .class1_alg_type = OP_ALG_ALGSEL_AES |
3114 OP_ALG_AAI_CTR_MOD128,
3115 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3116 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3117 .rfc3686 = true,
3118 .geniv = true,
3119 },
daebc465
CV
3120 },
3121 {
479bcc7c
HX
3122 .aead = {
3123 .base = {
3124 .cra_name = "authenc(hmac(sha224),"
3125 "rfc3686(ctr(aes)))",
3126 .cra_driver_name = "authenc-hmac-sha224-"
3127 "rfc3686-ctr-aes-caam",
3128 .cra_blocksize = 1,
3129 },
daebc465
CV
3130 .setkey = aead_setkey,
3131 .setauthsize = aead_setauthsize,
479bcc7c
HX
3132 .encrypt = aead_encrypt,
3133 .decrypt = aead_decrypt,
daebc465 3134 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3135 .maxauthsize = SHA224_DIGEST_SIZE,
3136 },
3137 .caam = {
3138 .class1_alg_type = OP_ALG_ALGSEL_AES |
3139 OP_ALG_AAI_CTR_MOD128,
3140 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3141 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3142 .rfc3686 = true,
3143 },
daebc465
CV
3144 },
3145 {
479bcc7c
HX
3146 .aead = {
3147 .base = {
3148 .cra_name = "seqiv(authenc("
3149 "hmac(sha224),rfc3686(ctr(aes))))",
3150 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3151 "rfc3686-ctr-aes-caam",
3152 .cra_blocksize = 1,
3153 },
daebc465
CV
3154 .setkey = aead_setkey,
3155 .setauthsize = aead_setauthsize,
479bcc7c 3156 .encrypt = aead_encrypt,
8b18e235 3157 .decrypt = aead_decrypt,
daebc465 3158 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3159 .maxauthsize = SHA224_DIGEST_SIZE,
3160 },
3161 .caam = {
3162 .class1_alg_type = OP_ALG_ALGSEL_AES |
3163 OP_ALG_AAI_CTR_MOD128,
3164 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3165 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3166 .rfc3686 = true,
3167 .geniv = true,
3168 },
acdca31d
YK
3169 },
3170 {
479bcc7c
HX
3171 .aead = {
3172 .base = {
3173 .cra_name = "authenc(hmac(sha256),"
3174 "rfc3686(ctr(aes)))",
3175 .cra_driver_name = "authenc-hmac-sha256-"
3176 "rfc3686-ctr-aes-caam",
3177 .cra_blocksize = 1,
acdca31d 3178 },
479bcc7c
HX
3179 .setkey = aead_setkey,
3180 .setauthsize = aead_setauthsize,
3181 .encrypt = aead_encrypt,
3182 .decrypt = aead_decrypt,
3183 .ivsize = CTR_RFC3686_IV_SIZE,
3184 .maxauthsize = SHA256_DIGEST_SIZE,
3185 },
3186 .caam = {
3187 .class1_alg_type = OP_ALG_ALGSEL_AES |
3188 OP_ALG_AAI_CTR_MOD128,
3189 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3190 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3191 .rfc3686 = true,
3192 },
acdca31d
YK
3193 },
3194 {
479bcc7c
HX
3195 .aead = {
3196 .base = {
3197 .cra_name = "seqiv(authenc(hmac(sha256),"
3198 "rfc3686(ctr(aes))))",
3199 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3200 "rfc3686-ctr-aes-caam",
3201 .cra_blocksize = 1,
acdca31d 3202 },
479bcc7c
HX
3203 .setkey = aead_setkey,
3204 .setauthsize = aead_setauthsize,
3205 .encrypt = aead_encrypt,
8b18e235 3206 .decrypt = aead_decrypt,
479bcc7c
HX
3207 .ivsize = CTR_RFC3686_IV_SIZE,
3208 .maxauthsize = SHA256_DIGEST_SIZE,
3209 },
3210 .caam = {
3211 .class1_alg_type = OP_ALG_ALGSEL_AES |
3212 OP_ALG_AAI_CTR_MOD128,
3213 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3214 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3215 .rfc3686 = true,
3216 .geniv = true,
3217 },
2b22f6c5
CV
3218 },
3219 {
479bcc7c
HX
3220 .aead = {
3221 .base = {
3222 .cra_name = "authenc(hmac(sha384),"
3223 "rfc3686(ctr(aes)))",
3224 .cra_driver_name = "authenc-hmac-sha384-"
3225 "rfc3686-ctr-aes-caam",
3226 .cra_blocksize = 1,
2b22f6c5 3227 },
479bcc7c
HX
3228 .setkey = aead_setkey,
3229 .setauthsize = aead_setauthsize,
3230 .encrypt = aead_encrypt,
3231 .decrypt = aead_decrypt,
a5f57cff 3232 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3233 .maxauthsize = SHA384_DIGEST_SIZE,
3234 },
3235 .caam = {
3236 .class1_alg_type = OP_ALG_ALGSEL_AES |
3237 OP_ALG_AAI_CTR_MOD128,
3238 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3239 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3240 .rfc3686 = true,
3241 },
3242 },
f2147b88
HX
3243 {
3244 .aead = {
3245 .base = {
479bcc7c
HX
3246 .cra_name = "seqiv(authenc(hmac(sha384),"
3247 "rfc3686(ctr(aes))))",
3248 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3249 "rfc3686-ctr-aes-caam",
f2147b88
HX
3250 .cra_blocksize = 1,
3251 },
479bcc7c
HX
3252 .setkey = aead_setkey,
3253 .setauthsize = aead_setauthsize,
3254 .encrypt = aead_encrypt,
8b18e235 3255 .decrypt = aead_decrypt,
479bcc7c
HX
3256 .ivsize = CTR_RFC3686_IV_SIZE,
3257 .maxauthsize = SHA384_DIGEST_SIZE,
f2147b88
HX
3258 },
3259 .caam = {
479bcc7c
HX
3260 .class1_alg_type = OP_ALG_ALGSEL_AES |
3261 OP_ALG_AAI_CTR_MOD128,
3262 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3263 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3264 .rfc3686 = true,
3265 .geniv = true,
f2147b88
HX
3266 },
3267 },
3268 {
3269 .aead = {
3270 .base = {
479bcc7c
HX
3271 .cra_name = "authenc(hmac(sha512),"
3272 "rfc3686(ctr(aes)))",
3273 .cra_driver_name = "authenc-hmac-sha512-"
3274 "rfc3686-ctr-aes-caam",
f2147b88
HX
3275 .cra_blocksize = 1,
3276 },
479bcc7c
HX
3277 .setkey = aead_setkey,
3278 .setauthsize = aead_setauthsize,
3279 .encrypt = aead_encrypt,
3280 .decrypt = aead_decrypt,
3281 .ivsize = CTR_RFC3686_IV_SIZE,
3282 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3283 },
3284 .caam = {
479bcc7c
HX
3285 .class1_alg_type = OP_ALG_ALGSEL_AES |
3286 OP_ALG_AAI_CTR_MOD128,
3287 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3288 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 3289 .rfc3686 = true,
f2147b88
HX
3290 },
3291 },
f2147b88
HX
3292 {
3293 .aead = {
3294 .base = {
479bcc7c
HX
3295 .cra_name = "seqiv(authenc(hmac(sha512),"
3296 "rfc3686(ctr(aes))))",
3297 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3298 "rfc3686-ctr-aes-caam",
f2147b88
HX
3299 .cra_blocksize = 1,
3300 },
479bcc7c
HX
3301 .setkey = aead_setkey,
3302 .setauthsize = aead_setauthsize,
3303 .encrypt = aead_encrypt,
8b18e235 3304 .decrypt = aead_decrypt,
479bcc7c
HX
3305 .ivsize = CTR_RFC3686_IV_SIZE,
3306 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3307 },
3308 .caam = {
479bcc7c
HX
3309 .class1_alg_type = OP_ALG_ALGSEL_AES |
3310 OP_ALG_AAI_CTR_MOD128,
3311 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3312 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3313 .rfc3686 = true,
3314 .geniv = true,
f2147b88
HX
3315 },
3316 },
d6bbd4ee
HG
3317 {
3318 .aead = {
3319 .base = {
3320 .cra_name = "rfc7539(chacha20,poly1305)",
3321 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3322 "caam",
3323 .cra_blocksize = 1,
3324 },
3325 .setkey = chachapoly_setkey,
3326 .setauthsize = chachapoly_setauthsize,
3327 .encrypt = chachapoly_encrypt,
3328 .decrypt = chachapoly_decrypt,
3329 .ivsize = CHACHAPOLY_IV_SIZE,
3330 .maxauthsize = POLY1305_DIGEST_SIZE,
3331 },
3332 .caam = {
3333 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3334 OP_ALG_AAI_AEAD,
3335 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3336 OP_ALG_AAI_AEAD,
3337 },
3338 },
3339 {
3340 .aead = {
3341 .base = {
3342 .cra_name = "rfc7539esp(chacha20,poly1305)",
3343 .cra_driver_name = "rfc7539esp-chacha20-"
3344 "poly1305-caam",
3345 .cra_blocksize = 1,
3346 },
3347 .setkey = chachapoly_setkey,
3348 .setauthsize = chachapoly_setauthsize,
3349 .encrypt = chachapoly_encrypt,
3350 .decrypt = chachapoly_decrypt,
3351 .ivsize = 8,
3352 .maxauthsize = POLY1305_DIGEST_SIZE,
3353 },
3354 .caam = {
3355 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3356 OP_ALG_AAI_AEAD,
3357 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3358 OP_ALG_AAI_AEAD,
3359 },
3360 },
f2147b88
HX
3361};
3362
7e0880b9
HG
3363static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3364 bool uses_dkp)
8e8ec596 3365{
bbf22344 3366 dma_addr_t dma_addr;
7e0880b9 3367 struct caam_drv_private *priv;
bbf22344 3368
cfc6f11b
RG
3369 ctx->jrdev = caam_jr_alloc();
3370 if (IS_ERR(ctx->jrdev)) {
3371 pr_err("Job Ring Device allocation for transform failed\n");
3372 return PTR_ERR(ctx->jrdev);
3373 }
8e8ec596 3374
7e0880b9
HG
3375 priv = dev_get_drvdata(ctx->jrdev->parent);
3376 if (priv->era >= 6 && uses_dkp)
3377 ctx->dir = DMA_BIDIRECTIONAL;
3378 else
3379 ctx->dir = DMA_TO_DEVICE;
3380
bbf22344
HG
3381 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3382 offsetof(struct caam_ctx,
3383 sh_desc_enc_dma),
7e0880b9 3384 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
bbf22344
HG
3385 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3386 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3387 caam_jr_free(ctx->jrdev);
3388 return -ENOMEM;
3389 }
3390
3391 ctx->sh_desc_enc_dma = dma_addr;
3392 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3393 sh_desc_dec);
bbf22344
HG
3394 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3395
8e8ec596 3396 /* copy descriptor header template value */
db57656b
HG
3397 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3398 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
8e8ec596
KP
3399
3400 return 0;
3401}
3402
5ca7badb 3403static int caam_cra_init(struct crypto_skcipher *tfm)
8e8ec596 3404{
5ca7badb
HG
3405 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3406 struct caam_skcipher_alg *caam_alg =
3407 container_of(alg, typeof(*caam_alg), skcipher);
8e8ec596 3408
5ca7badb
HG
3409 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3410 false);
f2147b88
HX
3411}
3412
3413static int caam_aead_init(struct crypto_aead *tfm)
3414{
3415 struct aead_alg *alg = crypto_aead_alg(tfm);
3416 struct caam_aead_alg *caam_alg =
3417 container_of(alg, struct caam_aead_alg, aead);
3418 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3419
7e0880b9
HG
3420 return caam_init_common(ctx, &caam_alg->caam,
3421 alg->setkey == aead_setkey);
f2147b88
HX
3422}
3423
3424static void caam_exit_common(struct caam_ctx *ctx)
3425{
bbf22344
HG
3426 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3427 offsetof(struct caam_ctx, sh_desc_enc_dma),
7e0880b9 3428 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
cfc6f11b 3429 caam_jr_free(ctx->jrdev);
8e8ec596
KP
3430}
3431
5ca7badb 3432static void caam_cra_exit(struct crypto_skcipher *tfm)
f2147b88 3433{
5ca7badb 3434 caam_exit_common(crypto_skcipher_ctx(tfm));
f2147b88
HX
3435}
3436
3437static void caam_aead_exit(struct crypto_aead *tfm)
3438{
3439 caam_exit_common(crypto_aead_ctx(tfm));
3440}
3441
8e8ec596
KP
3442static void __exit caam_algapi_exit(void)
3443{
f2147b88
HX
3444 int i;
3445
3446 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3447 struct caam_aead_alg *t_alg = driver_aeads + i;
3448
3449 if (t_alg->registered)
3450 crypto_unregister_aead(&t_alg->aead);
3451 }
8e8ec596 3452
5ca7badb
HG
3453 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3454 struct caam_skcipher_alg *t_alg = driver_algs + i;
8e8ec596 3455
5ca7badb
HG
3456 if (t_alg->registered)
3457 crypto_unregister_skcipher(&t_alg->skcipher);
8e8ec596 3458 }
8e8ec596
KP
3459}
3460
5ca7badb 3461static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
8e8ec596 3462{
5ca7badb 3463 struct skcipher_alg *alg = &t_alg->skcipher;
8e8ec596 3464
5ca7badb
HG
3465 alg->base.cra_module = THIS_MODULE;
3466 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3467 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3468 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
8e8ec596 3469
5ca7badb
HG
3470 alg->init = caam_cra_init;
3471 alg->exit = caam_cra_exit;
8e8ec596
KP
3472}
3473
f2147b88
HX
3474static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3475{
3476 struct aead_alg *alg = &t_alg->aead;
3477
3478 alg->base.cra_module = THIS_MODULE;
3479 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3480 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
5e4b8c1f 3481 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
f2147b88
HX
3482
3483 alg->init = caam_aead_init;
3484 alg->exit = caam_aead_exit;
3485}
3486
8e8ec596
KP
3487static int __init caam_algapi_init(void)
3488{
35af6403
RG
3489 struct device_node *dev_node;
3490 struct platform_device *pdev;
bf83490e 3491 struct caam_drv_private *priv;
8e8ec596 3492 int i = 0, err = 0;
d6bbd4ee 3493 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
eaed71a4 3494 u32 arc4_inst;
bf83490e 3495 unsigned int md_limit = SHA512_DIGEST_SIZE;
f2147b88 3496 bool registered = false;
8e8ec596 3497
35af6403
RG
3498 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3499 if (!dev_node) {
3500 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3501 if (!dev_node)
3502 return -ENODEV;
3503 }
3504
3505 pdev = of_find_device_by_node(dev_node);
3506 if (!pdev) {
3507 of_node_put(dev_node);
3508 return -ENODEV;
3509 }
3510
00e87449 3511 priv = dev_get_drvdata(&pdev->dev);
35af6403
RG
3512 of_node_put(dev_node);
3513
3514 /*
3515 * If priv is NULL, it's probably because the caam driver wasn't
3516 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3517 */
00e87449
WY
3518 if (!priv) {
3519 err = -ENODEV;
3520 goto out_put_dev;
3521 }
35af6403
RG
3522
3523
bf83490e
VM
3524 /*
3525 * Register crypto algorithms the device supports.
3526 * First, detect presence and attributes of DES, AES, and MD blocks.
3527 */
d239b10d
HG
3528 if (priv->era < 10) {
3529 u32 cha_vid, cha_inst;
3530
3531 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3532 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3533 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3534
3535 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3536 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3537 CHA_ID_LS_DES_SHIFT;
3538 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3539 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
eaed71a4
IP
3540 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3541 CHA_ID_LS_ARC4_SHIFT;
d6bbd4ee
HG
3542 ccha_inst = 0;
3543 ptha_inst = 0;
d239b10d
HG
3544 } else {
3545 u32 aesa, mdha;
3546
3547 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3548 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3549
3550 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3551 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3552
3553 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3554 aes_inst = aesa & CHA_VER_NUM_MASK;
3555 md_inst = mdha & CHA_VER_NUM_MASK;
d6bbd4ee
HG
3556 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3557 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
eaed71a4 3558 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
d239b10d 3559 }
bf83490e
VM
3560
3561 /* If MD is present, limit digest size based on LP256 */
d239b10d 3562 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
bf83490e
VM
3563 md_limit = SHA256_DIGEST_SIZE;
3564
8e8ec596 3565 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
5ca7badb
HG
3566 struct caam_skcipher_alg *t_alg = driver_algs + i;
3567 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
bf83490e
VM
3568
3569 /* Skip DES algorithms if not supported by device */
3570 if (!des_inst &&
3571 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3572 (alg_sel == OP_ALG_ALGSEL_DES)))
3573 continue;
3574
3575 /* Skip AES algorithms if not supported by device */
3576 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3577 continue;
8e8ec596 3578
eaed71a4
IP
3579 /* Skip ARC4 algorithms if not supported by device */
3580 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3581 continue;
3582
83d2c9a9
SE
3583 /*
3584 * Check support for AES modes not available
3585 * on LP devices.
3586 */
d239b10d
HG
3587 if (aes_vid == CHA_VER_VID_AES_LP &&
3588 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3589 OP_ALG_AAI_XTS)
3590 continue;
83d2c9a9 3591
5ca7badb 3592 caam_skcipher_alg_init(t_alg);
8e8ec596 3593
5ca7badb 3594 err = crypto_register_skcipher(&t_alg->skcipher);
8e8ec596 3595 if (err) {
cfc6f11b 3596 pr_warn("%s alg registration failed\n",
5ca7badb 3597 t_alg->skcipher.base.cra_driver_name);
f2147b88
HX
3598 continue;
3599 }
3600
5ca7badb 3601 t_alg->registered = true;
f2147b88
HX
3602 registered = true;
3603 }
3604
3605 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3606 struct caam_aead_alg *t_alg = driver_aeads + i;
bf83490e
VM
3607 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3608 OP_ALG_ALGSEL_MASK;
3609 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3610 OP_ALG_ALGSEL_MASK;
3611 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3612
3613 /* Skip DES algorithms if not supported by device */
3614 if (!des_inst &&
3615 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3616 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3617 continue;
3618
3619 /* Skip AES algorithms if not supported by device */
3620 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3621 continue;
3622
d6bbd4ee
HG
3623 /* Skip CHACHA20 algorithms if not supported by device */
3624 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3625 continue;
3626
3627 /* Skip POLY1305 algorithms if not supported by device */
3628 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3629 continue;
3630
bf83490e
VM
3631 /*
3632 * Check support for AES algorithms not available
3633 * on LP devices.
3634 */
d239b10d
HG
3635 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
3636 continue;
bf83490e
VM
3637
3638 /*
3639 * Skip algorithms requiring message digests
3640 * if MD or MD size is not supported by device.
3641 */
2dd3fde4 3642 if (is_mdha(c2_alg_sel) &&
d6bbd4ee
HG
3643 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3644 continue;
f2147b88
HX
3645
3646 caam_aead_alg_init(t_alg);
3647
3648 err = crypto_register_aead(&t_alg->aead);
3649 if (err) {
3650 pr_warn("%s alg registration failed\n",
3651 t_alg->aead.base.cra_driver_name);
3652 continue;
3653 }
3654
3655 t_alg->registered = true;
3656 registered = true;
8e8ec596 3657 }
f2147b88
HX
3658
3659 if (registered)
cfc6f11b 3660 pr_info("caam algorithms registered in /proc/crypto\n");
8e8ec596 3661
00e87449
WY
3662out_put_dev:
3663 put_device(&pdev->dev);
8e8ec596
KP
3664 return err;
3665}
3666
3667module_init(caam_algapi_init);
3668module_exit(caam_algapi_exit);
3669
3670MODULE_LICENSE("GPL");
3671MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3672MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");