]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - drivers/crypto/caam/caamalg.c
Merge tag 'pm-turbostat-4.11-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git...
[mirror_ubuntu-artful-kernel.git] / drivers / crypto / caam / caamalg.c
CommitLineData
8e8ec596
KP
1/*
2 * caam - Freescale FSL CAAM support for crypto API
3 *
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
8cea7b66 5 * Copyright 2016 NXP
8e8ec596
KP
6 *
7 * Based on talitos crypto API driver.
8 *
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 *
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
15 * . | | (cipherKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
19 * | *(packet 2) | |
20 * --------------- |
21 * . |
22 * . |
23 * --------------- |
24 * | JobDesc #3 |------------
25 * | *(packet 3) |
26 * ---------------
27 *
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
33 *
34 * So, a job desc looks like:
35 *
36 * ---------------------
37 * | Header |
38 * | ShareDesc Pointer |
39 * | SEQ_OUT_PTR |
40 * | (output buffer) |
6ec47334 41 * | (output length) |
8e8ec596
KP
42 * | SEQ_IN_PTR |
43 * | (input buffer) |
6ec47334 44 * | (input length) |
8e8ec596
KP
45 * ---------------------
46 */
47
48#include "compat.h"
49
50#include "regs.h"
51#include "intern.h"
52#include "desc_constr.h"
53#include "jr.h"
54#include "error.h"
a299c837 55#include "sg_sw_sec4.h"
4c1ec1f9 56#include "key_gen.h"
8cea7b66 57#include "caamalg_desc.h"
8e8ec596
KP
58
59/*
60 * crypto alg
61 */
62#define CAAM_CRA_PRIORITY 3000
63/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
daebc465 65 CTR_RFC3686_NONCE_SIZE + \
8e8ec596 66 SHA512_DIGEST_SIZE * 2)
8e8ec596 67
f2147b88
HX
68#define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69#define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
70 CAAM_CMD_SZ * 4)
479bcc7c
HX
71#define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 CAAM_CMD_SZ * 5)
f2147b88 73
87e51b07
HX
74#define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75#define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
4427b1b4 76
8e8ec596
KP
77#ifdef DEBUG
78/* for print_hex_dumps with line references */
8e8ec596
KP
79#define debug(format, arg...) printk(format, arg)
80#else
81#define debug(format, arg...)
82#endif
5ecf8ef9
CV
83
84#ifdef DEBUG
85#include <linux/highmem.h>
86
87static void dbg_dump_sg(const char *level, const char *prefix_str,
88 int prefix_type, int rowsize, int groupsize,
00fef2b2 89 struct scatterlist *sg, size_t tlen, bool ascii)
5ecf8ef9
CV
90{
91 struct scatterlist *it;
92 void *it_page;
93 size_t len;
94 void *buf;
95
96 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
97 /*
98 * make sure the scatterlist's page
99 * has a valid virtual memory mapping
100 */
101 it_page = kmap_atomic(sg_page(it));
102 if (unlikely(!it_page)) {
103 printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
104 return;
105 }
106
107 buf = it_page + it->offset;
d69985a0 108 len = min_t(size_t, tlen, it->length);
5ecf8ef9
CV
109 print_hex_dump(level, prefix_str, prefix_type, rowsize,
110 groupsize, buf, len, ascii);
111 tlen -= len;
112
113 kunmap_atomic(it_page);
114 }
115}
116#endif
117
cfc6f11b 118static struct list_head alg_list;
8e8ec596 119
479bcc7c
HX
120struct caam_alg_entry {
121 int class1_alg_type;
122 int class2_alg_type;
479bcc7c
HX
123 bool rfc3686;
124 bool geniv;
125};
126
127struct caam_aead_alg {
128 struct aead_alg aead;
129 struct caam_alg_entry caam;
130 bool registered;
131};
132
8e8ec596
KP
133/*
134 * per-session context
135 */
136struct caam_ctx {
1acebad3
YK
137 u32 sh_desc_enc[DESC_MAX_USED_LEN];
138 u32 sh_desc_dec[DESC_MAX_USED_LEN];
139 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
bbf22344 140 u8 key[CAAM_MAX_KEY_SIZE];
1acebad3
YK
141 dma_addr_t sh_desc_enc_dma;
142 dma_addr_t sh_desc_dec_dma;
143 dma_addr_t sh_desc_givenc_dma;
885e9e2f 144 dma_addr_t key_dma;
bbf22344 145 struct device *jrdev;
db57656b
HG
146 struct alginfo adata;
147 struct alginfo cdata;
8e8ec596
KP
148 unsigned int authsize;
149};
150
ae4a825f
HG
151static int aead_null_set_sh_desc(struct crypto_aead *aead)
152{
ae4a825f
HG
153 struct caam_ctx *ctx = crypto_aead_ctx(aead);
154 struct device *jrdev = ctx->jrdev;
ae4a825f 155 u32 *desc;
4cbe79cc
HG
156 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
157 ctx->adata.keylen_pad;
ae4a825f
HG
158
159 /*
160 * Job Descriptor and Shared Descriptors
161 * must all fit into the 64-word Descriptor h/w Buffer
162 */
4cbe79cc 163 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
db57656b 164 ctx->adata.key_inline = true;
9c0bc511 165 ctx->adata.key_virt = ctx->key;
db57656b
HG
166 } else {
167 ctx->adata.key_inline = false;
9c0bc511 168 ctx->adata.key_dma = ctx->key_dma;
db57656b 169 }
ae4a825f 170
479bcc7c 171 /* aead_encrypt shared descriptor */
ae4a825f 172 desc = ctx->sh_desc_enc;
8cea7b66 173 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
bbf22344
HG
174 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
175 desc_bytes(desc), DMA_TO_DEVICE);
ae4a825f
HG
176
177 /*
178 * Job Descriptor and Shared Descriptors
179 * must all fit into the 64-word Descriptor h/w Buffer
180 */
4cbe79cc 181 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
db57656b 182 ctx->adata.key_inline = true;
9c0bc511 183 ctx->adata.key_virt = ctx->key;
db57656b
HG
184 } else {
185 ctx->adata.key_inline = false;
9c0bc511 186 ctx->adata.key_dma = ctx->key_dma;
db57656b 187 }
ae4a825f 188
479bcc7c 189 /* aead_decrypt shared descriptor */
8cea7b66
HG
190 desc = ctx->sh_desc_dec;
191 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
bbf22344
HG
192 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
193 desc_bytes(desc), DMA_TO_DEVICE);
ae4a825f
HG
194
195 return 0;
196}
197
1acebad3
YK
198static int aead_set_sh_desc(struct crypto_aead *aead)
199{
479bcc7c
HX
200 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
201 struct caam_aead_alg, aead);
add86d55 202 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3
YK
203 struct caam_ctx *ctx = crypto_aead_ctx(aead);
204 struct device *jrdev = ctx->jrdev;
daebc465 205 u32 ctx1_iv_off = 0;
8cea7b66 206 u32 *desc, *nonce = NULL;
4cbe79cc
HG
207 u32 inl_mask;
208 unsigned int data_len[2];
db57656b 209 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
daebc465 210 OP_ALG_AAI_CTR_MOD128);
479bcc7c 211 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 212
2fdea258
HG
213 if (!ctx->authsize)
214 return 0;
215
ae4a825f 216 /* NULL encryption / decryption */
db57656b 217 if (!ctx->cdata.keylen)
ae4a825f
HG
218 return aead_null_set_sh_desc(aead);
219
daebc465
CV
220 /*
221 * AES-CTR needs to load IV in CONTEXT1 reg
222 * at an offset of 128bits (16bytes)
223 * CONTEXT1[255:128] = IV
224 */
225 if (ctr_mode)
226 ctx1_iv_off = 16;
227
228 /*
229 * RFC3686 specific:
230 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
231 */
8cea7b66 232 if (is_rfc3686) {
daebc465 233 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
8cea7b66
HG
234 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
235 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
236 }
daebc465 237
4cbe79cc
HG
238 data_len[0] = ctx->adata.keylen_pad;
239 data_len[1] = ctx->cdata.keylen;
240
479bcc7c
HX
241 if (alg->caam.geniv)
242 goto skip_enc;
243
1acebad3
YK
244 /*
245 * Job Descriptor and Shared Descriptors
246 * must all fit into the 64-word Descriptor h/w Buffer
247 */
4cbe79cc
HG
248 if (desc_inline_query(DESC_AEAD_ENC_LEN +
249 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
250 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
251 ARRAY_SIZE(data_len)) < 0)
252 return -EINVAL;
253
254 if (inl_mask & 1)
9c0bc511 255 ctx->adata.key_virt = ctx->key;
4cbe79cc 256 else
9c0bc511 257 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
258
259 if (inl_mask & 2)
9c0bc511 260 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 261 else
9c0bc511 262 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
263
264 ctx->adata.key_inline = !!(inl_mask & 1);
265 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 266
479bcc7c 267 /* aead_encrypt shared descriptor */
1acebad3 268 desc = ctx->sh_desc_enc;
8cea7b66
HG
269 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ctx->authsize,
270 is_rfc3686, nonce, ctx1_iv_off);
bbf22344
HG
271 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
272 desc_bytes(desc), DMA_TO_DEVICE);
1acebad3 273
479bcc7c 274skip_enc:
1acebad3
YK
275 /*
276 * Job Descriptor and Shared Descriptors
277 * must all fit into the 64-word Descriptor h/w Buffer
278 */
4cbe79cc
HG
279 if (desc_inline_query(DESC_AEAD_DEC_LEN +
280 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
281 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
282 ARRAY_SIZE(data_len)) < 0)
283 return -EINVAL;
284
285 if (inl_mask & 1)
9c0bc511 286 ctx->adata.key_virt = ctx->key;
4cbe79cc 287 else
9c0bc511 288 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
289
290 if (inl_mask & 2)
9c0bc511 291 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 292 else
9c0bc511 293 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
294
295 ctx->adata.key_inline = !!(inl_mask & 1);
296 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3 297
479bcc7c 298 /* aead_decrypt shared descriptor */
4464a7d4 299 desc = ctx->sh_desc_dec;
8cea7b66
HG
300 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
301 ctx->authsize, alg->caam.geniv, is_rfc3686,
302 nonce, ctx1_iv_off);
bbf22344
HG
303 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
304 desc_bytes(desc), DMA_TO_DEVICE);
1acebad3 305
479bcc7c
HX
306 if (!alg->caam.geniv)
307 goto skip_givenc;
308
1acebad3
YK
309 /*
310 * Job Descriptor and Shared Descriptors
311 * must all fit into the 64-word Descriptor h/w Buffer
312 */
4cbe79cc
HG
313 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
314 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
315 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
316 ARRAY_SIZE(data_len)) < 0)
317 return -EINVAL;
318
319 if (inl_mask & 1)
9c0bc511 320 ctx->adata.key_virt = ctx->key;
4cbe79cc 321 else
9c0bc511 322 ctx->adata.key_dma = ctx->key_dma;
4cbe79cc
HG
323
324 if (inl_mask & 2)
9c0bc511 325 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
4cbe79cc 326 else
9c0bc511 327 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
4cbe79cc
HG
328
329 ctx->adata.key_inline = !!(inl_mask & 1);
330 ctx->cdata.key_inline = !!(inl_mask & 2);
1acebad3
YK
331
332 /* aead_givencrypt shared descriptor */
1d2d87e8 333 desc = ctx->sh_desc_enc;
8cea7b66
HG
334 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
335 ctx->authsize, is_rfc3686, nonce,
336 ctx1_iv_off);
bbf22344
HG
337 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
338 desc_bytes(desc), DMA_TO_DEVICE);
1acebad3 339
479bcc7c 340skip_givenc:
1acebad3
YK
341 return 0;
342}
343
0e479300 344static int aead_setauthsize(struct crypto_aead *authenc,
8e8ec596
KP
345 unsigned int authsize)
346{
347 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
348
349 ctx->authsize = authsize;
1acebad3 350 aead_set_sh_desc(authenc);
8e8ec596
KP
351
352 return 0;
353}
354
3ef8d945
TA
355static int gcm_set_sh_desc(struct crypto_aead *aead)
356{
3ef8d945
TA
357 struct caam_ctx *ctx = crypto_aead_ctx(aead);
358 struct device *jrdev = ctx->jrdev;
3ef8d945 359 u32 *desc;
4cbe79cc
HG
360 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
361 ctx->cdata.keylen;
3ef8d945 362
db57656b 363 if (!ctx->cdata.keylen || !ctx->authsize)
3ef8d945
TA
364 return 0;
365
366 /*
367 * AES GCM encrypt shared descriptor
368 * Job Descriptor and Shared Descriptor
369 * must fit into the 64-word Descriptor h/w Buffer
370 */
4cbe79cc 371 if (rem_bytes >= DESC_GCM_ENC_LEN) {
db57656b 372 ctx->cdata.key_inline = true;
9c0bc511 373 ctx->cdata.key_virt = ctx->key;
db57656b
HG
374 } else {
375 ctx->cdata.key_inline = false;
9c0bc511 376 ctx->cdata.key_dma = ctx->key_dma;
db57656b 377 }
3ef8d945
TA
378
379 desc = ctx->sh_desc_enc;
8cea7b66 380 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
bbf22344
HG
381 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
382 desc_bytes(desc), DMA_TO_DEVICE);
3ef8d945
TA
383
384 /*
385 * Job Descriptor and Shared Descriptors
386 * must all fit into the 64-word Descriptor h/w Buffer
387 */
4cbe79cc 388 if (rem_bytes >= DESC_GCM_DEC_LEN) {
db57656b 389 ctx->cdata.key_inline = true;
9c0bc511 390 ctx->cdata.key_virt = ctx->key;
db57656b
HG
391 } else {
392 ctx->cdata.key_inline = false;
9c0bc511 393 ctx->cdata.key_dma = ctx->key_dma;
db57656b 394 }
3ef8d945
TA
395
396 desc = ctx->sh_desc_dec;
8cea7b66 397 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
bbf22344
HG
398 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
399 desc_bytes(desc), DMA_TO_DEVICE);
3ef8d945
TA
400
401 return 0;
402}
403
404static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
405{
406 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
407
408 ctx->authsize = authsize;
409 gcm_set_sh_desc(authenc);
410
411 return 0;
412}
413
bac68f2c
TA
414static int rfc4106_set_sh_desc(struct crypto_aead *aead)
415{
bac68f2c
TA
416 struct caam_ctx *ctx = crypto_aead_ctx(aead);
417 struct device *jrdev = ctx->jrdev;
bac68f2c 418 u32 *desc;
4cbe79cc
HG
419 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
420 ctx->cdata.keylen;
bac68f2c 421
db57656b 422 if (!ctx->cdata.keylen || !ctx->authsize)
bac68f2c
TA
423 return 0;
424
425 /*
426 * RFC4106 encrypt shared descriptor
427 * Job Descriptor and Shared Descriptor
428 * must fit into the 64-word Descriptor h/w Buffer
429 */
4cbe79cc 430 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
db57656b 431 ctx->cdata.key_inline = true;
9c0bc511 432 ctx->cdata.key_virt = ctx->key;
db57656b
HG
433 } else {
434 ctx->cdata.key_inline = false;
9c0bc511 435 ctx->cdata.key_dma = ctx->key_dma;
db57656b 436 }
bac68f2c
TA
437
438 desc = ctx->sh_desc_enc;
8cea7b66 439 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
bbf22344
HG
440 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
441 desc_bytes(desc), DMA_TO_DEVICE);
bac68f2c
TA
442
443 /*
444 * Job Descriptor and Shared Descriptors
445 * must all fit into the 64-word Descriptor h/w Buffer
446 */
4cbe79cc 447 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
db57656b 448 ctx->cdata.key_inline = true;
9c0bc511 449 ctx->cdata.key_virt = ctx->key;
db57656b
HG
450 } else {
451 ctx->cdata.key_inline = false;
9c0bc511 452 ctx->cdata.key_dma = ctx->key_dma;
db57656b 453 }
bac68f2c
TA
454
455 desc = ctx->sh_desc_dec;
8cea7b66 456 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
bbf22344
HG
457 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
458 desc_bytes(desc), DMA_TO_DEVICE);
bac68f2c 459
bac68f2c
TA
460 return 0;
461}
462
463static int rfc4106_setauthsize(struct crypto_aead *authenc,
464 unsigned int authsize)
465{
466 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
467
468 ctx->authsize = authsize;
469 rfc4106_set_sh_desc(authenc);
470
471 return 0;
472}
473
5d0429a3
TA
474static int rfc4543_set_sh_desc(struct crypto_aead *aead)
475{
5d0429a3
TA
476 struct caam_ctx *ctx = crypto_aead_ctx(aead);
477 struct device *jrdev = ctx->jrdev;
5d0429a3 478 u32 *desc;
4cbe79cc
HG
479 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
480 ctx->cdata.keylen;
5d0429a3 481
db57656b 482 if (!ctx->cdata.keylen || !ctx->authsize)
5d0429a3
TA
483 return 0;
484
485 /*
486 * RFC4543 encrypt shared descriptor
487 * Job Descriptor and Shared Descriptor
488 * must fit into the 64-word Descriptor h/w Buffer
489 */
4cbe79cc 490 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
db57656b 491 ctx->cdata.key_inline = true;
9c0bc511 492 ctx->cdata.key_virt = ctx->key;
db57656b
HG
493 } else {
494 ctx->cdata.key_inline = false;
9c0bc511 495 ctx->cdata.key_dma = ctx->key_dma;
db57656b 496 }
5d0429a3
TA
497
498 desc = ctx->sh_desc_enc;
8cea7b66 499 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
bbf22344
HG
500 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
501 desc_bytes(desc), DMA_TO_DEVICE);
5d0429a3
TA
502
503 /*
504 * Job Descriptor and Shared Descriptors
505 * must all fit into the 64-word Descriptor h/w Buffer
506 */
4cbe79cc 507 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
db57656b 508 ctx->cdata.key_inline = true;
9c0bc511 509 ctx->cdata.key_virt = ctx->key;
db57656b
HG
510 } else {
511 ctx->cdata.key_inline = false;
9c0bc511 512 ctx->cdata.key_dma = ctx->key_dma;
db57656b 513 }
5d0429a3
TA
514
515 desc = ctx->sh_desc_dec;
8cea7b66 516 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
bbf22344
HG
517 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
518 desc_bytes(desc), DMA_TO_DEVICE);
5d0429a3 519
f2147b88
HX
520 return 0;
521}
5d0429a3 522
f2147b88
HX
523static int rfc4543_setauthsize(struct crypto_aead *authenc,
524 unsigned int authsize)
525{
526 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
5d0429a3 527
f2147b88
HX
528 ctx->authsize = authsize;
529 rfc4543_set_sh_desc(authenc);
5d0429a3 530
f2147b88
HX
531 return 0;
532}
5d0429a3 533
0e479300 534static int aead_setkey(struct crypto_aead *aead,
8e8ec596
KP
535 const u8 *key, unsigned int keylen)
536{
8e8ec596
KP
537 struct caam_ctx *ctx = crypto_aead_ctx(aead);
538 struct device *jrdev = ctx->jrdev;
4e6e0b27 539 struct crypto_authenc_keys keys;
8e8ec596
KP
540 int ret = 0;
541
4e6e0b27 542 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
8e8ec596
KP
543 goto badkey;
544
8e8ec596
KP
545#ifdef DEBUG
546 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
4e6e0b27
HG
547 keys.authkeylen + keys.enckeylen, keys.enckeylen,
548 keys.authkeylen);
514df281 549 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
8e8ec596
KP
550 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
551#endif
8e8ec596 552
6655cb8e
HG
553 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
554 keys.authkeylen, CAAM_MAX_KEY_SIZE -
555 keys.enckeylen);
8e8ec596 556 if (ret) {
8e8ec596
KP
557 goto badkey;
558 }
559
560 /* postpend encryption key to auth split key */
db57656b 561 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
bbf22344
HG
562 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
563 keys.enckeylen, DMA_TO_DEVICE);
8e8ec596 564#ifdef DEBUG
514df281 565 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
8e8ec596 566 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
db57656b 567 ctx->adata.keylen_pad + keys.enckeylen, 1);
8e8ec596 568#endif
db57656b 569 ctx->cdata.keylen = keys.enckeylen;
bbf22344 570 return aead_set_sh_desc(aead);
8e8ec596
KP
571badkey:
572 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
573 return -EINVAL;
574}
575
3ef8d945
TA
576static int gcm_setkey(struct crypto_aead *aead,
577 const u8 *key, unsigned int keylen)
578{
579 struct caam_ctx *ctx = crypto_aead_ctx(aead);
580 struct device *jrdev = ctx->jrdev;
3ef8d945
TA
581
582#ifdef DEBUG
583 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
584 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
585#endif
586
587 memcpy(ctx->key, key, keylen);
bbf22344 588 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
db57656b 589 ctx->cdata.keylen = keylen;
3ef8d945 590
bbf22344 591 return gcm_set_sh_desc(aead);
3ef8d945
TA
592}
593
bac68f2c
TA
594static int rfc4106_setkey(struct crypto_aead *aead,
595 const u8 *key, unsigned int keylen)
596{
597 struct caam_ctx *ctx = crypto_aead_ctx(aead);
598 struct device *jrdev = ctx->jrdev;
bac68f2c
TA
599
600 if (keylen < 4)
601 return -EINVAL;
602
603#ifdef DEBUG
604 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
605 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
606#endif
607
608 memcpy(ctx->key, key, keylen);
609
610 /*
611 * The last four bytes of the key material are used as the salt value
612 * in the nonce. Update the AES key length.
613 */
db57656b 614 ctx->cdata.keylen = keylen - 4;
bbf22344
HG
615 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
616 DMA_TO_DEVICE);
617 return rfc4106_set_sh_desc(aead);
bac68f2c
TA
618}
619
5d0429a3
TA
620static int rfc4543_setkey(struct crypto_aead *aead,
621 const u8 *key, unsigned int keylen)
622{
623 struct caam_ctx *ctx = crypto_aead_ctx(aead);
624 struct device *jrdev = ctx->jrdev;
5d0429a3
TA
625
626 if (keylen < 4)
627 return -EINVAL;
628
629#ifdef DEBUG
630 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
631 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
632#endif
633
634 memcpy(ctx->key, key, keylen);
635
636 /*
637 * The last four bytes of the key material are used as the salt value
638 * in the nonce. Update the AES key length.
639 */
db57656b 640 ctx->cdata.keylen = keylen - 4;
bbf22344
HG
641 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
642 DMA_TO_DEVICE);
643 return rfc4543_set_sh_desc(aead);
5d0429a3
TA
644}
645
acdca31d
YK
646static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
647 const u8 *key, unsigned int keylen)
648{
649 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
a5f57cff
CV
650 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
651 const char *alg_name = crypto_tfm_alg_name(tfm);
acdca31d 652 struct device *jrdev = ctx->jrdev;
8cea7b66 653 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
acdca31d 654 u32 *desc;
2b22f6c5 655 u32 ctx1_iv_off = 0;
db57656b 656 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
2b22f6c5 657 OP_ALG_AAI_CTR_MOD128);
a5f57cff
CV
658 const bool is_rfc3686 = (ctr_mode &&
659 (strstr(alg_name, "rfc3686") != NULL));
acdca31d 660
8cea7b66 661 memcpy(ctx->key, key, keylen);
acdca31d 662#ifdef DEBUG
514df281 663 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
acdca31d
YK
664 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
665#endif
2b22f6c5
CV
666 /*
667 * AES-CTR needs to load IV in CONTEXT1 reg
668 * at an offset of 128bits (16bytes)
669 * CONTEXT1[255:128] = IV
670 */
671 if (ctr_mode)
672 ctx1_iv_off = 16;
acdca31d 673
a5f57cff
CV
674 /*
675 * RFC3686 specific:
676 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
677 * | *key = {KEY, NONCE}
678 */
679 if (is_rfc3686) {
680 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
681 keylen -= CTR_RFC3686_NONCE_SIZE;
682 }
683
bbf22344 684 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
db57656b 685 ctx->cdata.keylen = keylen;
9c0bc511 686 ctx->cdata.key_virt = ctx->key;
db57656b 687 ctx->cdata.key_inline = true;
acdca31d
YK
688
689 /* ablkcipher_encrypt shared descriptor */
690 desc = ctx->sh_desc_enc;
8cea7b66
HG
691 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
692 ctx1_iv_off);
bbf22344
HG
693 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
694 desc_bytes(desc), DMA_TO_DEVICE);
8cea7b66 695
acdca31d
YK
696 /* ablkcipher_decrypt shared descriptor */
697 desc = ctx->sh_desc_dec;
8cea7b66
HG
698 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
699 ctx1_iv_off);
bbf22344
HG
700 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
701 desc_bytes(desc), DMA_TO_DEVICE);
acdca31d 702
7222d1a3
CV
703 /* ablkcipher_givencrypt shared descriptor */
704 desc = ctx->sh_desc_givenc;
8cea7b66
HG
705 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
706 ctx1_iv_off);
bbf22344
HG
707 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
708 desc_bytes(desc), DMA_TO_DEVICE);
acdca31d 709
8cea7b66 710 return 0;
acdca31d
YK
711}
712
c6415a60
CV
713static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
714 const u8 *key, unsigned int keylen)
715{
716 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
717 struct device *jrdev = ctx->jrdev;
8cea7b66 718 u32 *desc;
c6415a60
CV
719
720 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
721 crypto_ablkcipher_set_flags(ablkcipher,
722 CRYPTO_TFM_RES_BAD_KEY_LEN);
723 dev_err(jrdev, "key size mismatch\n");
724 return -EINVAL;
725 }
726
727 memcpy(ctx->key, key, keylen);
bbf22344 728 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
db57656b 729 ctx->cdata.keylen = keylen;
9c0bc511 730 ctx->cdata.key_virt = ctx->key;
db57656b 731 ctx->cdata.key_inline = true;
c6415a60
CV
732
733 /* xts_ablkcipher_encrypt shared descriptor */
734 desc = ctx->sh_desc_enc;
8cea7b66 735 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
bbf22344
HG
736 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
737 desc_bytes(desc), DMA_TO_DEVICE);
c6415a60
CV
738
739 /* xts_ablkcipher_decrypt shared descriptor */
740 desc = ctx->sh_desc_dec;
8cea7b66 741 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
bbf22344
HG
742 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
743 desc_bytes(desc), DMA_TO_DEVICE);
c6415a60
CV
744
745 return 0;
746}
747
8e8ec596 748/*
1acebad3 749 * aead_edesc - s/w-extended aead descriptor
fa0c92db
HG
750 * @src_nents: number of segments in input s/w scatterlist
751 * @dst_nents: number of segments in output s/w scatterlist
a299c837
YK
752 * @sec4_sg_bytes: length of dma mapped sec4_sg space
753 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 754 * @sec4_sg: pointer to h/w link table
8e8ec596
KP
755 * @hw_desc: the h/w job descriptor followed by any referenced link tables
756 */
0e479300 757struct aead_edesc {
8e8ec596
KP
758 int src_nents;
759 int dst_nents;
a299c837
YK
760 int sec4_sg_bytes;
761 dma_addr_t sec4_sg_dma;
762 struct sec4_sg_entry *sec4_sg;
f2147b88 763 u32 hw_desc[];
8e8ec596
KP
764};
765
acdca31d
YK
766/*
767 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
fa0c92db
HG
768 * @src_nents: number of segments in input s/w scatterlist
769 * @dst_nents: number of segments in output s/w scatterlist
acdca31d 770 * @iv_dma: dma address of iv for checking continuity and link table
a299c837
YK
771 * @sec4_sg_bytes: length of dma mapped sec4_sg space
772 * @sec4_sg_dma: bus physical mapped address of h/w link table
4ca7c7d8 773 * @sec4_sg: pointer to h/w link table
acdca31d
YK
774 * @hw_desc: the h/w job descriptor followed by any referenced link tables
775 */
776struct ablkcipher_edesc {
777 int src_nents;
778 int dst_nents;
779 dma_addr_t iv_dma;
a299c837
YK
780 int sec4_sg_bytes;
781 dma_addr_t sec4_sg_dma;
782 struct sec4_sg_entry *sec4_sg;
acdca31d
YK
783 u32 hw_desc[0];
784};
785
1acebad3 786static void caam_unmap(struct device *dev, struct scatterlist *src,
643b39b0 787 struct scatterlist *dst, int src_nents,
13fb8fd7 788 int dst_nents,
a299c837
YK
789 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
790 int sec4_sg_bytes)
8e8ec596 791{
643b39b0 792 if (dst != src) {
fa0c92db
HG
793 if (src_nents)
794 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
795 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
8e8ec596 796 } else {
fa0c92db 797 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
8e8ec596
KP
798 }
799
1acebad3
YK
800 if (iv_dma)
801 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
a299c837
YK
802 if (sec4_sg_bytes)
803 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
8e8ec596
KP
804 DMA_TO_DEVICE);
805}
806
1acebad3
YK
807static void aead_unmap(struct device *dev,
808 struct aead_edesc *edesc,
809 struct aead_request *req)
f2147b88
HX
810{
811 caam_unmap(dev, req->src, req->dst,
13fb8fd7 812 edesc->src_nents, edesc->dst_nents, 0, 0,
f2147b88
HX
813 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
814}
815
acdca31d
YK
816static void ablkcipher_unmap(struct device *dev,
817 struct ablkcipher_edesc *edesc,
818 struct ablkcipher_request *req)
819{
820 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
821 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
822
823 caam_unmap(dev, req->src, req->dst,
13fb8fd7
LC
824 edesc->src_nents, edesc->dst_nents,
825 edesc->iv_dma, ivsize,
643b39b0 826 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
acdca31d
YK
827}
828
0e479300 829static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
830 void *context)
831{
0e479300
YK
832 struct aead_request *req = context;
833 struct aead_edesc *edesc;
f2147b88
HX
834
835#ifdef DEBUG
836 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
837#endif
838
839 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
840
841 if (err)
842 caam_jr_strstatus(jrdev, err);
843
844 aead_unmap(jrdev, edesc, req);
845
846 kfree(edesc);
847
848 aead_request_complete(req, err);
849}
850
0e479300 851static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
8e8ec596
KP
852 void *context)
853{
0e479300
YK
854 struct aead_request *req = context;
855 struct aead_edesc *edesc;
f2147b88
HX
856
857#ifdef DEBUG
858 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
859#endif
860
861 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
862
863 if (err)
864 caam_jr_strstatus(jrdev, err);
865
866 aead_unmap(jrdev, edesc, req);
867
868 /*
869 * verify hw auth check passed else return -EBADMSG
870 */
871 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
872 err = -EBADMSG;
873
874 kfree(edesc);
875
876 aead_request_complete(req, err);
877}
878
acdca31d
YK
879static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
880 void *context)
881{
882 struct ablkcipher_request *req = context;
883 struct ablkcipher_edesc *edesc;
884#ifdef DEBUG
885 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
886 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
887
888 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
889#endif
890
4ca7c7d8 891 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
acdca31d 892
fa9659cd
MV
893 if (err)
894 caam_jr_strstatus(jrdev, err);
acdca31d
YK
895
896#ifdef DEBUG
514df281 897 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
acdca31d
YK
898 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
899 edesc->src_nents > 1 ? 100 : ivsize, 1);
5ecf8ef9
CV
900 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
901 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
00fef2b2 902 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
acdca31d
YK
903#endif
904
905 ablkcipher_unmap(jrdev, edesc, req);
906 kfree(edesc);
907
908 ablkcipher_request_complete(req, err);
909}
910
911static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
912 void *context)
913{
914 struct ablkcipher_request *req = context;
915 struct ablkcipher_edesc *edesc;
916#ifdef DEBUG
917 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
918 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
919
920 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
921#endif
922
4ca7c7d8 923 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
fa9659cd
MV
924 if (err)
925 caam_jr_strstatus(jrdev, err);
acdca31d
YK
926
927#ifdef DEBUG
514df281 928 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
acdca31d
YK
929 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
930 ivsize, 1);
5ecf8ef9
CV
931 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
932 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
00fef2b2 933 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
acdca31d
YK
934#endif
935
936 ablkcipher_unmap(jrdev, edesc, req);
937 kfree(edesc);
938
939 ablkcipher_request_complete(req, err);
940}
941
f2147b88
HX
942/*
943 * Fill in aead job descriptor
944 */
945static void init_aead_job(struct aead_request *req,
946 struct aead_edesc *edesc,
947 bool all_contig, bool encrypt)
948{
949 struct crypto_aead *aead = crypto_aead_reqtfm(req);
950 struct caam_ctx *ctx = crypto_aead_ctx(aead);
951 int authsize = ctx->authsize;
952 u32 *desc = edesc->hw_desc;
953 u32 out_options, in_options;
954 dma_addr_t dst_dma, src_dma;
955 int len, sec4_sg_index = 0;
956 dma_addr_t ptr;
957 u32 *sh_desc;
958
959 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
960 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
961
962 len = desc_len(sh_desc);
963 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
964
965 if (all_contig) {
fa0c92db 966 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
f2147b88
HX
967 in_options = 0;
968 } else {
969 src_dma = edesc->sec4_sg_dma;
970 sec4_sg_index += edesc->src_nents;
971 in_options = LDST_SGF;
972 }
973
974 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
975 in_options);
976
977 dst_dma = src_dma;
978 out_options = in_options;
979
980 if (unlikely(req->src != req->dst)) {
fa0c92db 981 if (edesc->dst_nents == 1) {
f2147b88
HX
982 dst_dma = sg_dma_address(req->dst);
983 } else {
984 dst_dma = edesc->sec4_sg_dma +
985 sec4_sg_index *
986 sizeof(struct sec4_sg_entry);
987 out_options = LDST_SGF;
988 }
989 }
990
991 if (encrypt)
992 append_seq_out_ptr(desc, dst_dma,
993 req->assoclen + req->cryptlen + authsize,
994 out_options);
995 else
996 append_seq_out_ptr(desc, dst_dma,
997 req->assoclen + req->cryptlen - authsize,
998 out_options);
999
1000 /* REG3 = assoclen */
1001 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1002}
1003
1004static void init_gcm_job(struct aead_request *req,
1005 struct aead_edesc *edesc,
1006 bool all_contig, bool encrypt)
1007{
1008 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1009 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1010 unsigned int ivsize = crypto_aead_ivsize(aead);
1011 u32 *desc = edesc->hw_desc;
1012 bool generic_gcm = (ivsize == 12);
1013 unsigned int last;
1014
1015 init_aead_job(req, edesc, all_contig, encrypt);
1016
1017 /* BUG This should not be specific to generic GCM. */
1018 last = 0;
1019 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1020 last = FIFOLD_TYPE_LAST1;
1021
1022 /* Read GCM IV */
1023 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1024 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1025 /* Append Salt */
1026 if (!generic_gcm)
db57656b 1027 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
f2147b88
HX
1028 /* Append IV */
1029 append_data(desc, req->iv, ivsize);
1030 /* End of blank commands */
1031}
1032
479bcc7c
HX
1033static void init_authenc_job(struct aead_request *req,
1034 struct aead_edesc *edesc,
1035 bool all_contig, bool encrypt)
1acebad3
YK
1036{
1037 struct crypto_aead *aead = crypto_aead_reqtfm(req);
479bcc7c
HX
1038 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1039 struct caam_aead_alg, aead);
1040 unsigned int ivsize = crypto_aead_ivsize(aead);
1acebad3 1041 struct caam_ctx *ctx = crypto_aead_ctx(aead);
db57656b 1042 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
479bcc7c
HX
1043 OP_ALG_AAI_CTR_MOD128);
1044 const bool is_rfc3686 = alg->caam.rfc3686;
1acebad3 1045 u32 *desc = edesc->hw_desc;
479bcc7c 1046 u32 ivoffset = 0;
8e8ec596 1047
479bcc7c
HX
1048 /*
1049 * AES-CTR needs to load IV in CONTEXT1 reg
1050 * at an offset of 128bits (16bytes)
1051 * CONTEXT1[255:128] = IV
1052 */
1053 if (ctr_mode)
1054 ivoffset = 16;
1acebad3 1055
479bcc7c
HX
1056 /*
1057 * RFC3686 specific:
1058 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1059 */
1060 if (is_rfc3686)
1061 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
8e8ec596 1062
479bcc7c 1063 init_aead_job(req, edesc, all_contig, encrypt);
1acebad3 1064
8b18e235 1065 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
479bcc7c
HX
1066 append_load_as_imm(desc, req->iv, ivsize,
1067 LDST_CLASS_1_CCB |
1068 LDST_SRCDST_BYTE_CONTEXT |
1069 (ivoffset << LDST_OFFSET_SHIFT));
8e8ec596
KP
1070}
1071
acdca31d
YK
1072/*
1073 * Fill in ablkcipher job descriptor
1074 */
1075static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1076 struct ablkcipher_edesc *edesc,
1077 struct ablkcipher_request *req,
1078 bool iv_contig)
1079{
1080 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1081 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1082 u32 *desc = edesc->hw_desc;
1083 u32 out_options = 0, in_options;
1084 dma_addr_t dst_dma, src_dma;
a299c837 1085 int len, sec4_sg_index = 0;
acdca31d
YK
1086
1087#ifdef DEBUG
514df281 1088 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
acdca31d
YK
1089 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1090 ivsize, 1);
fa0c92db
HG
1091 pr_err("asked=%d, nbytes%d\n",
1092 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
5ecf8ef9
CV
1093 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
1094 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
fa0c92db 1095 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
acdca31d
YK
1096#endif
1097
1098 len = desc_len(sh_desc);
1099 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1100
1101 if (iv_contig) {
1102 src_dma = edesc->iv_dma;
1103 in_options = 0;
1104 } else {
a299c837 1105 src_dma = edesc->sec4_sg_dma;
35b82e55 1106 sec4_sg_index += edesc->src_nents + 1;
acdca31d
YK
1107 in_options = LDST_SGF;
1108 }
1109 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1110
1111 if (likely(req->src == req->dst)) {
fa0c92db 1112 if (edesc->src_nents == 1 && iv_contig) {
acdca31d
YK
1113 dst_dma = sg_dma_address(req->src);
1114 } else {
a299c837
YK
1115 dst_dma = edesc->sec4_sg_dma +
1116 sizeof(struct sec4_sg_entry);
acdca31d
YK
1117 out_options = LDST_SGF;
1118 }
1119 } else {
fa0c92db 1120 if (edesc->dst_nents == 1) {
acdca31d
YK
1121 dst_dma = sg_dma_address(req->dst);
1122 } else {
a299c837
YK
1123 dst_dma = edesc->sec4_sg_dma +
1124 sec4_sg_index * sizeof(struct sec4_sg_entry);
acdca31d
YK
1125 out_options = LDST_SGF;
1126 }
1127 }
1128 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1129}
1130
7222d1a3
CV
1131/*
1132 * Fill in ablkcipher givencrypt job descriptor
1133 */
1134static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1135 struct ablkcipher_edesc *edesc,
1136 struct ablkcipher_request *req,
1137 bool iv_contig)
1138{
1139 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1140 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1141 u32 *desc = edesc->hw_desc;
1142 u32 out_options, in_options;
1143 dma_addr_t dst_dma, src_dma;
1144 int len, sec4_sg_index = 0;
1145
1146#ifdef DEBUG
1147 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1148 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1149 ivsize, 1);
5ecf8ef9
CV
1150 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
1151 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
fa0c92db 1152 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
7222d1a3
CV
1153#endif
1154
1155 len = desc_len(sh_desc);
1156 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1157
fa0c92db 1158 if (edesc->src_nents == 1) {
7222d1a3
CV
1159 src_dma = sg_dma_address(req->src);
1160 in_options = 0;
1161 } else {
1162 src_dma = edesc->sec4_sg_dma;
1163 sec4_sg_index += edesc->src_nents;
1164 in_options = LDST_SGF;
1165 }
1166 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1167
1168 if (iv_contig) {
1169 dst_dma = edesc->iv_dma;
1170 out_options = 0;
1171 } else {
1172 dst_dma = edesc->sec4_sg_dma +
1173 sec4_sg_index * sizeof(struct sec4_sg_entry);
1174 out_options = LDST_SGF;
1175 }
1176 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1177}
1178
8e8ec596 1179/*
1acebad3 1180 * allocate and map the aead extended descriptor
8e8ec596 1181 */
479bcc7c
HX
1182static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1183 int desc_bytes, bool *all_contig_ptr,
1184 bool encrypt)
8e8ec596 1185{
0e479300 1186 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1187 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1188 struct device *jrdev = ctx->jrdev;
1acebad3
YK
1189 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1190 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
838e0a89 1191 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
0e479300 1192 struct aead_edesc *edesc;
fa0c92db 1193 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
bbf9c893 1194 unsigned int authsize = ctx->authsize;
1acebad3 1195
bbf9c893 1196 if (unlikely(req->dst != req->src)) {
fa0c92db
HG
1197 src_nents = sg_nents_for_len(req->src, req->assoclen +
1198 req->cryptlen);
fd144d83
HG
1199 if (unlikely(src_nents < 0)) {
1200 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1201 req->assoclen + req->cryptlen);
1202 return ERR_PTR(src_nents);
1203 }
1204
fa0c92db
HG
1205 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1206 req->cryptlen +
1207 (encrypt ? authsize :
1208 (-authsize)));
fd144d83
HG
1209 if (unlikely(dst_nents < 0)) {
1210 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1211 req->assoclen + req->cryptlen +
1212 (encrypt ? authsize : (-authsize)));
1213 return ERR_PTR(dst_nents);
1214 }
bbf9c893 1215 } else {
fa0c92db
HG
1216 src_nents = sg_nents_for_len(req->src, req->assoclen +
1217 req->cryptlen +
1218 (encrypt ? authsize : 0));
fd144d83
HG
1219 if (unlikely(src_nents < 0)) {
1220 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1221 req->assoclen + req->cryptlen +
1222 (encrypt ? authsize : 0));
1223 return ERR_PTR(src_nents);
1224 }
f2147b88 1225 }
3ef8d945 1226
f2147b88 1227 if (likely(req->src == req->dst)) {
838e0a89
HG
1228 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1229 DMA_BIDIRECTIONAL);
1230 if (unlikely(!mapped_src_nents)) {
f2147b88 1231 dev_err(jrdev, "unable to map source\n");
f2147b88
HX
1232 return ERR_PTR(-ENOMEM);
1233 }
1234 } else {
fa0c92db
HG
1235 /* Cover also the case of null (zero length) input data */
1236 if (src_nents) {
838e0a89
HG
1237 mapped_src_nents = dma_map_sg(jrdev, req->src,
1238 src_nents, DMA_TO_DEVICE);
1239 if (unlikely(!mapped_src_nents)) {
fa0c92db 1240 dev_err(jrdev, "unable to map source\n");
fa0c92db
HG
1241 return ERR_PTR(-ENOMEM);
1242 }
838e0a89
HG
1243 } else {
1244 mapped_src_nents = 0;
f2147b88
HX
1245 }
1246
838e0a89
HG
1247 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1248 DMA_FROM_DEVICE);
1249 if (unlikely(!mapped_dst_nents)) {
f2147b88 1250 dev_err(jrdev, "unable to map destination\n");
fa0c92db 1251 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
f2147b88
HX
1252 return ERR_PTR(-ENOMEM);
1253 }
1254 }
1255
838e0a89
HG
1256 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1257 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1258 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1259
1260 /* allocate space for base edesc and hw desc commands, link tables */
1261 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1262 GFP_DMA | flags);
1263 if (!edesc) {
1264 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1265 0, 0, 0);
1266 return ERR_PTR(-ENOMEM);
1267 }
1268
8e8ec596
KP
1269 edesc->src_nents = src_nents;
1270 edesc->dst_nents = dst_nents;
a299c837
YK
1271 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1272 desc_bytes;
838e0a89 1273 *all_contig_ptr = !(mapped_src_nents > 1);
1acebad3 1274
a299c837 1275 sec4_sg_index = 0;
838e0a89
HG
1276 if (mapped_src_nents > 1) {
1277 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1278 edesc->sec4_sg + sec4_sg_index, 0);
1279 sec4_sg_index += mapped_src_nents;
1acebad3 1280 }
838e0a89
HG
1281 if (mapped_dst_nents > 1) {
1282 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
a299c837 1283 edesc->sec4_sg + sec4_sg_index, 0);
1acebad3 1284 }
f2147b88
HX
1285
1286 if (!sec4_sg_bytes)
1287 return edesc;
1288
1da2be33
RG
1289 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1290 sec4_sg_bytes, DMA_TO_DEVICE);
ce572085
HG
1291 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1292 dev_err(jrdev, "unable to map S/G table\n");
f2147b88
HX
1293 aead_unmap(jrdev, edesc, req);
1294 kfree(edesc);
ce572085
HG
1295 return ERR_PTR(-ENOMEM);
1296 }
8e8ec596 1297
f2147b88
HX
1298 edesc->sec4_sg_bytes = sec4_sg_bytes;
1299
8e8ec596
KP
1300 return edesc;
1301}
1302
f2147b88 1303static int gcm_encrypt(struct aead_request *req)
8e8ec596 1304{
0e479300
YK
1305 struct aead_edesc *edesc;
1306 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1307 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1308 struct device *jrdev = ctx->jrdev;
1acebad3 1309 bool all_contig;
8e8ec596 1310 u32 *desc;
1acebad3
YK
1311 int ret = 0;
1312
8e8ec596 1313 /* allocate extended descriptor */
f2147b88 1314 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
8e8ec596
KP
1315 if (IS_ERR(edesc))
1316 return PTR_ERR(edesc);
1317
1acebad3 1318 /* Create and submit job descriptor */
f2147b88 1319 init_gcm_job(req, edesc, all_contig, true);
1acebad3 1320#ifdef DEBUG
514df281 1321 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1acebad3
YK
1322 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1323 desc_bytes(edesc->hw_desc), 1);
1324#endif
8e8ec596 1325
1acebad3
YK
1326 desc = edesc->hw_desc;
1327 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1328 if (!ret) {
1329 ret = -EINPROGRESS;
1330 } else {
1331 aead_unmap(jrdev, edesc, req);
1332 kfree(edesc);
1333 }
8e8ec596 1334
1acebad3 1335 return ret;
8e8ec596
KP
1336}
1337
46218750
HX
1338static int ipsec_gcm_encrypt(struct aead_request *req)
1339{
1340 if (req->assoclen < 8)
1341 return -EINVAL;
1342
1343 return gcm_encrypt(req);
1344}
1345
479bcc7c 1346static int aead_encrypt(struct aead_request *req)
f2147b88
HX
1347{
1348 struct aead_edesc *edesc;
1349 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1350 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1351 struct device *jrdev = ctx->jrdev;
1352 bool all_contig;
1353 u32 *desc;
1354 int ret = 0;
1355
1356 /* allocate extended descriptor */
479bcc7c
HX
1357 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1358 &all_contig, true);
f2147b88
HX
1359 if (IS_ERR(edesc))
1360 return PTR_ERR(edesc);
1361
1362 /* Create and submit job descriptor */
479bcc7c 1363 init_authenc_job(req, edesc, all_contig, true);
f2147b88
HX
1364#ifdef DEBUG
1365 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1366 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1367 desc_bytes(edesc->hw_desc), 1);
1368#endif
1369
1370 desc = edesc->hw_desc;
479bcc7c 1371 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
f2147b88
HX
1372 if (!ret) {
1373 ret = -EINPROGRESS;
1374 } else {
479bcc7c 1375 aead_unmap(jrdev, edesc, req);
f2147b88
HX
1376 kfree(edesc);
1377 }
1378
1379 return ret;
1380}
1381
1382static int gcm_decrypt(struct aead_request *req)
1383{
1384 struct aead_edesc *edesc;
1385 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1386 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1387 struct device *jrdev = ctx->jrdev;
1388 bool all_contig;
1389 u32 *desc;
1390 int ret = 0;
1391
1392 /* allocate extended descriptor */
1393 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1394 if (IS_ERR(edesc))
1395 return PTR_ERR(edesc);
1396
1397 /* Create and submit job descriptor*/
1398 init_gcm_job(req, edesc, all_contig, false);
1399#ifdef DEBUG
1400 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1401 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1402 desc_bytes(edesc->hw_desc), 1);
1403#endif
1404
1405 desc = edesc->hw_desc;
1406 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1407 if (!ret) {
1408 ret = -EINPROGRESS;
1409 } else {
1410 aead_unmap(jrdev, edesc, req);
1411 kfree(edesc);
1412 }
1413
1414 return ret;
1415}
1416
46218750
HX
1417static int ipsec_gcm_decrypt(struct aead_request *req)
1418{
1419 if (req->assoclen < 8)
1420 return -EINVAL;
1421
1422 return gcm_decrypt(req);
1423}
1424
479bcc7c 1425static int aead_decrypt(struct aead_request *req)
8e8ec596 1426{
1acebad3 1427 struct aead_edesc *edesc;
8e8ec596 1428 struct crypto_aead *aead = crypto_aead_reqtfm(req);
8e8ec596
KP
1429 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1430 struct device *jrdev = ctx->jrdev;
1acebad3 1431 bool all_contig;
8e8ec596 1432 u32 *desc;
1acebad3 1433 int ret = 0;
8e8ec596 1434
5ecf8ef9 1435#ifdef DEBUG
5ecf8ef9
CV
1436 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
1437 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
00fef2b2 1438 req->assoclen + req->cryptlen, 1);
5ecf8ef9
CV
1439#endif
1440
8e8ec596 1441 /* allocate extended descriptor */
479bcc7c
HX
1442 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1443 &all_contig, false);
8e8ec596
KP
1444 if (IS_ERR(edesc))
1445 return PTR_ERR(edesc);
1446
1acebad3 1447 /* Create and submit job descriptor*/
479bcc7c 1448 init_authenc_job(req, edesc, all_contig, false);
1acebad3 1449#ifdef DEBUG
514df281 1450 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1acebad3
YK
1451 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1452 desc_bytes(edesc->hw_desc), 1);
1453#endif
1454
8e8ec596 1455 desc = edesc->hw_desc;
479bcc7c 1456 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1acebad3
YK
1457 if (!ret) {
1458 ret = -EINPROGRESS;
1459 } else {
479bcc7c 1460 aead_unmap(jrdev, edesc, req);
1acebad3
YK
1461 kfree(edesc);
1462 }
8e8ec596 1463
1acebad3
YK
1464 return ret;
1465}
8e8ec596 1466
acdca31d
YK
1467/*
1468 * allocate and map the ablkcipher extended descriptor for ablkcipher
1469 */
1470static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1471 *req, int desc_bytes,
1472 bool *iv_contig_out)
1473{
1474 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1475 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1476 struct device *jrdev = ctx->jrdev;
1477 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1478 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1479 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1480 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
acdca31d
YK
1481 struct ablkcipher_edesc *edesc;
1482 dma_addr_t iv_dma = 0;
fa0c92db 1483 bool in_contig;
acdca31d 1484 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
838e0a89 1485 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
acdca31d 1486
fa0c92db 1487 src_nents = sg_nents_for_len(req->src, req->nbytes);
fd144d83
HG
1488 if (unlikely(src_nents < 0)) {
1489 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1490 req->nbytes);
1491 return ERR_PTR(src_nents);
1492 }
acdca31d 1493
fd144d83 1494 if (req->dst != req->src) {
fa0c92db 1495 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
fd144d83
HG
1496 if (unlikely(dst_nents < 0)) {
1497 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1498 req->nbytes);
1499 return ERR_PTR(dst_nents);
1500 }
1501 }
acdca31d
YK
1502
1503 if (likely(req->src == req->dst)) {
838e0a89
HG
1504 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1505 DMA_BIDIRECTIONAL);
1506 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1507 dev_err(jrdev, "unable to map source\n");
1508 return ERR_PTR(-ENOMEM);
1509 }
acdca31d 1510 } else {
838e0a89
HG
1511 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1512 DMA_TO_DEVICE);
1513 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1514 dev_err(jrdev, "unable to map source\n");
1515 return ERR_PTR(-ENOMEM);
1516 }
1517
838e0a89
HG
1518 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1519 DMA_FROM_DEVICE);
1520 if (unlikely(!mapped_dst_nents)) {
c73e36e8 1521 dev_err(jrdev, "unable to map destination\n");
fa0c92db 1522 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
c73e36e8
HG
1523 return ERR_PTR(-ENOMEM);
1524 }
acdca31d
YK
1525 }
1526
ce572085
HG
1527 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1528 if (dma_mapping_error(jrdev, iv_dma)) {
1529 dev_err(jrdev, "unable to map IV\n");
c73e36e8
HG
1530 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1531 0, 0, 0);
ce572085
HG
1532 return ERR_PTR(-ENOMEM);
1533 }
1534
838e0a89
HG
1535 if (mapped_src_nents == 1 &&
1536 iv_dma + ivsize == sg_dma_address(req->src)) {
fa0c92db
HG
1537 in_contig = true;
1538 sec4_sg_ents = 0;
1539 } else {
1540 in_contig = false;
838e0a89 1541 sec4_sg_ents = 1 + mapped_src_nents;
fa0c92db
HG
1542 }
1543 dst_sg_idx = sec4_sg_ents;
838e0a89 1544 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
fa0c92db 1545 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
acdca31d
YK
1546
1547 /* allocate space for base edesc and hw desc commands, link tables */
dde20ae9
VM
1548 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1549 GFP_DMA | flags);
acdca31d
YK
1550 if (!edesc) {
1551 dev_err(jrdev, "could not allocate extended descriptor\n");
c73e36e8
HG
1552 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1553 iv_dma, ivsize, 0, 0);
acdca31d
YK
1554 return ERR_PTR(-ENOMEM);
1555 }
1556
1557 edesc->src_nents = src_nents;
1558 edesc->dst_nents = dst_nents;
a299c837
YK
1559 edesc->sec4_sg_bytes = sec4_sg_bytes;
1560 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1561 desc_bytes;
acdca31d 1562
fa0c92db 1563 if (!in_contig) {
a299c837 1564 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
838e0a89 1565 sg_to_sec4_sg_last(req->src, mapped_src_nents,
a299c837 1566 edesc->sec4_sg + 1, 0);
acdca31d
YK
1567 }
1568
838e0a89
HG
1569 if (mapped_dst_nents > 1) {
1570 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1571 edesc->sec4_sg + dst_sg_idx, 0);
acdca31d
YK
1572 }
1573
a299c837
YK
1574 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1575 sec4_sg_bytes, DMA_TO_DEVICE);
ce572085
HG
1576 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1577 dev_err(jrdev, "unable to map S/G table\n");
c73e36e8
HG
1578 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1579 iv_dma, ivsize, 0, 0);
1580 kfree(edesc);
ce572085
HG
1581 return ERR_PTR(-ENOMEM);
1582 }
1583
acdca31d
YK
1584 edesc->iv_dma = iv_dma;
1585
1586#ifdef DEBUG
514df281 1587 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
a299c837
YK
1588 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1589 sec4_sg_bytes, 1);
acdca31d
YK
1590#endif
1591
fa0c92db 1592 *iv_contig_out = in_contig;
acdca31d
YK
1593 return edesc;
1594}
1595
1596static int ablkcipher_encrypt(struct ablkcipher_request *req)
1597{
1598 struct ablkcipher_edesc *edesc;
1599 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1600 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1601 struct device *jrdev = ctx->jrdev;
1602 bool iv_contig;
1603 u32 *desc;
1604 int ret = 0;
1605
1606 /* allocate extended descriptor */
1607 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1608 CAAM_CMD_SZ, &iv_contig);
1609 if (IS_ERR(edesc))
1610 return PTR_ERR(edesc);
1611
1612 /* Create and submit job descriptor*/
1613 init_ablkcipher_job(ctx->sh_desc_enc,
1614 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1615#ifdef DEBUG
514df281 1616 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
acdca31d
YK
1617 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1618 desc_bytes(edesc->hw_desc), 1);
1619#endif
1620 desc = edesc->hw_desc;
1621 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1622
1623 if (!ret) {
1624 ret = -EINPROGRESS;
1625 } else {
1626 ablkcipher_unmap(jrdev, edesc, req);
1627 kfree(edesc);
1628 }
1629
1630 return ret;
1631}
1632
1633static int ablkcipher_decrypt(struct ablkcipher_request *req)
1634{
1635 struct ablkcipher_edesc *edesc;
1636 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1637 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1638 struct device *jrdev = ctx->jrdev;
1639 bool iv_contig;
1640 u32 *desc;
1641 int ret = 0;
1642
1643 /* allocate extended descriptor */
1644 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1645 CAAM_CMD_SZ, &iv_contig);
1646 if (IS_ERR(edesc))
1647 return PTR_ERR(edesc);
1648
1649 /* Create and submit job descriptor*/
1650 init_ablkcipher_job(ctx->sh_desc_dec,
1651 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1652 desc = edesc->hw_desc;
1653#ifdef DEBUG
514df281 1654 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
acdca31d
YK
1655 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1656 desc_bytes(edesc->hw_desc), 1);
1657#endif
1658
1659 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1660 if (!ret) {
1661 ret = -EINPROGRESS;
1662 } else {
1663 ablkcipher_unmap(jrdev, edesc, req);
1664 kfree(edesc);
1665 }
1666
1667 return ret;
1668}
1669
7222d1a3
CV
1670/*
1671 * allocate and map the ablkcipher extended descriptor
1672 * for ablkcipher givencrypt
1673 */
1674static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1675 struct skcipher_givcrypt_request *greq,
1676 int desc_bytes,
1677 bool *iv_contig_out)
1678{
1679 struct ablkcipher_request *req = &greq->creq;
1680 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1681 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1682 struct device *jrdev = ctx->jrdev;
1683 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1684 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1685 GFP_KERNEL : GFP_ATOMIC;
838e0a89 1686 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
7222d1a3
CV
1687 struct ablkcipher_edesc *edesc;
1688 dma_addr_t iv_dma = 0;
fa0c92db 1689 bool out_contig;
7222d1a3 1690 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
838e0a89 1691 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
7222d1a3 1692
fa0c92db 1693 src_nents = sg_nents_for_len(req->src, req->nbytes);
fd144d83
HG
1694 if (unlikely(src_nents < 0)) {
1695 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1696 req->nbytes);
1697 return ERR_PTR(src_nents);
1698 }
7222d1a3 1699
7222d1a3 1700 if (likely(req->src == req->dst)) {
838e0a89
HG
1701 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1702 DMA_BIDIRECTIONAL);
1703 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1704 dev_err(jrdev, "unable to map source\n");
1705 return ERR_PTR(-ENOMEM);
1706 }
fd88aac9
HG
1707
1708 dst_nents = src_nents;
838e0a89 1709 mapped_dst_nents = src_nents;
7222d1a3 1710 } else {
838e0a89
HG
1711 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1712 DMA_TO_DEVICE);
1713 if (unlikely(!mapped_src_nents)) {
c73e36e8
HG
1714 dev_err(jrdev, "unable to map source\n");
1715 return ERR_PTR(-ENOMEM);
1716 }
1717
fa0c92db 1718 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
fd144d83
HG
1719 if (unlikely(dst_nents < 0)) {
1720 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1721 req->nbytes);
1722 return ERR_PTR(dst_nents);
1723 }
1724
838e0a89
HG
1725 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1726 DMA_FROM_DEVICE);
1727 if (unlikely(!mapped_dst_nents)) {
c73e36e8 1728 dev_err(jrdev, "unable to map destination\n");
fa0c92db 1729 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
c73e36e8
HG
1730 return ERR_PTR(-ENOMEM);
1731 }
7222d1a3
CV
1732 }
1733
1734 /*
1735 * Check if iv can be contiguous with source and destination.
1736 * If so, include it. If not, create scatterlist.
1737 */
1738 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1739 if (dma_mapping_error(jrdev, iv_dma)) {
1740 dev_err(jrdev, "unable to map IV\n");
c73e36e8
HG
1741 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1742 0, 0, 0);
7222d1a3
CV
1743 return ERR_PTR(-ENOMEM);
1744 }
1745
838e0a89 1746 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
fa0c92db 1747 dst_sg_idx = sec4_sg_ents;
838e0a89
HG
1748 if (mapped_dst_nents == 1 &&
1749 iv_dma + ivsize == sg_dma_address(req->dst)) {
fa0c92db
HG
1750 out_contig = true;
1751 } else {
1752 out_contig = false;
838e0a89 1753 sec4_sg_ents += 1 + mapped_dst_nents;
fa0c92db 1754 }
7222d1a3
CV
1755
1756 /* allocate space for base edesc and hw desc commands, link tables */
fa0c92db 1757 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
dde20ae9
VM
1758 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1759 GFP_DMA | flags);
7222d1a3
CV
1760 if (!edesc) {
1761 dev_err(jrdev, "could not allocate extended descriptor\n");
c73e36e8
HG
1762 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1763 iv_dma, ivsize, 0, 0);
7222d1a3
CV
1764 return ERR_PTR(-ENOMEM);
1765 }
1766
1767 edesc->src_nents = src_nents;
7222d1a3 1768 edesc->dst_nents = dst_nents;
7222d1a3
CV
1769 edesc->sec4_sg_bytes = sec4_sg_bytes;
1770 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1771 desc_bytes;
1772
838e0a89
HG
1773 if (mapped_src_nents > 1)
1774 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1775 0);
7222d1a3 1776
fa0c92db
HG
1777 if (!out_contig) {
1778 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
7222d1a3 1779 iv_dma, ivsize, 0);
838e0a89 1780 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
fa0c92db 1781 edesc->sec4_sg + dst_sg_idx + 1, 0);
7222d1a3
CV
1782 }
1783
1784 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1785 sec4_sg_bytes, DMA_TO_DEVICE);
1786 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1787 dev_err(jrdev, "unable to map S/G table\n");
c73e36e8
HG
1788 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1789 iv_dma, ivsize, 0, 0);
1790 kfree(edesc);
7222d1a3
CV
1791 return ERR_PTR(-ENOMEM);
1792 }
1793 edesc->iv_dma = iv_dma;
1794
1795#ifdef DEBUG
1796 print_hex_dump(KERN_ERR,
1797 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1798 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1799 sec4_sg_bytes, 1);
1800#endif
1801
fa0c92db 1802 *iv_contig_out = out_contig;
7222d1a3
CV
1803 return edesc;
1804}
1805
1806static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1807{
1808 struct ablkcipher_request *req = &creq->creq;
1809 struct ablkcipher_edesc *edesc;
1810 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1811 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1812 struct device *jrdev = ctx->jrdev;
fd144d83 1813 bool iv_contig = false;
7222d1a3
CV
1814 u32 *desc;
1815 int ret = 0;
1816
1817 /* allocate extended descriptor */
1818 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1819 CAAM_CMD_SZ, &iv_contig);
1820 if (IS_ERR(edesc))
1821 return PTR_ERR(edesc);
1822
1823 /* Create and submit job descriptor*/
1824 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1825 edesc, req, iv_contig);
1826#ifdef DEBUG
1827 print_hex_dump(KERN_ERR,
1828 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1829 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1830 desc_bytes(edesc->hw_desc), 1);
1831#endif
1832 desc = edesc->hw_desc;
1833 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1834
1835 if (!ret) {
1836 ret = -EINPROGRESS;
1837 } else {
1838 ablkcipher_unmap(jrdev, edesc, req);
1839 kfree(edesc);
1840 }
1841
1842 return ret;
1843}
1844
885e9e2f 1845#define template_aead template_u.aead
acdca31d 1846#define template_ablkcipher template_u.ablkcipher
8e8ec596
KP
1847struct caam_alg_template {
1848 char name[CRYPTO_MAX_ALG_NAME];
1849 char driver_name[CRYPTO_MAX_ALG_NAME];
1850 unsigned int blocksize;
885e9e2f
YK
1851 u32 type;
1852 union {
1853 struct ablkcipher_alg ablkcipher;
885e9e2f 1854 } template_u;
8e8ec596
KP
1855 u32 class1_alg_type;
1856 u32 class2_alg_type;
8e8ec596
KP
1857};
1858
1859static struct caam_alg_template driver_algs[] = {
479bcc7c 1860 /* ablkcipher descriptor */
ae4a825f 1861 {
479bcc7c
HX
1862 .name = "cbc(aes)",
1863 .driver_name = "cbc-aes-caam",
1864 .blocksize = AES_BLOCK_SIZE,
1865 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1866 .template_ablkcipher = {
1867 .setkey = ablkcipher_setkey,
1868 .encrypt = ablkcipher_encrypt,
1869 .decrypt = ablkcipher_decrypt,
1870 .givencrypt = ablkcipher_givencrypt,
1871 .geniv = "<built-in>",
1872 .min_keysize = AES_MIN_KEY_SIZE,
1873 .max_keysize = AES_MAX_KEY_SIZE,
1874 .ivsize = AES_BLOCK_SIZE,
1875 },
1876 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1877 },
1878 {
1879 .name = "cbc(des3_ede)",
1880 .driver_name = "cbc-3des-caam",
1881 .blocksize = DES3_EDE_BLOCK_SIZE,
1882 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1883 .template_ablkcipher = {
1884 .setkey = ablkcipher_setkey,
1885 .encrypt = ablkcipher_encrypt,
1886 .decrypt = ablkcipher_decrypt,
1887 .givencrypt = ablkcipher_givencrypt,
1888 .geniv = "<built-in>",
1889 .min_keysize = DES3_EDE_KEY_SIZE,
1890 .max_keysize = DES3_EDE_KEY_SIZE,
1891 .ivsize = DES3_EDE_BLOCK_SIZE,
1892 },
1893 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1894 },
1895 {
1896 .name = "cbc(des)",
1897 .driver_name = "cbc-des-caam",
1898 .blocksize = DES_BLOCK_SIZE,
1899 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1900 .template_ablkcipher = {
1901 .setkey = ablkcipher_setkey,
1902 .encrypt = ablkcipher_encrypt,
1903 .decrypt = ablkcipher_decrypt,
1904 .givencrypt = ablkcipher_givencrypt,
1905 .geniv = "<built-in>",
1906 .min_keysize = DES_KEY_SIZE,
1907 .max_keysize = DES_KEY_SIZE,
1908 .ivsize = DES_BLOCK_SIZE,
1909 },
1910 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1911 },
1912 {
1913 .name = "ctr(aes)",
1914 .driver_name = "ctr-aes-caam",
1915 .blocksize = 1,
1916 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1917 .template_ablkcipher = {
1918 .setkey = ablkcipher_setkey,
1919 .encrypt = ablkcipher_encrypt,
1920 .decrypt = ablkcipher_decrypt,
1921 .geniv = "chainiv",
1922 .min_keysize = AES_MIN_KEY_SIZE,
1923 .max_keysize = AES_MAX_KEY_SIZE,
1924 .ivsize = AES_BLOCK_SIZE,
1925 },
1926 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1927 },
1928 {
1929 .name = "rfc3686(ctr(aes))",
1930 .driver_name = "rfc3686-ctr-aes-caam",
1931 .blocksize = 1,
1932 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1933 .template_ablkcipher = {
1934 .setkey = ablkcipher_setkey,
1935 .encrypt = ablkcipher_encrypt,
1936 .decrypt = ablkcipher_decrypt,
1937 .givencrypt = ablkcipher_givencrypt,
ae4a825f 1938 .geniv = "<built-in>",
479bcc7c
HX
1939 .min_keysize = AES_MIN_KEY_SIZE +
1940 CTR_RFC3686_NONCE_SIZE,
1941 .max_keysize = AES_MAX_KEY_SIZE +
1942 CTR_RFC3686_NONCE_SIZE,
1943 .ivsize = CTR_RFC3686_IV_SIZE,
1944 },
1945 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
c6415a60
CV
1946 },
1947 {
1948 .name = "xts(aes)",
1949 .driver_name = "xts-aes-caam",
1950 .blocksize = AES_BLOCK_SIZE,
1951 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1952 .template_ablkcipher = {
1953 .setkey = xts_ablkcipher_setkey,
1954 .encrypt = ablkcipher_encrypt,
1955 .decrypt = ablkcipher_decrypt,
1956 .geniv = "eseqiv",
1957 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1958 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1959 .ivsize = AES_BLOCK_SIZE,
1960 },
1961 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1962 },
479bcc7c
HX
1963};
1964
1965static struct caam_aead_alg driver_aeads[] = {
1966 {
1967 .aead = {
1968 .base = {
1969 .cra_name = "rfc4106(gcm(aes))",
1970 .cra_driver_name = "rfc4106-gcm-aes-caam",
1971 .cra_blocksize = 1,
1972 },
1973 .setkey = rfc4106_setkey,
1974 .setauthsize = rfc4106_setauthsize,
1975 .encrypt = ipsec_gcm_encrypt,
1976 .decrypt = ipsec_gcm_decrypt,
1977 .ivsize = 8,
1978 .maxauthsize = AES_BLOCK_SIZE,
1979 },
1980 .caam = {
1981 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1982 },
1983 },
1984 {
1985 .aead = {
1986 .base = {
1987 .cra_name = "rfc4543(gcm(aes))",
1988 .cra_driver_name = "rfc4543-gcm-aes-caam",
1989 .cra_blocksize = 1,
1990 },
1991 .setkey = rfc4543_setkey,
1992 .setauthsize = rfc4543_setauthsize,
1993 .encrypt = ipsec_gcm_encrypt,
1994 .decrypt = ipsec_gcm_decrypt,
1995 .ivsize = 8,
1996 .maxauthsize = AES_BLOCK_SIZE,
1997 },
1998 .caam = {
1999 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2000 },
2001 },
2002 /* Galois Counter Mode */
2003 {
2004 .aead = {
2005 .base = {
2006 .cra_name = "gcm(aes)",
2007 .cra_driver_name = "gcm-aes-caam",
2008 .cra_blocksize = 1,
2009 },
2010 .setkey = gcm_setkey,
2011 .setauthsize = gcm_setauthsize,
2012 .encrypt = gcm_encrypt,
2013 .decrypt = gcm_decrypt,
2014 .ivsize = 12,
2015 .maxauthsize = AES_BLOCK_SIZE,
2016 },
2017 .caam = {
2018 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2019 },
2020 },
2021 /* single-pass ipsec_esp descriptor */
2022 {
2023 .aead = {
2024 .base = {
2025 .cra_name = "authenc(hmac(md5),"
2026 "ecb(cipher_null))",
2027 .cra_driver_name = "authenc-hmac-md5-"
2028 "ecb-cipher_null-caam",
2029 .cra_blocksize = NULL_BLOCK_SIZE,
2030 },
2031 .setkey = aead_setkey,
2032 .setauthsize = aead_setauthsize,
2033 .encrypt = aead_encrypt,
2034 .decrypt = aead_decrypt,
ae4a825f 2035 .ivsize = NULL_IV_SIZE,
479bcc7c
HX
2036 .maxauthsize = MD5_DIGEST_SIZE,
2037 },
2038 .caam = {
2039 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2040 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2041 },
2042 },
2043 {
2044 .aead = {
2045 .base = {
2046 .cra_name = "authenc(hmac(sha1),"
2047 "ecb(cipher_null))",
2048 .cra_driver_name = "authenc-hmac-sha1-"
2049 "ecb-cipher_null-caam",
2050 .cra_blocksize = NULL_BLOCK_SIZE,
ae4a825f 2051 },
479bcc7c
HX
2052 .setkey = aead_setkey,
2053 .setauthsize = aead_setauthsize,
2054 .encrypt = aead_encrypt,
2055 .decrypt = aead_decrypt,
2056 .ivsize = NULL_IV_SIZE,
2057 .maxauthsize = SHA1_DIGEST_SIZE,
2058 },
2059 .caam = {
2060 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2061 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2062 },
ae4a825f
HG
2063 },
2064 {
479bcc7c
HX
2065 .aead = {
2066 .base = {
2067 .cra_name = "authenc(hmac(sha224),"
2068 "ecb(cipher_null))",
2069 .cra_driver_name = "authenc-hmac-sha224-"
2070 "ecb-cipher_null-caam",
2071 .cra_blocksize = NULL_BLOCK_SIZE,
2072 },
ae4a825f
HG
2073 .setkey = aead_setkey,
2074 .setauthsize = aead_setauthsize,
479bcc7c
HX
2075 .encrypt = aead_encrypt,
2076 .decrypt = aead_decrypt,
ae4a825f
HG
2077 .ivsize = NULL_IV_SIZE,
2078 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2079 },
2080 .caam = {
2081 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2082 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2083 },
ae4a825f
HG
2084 },
2085 {
479bcc7c
HX
2086 .aead = {
2087 .base = {
2088 .cra_name = "authenc(hmac(sha256),"
2089 "ecb(cipher_null))",
2090 .cra_driver_name = "authenc-hmac-sha256-"
2091 "ecb-cipher_null-caam",
2092 .cra_blocksize = NULL_BLOCK_SIZE,
2093 },
ae4a825f
HG
2094 .setkey = aead_setkey,
2095 .setauthsize = aead_setauthsize,
479bcc7c
HX
2096 .encrypt = aead_encrypt,
2097 .decrypt = aead_decrypt,
ae4a825f
HG
2098 .ivsize = NULL_IV_SIZE,
2099 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2100 },
2101 .caam = {
2102 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2103 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2104 },
ae4a825f
HG
2105 },
2106 {
479bcc7c
HX
2107 .aead = {
2108 .base = {
2109 .cra_name = "authenc(hmac(sha384),"
2110 "ecb(cipher_null))",
2111 .cra_driver_name = "authenc-hmac-sha384-"
2112 "ecb-cipher_null-caam",
2113 .cra_blocksize = NULL_BLOCK_SIZE,
2114 },
ae4a825f
HG
2115 .setkey = aead_setkey,
2116 .setauthsize = aead_setauthsize,
479bcc7c
HX
2117 .encrypt = aead_encrypt,
2118 .decrypt = aead_decrypt,
ae4a825f
HG
2119 .ivsize = NULL_IV_SIZE,
2120 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2121 },
2122 .caam = {
2123 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2124 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2125 },
ae4a825f
HG
2126 },
2127 {
479bcc7c
HX
2128 .aead = {
2129 .base = {
2130 .cra_name = "authenc(hmac(sha512),"
2131 "ecb(cipher_null))",
2132 .cra_driver_name = "authenc-hmac-sha512-"
2133 "ecb-cipher_null-caam",
2134 .cra_blocksize = NULL_BLOCK_SIZE,
2135 },
ae4a825f
HG
2136 .setkey = aead_setkey,
2137 .setauthsize = aead_setauthsize,
479bcc7c
HX
2138 .encrypt = aead_encrypt,
2139 .decrypt = aead_decrypt,
ae4a825f
HG
2140 .ivsize = NULL_IV_SIZE,
2141 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2142 },
2143 .caam = {
2144 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2145 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2146 },
2147 },
2148 {
2149 .aead = {
2150 .base = {
2151 .cra_name = "authenc(hmac(md5),cbc(aes))",
2152 .cra_driver_name = "authenc-hmac-md5-"
2153 "cbc-aes-caam",
2154 .cra_blocksize = AES_BLOCK_SIZE,
ae4a825f 2155 },
479bcc7c
HX
2156 .setkey = aead_setkey,
2157 .setauthsize = aead_setauthsize,
2158 .encrypt = aead_encrypt,
2159 .decrypt = aead_decrypt,
2160 .ivsize = AES_BLOCK_SIZE,
2161 .maxauthsize = MD5_DIGEST_SIZE,
2162 },
2163 .caam = {
2164 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2165 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2166 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2167 },
ae4a825f 2168 },
8b4d43a4 2169 {
479bcc7c
HX
2170 .aead = {
2171 .base = {
2172 .cra_name = "echainiv(authenc(hmac(md5),"
2173 "cbc(aes)))",
2174 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2175 "cbc-aes-caam",
2176 .cra_blocksize = AES_BLOCK_SIZE,
2177 },
8b4d43a4
KP
2178 .setkey = aead_setkey,
2179 .setauthsize = aead_setauthsize,
479bcc7c 2180 .encrypt = aead_encrypt,
8b18e235 2181 .decrypt = aead_decrypt,
8b4d43a4
KP
2182 .ivsize = AES_BLOCK_SIZE,
2183 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2184 },
2185 .caam = {
2186 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2187 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2188 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2189 .geniv = true,
2190 },
2191 },
2192 {
2193 .aead = {
2194 .base = {
2195 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2196 .cra_driver_name = "authenc-hmac-sha1-"
2197 "cbc-aes-caam",
2198 .cra_blocksize = AES_BLOCK_SIZE,
8b4d43a4 2199 },
479bcc7c
HX
2200 .setkey = aead_setkey,
2201 .setauthsize = aead_setauthsize,
2202 .encrypt = aead_encrypt,
2203 .decrypt = aead_decrypt,
2204 .ivsize = AES_BLOCK_SIZE,
2205 .maxauthsize = SHA1_DIGEST_SIZE,
2206 },
2207 .caam = {
2208 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2209 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2210 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2211 },
8b4d43a4 2212 },
8e8ec596 2213 {
479bcc7c
HX
2214 .aead = {
2215 .base = {
2216 .cra_name = "echainiv(authenc(hmac(sha1),"
2217 "cbc(aes)))",
2218 .cra_driver_name = "echainiv-authenc-"
2219 "hmac-sha1-cbc-aes-caam",
2220 .cra_blocksize = AES_BLOCK_SIZE,
2221 },
0e479300
YK
2222 .setkey = aead_setkey,
2223 .setauthsize = aead_setauthsize,
479bcc7c 2224 .encrypt = aead_encrypt,
8b18e235 2225 .decrypt = aead_decrypt,
8e8ec596
KP
2226 .ivsize = AES_BLOCK_SIZE,
2227 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2228 },
2229 .caam = {
2230 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2231 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2232 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2233 .geniv = true,
2234 },
2235 },
2236 {
2237 .aead = {
2238 .base = {
2239 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2240 .cra_driver_name = "authenc-hmac-sha224-"
2241 "cbc-aes-caam",
2242 .cra_blocksize = AES_BLOCK_SIZE,
8e8ec596 2243 },
479bcc7c
HX
2244 .setkey = aead_setkey,
2245 .setauthsize = aead_setauthsize,
2246 .encrypt = aead_encrypt,
2247 .decrypt = aead_decrypt,
2248 .ivsize = AES_BLOCK_SIZE,
2249 .maxauthsize = SHA224_DIGEST_SIZE,
2250 },
2251 .caam = {
2252 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2253 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2254 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2255 },
8e8ec596 2256 },
e863f9cc 2257 {
479bcc7c
HX
2258 .aead = {
2259 .base = {
2260 .cra_name = "echainiv(authenc(hmac(sha224),"
2261 "cbc(aes)))",
2262 .cra_driver_name = "echainiv-authenc-"
2263 "hmac-sha224-cbc-aes-caam",
2264 .cra_blocksize = AES_BLOCK_SIZE,
2265 },
e863f9cc
HA
2266 .setkey = aead_setkey,
2267 .setauthsize = aead_setauthsize,
479bcc7c 2268 .encrypt = aead_encrypt,
8b18e235 2269 .decrypt = aead_decrypt,
e863f9cc
HA
2270 .ivsize = AES_BLOCK_SIZE,
2271 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2272 },
2273 .caam = {
2274 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2275 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2276 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2277 .geniv = true,
2278 },
2279 },
2280 {
2281 .aead = {
2282 .base = {
2283 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2284 .cra_driver_name = "authenc-hmac-sha256-"
2285 "cbc-aes-caam",
2286 .cra_blocksize = AES_BLOCK_SIZE,
e863f9cc 2287 },
479bcc7c
HX
2288 .setkey = aead_setkey,
2289 .setauthsize = aead_setauthsize,
2290 .encrypt = aead_encrypt,
2291 .decrypt = aead_decrypt,
2292 .ivsize = AES_BLOCK_SIZE,
2293 .maxauthsize = SHA256_DIGEST_SIZE,
2294 },
2295 .caam = {
2296 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2297 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2298 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2299 },
e863f9cc 2300 },
8e8ec596 2301 {
479bcc7c
HX
2302 .aead = {
2303 .base = {
2304 .cra_name = "echainiv(authenc(hmac(sha256),"
2305 "cbc(aes)))",
2306 .cra_driver_name = "echainiv-authenc-"
2307 "hmac-sha256-cbc-aes-caam",
2308 .cra_blocksize = AES_BLOCK_SIZE,
2309 },
2310 .setkey = aead_setkey,
2311 .setauthsize = aead_setauthsize,
2312 .encrypt = aead_encrypt,
8b18e235 2313 .decrypt = aead_decrypt,
479bcc7c
HX
2314 .ivsize = AES_BLOCK_SIZE,
2315 .maxauthsize = SHA256_DIGEST_SIZE,
2316 },
2317 .caam = {
2318 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2319 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2320 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2321 .geniv = true,
2322 },
2323 },
2324 {
2325 .aead = {
2326 .base = {
2327 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2328 .cra_driver_name = "authenc-hmac-sha384-"
2329 "cbc-aes-caam",
2330 .cra_blocksize = AES_BLOCK_SIZE,
2331 },
2332 .setkey = aead_setkey,
2333 .setauthsize = aead_setauthsize,
2334 .encrypt = aead_encrypt,
2335 .decrypt = aead_decrypt,
2336 .ivsize = AES_BLOCK_SIZE,
2337 .maxauthsize = SHA384_DIGEST_SIZE,
2338 },
2339 .caam = {
2340 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2341 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2342 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2343 },
2344 },
2345 {
2346 .aead = {
2347 .base = {
2348 .cra_name = "echainiv(authenc(hmac(sha384),"
2349 "cbc(aes)))",
2350 .cra_driver_name = "echainiv-authenc-"
2351 "hmac-sha384-cbc-aes-caam",
2352 .cra_blocksize = AES_BLOCK_SIZE,
2353 },
0e479300
YK
2354 .setkey = aead_setkey,
2355 .setauthsize = aead_setauthsize,
479bcc7c 2356 .encrypt = aead_encrypt,
8b18e235 2357 .decrypt = aead_decrypt,
8e8ec596 2358 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2359 .maxauthsize = SHA384_DIGEST_SIZE,
2360 },
2361 .caam = {
2362 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2363 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2364 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2365 .geniv = true,
2366 },
8e8ec596 2367 },
e863f9cc 2368 {
479bcc7c
HX
2369 .aead = {
2370 .base = {
2371 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2372 .cra_driver_name = "authenc-hmac-sha512-"
2373 "cbc-aes-caam",
2374 .cra_blocksize = AES_BLOCK_SIZE,
2375 },
e863f9cc
HA
2376 .setkey = aead_setkey,
2377 .setauthsize = aead_setauthsize,
479bcc7c
HX
2378 .encrypt = aead_encrypt,
2379 .decrypt = aead_decrypt,
e863f9cc 2380 .ivsize = AES_BLOCK_SIZE,
479bcc7c
HX
2381 .maxauthsize = SHA512_DIGEST_SIZE,
2382 },
2383 .caam = {
2384 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2385 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2386 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2387 },
e863f9cc 2388 },
4427b1b4 2389 {
479bcc7c
HX
2390 .aead = {
2391 .base = {
2392 .cra_name = "echainiv(authenc(hmac(sha512),"
2393 "cbc(aes)))",
2394 .cra_driver_name = "echainiv-authenc-"
2395 "hmac-sha512-cbc-aes-caam",
2396 .cra_blocksize = AES_BLOCK_SIZE,
2397 },
0e479300
YK
2398 .setkey = aead_setkey,
2399 .setauthsize = aead_setauthsize,
479bcc7c 2400 .encrypt = aead_encrypt,
8b18e235 2401 .decrypt = aead_decrypt,
4427b1b4
KP
2402 .ivsize = AES_BLOCK_SIZE,
2403 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2404 },
2405 .caam = {
2406 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2407 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2408 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2409 .geniv = true,
2410 },
2411 },
2412 {
2413 .aead = {
2414 .base = {
2415 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2416 .cra_driver_name = "authenc-hmac-md5-"
2417 "cbc-des3_ede-caam",
2418 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
4427b1b4 2419 },
479bcc7c
HX
2420 .setkey = aead_setkey,
2421 .setauthsize = aead_setauthsize,
2422 .encrypt = aead_encrypt,
2423 .decrypt = aead_decrypt,
2424 .ivsize = DES3_EDE_BLOCK_SIZE,
2425 .maxauthsize = MD5_DIGEST_SIZE,
2426 },
2427 .caam = {
2428 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2429 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2430 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2431 }
4427b1b4 2432 },
8b4d43a4 2433 {
479bcc7c
HX
2434 .aead = {
2435 .base = {
2436 .cra_name = "echainiv(authenc(hmac(md5),"
2437 "cbc(des3_ede)))",
2438 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2439 "cbc-des3_ede-caam",
2440 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2441 },
8b4d43a4
KP
2442 .setkey = aead_setkey,
2443 .setauthsize = aead_setauthsize,
479bcc7c 2444 .encrypt = aead_encrypt,
8b18e235 2445 .decrypt = aead_decrypt,
8b4d43a4
KP
2446 .ivsize = DES3_EDE_BLOCK_SIZE,
2447 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2448 },
2449 .caam = {
2450 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2451 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2452 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2453 .geniv = true,
2454 }
2455 },
2456 {
2457 .aead = {
2458 .base = {
2459 .cra_name = "authenc(hmac(sha1),"
2460 "cbc(des3_ede))",
2461 .cra_driver_name = "authenc-hmac-sha1-"
2462 "cbc-des3_ede-caam",
2463 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8b4d43a4 2464 },
479bcc7c
HX
2465 .setkey = aead_setkey,
2466 .setauthsize = aead_setauthsize,
2467 .encrypt = aead_encrypt,
2468 .decrypt = aead_decrypt,
2469 .ivsize = DES3_EDE_BLOCK_SIZE,
2470 .maxauthsize = SHA1_DIGEST_SIZE,
2471 },
2472 .caam = {
2473 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2474 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2475 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2476 },
8b4d43a4 2477 },
8e8ec596 2478 {
479bcc7c
HX
2479 .aead = {
2480 .base = {
2481 .cra_name = "echainiv(authenc(hmac(sha1),"
2482 "cbc(des3_ede)))",
2483 .cra_driver_name = "echainiv-authenc-"
2484 "hmac-sha1-"
2485 "cbc-des3_ede-caam",
2486 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2487 },
0e479300
YK
2488 .setkey = aead_setkey,
2489 .setauthsize = aead_setauthsize,
479bcc7c 2490 .encrypt = aead_encrypt,
8b18e235 2491 .decrypt = aead_decrypt,
8e8ec596
KP
2492 .ivsize = DES3_EDE_BLOCK_SIZE,
2493 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2494 },
2495 .caam = {
2496 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2497 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2498 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2499 .geniv = true,
2500 },
2501 },
2502 {
2503 .aead = {
2504 .base = {
2505 .cra_name = "authenc(hmac(sha224),"
2506 "cbc(des3_ede))",
2507 .cra_driver_name = "authenc-hmac-sha224-"
2508 "cbc-des3_ede-caam",
2509 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2510 },
479bcc7c
HX
2511 .setkey = aead_setkey,
2512 .setauthsize = aead_setauthsize,
2513 .encrypt = aead_encrypt,
2514 .decrypt = aead_decrypt,
2515 .ivsize = DES3_EDE_BLOCK_SIZE,
2516 .maxauthsize = SHA224_DIGEST_SIZE,
2517 },
2518 .caam = {
2519 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2520 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2521 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2522 },
8e8ec596 2523 },
e863f9cc 2524 {
479bcc7c
HX
2525 .aead = {
2526 .base = {
2527 .cra_name = "echainiv(authenc(hmac(sha224),"
2528 "cbc(des3_ede)))",
2529 .cra_driver_name = "echainiv-authenc-"
2530 "hmac-sha224-"
2531 "cbc-des3_ede-caam",
2532 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2533 },
e863f9cc
HA
2534 .setkey = aead_setkey,
2535 .setauthsize = aead_setauthsize,
479bcc7c 2536 .encrypt = aead_encrypt,
8b18e235 2537 .decrypt = aead_decrypt,
e863f9cc
HA
2538 .ivsize = DES3_EDE_BLOCK_SIZE,
2539 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2540 },
2541 .caam = {
2542 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2543 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2544 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2545 .geniv = true,
2546 },
2547 },
2548 {
2549 .aead = {
2550 .base = {
2551 .cra_name = "authenc(hmac(sha256),"
2552 "cbc(des3_ede))",
2553 .cra_driver_name = "authenc-hmac-sha256-"
2554 "cbc-des3_ede-caam",
2555 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2556 },
479bcc7c
HX
2557 .setkey = aead_setkey,
2558 .setauthsize = aead_setauthsize,
2559 .encrypt = aead_encrypt,
2560 .decrypt = aead_decrypt,
2561 .ivsize = DES3_EDE_BLOCK_SIZE,
2562 .maxauthsize = SHA256_DIGEST_SIZE,
2563 },
2564 .caam = {
2565 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2566 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2567 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2568 },
e863f9cc 2569 },
8e8ec596 2570 {
479bcc7c
HX
2571 .aead = {
2572 .base = {
2573 .cra_name = "echainiv(authenc(hmac(sha256),"
2574 "cbc(des3_ede)))",
2575 .cra_driver_name = "echainiv-authenc-"
2576 "hmac-sha256-"
2577 "cbc-des3_ede-caam",
2578 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2579 },
0e479300
YK
2580 .setkey = aead_setkey,
2581 .setauthsize = aead_setauthsize,
479bcc7c 2582 .encrypt = aead_encrypt,
8b18e235 2583 .decrypt = aead_decrypt,
8e8ec596
KP
2584 .ivsize = DES3_EDE_BLOCK_SIZE,
2585 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2586 },
2587 .caam = {
2588 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2589 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2590 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2591 .geniv = true,
2592 },
2593 },
2594 {
2595 .aead = {
2596 .base = {
2597 .cra_name = "authenc(hmac(sha384),"
2598 "cbc(des3_ede))",
2599 .cra_driver_name = "authenc-hmac-sha384-"
2600 "cbc-des3_ede-caam",
2601 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
8e8ec596 2602 },
479bcc7c
HX
2603 .setkey = aead_setkey,
2604 .setauthsize = aead_setauthsize,
2605 .encrypt = aead_encrypt,
2606 .decrypt = aead_decrypt,
2607 .ivsize = DES3_EDE_BLOCK_SIZE,
2608 .maxauthsize = SHA384_DIGEST_SIZE,
2609 },
2610 .caam = {
2611 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2612 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2613 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2614 },
8e8ec596 2615 },
e863f9cc 2616 {
479bcc7c
HX
2617 .aead = {
2618 .base = {
2619 .cra_name = "echainiv(authenc(hmac(sha384),"
2620 "cbc(des3_ede)))",
2621 .cra_driver_name = "echainiv-authenc-"
2622 "hmac-sha384-"
2623 "cbc-des3_ede-caam",
2624 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2625 },
e863f9cc
HA
2626 .setkey = aead_setkey,
2627 .setauthsize = aead_setauthsize,
479bcc7c 2628 .encrypt = aead_encrypt,
8b18e235 2629 .decrypt = aead_decrypt,
e863f9cc
HA
2630 .ivsize = DES3_EDE_BLOCK_SIZE,
2631 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2632 },
2633 .caam = {
2634 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2635 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2636 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2637 .geniv = true,
2638 },
2639 },
2640 {
2641 .aead = {
2642 .base = {
2643 .cra_name = "authenc(hmac(sha512),"
2644 "cbc(des3_ede))",
2645 .cra_driver_name = "authenc-hmac-sha512-"
2646 "cbc-des3_ede-caam",
2647 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
e863f9cc 2648 },
479bcc7c
HX
2649 .setkey = aead_setkey,
2650 .setauthsize = aead_setauthsize,
2651 .encrypt = aead_encrypt,
2652 .decrypt = aead_decrypt,
2653 .ivsize = DES3_EDE_BLOCK_SIZE,
2654 .maxauthsize = SHA512_DIGEST_SIZE,
2655 },
2656 .caam = {
2657 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2658 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2659 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2660 },
e863f9cc 2661 },
4427b1b4 2662 {
479bcc7c
HX
2663 .aead = {
2664 .base = {
2665 .cra_name = "echainiv(authenc(hmac(sha512),"
2666 "cbc(des3_ede)))",
2667 .cra_driver_name = "echainiv-authenc-"
2668 "hmac-sha512-"
2669 "cbc-des3_ede-caam",
2670 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2671 },
0e479300
YK
2672 .setkey = aead_setkey,
2673 .setauthsize = aead_setauthsize,
479bcc7c 2674 .encrypt = aead_encrypt,
8b18e235 2675 .decrypt = aead_decrypt,
4427b1b4
KP
2676 .ivsize = DES3_EDE_BLOCK_SIZE,
2677 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2678 },
2679 .caam = {
2680 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2681 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2682 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2683 .geniv = true,
2684 },
2685 },
2686 {
2687 .aead = {
2688 .base = {
2689 .cra_name = "authenc(hmac(md5),cbc(des))",
2690 .cra_driver_name = "authenc-hmac-md5-"
2691 "cbc-des-caam",
2692 .cra_blocksize = DES_BLOCK_SIZE,
4427b1b4 2693 },
479bcc7c
HX
2694 .setkey = aead_setkey,
2695 .setauthsize = aead_setauthsize,
2696 .encrypt = aead_encrypt,
2697 .decrypt = aead_decrypt,
2698 .ivsize = DES_BLOCK_SIZE,
2699 .maxauthsize = MD5_DIGEST_SIZE,
2700 },
2701 .caam = {
2702 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2703 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2704 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2705 },
4427b1b4 2706 },
8b4d43a4 2707 {
479bcc7c
HX
2708 .aead = {
2709 .base = {
2710 .cra_name = "echainiv(authenc(hmac(md5),"
2711 "cbc(des)))",
2712 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2713 "cbc-des-caam",
2714 .cra_blocksize = DES_BLOCK_SIZE,
2715 },
8b4d43a4
KP
2716 .setkey = aead_setkey,
2717 .setauthsize = aead_setauthsize,
479bcc7c 2718 .encrypt = aead_encrypt,
8b18e235 2719 .decrypt = aead_decrypt,
8b4d43a4
KP
2720 .ivsize = DES_BLOCK_SIZE,
2721 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2722 },
2723 .caam = {
2724 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2725 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2726 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2727 .geniv = true,
2728 },
2729 },
2730 {
2731 .aead = {
2732 .base = {
2733 .cra_name = "authenc(hmac(sha1),cbc(des))",
2734 .cra_driver_name = "authenc-hmac-sha1-"
2735 "cbc-des-caam",
2736 .cra_blocksize = DES_BLOCK_SIZE,
8b4d43a4 2737 },
479bcc7c
HX
2738 .setkey = aead_setkey,
2739 .setauthsize = aead_setauthsize,
2740 .encrypt = aead_encrypt,
2741 .decrypt = aead_decrypt,
2742 .ivsize = DES_BLOCK_SIZE,
2743 .maxauthsize = SHA1_DIGEST_SIZE,
2744 },
2745 .caam = {
2746 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2747 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2748 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2749 },
8b4d43a4 2750 },
8e8ec596 2751 {
479bcc7c
HX
2752 .aead = {
2753 .base = {
2754 .cra_name = "echainiv(authenc(hmac(sha1),"
2755 "cbc(des)))",
2756 .cra_driver_name = "echainiv-authenc-"
2757 "hmac-sha1-cbc-des-caam",
2758 .cra_blocksize = DES_BLOCK_SIZE,
2759 },
0e479300
YK
2760 .setkey = aead_setkey,
2761 .setauthsize = aead_setauthsize,
479bcc7c 2762 .encrypt = aead_encrypt,
8b18e235 2763 .decrypt = aead_decrypt,
8e8ec596
KP
2764 .ivsize = DES_BLOCK_SIZE,
2765 .maxauthsize = SHA1_DIGEST_SIZE,
479bcc7c
HX
2766 },
2767 .caam = {
2768 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2769 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2770 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2771 .geniv = true,
2772 },
2773 },
2774 {
2775 .aead = {
2776 .base = {
2777 .cra_name = "authenc(hmac(sha224),cbc(des))",
2778 .cra_driver_name = "authenc-hmac-sha224-"
2779 "cbc-des-caam",
2780 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2781 },
479bcc7c
HX
2782 .setkey = aead_setkey,
2783 .setauthsize = aead_setauthsize,
2784 .encrypt = aead_encrypt,
2785 .decrypt = aead_decrypt,
2786 .ivsize = DES_BLOCK_SIZE,
2787 .maxauthsize = SHA224_DIGEST_SIZE,
2788 },
2789 .caam = {
2790 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2791 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2792 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2793 },
8e8ec596 2794 },
e863f9cc 2795 {
479bcc7c
HX
2796 .aead = {
2797 .base = {
2798 .cra_name = "echainiv(authenc(hmac(sha224),"
2799 "cbc(des)))",
2800 .cra_driver_name = "echainiv-authenc-"
2801 "hmac-sha224-cbc-des-caam",
2802 .cra_blocksize = DES_BLOCK_SIZE,
2803 },
e863f9cc
HA
2804 .setkey = aead_setkey,
2805 .setauthsize = aead_setauthsize,
479bcc7c 2806 .encrypt = aead_encrypt,
8b18e235 2807 .decrypt = aead_decrypt,
e863f9cc
HA
2808 .ivsize = DES_BLOCK_SIZE,
2809 .maxauthsize = SHA224_DIGEST_SIZE,
479bcc7c
HX
2810 },
2811 .caam = {
2812 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2813 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2814 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2815 .geniv = true,
2816 },
2817 },
2818 {
2819 .aead = {
2820 .base = {
2821 .cra_name = "authenc(hmac(sha256),cbc(des))",
2822 .cra_driver_name = "authenc-hmac-sha256-"
2823 "cbc-des-caam",
2824 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2825 },
479bcc7c
HX
2826 .setkey = aead_setkey,
2827 .setauthsize = aead_setauthsize,
2828 .encrypt = aead_encrypt,
2829 .decrypt = aead_decrypt,
2830 .ivsize = DES_BLOCK_SIZE,
2831 .maxauthsize = SHA256_DIGEST_SIZE,
2832 },
2833 .caam = {
2834 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2835 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2836 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2837 },
e863f9cc 2838 },
8e8ec596 2839 {
479bcc7c
HX
2840 .aead = {
2841 .base = {
2842 .cra_name = "echainiv(authenc(hmac(sha256),"
2843 "cbc(des)))",
2844 .cra_driver_name = "echainiv-authenc-"
2845 "hmac-sha256-cbc-des-caam",
2846 .cra_blocksize = DES_BLOCK_SIZE,
2847 },
0e479300
YK
2848 .setkey = aead_setkey,
2849 .setauthsize = aead_setauthsize,
479bcc7c 2850 .encrypt = aead_encrypt,
8b18e235 2851 .decrypt = aead_decrypt,
8e8ec596
KP
2852 .ivsize = DES_BLOCK_SIZE,
2853 .maxauthsize = SHA256_DIGEST_SIZE,
479bcc7c
HX
2854 },
2855 .caam = {
2856 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2857 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2858 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2859 .geniv = true,
2860 },
2861 },
2862 {
2863 .aead = {
2864 .base = {
2865 .cra_name = "authenc(hmac(sha384),cbc(des))",
2866 .cra_driver_name = "authenc-hmac-sha384-"
2867 "cbc-des-caam",
2868 .cra_blocksize = DES_BLOCK_SIZE,
8e8ec596 2869 },
479bcc7c
HX
2870 .setkey = aead_setkey,
2871 .setauthsize = aead_setauthsize,
2872 .encrypt = aead_encrypt,
2873 .decrypt = aead_decrypt,
2874 .ivsize = DES_BLOCK_SIZE,
2875 .maxauthsize = SHA384_DIGEST_SIZE,
2876 },
2877 .caam = {
2878 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2879 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2880 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2881 },
8e8ec596 2882 },
e863f9cc 2883 {
479bcc7c
HX
2884 .aead = {
2885 .base = {
2886 .cra_name = "echainiv(authenc(hmac(sha384),"
2887 "cbc(des)))",
2888 .cra_driver_name = "echainiv-authenc-"
2889 "hmac-sha384-cbc-des-caam",
2890 .cra_blocksize = DES_BLOCK_SIZE,
2891 },
e863f9cc
HA
2892 .setkey = aead_setkey,
2893 .setauthsize = aead_setauthsize,
479bcc7c 2894 .encrypt = aead_encrypt,
8b18e235 2895 .decrypt = aead_decrypt,
e863f9cc
HA
2896 .ivsize = DES_BLOCK_SIZE,
2897 .maxauthsize = SHA384_DIGEST_SIZE,
479bcc7c
HX
2898 },
2899 .caam = {
2900 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2901 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2902 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2903 .geniv = true,
2904 },
2905 },
2906 {
2907 .aead = {
2908 .base = {
2909 .cra_name = "authenc(hmac(sha512),cbc(des))",
2910 .cra_driver_name = "authenc-hmac-sha512-"
2911 "cbc-des-caam",
2912 .cra_blocksize = DES_BLOCK_SIZE,
e863f9cc 2913 },
479bcc7c
HX
2914 .setkey = aead_setkey,
2915 .setauthsize = aead_setauthsize,
2916 .encrypt = aead_encrypt,
2917 .decrypt = aead_decrypt,
2918 .ivsize = DES_BLOCK_SIZE,
2919 .maxauthsize = SHA512_DIGEST_SIZE,
2920 },
2921 .caam = {
2922 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2923 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2924 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 2925 },
e863f9cc 2926 },
4427b1b4 2927 {
479bcc7c
HX
2928 .aead = {
2929 .base = {
2930 .cra_name = "echainiv(authenc(hmac(sha512),"
2931 "cbc(des)))",
2932 .cra_driver_name = "echainiv-authenc-"
2933 "hmac-sha512-cbc-des-caam",
2934 .cra_blocksize = DES_BLOCK_SIZE,
2935 },
0e479300
YK
2936 .setkey = aead_setkey,
2937 .setauthsize = aead_setauthsize,
479bcc7c 2938 .encrypt = aead_encrypt,
8b18e235 2939 .decrypt = aead_decrypt,
4427b1b4
KP
2940 .ivsize = DES_BLOCK_SIZE,
2941 .maxauthsize = SHA512_DIGEST_SIZE,
479bcc7c
HX
2942 },
2943 .caam = {
2944 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2945 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2946 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2947 .geniv = true,
2948 },
4427b1b4 2949 },
daebc465 2950 {
479bcc7c
HX
2951 .aead = {
2952 .base = {
2953 .cra_name = "authenc(hmac(md5),"
2954 "rfc3686(ctr(aes)))",
2955 .cra_driver_name = "authenc-hmac-md5-"
2956 "rfc3686-ctr-aes-caam",
2957 .cra_blocksize = 1,
2958 },
daebc465
CV
2959 .setkey = aead_setkey,
2960 .setauthsize = aead_setauthsize,
479bcc7c
HX
2961 .encrypt = aead_encrypt,
2962 .decrypt = aead_decrypt,
daebc465
CV
2963 .ivsize = CTR_RFC3686_IV_SIZE,
2964 .maxauthsize = MD5_DIGEST_SIZE,
479bcc7c
HX
2965 },
2966 .caam = {
2967 .class1_alg_type = OP_ALG_ALGSEL_AES |
2968 OP_ALG_AAI_CTR_MOD128,
2969 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2970 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2971 .rfc3686 = true,
2972 },
daebc465
CV
2973 },
2974 {
479bcc7c
HX
2975 .aead = {
2976 .base = {
2977 .cra_name = "seqiv(authenc("
2978 "hmac(md5),rfc3686(ctr(aes))))",
2979 .cra_driver_name = "seqiv-authenc-hmac-md5-"
2980 "rfc3686-ctr-aes-caam",
2981 .cra_blocksize = 1,
2982 },
daebc465
CV
2983 .setkey = aead_setkey,
2984 .setauthsize = aead_setauthsize,
479bcc7c 2985 .encrypt = aead_encrypt,
8b18e235 2986 .decrypt = aead_decrypt,
daebc465 2987 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
2988 .maxauthsize = MD5_DIGEST_SIZE,
2989 },
2990 .caam = {
2991 .class1_alg_type = OP_ALG_ALGSEL_AES |
2992 OP_ALG_AAI_CTR_MOD128,
2993 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2994 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
2995 .rfc3686 = true,
2996 .geniv = true,
2997 },
daebc465
CV
2998 },
2999 {
479bcc7c
HX
3000 .aead = {
3001 .base = {
3002 .cra_name = "authenc(hmac(sha1),"
3003 "rfc3686(ctr(aes)))",
3004 .cra_driver_name = "authenc-hmac-sha1-"
3005 "rfc3686-ctr-aes-caam",
3006 .cra_blocksize = 1,
3007 },
daebc465
CV
3008 .setkey = aead_setkey,
3009 .setauthsize = aead_setauthsize,
479bcc7c
HX
3010 .encrypt = aead_encrypt,
3011 .decrypt = aead_decrypt,
daebc465 3012 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3013 .maxauthsize = SHA1_DIGEST_SIZE,
3014 },
3015 .caam = {
3016 .class1_alg_type = OP_ALG_ALGSEL_AES |
3017 OP_ALG_AAI_CTR_MOD128,
3018 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3019 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3020 .rfc3686 = true,
3021 },
daebc465
CV
3022 },
3023 {
479bcc7c
HX
3024 .aead = {
3025 .base = {
3026 .cra_name = "seqiv(authenc("
3027 "hmac(sha1),rfc3686(ctr(aes))))",
3028 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3029 "rfc3686-ctr-aes-caam",
3030 .cra_blocksize = 1,
3031 },
daebc465
CV
3032 .setkey = aead_setkey,
3033 .setauthsize = aead_setauthsize,
479bcc7c 3034 .encrypt = aead_encrypt,
8b18e235 3035 .decrypt = aead_decrypt,
daebc465 3036 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3037 .maxauthsize = SHA1_DIGEST_SIZE,
3038 },
3039 .caam = {
3040 .class1_alg_type = OP_ALG_ALGSEL_AES |
3041 OP_ALG_AAI_CTR_MOD128,
3042 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3043 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3044 .rfc3686 = true,
3045 .geniv = true,
3046 },
daebc465
CV
3047 },
3048 {
479bcc7c
HX
3049 .aead = {
3050 .base = {
3051 .cra_name = "authenc(hmac(sha224),"
3052 "rfc3686(ctr(aes)))",
3053 .cra_driver_name = "authenc-hmac-sha224-"
3054 "rfc3686-ctr-aes-caam",
3055 .cra_blocksize = 1,
3056 },
daebc465
CV
3057 .setkey = aead_setkey,
3058 .setauthsize = aead_setauthsize,
479bcc7c
HX
3059 .encrypt = aead_encrypt,
3060 .decrypt = aead_decrypt,
daebc465 3061 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3062 .maxauthsize = SHA224_DIGEST_SIZE,
3063 },
3064 .caam = {
3065 .class1_alg_type = OP_ALG_ALGSEL_AES |
3066 OP_ALG_AAI_CTR_MOD128,
3067 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3068 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3069 .rfc3686 = true,
3070 },
daebc465
CV
3071 },
3072 {
479bcc7c
HX
3073 .aead = {
3074 .base = {
3075 .cra_name = "seqiv(authenc("
3076 "hmac(sha224),rfc3686(ctr(aes))))",
3077 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3078 "rfc3686-ctr-aes-caam",
3079 .cra_blocksize = 1,
3080 },
daebc465
CV
3081 .setkey = aead_setkey,
3082 .setauthsize = aead_setauthsize,
479bcc7c 3083 .encrypt = aead_encrypt,
8b18e235 3084 .decrypt = aead_decrypt,
daebc465 3085 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3086 .maxauthsize = SHA224_DIGEST_SIZE,
3087 },
3088 .caam = {
3089 .class1_alg_type = OP_ALG_ALGSEL_AES |
3090 OP_ALG_AAI_CTR_MOD128,
3091 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3092 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3093 .rfc3686 = true,
3094 .geniv = true,
3095 },
acdca31d
YK
3096 },
3097 {
479bcc7c
HX
3098 .aead = {
3099 .base = {
3100 .cra_name = "authenc(hmac(sha256),"
3101 "rfc3686(ctr(aes)))",
3102 .cra_driver_name = "authenc-hmac-sha256-"
3103 "rfc3686-ctr-aes-caam",
3104 .cra_blocksize = 1,
acdca31d 3105 },
479bcc7c
HX
3106 .setkey = aead_setkey,
3107 .setauthsize = aead_setauthsize,
3108 .encrypt = aead_encrypt,
3109 .decrypt = aead_decrypt,
3110 .ivsize = CTR_RFC3686_IV_SIZE,
3111 .maxauthsize = SHA256_DIGEST_SIZE,
3112 },
3113 .caam = {
3114 .class1_alg_type = OP_ALG_ALGSEL_AES |
3115 OP_ALG_AAI_CTR_MOD128,
3116 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3117 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3118 .rfc3686 = true,
3119 },
acdca31d
YK
3120 },
3121 {
479bcc7c
HX
3122 .aead = {
3123 .base = {
3124 .cra_name = "seqiv(authenc(hmac(sha256),"
3125 "rfc3686(ctr(aes))))",
3126 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3127 "rfc3686-ctr-aes-caam",
3128 .cra_blocksize = 1,
acdca31d 3129 },
479bcc7c
HX
3130 .setkey = aead_setkey,
3131 .setauthsize = aead_setauthsize,
3132 .encrypt = aead_encrypt,
8b18e235 3133 .decrypt = aead_decrypt,
479bcc7c
HX
3134 .ivsize = CTR_RFC3686_IV_SIZE,
3135 .maxauthsize = SHA256_DIGEST_SIZE,
3136 },
3137 .caam = {
3138 .class1_alg_type = OP_ALG_ALGSEL_AES |
3139 OP_ALG_AAI_CTR_MOD128,
3140 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3141 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3142 .rfc3686 = true,
3143 .geniv = true,
3144 },
2b22f6c5
CV
3145 },
3146 {
479bcc7c
HX
3147 .aead = {
3148 .base = {
3149 .cra_name = "authenc(hmac(sha384),"
3150 "rfc3686(ctr(aes)))",
3151 .cra_driver_name = "authenc-hmac-sha384-"
3152 "rfc3686-ctr-aes-caam",
3153 .cra_blocksize = 1,
2b22f6c5 3154 },
479bcc7c
HX
3155 .setkey = aead_setkey,
3156 .setauthsize = aead_setauthsize,
3157 .encrypt = aead_encrypt,
3158 .decrypt = aead_decrypt,
a5f57cff 3159 .ivsize = CTR_RFC3686_IV_SIZE,
479bcc7c
HX
3160 .maxauthsize = SHA384_DIGEST_SIZE,
3161 },
3162 .caam = {
3163 .class1_alg_type = OP_ALG_ALGSEL_AES |
3164 OP_ALG_AAI_CTR_MOD128,
3165 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3166 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3167 .rfc3686 = true,
3168 },
3169 },
f2147b88
HX
3170 {
3171 .aead = {
3172 .base = {
479bcc7c
HX
3173 .cra_name = "seqiv(authenc(hmac(sha384),"
3174 "rfc3686(ctr(aes))))",
3175 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3176 "rfc3686-ctr-aes-caam",
f2147b88
HX
3177 .cra_blocksize = 1,
3178 },
479bcc7c
HX
3179 .setkey = aead_setkey,
3180 .setauthsize = aead_setauthsize,
3181 .encrypt = aead_encrypt,
8b18e235 3182 .decrypt = aead_decrypt,
479bcc7c
HX
3183 .ivsize = CTR_RFC3686_IV_SIZE,
3184 .maxauthsize = SHA384_DIGEST_SIZE,
f2147b88
HX
3185 },
3186 .caam = {
479bcc7c
HX
3187 .class1_alg_type = OP_ALG_ALGSEL_AES |
3188 OP_ALG_AAI_CTR_MOD128,
3189 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3190 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3191 .rfc3686 = true,
3192 .geniv = true,
f2147b88
HX
3193 },
3194 },
3195 {
3196 .aead = {
3197 .base = {
479bcc7c
HX
3198 .cra_name = "authenc(hmac(sha512),"
3199 "rfc3686(ctr(aes)))",
3200 .cra_driver_name = "authenc-hmac-sha512-"
3201 "rfc3686-ctr-aes-caam",
f2147b88
HX
3202 .cra_blocksize = 1,
3203 },
479bcc7c
HX
3204 .setkey = aead_setkey,
3205 .setauthsize = aead_setauthsize,
3206 .encrypt = aead_encrypt,
3207 .decrypt = aead_decrypt,
3208 .ivsize = CTR_RFC3686_IV_SIZE,
3209 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3210 },
3211 .caam = {
479bcc7c
HX
3212 .class1_alg_type = OP_ALG_ALGSEL_AES |
3213 OP_ALG_AAI_CTR_MOD128,
3214 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3215 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c 3216 .rfc3686 = true,
f2147b88
HX
3217 },
3218 },
f2147b88
HX
3219 {
3220 .aead = {
3221 .base = {
479bcc7c
HX
3222 .cra_name = "seqiv(authenc(hmac(sha512),"
3223 "rfc3686(ctr(aes))))",
3224 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3225 "rfc3686-ctr-aes-caam",
f2147b88
HX
3226 .cra_blocksize = 1,
3227 },
479bcc7c
HX
3228 .setkey = aead_setkey,
3229 .setauthsize = aead_setauthsize,
3230 .encrypt = aead_encrypt,
8b18e235 3231 .decrypt = aead_decrypt,
479bcc7c
HX
3232 .ivsize = CTR_RFC3686_IV_SIZE,
3233 .maxauthsize = SHA512_DIGEST_SIZE,
f2147b88
HX
3234 },
3235 .caam = {
479bcc7c
HX
3236 .class1_alg_type = OP_ALG_ALGSEL_AES |
3237 OP_ALG_AAI_CTR_MOD128,
3238 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3239 OP_ALG_AAI_HMAC_PRECOMP,
479bcc7c
HX
3240 .rfc3686 = true,
3241 .geniv = true,
f2147b88
HX
3242 },
3243 },
3244};
3245
3246struct caam_crypto_alg {
8e8ec596 3247 struct crypto_alg crypto_alg;
f2147b88
HX
3248 struct list_head entry;
3249 struct caam_alg_entry caam;
8e8ec596
KP
3250};
3251
f2147b88 3252static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
8e8ec596 3253{
bbf22344
HG
3254 dma_addr_t dma_addr;
3255
cfc6f11b
RG
3256 ctx->jrdev = caam_jr_alloc();
3257 if (IS_ERR(ctx->jrdev)) {
3258 pr_err("Job Ring Device allocation for transform failed\n");
3259 return PTR_ERR(ctx->jrdev);
3260 }
8e8ec596 3261
bbf22344
HG
3262 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3263 offsetof(struct caam_ctx,
3264 sh_desc_enc_dma),
3265 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3266 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3267 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3268 caam_jr_free(ctx->jrdev);
3269 return -ENOMEM;
3270 }
3271
3272 ctx->sh_desc_enc_dma = dma_addr;
3273 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3274 sh_desc_dec);
3275 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3276 sh_desc_givenc);
3277 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3278
8e8ec596 3279 /* copy descriptor header template value */
db57656b
HG
3280 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3281 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
8e8ec596
KP
3282
3283 return 0;
3284}
3285
f2147b88 3286static int caam_cra_init(struct crypto_tfm *tfm)
8e8ec596 3287{
f2147b88
HX
3288 struct crypto_alg *alg = tfm->__crt_alg;
3289 struct caam_crypto_alg *caam_alg =
3290 container_of(alg, struct caam_crypto_alg, crypto_alg);
8e8ec596
KP
3291 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3292
f2147b88
HX
3293 return caam_init_common(ctx, &caam_alg->caam);
3294}
3295
3296static int caam_aead_init(struct crypto_aead *tfm)
3297{
3298 struct aead_alg *alg = crypto_aead_alg(tfm);
3299 struct caam_aead_alg *caam_alg =
3300 container_of(alg, struct caam_aead_alg, aead);
3301 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3302
3303 return caam_init_common(ctx, &caam_alg->caam);
3304}
3305
3306static void caam_exit_common(struct caam_ctx *ctx)
3307{
bbf22344
HG
3308 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3309 offsetof(struct caam_ctx, sh_desc_enc_dma),
3310 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
cfc6f11b 3311 caam_jr_free(ctx->jrdev);
8e8ec596
KP
3312}
3313
f2147b88
HX
3314static void caam_cra_exit(struct crypto_tfm *tfm)
3315{
3316 caam_exit_common(crypto_tfm_ctx(tfm));
3317}
3318
3319static void caam_aead_exit(struct crypto_aead *tfm)
3320{
3321 caam_exit_common(crypto_aead_ctx(tfm));
3322}
3323
8e8ec596
KP
3324static void __exit caam_algapi_exit(void)
3325{
3326
8e8ec596 3327 struct caam_crypto_alg *t_alg, *n;
f2147b88
HX
3328 int i;
3329
3330 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3331 struct caam_aead_alg *t_alg = driver_aeads + i;
3332
3333 if (t_alg->registered)
3334 crypto_unregister_aead(&t_alg->aead);
3335 }
8e8ec596 3336
cfc6f11b 3337 if (!alg_list.next)
8e8ec596
KP
3338 return;
3339
cfc6f11b 3340 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
8e8ec596
KP
3341 crypto_unregister_alg(&t_alg->crypto_alg);
3342 list_del(&t_alg->entry);
3343 kfree(t_alg);
3344 }
8e8ec596
KP
3345}
3346
cfc6f11b 3347static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
8e8ec596
KP
3348 *template)
3349{
3350 struct caam_crypto_alg *t_alg;
3351 struct crypto_alg *alg;
3352
9c4f9733 3353 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
8e8ec596 3354 if (!t_alg) {
cfc6f11b 3355 pr_err("failed to allocate t_alg\n");
8e8ec596
KP
3356 return ERR_PTR(-ENOMEM);
3357 }
3358
3359 alg = &t_alg->crypto_alg;
3360
3361 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3362 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3363 template->driver_name);
3364 alg->cra_module = THIS_MODULE;
3365 alg->cra_init = caam_cra_init;
3366 alg->cra_exit = caam_cra_exit;
3367 alg->cra_priority = CAAM_CRA_PRIORITY;
8e8ec596
KP
3368 alg->cra_blocksize = template->blocksize;
3369 alg->cra_alignmask = 0;
8e8ec596 3370 alg->cra_ctxsize = sizeof(struct caam_ctx);
d912bb76
NM
3371 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3372 template->type;
885e9e2f 3373 switch (template->type) {
7222d1a3
CV
3374 case CRYPTO_ALG_TYPE_GIVCIPHER:
3375 alg->cra_type = &crypto_givcipher_type;
3376 alg->cra_ablkcipher = template->template_ablkcipher;
3377 break;
acdca31d
YK
3378 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3379 alg->cra_type = &crypto_ablkcipher_type;
3380 alg->cra_ablkcipher = template->template_ablkcipher;
3381 break;
885e9e2f 3382 }
8e8ec596 3383
f2147b88
HX
3384 t_alg->caam.class1_alg_type = template->class1_alg_type;
3385 t_alg->caam.class2_alg_type = template->class2_alg_type;
8e8ec596
KP
3386
3387 return t_alg;
3388}
3389
f2147b88
HX
3390static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3391{
3392 struct aead_alg *alg = &t_alg->aead;
3393
3394 alg->base.cra_module = THIS_MODULE;
3395 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3396 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
5e4b8c1f 3397 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
f2147b88
HX
3398
3399 alg->init = caam_aead_init;
3400 alg->exit = caam_aead_exit;
3401}
3402
8e8ec596
KP
3403static int __init caam_algapi_init(void)
3404{
35af6403
RG
3405 struct device_node *dev_node;
3406 struct platform_device *pdev;
3407 struct device *ctrldev;
bf83490e 3408 struct caam_drv_private *priv;
8e8ec596 3409 int i = 0, err = 0;
bf83490e
VM
3410 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3411 unsigned int md_limit = SHA512_DIGEST_SIZE;
f2147b88 3412 bool registered = false;
8e8ec596 3413
35af6403
RG
3414 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3415 if (!dev_node) {
3416 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3417 if (!dev_node)
3418 return -ENODEV;
3419 }
3420
3421 pdev = of_find_device_by_node(dev_node);
3422 if (!pdev) {
3423 of_node_put(dev_node);
3424 return -ENODEV;
3425 }
3426
3427 ctrldev = &pdev->dev;
3428 priv = dev_get_drvdata(ctrldev);
3429 of_node_put(dev_node);
3430
3431 /*
3432 * If priv is NULL, it's probably because the caam driver wasn't
3433 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3434 */
3435 if (!priv)
3436 return -ENODEV;
3437
3438
cfc6f11b 3439 INIT_LIST_HEAD(&alg_list);
8e8ec596 3440
bf83490e
VM
3441 /*
3442 * Register crypto algorithms the device supports.
3443 * First, detect presence and attributes of DES, AES, and MD blocks.
3444 */
3445 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3446 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3447 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3448 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3449 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3450
3451 /* If MD is present, limit digest size based on LP256 */
3452 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3453 md_limit = SHA256_DIGEST_SIZE;
3454
8e8ec596 3455 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
8e8ec596 3456 struct caam_crypto_alg *t_alg;
bf83490e
VM
3457 struct caam_alg_template *alg = driver_algs + i;
3458 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3459
3460 /* Skip DES algorithms if not supported by device */
3461 if (!des_inst &&
3462 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3463 (alg_sel == OP_ALG_ALGSEL_DES)))
3464 continue;
3465
3466 /* Skip AES algorithms if not supported by device */
3467 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3468 continue;
8e8ec596 3469
83d2c9a9
SE
3470 /*
3471 * Check support for AES modes not available
3472 * on LP devices.
3473 */
3474 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3475 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3476 OP_ALG_AAI_XTS)
3477 continue;
3478
bf83490e 3479 t_alg = caam_alg_alloc(alg);
8e8ec596
KP
3480 if (IS_ERR(t_alg)) {
3481 err = PTR_ERR(t_alg);
bf83490e 3482 pr_warn("%s alg allocation failed\n", alg->driver_name);
8e8ec596
KP
3483 continue;
3484 }
3485
3486 err = crypto_register_alg(&t_alg->crypto_alg);
3487 if (err) {
cfc6f11b 3488 pr_warn("%s alg registration failed\n",
8e8ec596
KP
3489 t_alg->crypto_alg.cra_driver_name);
3490 kfree(t_alg);
f2147b88
HX
3491 continue;
3492 }
3493
3494 list_add_tail(&t_alg->entry, &alg_list);
3495 registered = true;
3496 }
3497
3498 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3499 struct caam_aead_alg *t_alg = driver_aeads + i;
bf83490e
VM
3500 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3501 OP_ALG_ALGSEL_MASK;
3502 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3503 OP_ALG_ALGSEL_MASK;
3504 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3505
3506 /* Skip DES algorithms if not supported by device */
3507 if (!des_inst &&
3508 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3509 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3510 continue;
3511
3512 /* Skip AES algorithms if not supported by device */
3513 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3514 continue;
3515
3516 /*
3517 * Check support for AES algorithms not available
3518 * on LP devices.
3519 */
3520 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3521 if (alg_aai == OP_ALG_AAI_GCM)
3522 continue;
3523
3524 /*
3525 * Skip algorithms requiring message digests
3526 * if MD or MD size is not supported by device.
3527 */
3528 if (c2_alg_sel &&
3529 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3530 continue;
f2147b88
HX
3531
3532 caam_aead_alg_init(t_alg);
3533
3534 err = crypto_register_aead(&t_alg->aead);
3535 if (err) {
3536 pr_warn("%s alg registration failed\n",
3537 t_alg->aead.base.cra_driver_name);
3538 continue;
3539 }
3540
3541 t_alg->registered = true;
3542 registered = true;
8e8ec596 3543 }
f2147b88
HX
3544
3545 if (registered)
cfc6f11b 3546 pr_info("caam algorithms registered in /proc/crypto\n");
8e8ec596
KP
3547
3548 return err;
3549}
3550
3551module_init(caam_algapi_init);
3552module_exit(caam_algapi_exit);
3553
3554MODULE_LICENSE("GPL");
3555MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3556MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");