]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blame - drivers/crypto/inside-secure/safexcel_hash.c
crypto: inside-secure - Added support for the AES-CMAC ahash
[mirror_ubuntu-hirsute-kernel.git] / drivers / crypto / inside-secure / safexcel_hash.c
CommitLineData
301422e3 1// SPDX-License-Identifier: GPL-2.0
1b44c5a6
AT
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
1b44c5a6
AT
6 */
7
b98687bb 8#include <crypto/aes.h>
aed3731e 9#include <crypto/hmac.h>
293f89cf 10#include <crypto/md5.h>
1b44c5a6 11#include <crypto/sha.h>
38f21b4b 12#include <crypto/skcipher.h>
1b44c5a6
AT
13#include <linux/device.h>
14#include <linux/dma-mapping.h>
15#include <linux/dmapool.h>
16
1b44c5a6
AT
17#include "safexcel.h"
18
19struct safexcel_ahash_ctx {
20 struct safexcel_context base;
21 struct safexcel_crypto_priv *priv;
22
23 u32 alg;
b98687bb 24 u8 key_sz;
38f21b4b 25 bool cbcmac;
1b44c5a6 26
0de54fb1
AT
27 u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
28 u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
38f21b4b
PL
29
30 struct crypto_cipher *kaes;
1b44c5a6
AT
31};
32
33struct safexcel_ahash_req {
34 bool last_req;
35 bool finish;
36 bool hmac;
1eb7b403 37 bool needs_inv;
85b36ee8
PL
38 bool hmac_zlen;
39 bool len_is_le;
b98687bb
PL
40 bool not_first;
41 bool xcbcmac;
1b44c5a6 42
c957f8b3 43 int nents;
b8592027 44 dma_addr_t result_dma;
c957f8b3 45
b869648c
AT
46 u32 digest;
47
41abed7d
PL
48 u8 state_sz; /* expected state size, only set once */
49 u8 block_sz; /* block size, only set once */
b460edb6 50 u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
1b44c5a6 51
31fb084c
PL
52 u64 len;
53 u64 processed;
1b44c5a6 54
41abed7d 55 u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
cff9a175
AT
56 dma_addr_t cache_dma;
57 unsigned int cache_sz;
58
41abed7d 59 u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
1b44c5a6
AT
60};
61
b460edb6
AT
62static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
63{
31fb084c 64 return req->len - req->processed;
b460edb6
AT
65}
66
1b44c5a6 67static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
b98687bb 68 u32 input_length, u32 result_length,
38f21b4b 69 bool cbcmac)
1b44c5a6
AT
70{
71 struct safexcel_token *token =
72 (struct safexcel_token *)cdesc->control_data.token;
73
74 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
75 token[0].packet_length = input_length;
1b44c5a6
AT
76 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
77
b98687bb 78 input_length &= 15;
38f21b4b 79 if (unlikely(cbcmac && input_length)) {
b98687bb
PL
80 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
81 token[1].packet_length = 16 - input_length;
82 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
83 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
84 } else {
85 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
86 }
87
88 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
89 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
1b44c5a6 90 EIP197_TOKEN_STAT_LAST_PACKET;
b98687bb
PL
91 token[2].packet_length = result_length;
92 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
1b44c5a6
AT
93 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
94}
95
96static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
97 struct safexcel_ahash_req *req,
41abed7d 98 struct safexcel_command_desc *cdesc)
1b44c5a6 99{
b460edb6 100 struct safexcel_crypto_priv *priv = ctx->priv;
41abed7d 101 u64 count = 0;
1b44c5a6 102
a7cf8658 103 cdesc->control_data.control0 = ctx->alg;
41abed7d
PL
104
105 /*
106 * Copy the input digest if needed, and setup the context
107 * fields. Do this now as we need it to setup the first command
108 * descriptor.
109 */
a7cf8658 110 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
b98687bb
PL
111 if (req->xcbcmac)
112 memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
113 else
114 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
a7cf8658 115
b98687bb
PL
116 if (!req->finish && req->xcbcmac)
117 cdesc->control_data.control0 |=
118 CONTEXT_CONTROL_DIGEST_XCM |
119 CONTEXT_CONTROL_TYPE_HASH_OUT |
120 CONTEXT_CONTROL_NO_FINISH_HASH |
121 CONTEXT_CONTROL_SIZE(req->state_sz /
122 sizeof(u32));
123 else
124 cdesc->control_data.control0 |=
125 CONTEXT_CONTROL_DIGEST_XCM |
126 CONTEXT_CONTROL_TYPE_HASH_OUT |
127 CONTEXT_CONTROL_SIZE(req->state_sz /
128 sizeof(u32));
a7cf8658
PL
129 return;
130 } else if (!req->processed) {
41abed7d 131 /* First - and possibly only - block of basic hash only */
b98687bb 132 if (req->finish)
a7cf8658 133 cdesc->control_data.control0 |= req->digest |
41abed7d
PL
134 CONTEXT_CONTROL_TYPE_HASH_OUT |
135 CONTEXT_CONTROL_RESTART_HASH |
136 /* ensure its not 0! */
137 CONTEXT_CONTROL_SIZE(1);
b98687bb 138 else
a7cf8658 139 cdesc->control_data.control0 |= req->digest |
41abed7d
PL
140 CONTEXT_CONTROL_TYPE_HASH_OUT |
141 CONTEXT_CONTROL_RESTART_HASH |
142 CONTEXT_CONTROL_NO_FINISH_HASH |
143 /* ensure its not 0! */
144 CONTEXT_CONTROL_SIZE(1);
41abed7d
PL
145 return;
146 }
1b44c5a6 147
41abed7d
PL
148 /* Hash continuation or HMAC, setup (inner) digest from state */
149 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
150
151 if (req->finish) {
152 /* Compute digest count for hash/HMAC finish operations */
153 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
31fb084c
PL
154 req->hmac_zlen || (req->processed != req->block_sz)) {
155 count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
41abed7d
PL
156
157 /* This is a hardware limitation, as the
158 * counter must fit into an u32. This represents
159 * a fairly big amount of input data, so we
160 * shouldn't see this.
161 */
162 if (unlikely(count & 0xffffffff00000000ULL)) {
163 dev_warn(priv->dev,
164 "Input data is too big\n");
165 return;
b460edb6 166 }
1b44c5a6 167 }
1b44c5a6 168
41abed7d 169 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
85b36ee8
PL
170 /* Special case: zero length HMAC */
171 req->hmac_zlen ||
41abed7d 172 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
31fb084c 173 (req->processed != req->block_sz)) {
41abed7d
PL
174 /* Basic hash continue operation, need digest + cnt */
175 cdesc->control_data.control0 |=
176 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
177 CONTEXT_CONTROL_TYPE_HASH_OUT |
178 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
85b36ee8
PL
179 /* For zero-len HMAC, don't finalize, already padded! */
180 if (req->hmac_zlen)
181 cdesc->control_data.control0 |=
182 CONTEXT_CONTROL_NO_FINISH_HASH;
41abed7d
PL
183 cdesc->control_data.control1 |=
184 CONTEXT_CONTROL_DIGEST_CNT;
185 ctx->base.ctxr->data[req->state_sz >> 2] =
186 cpu_to_le32(count);
187 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
85b36ee8
PL
188
189 /* Clear zero-length HMAC flag for next operation! */
190 req->hmac_zlen = false;
41abed7d
PL
191 } else { /* HMAC */
192 /* Need outer digest for HMAC finalization */
193 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
194 ctx->opad, req->state_sz);
195
196 /* Single pass HMAC - no digest count */
197 cdesc->control_data.control0 |=
198 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
199 CONTEXT_CONTROL_TYPE_HASH_OUT |
200 CONTEXT_CONTROL_DIGEST_HMAC;
201 }
202 } else { /* Hash continuation, do not finish yet */
203 cdesc->control_data.control0 |=
204 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
205 CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
206 CONTEXT_CONTROL_TYPE_HASH_OUT |
207 CONTEXT_CONTROL_NO_FINISH_HASH;
1b44c5a6
AT
208 }
209}
210
41abed7d
PL
211static int safexcel_ahash_enqueue(struct ahash_request *areq);
212
213static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
214 int ring,
1eb7b403
OH
215 struct crypto_async_request *async,
216 bool *should_complete, int *ret)
1b44c5a6
AT
217{
218 struct safexcel_result_desc *rdesc;
219 struct ahash_request *areq = ahash_request_cast(async);
220 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
221 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
41abed7d 222 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
b460edb6 223 u64 cache_len;
1b44c5a6
AT
224
225 *ret = 0;
226
1b44c5a6
AT
227 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
228 if (IS_ERR(rdesc)) {
229 dev_err(priv->dev,
230 "hash: result: could not retrieve the result descriptor\n");
231 *ret = PTR_ERR(rdesc);
bdfd1909
AT
232 } else {
233 *ret = safexcel_rdesc_check_errors(priv, rdesc);
1b44c5a6
AT
234 }
235
236 safexcel_complete(priv, ring);
1b44c5a6 237
c957f8b3
AT
238 if (sreq->nents) {
239 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
240 sreq->nents = 0;
241 }
1b44c5a6 242
b8592027
OH
243 if (sreq->result_dma) {
244 dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
245 DMA_FROM_DEVICE);
246 sreq->result_dma = 0;
247 }
248
cff9a175
AT
249 if (sreq->cache_dma) {
250 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
251 DMA_TO_DEVICE);
252 sreq->cache_dma = 0;
aa524286 253 sreq->cache_sz = 0;
cff9a175 254 }
1b44c5a6 255
41abed7d
PL
256 if (sreq->finish) {
257 if (sreq->hmac &&
258 (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
259 /* Faking HMAC using hash - need to do outer hash */
260 memcpy(sreq->cache, sreq->state,
261 crypto_ahash_digestsize(ahash));
262
263 memcpy(sreq->state, ctx->opad, sreq->state_sz);
264
31fb084c
PL
265 sreq->len = sreq->block_sz +
266 crypto_ahash_digestsize(ahash);
267 sreq->processed = sreq->block_sz;
41abed7d
PL
268 sreq->hmac = 0;
269
270 ctx->base.needs_inv = true;
271 areq->nbytes = 0;
272 safexcel_ahash_enqueue(areq);
273
274 *should_complete = false; /* Not done yet */
275 return 1;
276 }
277
b98687bb
PL
278 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
279 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
a7cf8658
PL
280 /* Undo final XOR with 0xffffffff ...*/
281 *(u32 *)areq->result = ~sreq->state[0];
282 } else {
283 memcpy(areq->result, sreq->state,
284 crypto_ahash_digestsize(ahash));
285 }
41abed7d 286 }
b89a8159 287
b460edb6 288 cache_len = safexcel_queued_len(sreq);
1b44c5a6
AT
289 if (cache_len)
290 memcpy(sreq->cache, sreq->cache_next, cache_len);
291
292 *should_complete = true;
293
294 return 1;
295}
296
1eb7b403 297static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
1eb7b403 298 int *commands, int *results)
1b44c5a6
AT
299{
300 struct ahash_request *areq = ahash_request_cast(async);
1b44c5a6
AT
301 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
302 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
303 struct safexcel_crypto_priv *priv = ctx->priv;
304 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
305 struct safexcel_result_desc *rdesc;
306 struct scatterlist *sg;
b98687bb
PL
307 int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0, res_sz;
308 u64 queued, len;
1b44c5a6 309
b98687bb 310 queued = safexcel_queued_len(req);
41abed7d 311 if (queued <= HASH_CACHE_SIZE)
1b44c5a6
AT
312 cache_len = queued;
313 else
314 cache_len = queued - areq->nbytes;
315
41abed7d 316 if (!req->finish && !req->last_req) {
809778e0 317 /* If this is not the last request and the queued data does not
41abed7d 318 * fit into full cache blocks, cache it for the next send call.
809778e0 319 */
41abed7d 320 extra = queued & (HASH_CACHE_SIZE - 1);
082ec2d4 321
dd4306a6
AT
322 /* If this is not the last request and the queued data
323 * is a multiple of a block, cache the last one for now.
324 */
809778e0 325 if (!extra)
41abed7d 326 extra = HASH_CACHE_SIZE;
809778e0 327
709ecc10
AT
328 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
329 req->cache_next, extra,
330 areq->nbytes - extra);
331
332 queued -= extra;
dc5268b6
PL
333
334 if (!queued) {
335 *commands = 0;
336 *results = 0;
337 return 0;
338 }
b98687bb
PL
339
340 extra = 0;
341 }
342
343 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
344 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
345 /*
346 * Cache contains less than 1 full block, complete.
347 */
348 extra = AES_BLOCK_SIZE - cache_len;
349 if (queued > cache_len) {
350 /* More data follows: borrow bytes */
351 u64 tmp = queued - cache_len;
352
353 skip = min_t(u64, tmp, extra);
354 sg_pcopy_to_buffer(areq->src,
355 sg_nents(areq->src),
356 req->cache + cache_len,
357 skip, 0);
358 }
359 extra -= skip;
360 memset(req->cache + cache_len + skip, 0, extra);
38f21b4b
PL
361 if (!ctx->cbcmac && extra) {
362 // 10- padding for XCBCMAC & CMAC
363 req->cache[cache_len + skip] = 0x80;
364 // HW will use K2 iso K3 - compensate!
365 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
366 ((u32 *)req->cache)[i] ^=
367 cpu_to_be32(ctx->ipad[i]) ^
368 cpu_to_be32(ctx->ipad[i + 4]);
369 }
b98687bb
PL
370 cache_len = AES_BLOCK_SIZE;
371 queued = queued + extra;
372 }
373
374 /* XCBC continue: XOR previous result into 1st word */
375 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
1b44c5a6
AT
376 }
377
b98687bb 378 len = queued;
1b44c5a6
AT
379 /* Add a command descriptor for the cached data, if any */
380 if (cache_len) {
cff9a175
AT
381 req->cache_dma = dma_map_single(priv->dev, req->cache,
382 cache_len, DMA_TO_DEVICE);
9744fec9 383 if (dma_mapping_error(priv->dev, req->cache_dma))
cff9a175 384 return -EINVAL;
1b44c5a6 385
cff9a175 386 req->cache_sz = cache_len;
1b44c5a6
AT
387 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
388 (cache_len == len),
b98687bb
PL
389 req->cache_dma, cache_len,
390 len, ctx->base.ctxr_dma);
1b44c5a6
AT
391 if (IS_ERR(first_cdesc)) {
392 ret = PTR_ERR(first_cdesc);
393 goto unmap_cache;
394 }
395 n_cdesc++;
396
397 queued -= cache_len;
398 if (!queued)
399 goto send_command;
400 }
401
402 /* Now handle the current ahash request buffer(s) */
41abed7d
PL
403 req->nents = dma_map_sg(priv->dev, areq->src,
404 sg_nents_for_len(areq->src,
405 areq->nbytes),
c957f8b3
AT
406 DMA_TO_DEVICE);
407 if (!req->nents) {
1b44c5a6
AT
408 ret = -ENOMEM;
409 goto cdesc_rollback;
410 }
411
c957f8b3 412 for_each_sg(areq->src, sg, req->nents, i) {
1b44c5a6
AT
413 int sglen = sg_dma_len(sg);
414
b98687bb
PL
415 if (unlikely(sglen <= skip)) {
416 skip -= sglen;
417 continue;
418 }
419
1b44c5a6 420 /* Do not overflow the request */
b98687bb 421 if ((queued + skip) <= sglen)
1b44c5a6 422 sglen = queued;
b98687bb
PL
423 else
424 sglen -= skip;
1b44c5a6
AT
425
426 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
41abed7d 427 !(queued - sglen),
b98687bb
PL
428 sg_dma_address(sg) + skip, sglen,
429 len, ctx->base.ctxr_dma);
1b44c5a6
AT
430 if (IS_ERR(cdesc)) {
431 ret = PTR_ERR(cdesc);
57433b58 432 goto unmap_sg;
1b44c5a6 433 }
1b44c5a6 434
b98687bb 435 if (!n_cdesc)
1b44c5a6 436 first_cdesc = cdesc;
b98687bb 437 n_cdesc++;
1b44c5a6
AT
438
439 queued -= sglen;
440 if (!queued)
441 break;
b98687bb 442 skip = 0;
1b44c5a6
AT
443 }
444
445send_command:
446 /* Setup the context options */
41abed7d 447 safexcel_context_control(ctx, req, first_cdesc);
1b44c5a6 448
b98687bb
PL
449 /* Add the token. Note that the XCBC result is only 1 AES block. */
450 res_sz = req->xcbcmac ? AES_BLOCK_SIZE : req->state_sz;
38f21b4b 451 safexcel_hash_token(first_cdesc, len, res_sz, ctx->cbcmac);
1b44c5a6 452
b8592027
OH
453 req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz,
454 DMA_FROM_DEVICE);
455 if (dma_mapping_error(priv->dev, req->result_dma)) {
1b44c5a6 456 ret = -EINVAL;
57433b58 457 goto unmap_sg;
1b44c5a6
AT
458 }
459
460 /* Add a result descriptor */
b8592027 461 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
b98687bb 462 res_sz);
1b44c5a6
AT
463 if (IS_ERR(rdesc)) {
464 ret = PTR_ERR(rdesc);
57240a78 465 goto unmap_result;
1b44c5a6
AT
466 }
467
9744fec9 468 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
1b44c5a6 469
b98687bb 470 req->processed += len - extra;
b460edb6 471
1b44c5a6
AT
472 *commands = n_cdesc;
473 *results = 1;
474 return 0;
475
57240a78 476unmap_result:
57433b58
AT
477 dma_unmap_single(priv->dev, req->result_dma, req->state_sz,
478 DMA_FROM_DEVICE);
479unmap_sg:
b98687bb
PL
480 if (req->nents) {
481 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
482 req->nents = 0;
483 }
1b44c5a6
AT
484cdesc_rollback:
485 for (i = 0; i < n_cdesc; i++)
486 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
487unmap_cache:
cff9a175
AT
488 if (req->cache_dma) {
489 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
490 DMA_TO_DEVICE);
aa524286 491 req->cache_dma = 0;
cff9a175 492 req->cache_sz = 0;
1b44c5a6 493 }
1b44c5a6 494
1b44c5a6
AT
495 return ret;
496}
497
1b44c5a6
AT
498static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
499 int ring,
500 struct crypto_async_request *async,
501 bool *should_complete, int *ret)
502{
503 struct safexcel_result_desc *rdesc;
504 struct ahash_request *areq = ahash_request_cast(async);
505 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
506 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
507 int enq_ret;
508
509 *ret = 0;
510
1b44c5a6
AT
511 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
512 if (IS_ERR(rdesc)) {
513 dev_err(priv->dev,
514 "hash: invalidate: could not retrieve the result descriptor\n");
515 *ret = PTR_ERR(rdesc);
cda3e73a
AT
516 } else {
517 *ret = safexcel_rdesc_check_errors(priv, rdesc);
1b44c5a6
AT
518 }
519
520 safexcel_complete(priv, ring);
1b44c5a6
AT
521
522 if (ctx->base.exit_inv) {
523 dma_pool_free(priv->context_pool, ctx->base.ctxr,
524 ctx->base.ctxr_dma);
525
526 *should_complete = true;
527 return 1;
528 }
529
86671abb
AT
530 ring = safexcel_select_ring(priv);
531 ctx->base.ring = ring;
1b44c5a6 532
86671abb
AT
533 spin_lock_bh(&priv->ring[ring].queue_lock);
534 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
535 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6
AT
536
537 if (enq_ret != -EINPROGRESS)
538 *ret = enq_ret;
539
8472e778
AT
540 queue_work(priv->ring[ring].workqueue,
541 &priv->ring[ring].work_data.work);
86671abb 542
1b44c5a6
AT
543 *should_complete = false;
544
545 return 1;
546}
547
1eb7b403
OH
548static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
549 struct crypto_async_request *async,
550 bool *should_complete, int *ret)
551{
552 struct ahash_request *areq = ahash_request_cast(async);
553 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
554 int err;
555
53c83e91 556 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
871df319 557
1eb7b403
OH
558 if (req->needs_inv) {
559 req->needs_inv = false;
560 err = safexcel_handle_inv_result(priv, ring, async,
561 should_complete, ret);
562 } else {
563 err = safexcel_handle_req_result(priv, ring, async,
564 should_complete, ret);
565 }
566
567 return err;
568}
569
1b44c5a6 570static int safexcel_ahash_send_inv(struct crypto_async_request *async,
9744fec9 571 int ring, int *commands, int *results)
1b44c5a6
AT
572{
573 struct ahash_request *areq = ahash_request_cast(async);
574 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
575 int ret;
576
5290ad6e 577 ret = safexcel_invalidate_cache(async, ctx->priv,
9744fec9 578 ctx->base.ctxr_dma, ring);
1b44c5a6
AT
579 if (unlikely(ret))
580 return ret;
581
582 *commands = 1;
583 *results = 1;
584
585 return 0;
586}
587
1eb7b403 588static int safexcel_ahash_send(struct crypto_async_request *async,
9744fec9 589 int ring, int *commands, int *results)
1eb7b403
OH
590{
591 struct ahash_request *areq = ahash_request_cast(async);
592 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
593 int ret;
594
595 if (req->needs_inv)
9744fec9 596 ret = safexcel_ahash_send_inv(async, ring, commands, results);
1eb7b403 597 else
9744fec9
OH
598 ret = safexcel_ahash_send_req(async, ring, commands, results);
599
1eb7b403
OH
600 return ret;
601}
602
1b44c5a6
AT
603static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
604{
605 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
606 struct safexcel_crypto_priv *priv = ctx->priv;
61824806 607 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
7cad2fab 608 struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
3e1166b9 609 struct safexcel_inv_result result = {};
86671abb 610 int ring = ctx->base.ring;
1b44c5a6 611
b926213d 612 memset(req, 0, EIP197_AHASH_REQ_SIZE);
1b44c5a6
AT
613
614 /* create invalidation request */
615 init_completion(&result.completion);
7cad2fab 616 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1b44c5a6
AT
617 safexcel_inv_complete, &result);
618
7cad2fab
AT
619 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
620 ctx = crypto_tfm_ctx(req->base.tfm);
1b44c5a6 621 ctx->base.exit_inv = true;
1eb7b403 622 rctx->needs_inv = true;
1b44c5a6 623
86671abb 624 spin_lock_bh(&priv->ring[ring].queue_lock);
7cad2fab 625 crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
86671abb 626 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6 627
8472e778
AT
628 queue_work(priv->ring[ring].workqueue,
629 &priv->ring[ring].work_data.work);
1b44c5a6 630
b7007dbc 631 wait_for_completion(&result.completion);
1b44c5a6
AT
632
633 if (result.error) {
634 dev_warn(priv->dev, "hash: completion error (%d)\n",
635 result.error);
636 return result.error;
637 }
638
639 return 0;
640}
641
cc75f5ce
AT
642/* safexcel_ahash_cache: cache data until at least one request can be sent to
643 * the engine, aka. when there is at least 1 block size in the pipe.
644 */
41abed7d 645static int safexcel_ahash_cache(struct ahash_request *areq)
1b44c5a6
AT
646{
647 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
41abed7d 648 u64 cache_len;
1b44c5a6 649
b460edb6
AT
650 /* cache_len: everything accepted by the driver but not sent yet,
651 * tot sz handled by update() - last req sz - tot sz handled by send()
652 */
41abed7d 653 cache_len = safexcel_queued_len(req);
1b44c5a6
AT
654
655 /*
656 * In case there isn't enough bytes to proceed (less than a
657 * block size), cache the data until we have enough.
658 */
41abed7d 659 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
1b44c5a6
AT
660 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
661 req->cache + cache_len,
662 areq->nbytes, 0);
41abed7d 663 return 0;
1b44c5a6
AT
664 }
665
dfbcc08f 666 /* We couldn't cache all the data */
1b44c5a6
AT
667 return -E2BIG;
668}
669
670static int safexcel_ahash_enqueue(struct ahash_request *areq)
671{
672 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
673 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
674 struct safexcel_crypto_priv *priv = ctx->priv;
86671abb 675 int ret, ring;
1b44c5a6 676
1eb7b403 677 req->needs_inv = false;
1b44c5a6 678
1b44c5a6 679 if (ctx->base.ctxr) {
53c83e91 680 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
b98687bb
PL
681 /* invalidate for *any* non-XCBC continuation */
682 ((req->not_first && !req->xcbcmac) ||
41abed7d
PL
683 /* invalidate if (i)digest changed */
684 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
41abed7d 685 /* invalidate for HMAC finish with odigest changed */
a7cf8658 686 (req->finish && req->hmac &&
41abed7d
PL
687 memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
688 ctx->opad, req->state_sz))))
689 /*
690 * We're still setting needs_inv here, even though it is
c4daf4cc
OH
691 * cleared right away, because the needs_inv flag can be
692 * set in other functions and we want to keep the same
693 * logic.
694 */
41abed7d 695 ctx->base.needs_inv = true;
c4daf4cc 696
1eb7b403
OH
697 if (ctx->base.needs_inv) {
698 ctx->base.needs_inv = false;
699 req->needs_inv = true;
700 }
1b44c5a6
AT
701 } else {
702 ctx->base.ring = safexcel_select_ring(priv);
703 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
704 EIP197_GFP_FLAGS(areq->base),
705 &ctx->base.ctxr_dma);
706 if (!ctx->base.ctxr)
707 return -ENOMEM;
708 }
b98687bb 709 req->not_first = true;
1b44c5a6 710
86671abb
AT
711 ring = ctx->base.ring;
712
713 spin_lock_bh(&priv->ring[ring].queue_lock);
714 ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
715 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6 716
8472e778
AT
717 queue_work(priv->ring[ring].workqueue,
718 &priv->ring[ring].work_data.work);
1b44c5a6
AT
719
720 return ret;
721}
722
723static int safexcel_ahash_update(struct ahash_request *areq)
724{
1b44c5a6 725 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
41abed7d 726 int ret;
1b44c5a6
AT
727
728 /* If the request is 0 length, do nothing */
729 if (!areq->nbytes)
730 return 0;
731
41abed7d
PL
732 /* Add request to the cache if it fits */
733 ret = safexcel_ahash_cache(areq);
734
735 /* Update total request length */
31fb084c 736 req->len += areq->nbytes;
1b44c5a6 737
41abed7d
PL
738 /* If not all data could fit into the cache, go process the excess.
739 * Also go process immediately for an HMAC IV precompute, which
740 * will never be finished at all, but needs to be processed anyway.
1b44c5a6 741 */
41abed7d 742 if ((ret && !req->finish) || req->last_req)
1b44c5a6
AT
743 return safexcel_ahash_enqueue(areq);
744
745 return 0;
746}
747
748static int safexcel_ahash_final(struct ahash_request *areq)
749{
750 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
751 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
752
1b44c5a6
AT
753 req->finish = true;
754
31fb084c 755 if (unlikely(!req->len && !areq->nbytes)) {
85695b09
PL
756 /*
757 * If we have an overall 0 length *hash* request:
758 * The HW cannot do 0 length hash, so we provide the correct
759 * result directly here.
760 */
293f89cf
OH
761 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
762 memcpy(areq->result, md5_zero_message_hash,
763 MD5_DIGEST_SIZE);
764 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
1b44c5a6
AT
765 memcpy(areq->result, sha1_zero_message_hash,
766 SHA1_DIGEST_SIZE);
767 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
768 memcpy(areq->result, sha224_zero_message_hash,
769 SHA224_DIGEST_SIZE);
770 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
771 memcpy(areq->result, sha256_zero_message_hash,
772 SHA256_DIGEST_SIZE);
9e46eafd
AT
773 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
774 memcpy(areq->result, sha384_zero_message_hash,
775 SHA384_DIGEST_SIZE);
b460edb6
AT
776 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
777 memcpy(areq->result, sha512_zero_message_hash,
778 SHA512_DIGEST_SIZE);
1b44c5a6 779
a7cf8658
PL
780 return 0;
781 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
782 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
783 req->len == sizeof(u32) && !areq->nbytes)) {
784 /* Zero length CRC32 */
785 memcpy(areq->result, ctx->ipad, sizeof(u32));
1b44c5a6 786 return 0;
38f21b4b 787 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
b98687bb
PL
788 !areq->nbytes)) {
789 /* Zero length CBC MAC */
790 memset(areq->result, 0, AES_BLOCK_SIZE);
791 return 0;
38f21b4b
PL
792 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
793 !areq->nbytes)) {
794 /* Zero length (X)CBC/CMAC */
795 int i;
796
797 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
798 ((u32 *)areq->result)[i] =
799 cpu_to_be32(ctx->ipad[i + 4]); // K3
800 areq->result[0] ^= 0x80; // 10- padding
801 crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
802 return 0;
31fb084c
PL
803 } else if (unlikely(req->hmac &&
804 (req->len == req->block_sz) &&
41abed7d 805 !areq->nbytes)) {
85b36ee8
PL
806 /*
807 * If we have an overall 0 length *HMAC* request:
808 * For HMAC, we need to finalize the inner digest
809 * and then perform the outer hash.
810 */
811
812 /* generate pad block in the cache */
813 /* start with a hash block of all zeroes */
814 memset(req->cache, 0, req->block_sz);
815 /* set the first byte to 0x80 to 'append a 1 bit' */
816 req->cache[0] = 0x80;
817 /* add the length in bits in the last 2 bytes */
818 if (req->len_is_le) {
819 /* Little endian length word (e.g. MD5) */
820 req->cache[req->block_sz-8] = (req->block_sz << 3) &
821 255;
822 req->cache[req->block_sz-7] = (req->block_sz >> 5);
823 } else {
824 /* Big endian length word (e.g. any SHA) */
825 req->cache[req->block_sz-2] = (req->block_sz >> 5);
826 req->cache[req->block_sz-1] = (req->block_sz << 3) &
827 255;
828 }
829
31fb084c 830 req->len += req->block_sz; /* plus 1 hash block */
85b36ee8
PL
831
832 /* Set special zero-length HMAC flag */
833 req->hmac_zlen = true;
834
835 /* Finalize HMAC */
836 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
41abed7d
PL
837 } else if (req->hmac) {
838 /* Finalize HMAC */
839 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1b44c5a6
AT
840 }
841
842 return safexcel_ahash_enqueue(areq);
843}
844
845static int safexcel_ahash_finup(struct ahash_request *areq)
846{
847 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
848
1b44c5a6
AT
849 req->finish = true;
850
851 safexcel_ahash_update(areq);
852 return safexcel_ahash_final(areq);
853}
854
855static int safexcel_ahash_export(struct ahash_request *areq, void *out)
856{
1b44c5a6
AT
857 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
858 struct safexcel_ahash_export_state *export = out;
859
31fb084c
PL
860 export->len = req->len;
861 export->processed = req->processed;
1b44c5a6 862
b869648c
AT
863 export->digest = req->digest;
864
1b44c5a6 865 memcpy(export->state, req->state, req->state_sz);
41abed7d 866 memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
1b44c5a6
AT
867
868 return 0;
869}
870
871static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
872{
1b44c5a6
AT
873 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
874 const struct safexcel_ahash_export_state *export = in;
875 int ret;
876
877 ret = crypto_ahash_init(areq);
878 if (ret)
879 return ret;
880
31fb084c
PL
881 req->len = export->len;
882 req->processed = export->processed;
1b44c5a6 883
b869648c
AT
884 req->digest = export->digest;
885
41abed7d 886 memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
1b44c5a6
AT
887 memcpy(req->state, export->state, req->state_sz);
888
889 return 0;
890}
891
892static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
893{
894 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
895 struct safexcel_alg_template *tmpl =
896 container_of(__crypto_ahash_alg(tfm->__crt_alg),
897 struct safexcel_alg_template, alg.ahash);
898
899 ctx->priv = tmpl->priv;
1eb7b403
OH
900 ctx->base.send = safexcel_ahash_send;
901 ctx->base.handle_result = safexcel_handle_result;
1b44c5a6
AT
902
903 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
904 sizeof(struct safexcel_ahash_req));
905 return 0;
906}
907
908static int safexcel_sha1_init(struct ahash_request *areq)
909{
910 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
911 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
912
913 memset(req, 0, sizeof(*req));
914
1b44c5a6 915 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
b869648c 916 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1b44c5a6 917 req->state_sz = SHA1_DIGEST_SIZE;
41abed7d 918 req->block_sz = SHA1_BLOCK_SIZE;
1b44c5a6
AT
919
920 return 0;
921}
922
923static int safexcel_sha1_digest(struct ahash_request *areq)
924{
925 int ret = safexcel_sha1_init(areq);
926
927 if (ret)
928 return ret;
929
930 return safexcel_ahash_finup(areq);
931}
932
933static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
934{
935 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
936 struct safexcel_crypto_priv *priv = ctx->priv;
937 int ret;
938
939 /* context not allocated, skip invalidation */
940 if (!ctx->base.ctxr)
941 return;
942
53c83e91 943 if (priv->flags & EIP197_TRC_CACHE) {
871df319
AT
944 ret = safexcel_ahash_exit_inv(tfm);
945 if (ret)
946 dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
947 } else {
948 dma_pool_free(priv->context_pool, ctx->base.ctxr,
949 ctx->base.ctxr_dma);
950 }
1b44c5a6
AT
951}
952
953struct safexcel_alg_template safexcel_alg_sha1 = {
954 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 955 .algo_mask = SAFEXCEL_ALG_SHA1,
1b44c5a6
AT
956 .alg.ahash = {
957 .init = safexcel_sha1_init,
958 .update = safexcel_ahash_update,
959 .final = safexcel_ahash_final,
960 .finup = safexcel_ahash_finup,
961 .digest = safexcel_sha1_digest,
962 .export = safexcel_ahash_export,
963 .import = safexcel_ahash_import,
964 .halg = {
965 .digestsize = SHA1_DIGEST_SIZE,
966 .statesize = sizeof(struct safexcel_ahash_export_state),
967 .base = {
968 .cra_name = "sha1",
969 .cra_driver_name = "safexcel-sha1",
aa88f331 970 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
971 .cra_flags = CRYPTO_ALG_ASYNC |
972 CRYPTO_ALG_KERN_DRIVER_ONLY,
973 .cra_blocksize = SHA1_BLOCK_SIZE,
974 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
975 .cra_init = safexcel_ahash_cra_init,
976 .cra_exit = safexcel_ahash_cra_exit,
977 .cra_module = THIS_MODULE,
978 },
979 },
980 },
981};
982
983static int safexcel_hmac_sha1_init(struct ahash_request *areq)
984{
41abed7d 985 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
b869648c 986 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1b44c5a6 987
41abed7d
PL
988 memset(req, 0, sizeof(*req));
989
990 /* Start from ipad precompute */
991 memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
992 /* Already processed the key^ipad part now! */
31fb084c
PL
993 req->len = SHA1_BLOCK_SIZE;
994 req->processed = SHA1_BLOCK_SIZE;
41abed7d
PL
995
996 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
997 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
998 req->state_sz = SHA1_DIGEST_SIZE;
999 req->block_sz = SHA1_BLOCK_SIZE;
1000 req->hmac = true;
1001
1b44c5a6
AT
1002 return 0;
1003}
1004
1005static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1006{
1007 int ret = safexcel_hmac_sha1_init(areq);
1008
1009 if (ret)
1010 return ret;
1011
1012 return safexcel_ahash_finup(areq);
1013}
1014
1015struct safexcel_ahash_result {
1016 struct completion completion;
1017 int error;
1018};
1019
1020static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1021{
1022 struct safexcel_ahash_result *result = req->data;
1023
1024 if (error == -EINPROGRESS)
1025 return;
1026
1027 result->error = error;
1028 complete(&result->completion);
1029}
1030
1031static int safexcel_hmac_init_pad(struct ahash_request *areq,
1032 unsigned int blocksize, const u8 *key,
1033 unsigned int keylen, u8 *ipad, u8 *opad)
1034{
1035 struct safexcel_ahash_result result;
1036 struct scatterlist sg;
1037 int ret, i;
1038 u8 *keydup;
1039
1040 if (keylen <= blocksize) {
1041 memcpy(ipad, key, keylen);
1042 } else {
1043 keydup = kmemdup(key, keylen, GFP_KERNEL);
1044 if (!keydup)
1045 return -ENOMEM;
1046
1047 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1048 safexcel_ahash_complete, &result);
1049 sg_init_one(&sg, keydup, keylen);
1050 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1051 init_completion(&result.completion);
1052
1053 ret = crypto_ahash_digest(areq);
4dc5475a 1054 if (ret == -EINPROGRESS || ret == -EBUSY) {
1b44c5a6
AT
1055 wait_for_completion_interruptible(&result.completion);
1056 ret = result.error;
1057 }
1058
1059 /* Avoid leaking */
1060 memzero_explicit(keydup, keylen);
1061 kfree(keydup);
1062
1063 if (ret)
1064 return ret;
1065
1066 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1067 }
1068
1069 memset(ipad + keylen, 0, blocksize - keylen);
1070 memcpy(opad, ipad, blocksize);
1071
1072 for (i = 0; i < blocksize; i++) {
aed3731e
AT
1073 ipad[i] ^= HMAC_IPAD_VALUE;
1074 opad[i] ^= HMAC_OPAD_VALUE;
1b44c5a6
AT
1075 }
1076
1077 return 0;
1078}
1079
1080static int safexcel_hmac_init_iv(struct ahash_request *areq,
1081 unsigned int blocksize, u8 *pad, void *state)
1082{
1083 struct safexcel_ahash_result result;
1084 struct safexcel_ahash_req *req;
1085 struct scatterlist sg;
1086 int ret;
1087
1088 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1089 safexcel_ahash_complete, &result);
1090 sg_init_one(&sg, pad, blocksize);
1091 ahash_request_set_crypt(areq, &sg, pad, blocksize);
1092 init_completion(&result.completion);
1093
1094 ret = crypto_ahash_init(areq);
1095 if (ret)
1096 return ret;
1097
1098 req = ahash_request_ctx(areq);
1099 req->hmac = true;
1100 req->last_req = true;
1101
1102 ret = crypto_ahash_update(areq);
12bf4142 1103 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1b44c5a6
AT
1104 return ret;
1105
1106 wait_for_completion_interruptible(&result.completion);
1107 if (result.error)
1108 return result.error;
1109
1110 return crypto_ahash_export(areq, state);
1111}
1112
f6beaea3
AT
1113int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
1114 void *istate, void *ostate)
1b44c5a6
AT
1115{
1116 struct ahash_request *areq;
1117 struct crypto_ahash *tfm;
1118 unsigned int blocksize;
1119 u8 *ipad, *opad;
1120 int ret;
1121
85d7311f 1122 tfm = crypto_alloc_ahash(alg, 0, 0);
1b44c5a6
AT
1123 if (IS_ERR(tfm))
1124 return PTR_ERR(tfm);
1125
1126 areq = ahash_request_alloc(tfm, GFP_KERNEL);
1127 if (!areq) {
1128 ret = -ENOMEM;
1129 goto free_ahash;
1130 }
1131
1132 crypto_ahash_clear_flags(tfm, ~0);
1133 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1134
6396bb22 1135 ipad = kcalloc(2, blocksize, GFP_KERNEL);
1b44c5a6
AT
1136 if (!ipad) {
1137 ret = -ENOMEM;
1138 goto free_request;
1139 }
1140
1141 opad = ipad + blocksize;
1142
1143 ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1144 if (ret)
1145 goto free_ipad;
1146
1147 ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1148 if (ret)
1149 goto free_ipad;
1150
1151 ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1152
1153free_ipad:
1154 kfree(ipad);
1155free_request:
1156 ahash_request_free(areq);
1157free_ahash:
1158 crypto_free_ahash(tfm);
1159
1160 return ret;
1161}
1162
73f36ea7
AT
1163static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1164 unsigned int keylen, const char *alg,
1165 unsigned int state_sz)
1b44c5a6
AT
1166{
1167 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
871df319 1168 struct safexcel_crypto_priv *priv = ctx->priv;
1b44c5a6 1169 struct safexcel_ahash_export_state istate, ostate;
41abed7d 1170 int ret;
1b44c5a6 1171
73f36ea7 1172 ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1b44c5a6
AT
1173 if (ret)
1174 return ret;
1175
41abed7d
PL
1176 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
1177 (memcmp(ctx->ipad, istate.state, state_sz) ||
1178 memcmp(ctx->opad, ostate.state, state_sz)))
1179 ctx->base.needs_inv = true;
1b44c5a6 1180
73f36ea7
AT
1181 memcpy(ctx->ipad, &istate.state, state_sz);
1182 memcpy(ctx->opad, &ostate.state, state_sz);
42ef3bed 1183
1b44c5a6
AT
1184 return 0;
1185}
1186
73f36ea7
AT
1187static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1188 unsigned int keylen)
1189{
1190 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1191 SHA1_DIGEST_SIZE);
1192}
1193
1b44c5a6
AT
1194struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1195 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1196 .algo_mask = SAFEXCEL_ALG_SHA1,
1b44c5a6
AT
1197 .alg.ahash = {
1198 .init = safexcel_hmac_sha1_init,
1199 .update = safexcel_ahash_update,
1200 .final = safexcel_ahash_final,
1201 .finup = safexcel_ahash_finup,
1202 .digest = safexcel_hmac_sha1_digest,
1203 .setkey = safexcel_hmac_sha1_setkey,
1204 .export = safexcel_ahash_export,
1205 .import = safexcel_ahash_import,
1206 .halg = {
1207 .digestsize = SHA1_DIGEST_SIZE,
1208 .statesize = sizeof(struct safexcel_ahash_export_state),
1209 .base = {
1210 .cra_name = "hmac(sha1)",
1211 .cra_driver_name = "safexcel-hmac-sha1",
aa88f331 1212 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
1213 .cra_flags = CRYPTO_ALG_ASYNC |
1214 CRYPTO_ALG_KERN_DRIVER_ONLY,
1215 .cra_blocksize = SHA1_BLOCK_SIZE,
1216 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1217 .cra_init = safexcel_ahash_cra_init,
1218 .cra_exit = safexcel_ahash_cra_exit,
1219 .cra_module = THIS_MODULE,
1220 },
1221 },
1222 },
1223};
1224
1225static int safexcel_sha256_init(struct ahash_request *areq)
1226{
1227 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1228 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1229
1230 memset(req, 0, sizeof(*req));
1231
1b44c5a6 1232 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
b869648c 1233 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1b44c5a6 1234 req->state_sz = SHA256_DIGEST_SIZE;
41abed7d 1235 req->block_sz = SHA256_BLOCK_SIZE;
1b44c5a6
AT
1236
1237 return 0;
1238}
1239
1240static int safexcel_sha256_digest(struct ahash_request *areq)
1241{
1242 int ret = safexcel_sha256_init(areq);
1243
1244 if (ret)
1245 return ret;
1246
1247 return safexcel_ahash_finup(areq);
1248}
1249
1250struct safexcel_alg_template safexcel_alg_sha256 = {
1251 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1252 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1b44c5a6
AT
1253 .alg.ahash = {
1254 .init = safexcel_sha256_init,
1255 .update = safexcel_ahash_update,
1256 .final = safexcel_ahash_final,
1257 .finup = safexcel_ahash_finup,
1258 .digest = safexcel_sha256_digest,
1259 .export = safexcel_ahash_export,
1260 .import = safexcel_ahash_import,
1261 .halg = {
1262 .digestsize = SHA256_DIGEST_SIZE,
1263 .statesize = sizeof(struct safexcel_ahash_export_state),
1264 .base = {
1265 .cra_name = "sha256",
1266 .cra_driver_name = "safexcel-sha256",
aa88f331 1267 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
1268 .cra_flags = CRYPTO_ALG_ASYNC |
1269 CRYPTO_ALG_KERN_DRIVER_ONLY,
1270 .cra_blocksize = SHA256_BLOCK_SIZE,
1271 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1272 .cra_init = safexcel_ahash_cra_init,
1273 .cra_exit = safexcel_ahash_cra_exit,
1274 .cra_module = THIS_MODULE,
1275 },
1276 },
1277 },
1278};
1279
1280static int safexcel_sha224_init(struct ahash_request *areq)
1281{
1282 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1283 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1284
1285 memset(req, 0, sizeof(*req));
1286
1b44c5a6 1287 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
b869648c 1288 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1b44c5a6 1289 req->state_sz = SHA256_DIGEST_SIZE;
41abed7d 1290 req->block_sz = SHA256_BLOCK_SIZE;
1b44c5a6
AT
1291
1292 return 0;
1293}
1294
1295static int safexcel_sha224_digest(struct ahash_request *areq)
1296{
1297 int ret = safexcel_sha224_init(areq);
1298
1299 if (ret)
1300 return ret;
1301
1302 return safexcel_ahash_finup(areq);
1303}
1304
1305struct safexcel_alg_template safexcel_alg_sha224 = {
1306 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1307 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1b44c5a6
AT
1308 .alg.ahash = {
1309 .init = safexcel_sha224_init,
1310 .update = safexcel_ahash_update,
1311 .final = safexcel_ahash_final,
1312 .finup = safexcel_ahash_finup,
1313 .digest = safexcel_sha224_digest,
1314 .export = safexcel_ahash_export,
1315 .import = safexcel_ahash_import,
1316 .halg = {
1317 .digestsize = SHA224_DIGEST_SIZE,
1318 .statesize = sizeof(struct safexcel_ahash_export_state),
1319 .base = {
1320 .cra_name = "sha224",
1321 .cra_driver_name = "safexcel-sha224",
aa88f331 1322 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
1323 .cra_flags = CRYPTO_ALG_ASYNC |
1324 CRYPTO_ALG_KERN_DRIVER_ONLY,
1325 .cra_blocksize = SHA224_BLOCK_SIZE,
1326 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1327 .cra_init = safexcel_ahash_cra_init,
1328 .cra_exit = safexcel_ahash_cra_exit,
1329 .cra_module = THIS_MODULE,
1330 },
1331 },
1332 },
1333};
73f36ea7 1334
3ad618d8
AT
1335static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1336 unsigned int keylen)
1337{
1338 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1339 SHA256_DIGEST_SIZE);
1340}
1341
1342static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1343{
41abed7d 1344 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
3ad618d8
AT
1345 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1346
41abed7d
PL
1347 memset(req, 0, sizeof(*req));
1348
1349 /* Start from ipad precompute */
1350 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1351 /* Already processed the key^ipad part now! */
31fb084c
PL
1352 req->len = SHA256_BLOCK_SIZE;
1353 req->processed = SHA256_BLOCK_SIZE;
41abed7d
PL
1354
1355 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1356 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1357 req->state_sz = SHA256_DIGEST_SIZE;
1358 req->block_sz = SHA256_BLOCK_SIZE;
1359 req->hmac = true;
1360
3ad618d8
AT
1361 return 0;
1362}
1363
1364static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1365{
1366 int ret = safexcel_hmac_sha224_init(areq);
1367
1368 if (ret)
1369 return ret;
1370
1371 return safexcel_ahash_finup(areq);
1372}
1373
1374struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1375 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1376 .algo_mask = SAFEXCEL_ALG_SHA2_256,
3ad618d8
AT
1377 .alg.ahash = {
1378 .init = safexcel_hmac_sha224_init,
1379 .update = safexcel_ahash_update,
1380 .final = safexcel_ahash_final,
1381 .finup = safexcel_ahash_finup,
1382 .digest = safexcel_hmac_sha224_digest,
1383 .setkey = safexcel_hmac_sha224_setkey,
1384 .export = safexcel_ahash_export,
1385 .import = safexcel_ahash_import,
1386 .halg = {
1387 .digestsize = SHA224_DIGEST_SIZE,
1388 .statesize = sizeof(struct safexcel_ahash_export_state),
1389 .base = {
1390 .cra_name = "hmac(sha224)",
1391 .cra_driver_name = "safexcel-hmac-sha224",
aa88f331 1392 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3ad618d8
AT
1393 .cra_flags = CRYPTO_ALG_ASYNC |
1394 CRYPTO_ALG_KERN_DRIVER_ONLY,
1395 .cra_blocksize = SHA224_BLOCK_SIZE,
1396 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1397 .cra_init = safexcel_ahash_cra_init,
1398 .cra_exit = safexcel_ahash_cra_exit,
1399 .cra_module = THIS_MODULE,
1400 },
1401 },
1402 },
1403};
1404
73f36ea7
AT
1405static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1406 unsigned int keylen)
1407{
1408 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1409 SHA256_DIGEST_SIZE);
1410}
1411
1412static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1413{
41abed7d 1414 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
73f36ea7
AT
1415 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1416
41abed7d
PL
1417 memset(req, 0, sizeof(*req));
1418
1419 /* Start from ipad precompute */
1420 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1421 /* Already processed the key^ipad part now! */
31fb084c
PL
1422 req->len = SHA256_BLOCK_SIZE;
1423 req->processed = SHA256_BLOCK_SIZE;
41abed7d
PL
1424
1425 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1426 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1427 req->state_sz = SHA256_DIGEST_SIZE;
1428 req->block_sz = SHA256_BLOCK_SIZE;
1429 req->hmac = true;
1430
73f36ea7
AT
1431 return 0;
1432}
1433
1434static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1435{
1436 int ret = safexcel_hmac_sha256_init(areq);
1437
1438 if (ret)
1439 return ret;
1440
1441 return safexcel_ahash_finup(areq);
1442}
1443
1444struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1445 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1446 .algo_mask = SAFEXCEL_ALG_SHA2_256,
73f36ea7
AT
1447 .alg.ahash = {
1448 .init = safexcel_hmac_sha256_init,
1449 .update = safexcel_ahash_update,
1450 .final = safexcel_ahash_final,
1451 .finup = safexcel_ahash_finup,
1452 .digest = safexcel_hmac_sha256_digest,
1453 .setkey = safexcel_hmac_sha256_setkey,
1454 .export = safexcel_ahash_export,
1455 .import = safexcel_ahash_import,
1456 .halg = {
1457 .digestsize = SHA256_DIGEST_SIZE,
1458 .statesize = sizeof(struct safexcel_ahash_export_state),
1459 .base = {
1460 .cra_name = "hmac(sha256)",
1461 .cra_driver_name = "safexcel-hmac-sha256",
aa88f331 1462 .cra_priority = SAFEXCEL_CRA_PRIORITY,
73f36ea7
AT
1463 .cra_flags = CRYPTO_ALG_ASYNC |
1464 CRYPTO_ALG_KERN_DRIVER_ONLY,
1465 .cra_blocksize = SHA256_BLOCK_SIZE,
1466 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1467 .cra_init = safexcel_ahash_cra_init,
1468 .cra_exit = safexcel_ahash_cra_exit,
1469 .cra_module = THIS_MODULE,
1470 },
1471 },
1472 },
1473};
b460edb6
AT
1474
1475static int safexcel_sha512_init(struct ahash_request *areq)
1476{
1477 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1478 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1479
1480 memset(req, 0, sizeof(*req));
1481
b460edb6
AT
1482 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1483 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1484 req->state_sz = SHA512_DIGEST_SIZE;
41abed7d 1485 req->block_sz = SHA512_BLOCK_SIZE;
b460edb6
AT
1486
1487 return 0;
1488}
1489
1490static int safexcel_sha512_digest(struct ahash_request *areq)
1491{
1492 int ret = safexcel_sha512_init(areq);
1493
1494 if (ret)
1495 return ret;
1496
1497 return safexcel_ahash_finup(areq);
1498}
1499
1500struct safexcel_alg_template safexcel_alg_sha512 = {
1501 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1502 .algo_mask = SAFEXCEL_ALG_SHA2_512,
b460edb6
AT
1503 .alg.ahash = {
1504 .init = safexcel_sha512_init,
1505 .update = safexcel_ahash_update,
1506 .final = safexcel_ahash_final,
1507 .finup = safexcel_ahash_finup,
1508 .digest = safexcel_sha512_digest,
1509 .export = safexcel_ahash_export,
1510 .import = safexcel_ahash_import,
1511 .halg = {
1512 .digestsize = SHA512_DIGEST_SIZE,
1513 .statesize = sizeof(struct safexcel_ahash_export_state),
1514 .base = {
1515 .cra_name = "sha512",
1516 .cra_driver_name = "safexcel-sha512",
aa88f331 1517 .cra_priority = SAFEXCEL_CRA_PRIORITY,
b460edb6
AT
1518 .cra_flags = CRYPTO_ALG_ASYNC |
1519 CRYPTO_ALG_KERN_DRIVER_ONLY,
1520 .cra_blocksize = SHA512_BLOCK_SIZE,
1521 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1522 .cra_init = safexcel_ahash_cra_init,
1523 .cra_exit = safexcel_ahash_cra_exit,
1524 .cra_module = THIS_MODULE,
1525 },
1526 },
1527 },
1528};
0de54fb1 1529
9e46eafd
AT
1530static int safexcel_sha384_init(struct ahash_request *areq)
1531{
1532 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1533 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1534
1535 memset(req, 0, sizeof(*req));
1536
9e46eafd
AT
1537 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1538 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1539 req->state_sz = SHA512_DIGEST_SIZE;
41abed7d 1540 req->block_sz = SHA512_BLOCK_SIZE;
9e46eafd
AT
1541
1542 return 0;
1543}
1544
1545static int safexcel_sha384_digest(struct ahash_request *areq)
1546{
1547 int ret = safexcel_sha384_init(areq);
1548
1549 if (ret)
1550 return ret;
1551
1552 return safexcel_ahash_finup(areq);
1553}
1554
1555struct safexcel_alg_template safexcel_alg_sha384 = {
1556 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1557 .algo_mask = SAFEXCEL_ALG_SHA2_512,
9e46eafd
AT
1558 .alg.ahash = {
1559 .init = safexcel_sha384_init,
1560 .update = safexcel_ahash_update,
1561 .final = safexcel_ahash_final,
1562 .finup = safexcel_ahash_finup,
1563 .digest = safexcel_sha384_digest,
1564 .export = safexcel_ahash_export,
1565 .import = safexcel_ahash_import,
1566 .halg = {
1567 .digestsize = SHA384_DIGEST_SIZE,
1568 .statesize = sizeof(struct safexcel_ahash_export_state),
1569 .base = {
1570 .cra_name = "sha384",
1571 .cra_driver_name = "safexcel-sha384",
aa88f331 1572 .cra_priority = SAFEXCEL_CRA_PRIORITY,
9e46eafd
AT
1573 .cra_flags = CRYPTO_ALG_ASYNC |
1574 CRYPTO_ALG_KERN_DRIVER_ONLY,
1575 .cra_blocksize = SHA384_BLOCK_SIZE,
1576 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1577 .cra_init = safexcel_ahash_cra_init,
1578 .cra_exit = safexcel_ahash_cra_exit,
1579 .cra_module = THIS_MODULE,
1580 },
1581 },
1582 },
1583};
1584
0de54fb1
AT
1585static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1586 unsigned int keylen)
1587{
1588 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1589 SHA512_DIGEST_SIZE);
1590}
1591
1592static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1593{
41abed7d 1594 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0de54fb1
AT
1595 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1596
41abed7d
PL
1597 memset(req, 0, sizeof(*req));
1598
1599 /* Start from ipad precompute */
1600 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1601 /* Already processed the key^ipad part now! */
31fb084c
PL
1602 req->len = SHA512_BLOCK_SIZE;
1603 req->processed = SHA512_BLOCK_SIZE;
41abed7d
PL
1604
1605 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1606 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1607 req->state_sz = SHA512_DIGEST_SIZE;
1608 req->block_sz = SHA512_BLOCK_SIZE;
1609 req->hmac = true;
1610
0de54fb1
AT
1611 return 0;
1612}
1613
1614static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1615{
1616 int ret = safexcel_hmac_sha512_init(areq);
1617
1618 if (ret)
1619 return ret;
1620
1621 return safexcel_ahash_finup(areq);
1622}
1623
1624struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1625 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1626 .algo_mask = SAFEXCEL_ALG_SHA2_512,
0de54fb1
AT
1627 .alg.ahash = {
1628 .init = safexcel_hmac_sha512_init,
1629 .update = safexcel_ahash_update,
1630 .final = safexcel_ahash_final,
1631 .finup = safexcel_ahash_finup,
1632 .digest = safexcel_hmac_sha512_digest,
1633 .setkey = safexcel_hmac_sha512_setkey,
1634 .export = safexcel_ahash_export,
1635 .import = safexcel_ahash_import,
1636 .halg = {
1637 .digestsize = SHA512_DIGEST_SIZE,
1638 .statesize = sizeof(struct safexcel_ahash_export_state),
1639 .base = {
1640 .cra_name = "hmac(sha512)",
1641 .cra_driver_name = "safexcel-hmac-sha512",
aa88f331 1642 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0de54fb1
AT
1643 .cra_flags = CRYPTO_ALG_ASYNC |
1644 CRYPTO_ALG_KERN_DRIVER_ONLY,
1645 .cra_blocksize = SHA512_BLOCK_SIZE,
1646 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1647 .cra_init = safexcel_ahash_cra_init,
1648 .cra_exit = safexcel_ahash_cra_exit,
1649 .cra_module = THIS_MODULE,
1650 },
1651 },
1652 },
1653};
1f5d5d98
AT
1654
1655static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1656 unsigned int keylen)
1657{
1658 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1659 SHA512_DIGEST_SIZE);
1660}
1661
1662static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1663{
41abed7d 1664 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1f5d5d98
AT
1665 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1666
41abed7d
PL
1667 memset(req, 0, sizeof(*req));
1668
1669 /* Start from ipad precompute */
1670 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1671 /* Already processed the key^ipad part now! */
31fb084c
PL
1672 req->len = SHA512_BLOCK_SIZE;
1673 req->processed = SHA512_BLOCK_SIZE;
41abed7d
PL
1674
1675 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1676 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1677 req->state_sz = SHA512_DIGEST_SIZE;
1678 req->block_sz = SHA512_BLOCK_SIZE;
1679 req->hmac = true;
1680
1f5d5d98
AT
1681 return 0;
1682}
1683
1684static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1685{
1686 int ret = safexcel_hmac_sha384_init(areq);
1687
1688 if (ret)
1689 return ret;
1690
1691 return safexcel_ahash_finup(areq);
1692}
1693
1694struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1695 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1696 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1f5d5d98
AT
1697 .alg.ahash = {
1698 .init = safexcel_hmac_sha384_init,
1699 .update = safexcel_ahash_update,
1700 .final = safexcel_ahash_final,
1701 .finup = safexcel_ahash_finup,
1702 .digest = safexcel_hmac_sha384_digest,
1703 .setkey = safexcel_hmac_sha384_setkey,
1704 .export = safexcel_ahash_export,
1705 .import = safexcel_ahash_import,
1706 .halg = {
1707 .digestsize = SHA384_DIGEST_SIZE,
1708 .statesize = sizeof(struct safexcel_ahash_export_state),
1709 .base = {
1710 .cra_name = "hmac(sha384)",
1711 .cra_driver_name = "safexcel-hmac-sha384",
aa88f331 1712 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1f5d5d98
AT
1713 .cra_flags = CRYPTO_ALG_ASYNC |
1714 CRYPTO_ALG_KERN_DRIVER_ONLY,
1715 .cra_blocksize = SHA384_BLOCK_SIZE,
1716 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1717 .cra_init = safexcel_ahash_cra_init,
1718 .cra_exit = safexcel_ahash_cra_exit,
1719 .cra_module = THIS_MODULE,
1720 },
1721 },
1722 },
1723};
293f89cf
OH
1724
1725static int safexcel_md5_init(struct ahash_request *areq)
1726{
1727 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1728 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1729
1730 memset(req, 0, sizeof(*req));
1731
293f89cf
OH
1732 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1733 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1734 req->state_sz = MD5_DIGEST_SIZE;
41abed7d 1735 req->block_sz = MD5_HMAC_BLOCK_SIZE;
293f89cf
OH
1736
1737 return 0;
1738}
1739
1740static int safexcel_md5_digest(struct ahash_request *areq)
1741{
1742 int ret = safexcel_md5_init(areq);
1743
1744 if (ret)
1745 return ret;
1746
1747 return safexcel_ahash_finup(areq);
1748}
1749
1750struct safexcel_alg_template safexcel_alg_md5 = {
1751 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1752 .algo_mask = SAFEXCEL_ALG_MD5,
293f89cf
OH
1753 .alg.ahash = {
1754 .init = safexcel_md5_init,
1755 .update = safexcel_ahash_update,
1756 .final = safexcel_ahash_final,
1757 .finup = safexcel_ahash_finup,
1758 .digest = safexcel_md5_digest,
1759 .export = safexcel_ahash_export,
1760 .import = safexcel_ahash_import,
1761 .halg = {
1762 .digestsize = MD5_DIGEST_SIZE,
1763 .statesize = sizeof(struct safexcel_ahash_export_state),
1764 .base = {
1765 .cra_name = "md5",
1766 .cra_driver_name = "safexcel-md5",
aa88f331 1767 .cra_priority = SAFEXCEL_CRA_PRIORITY,
293f89cf
OH
1768 .cra_flags = CRYPTO_ALG_ASYNC |
1769 CRYPTO_ALG_KERN_DRIVER_ONLY,
1770 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1771 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1772 .cra_init = safexcel_ahash_cra_init,
1773 .cra_exit = safexcel_ahash_cra_exit,
1774 .cra_module = THIS_MODULE,
1775 },
1776 },
1777 },
1778};
b471e4b9
OH
1779
1780static int safexcel_hmac_md5_init(struct ahash_request *areq)
1781{
41abed7d 1782 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
b471e4b9
OH
1783 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1784
41abed7d
PL
1785 memset(req, 0, sizeof(*req));
1786
1787 /* Start from ipad precompute */
1788 memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1789 /* Already processed the key^ipad part now! */
31fb084c
PL
1790 req->len = MD5_HMAC_BLOCK_SIZE;
1791 req->processed = MD5_HMAC_BLOCK_SIZE;
41abed7d
PL
1792
1793 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1794 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1795 req->state_sz = MD5_DIGEST_SIZE;
1796 req->block_sz = MD5_HMAC_BLOCK_SIZE;
85b36ee8 1797 req->len_is_le = true; /* MD5 is little endian! ... */
41abed7d
PL
1798 req->hmac = true;
1799
b471e4b9
OH
1800 return 0;
1801}
1802
1803static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1804 unsigned int keylen)
1805{
1806 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1807 MD5_DIGEST_SIZE);
1808}
1809
1810static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1811{
1812 int ret = safexcel_hmac_md5_init(areq);
1813
1814 if (ret)
1815 return ret;
1816
1817 return safexcel_ahash_finup(areq);
1818}
1819
1820struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1821 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1822 .algo_mask = SAFEXCEL_ALG_MD5,
b471e4b9
OH
1823 .alg.ahash = {
1824 .init = safexcel_hmac_md5_init,
1825 .update = safexcel_ahash_update,
1826 .final = safexcel_ahash_final,
1827 .finup = safexcel_ahash_finup,
1828 .digest = safexcel_hmac_md5_digest,
1829 .setkey = safexcel_hmac_md5_setkey,
1830 .export = safexcel_ahash_export,
1831 .import = safexcel_ahash_import,
1832 .halg = {
1833 .digestsize = MD5_DIGEST_SIZE,
1834 .statesize = sizeof(struct safexcel_ahash_export_state),
1835 .base = {
1836 .cra_name = "hmac(md5)",
1837 .cra_driver_name = "safexcel-hmac-md5",
aa88f331 1838 .cra_priority = SAFEXCEL_CRA_PRIORITY,
b471e4b9
OH
1839 .cra_flags = CRYPTO_ALG_ASYNC |
1840 CRYPTO_ALG_KERN_DRIVER_ONLY,
1841 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1842 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1843 .cra_init = safexcel_ahash_cra_init,
1844 .cra_exit = safexcel_ahash_cra_exit,
1845 .cra_module = THIS_MODULE,
1846 },
1847 },
1848 },
1849};
a7cf8658
PL
1850
1851static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1852{
1853 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1854 int ret = safexcel_ahash_cra_init(tfm);
1855
1856 /* Default 'key' is all zeroes */
1857 memset(ctx->ipad, 0, sizeof(u32));
1858 return ret;
1859}
1860
1861static int safexcel_crc32_init(struct ahash_request *areq)
1862{
1863 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1864 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1865
1866 memset(req, 0, sizeof(*req));
1867
1868 /* Start from loaded key */
1869 req->state[0] = cpu_to_le32(~ctx->ipad[0]);
1870 /* Set processed to non-zero to enable invalidation detection */
1871 req->len = sizeof(u32);
1872 req->processed = sizeof(u32);
1873
1874 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1875 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1876 req->state_sz = sizeof(u32);
1877 req->block_sz = sizeof(u32);
1878
1879 return 0;
1880}
1881
1882static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1883 unsigned int keylen)
1884{
1885 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1886
1887 if (keylen != sizeof(u32)) {
1888 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1889 return -EINVAL;
1890 }
1891
1892 memcpy(ctx->ipad, key, sizeof(u32));
1893 return 0;
1894}
1895
1896static int safexcel_crc32_digest(struct ahash_request *areq)
1897{
1898 return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1899}
1900
1901struct safexcel_alg_template safexcel_alg_crc32 = {
1902 .type = SAFEXCEL_ALG_TYPE_AHASH,
1903 .algo_mask = 0,
1904 .alg.ahash = {
1905 .init = safexcel_crc32_init,
1906 .update = safexcel_ahash_update,
1907 .final = safexcel_ahash_final,
1908 .finup = safexcel_ahash_finup,
1909 .digest = safexcel_crc32_digest,
1910 .setkey = safexcel_crc32_setkey,
1911 .export = safexcel_ahash_export,
1912 .import = safexcel_ahash_import,
1913 .halg = {
1914 .digestsize = sizeof(u32),
1915 .statesize = sizeof(struct safexcel_ahash_export_state),
1916 .base = {
1917 .cra_name = "crc32",
1918 .cra_driver_name = "safexcel-crc32",
1919 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1920 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1921 CRYPTO_ALG_ASYNC |
1922 CRYPTO_ALG_KERN_DRIVER_ONLY,
1923 .cra_blocksize = 1,
1924 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1925 .cra_init = safexcel_crc32_cra_init,
1926 .cra_exit = safexcel_ahash_cra_exit,
1927 .cra_module = THIS_MODULE,
1928 },
1929 },
1930 },
1931};
b98687bb
PL
1932
1933static int safexcel_cbcmac_init(struct ahash_request *areq)
1934{
1935 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1936 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1937
1938 memset(req, 0, sizeof(*req));
1939
1940 /* Start from loaded keys */
1941 memcpy(req->state, ctx->ipad, ctx->key_sz);
1942 /* Set processed to non-zero to enable invalidation detection */
1943 req->len = AES_BLOCK_SIZE;
1944 req->processed = AES_BLOCK_SIZE;
1945
1946 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1947 req->state_sz = ctx->key_sz;
1948 req->block_sz = AES_BLOCK_SIZE;
1949 req->xcbcmac = true;
1950
1951 return 0;
1952}
1953
1954static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1955 unsigned int len)
1956{
1957 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1958 struct crypto_aes_ctx aes;
1959 int ret, i;
1960
1961 ret = aes_expandkey(&aes, key, len);
1962 if (ret) {
1963 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1964 return ret;
1965 }
1966
1967 memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
1968 for (i = 0; i < len / sizeof(u32); i++)
1969 ctx->ipad[i + 8] = cpu_to_be32(aes.key_enc[i]);
1970
1971 if (len == AES_KEYSIZE_192) {
1972 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
1973 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1974 } else if (len == AES_KEYSIZE_256) {
1975 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
1976 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1977 } else {
1978 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
1979 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1980 }
38f21b4b 1981 ctx->cbcmac = true;
b98687bb
PL
1982
1983 memzero_explicit(&aes, sizeof(aes));
1984 return 0;
1985}
1986
1987static int safexcel_cbcmac_digest(struct ahash_request *areq)
1988{
1989 return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
1990}
1991
1992struct safexcel_alg_template safexcel_alg_cbcmac = {
1993 .type = SAFEXCEL_ALG_TYPE_AHASH,
1994 .algo_mask = 0,
1995 .alg.ahash = {
1996 .init = safexcel_cbcmac_init,
1997 .update = safexcel_ahash_update,
1998 .final = safexcel_ahash_final,
1999 .finup = safexcel_ahash_finup,
2000 .digest = safexcel_cbcmac_digest,
2001 .setkey = safexcel_cbcmac_setkey,
2002 .export = safexcel_ahash_export,
2003 .import = safexcel_ahash_import,
2004 .halg = {
2005 .digestsize = AES_BLOCK_SIZE,
2006 .statesize = sizeof(struct safexcel_ahash_export_state),
2007 .base = {
2008 .cra_name = "cbcmac(aes)",
2009 .cra_driver_name = "safexcel-cbcmac-aes",
2010 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2011 .cra_flags = CRYPTO_ALG_ASYNC |
2012 CRYPTO_ALG_KERN_DRIVER_ONLY,
2013 .cra_blocksize = 1,
2014 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2015 .cra_init = safexcel_ahash_cra_init,
2016 .cra_exit = safexcel_ahash_cra_exit,
2017 .cra_module = THIS_MODULE,
2018 },
2019 },
2020 },
2021};
38f21b4b
PL
2022
2023static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2024 unsigned int len)
2025{
2026 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2027 struct crypto_aes_ctx aes;
2028 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2029 int ret, i;
2030
2031 ret = aes_expandkey(&aes, key, len);
2032 if (ret) {
2033 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2034 return ret;
2035 }
2036
2037 /* precompute the XCBC key material */
2038 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2039 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2040 CRYPTO_TFM_REQ_MASK);
2041 ret = crypto_cipher_setkey(ctx->kaes, key, len);
2042 crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2043 CRYPTO_TFM_RES_MASK);
2044 if (ret)
2045 return ret;
2046
2047 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2048 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2049 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2050 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2051 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2052 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2053 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2054 ctx->ipad[i] = cpu_to_be32(key_tmp[i]);
2055
2056 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2057 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2058 CRYPTO_TFM_REQ_MASK);
2059 ret = crypto_cipher_setkey(ctx->kaes,
2060 (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2061 AES_MIN_KEY_SIZE);
2062 crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2063 CRYPTO_TFM_RES_MASK);
2064 if (ret)
2065 return ret;
2066
2067 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2068 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2069 ctx->cbcmac = false;
2070
2071 memzero_explicit(&aes, sizeof(aes));
2072 return 0;
2073}
2074
2075static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2076{
2077 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2078
2079 safexcel_ahash_cra_init(tfm);
2080 ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2081 if (IS_ERR(ctx->kaes))
2082 return PTR_ERR(ctx->kaes);
2083
2084 return 0;
2085}
2086
2087static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2088{
2089 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2090
2091 crypto_free_cipher(ctx->kaes);
2092 safexcel_ahash_cra_exit(tfm);
2093}
2094
2095struct safexcel_alg_template safexcel_alg_xcbcmac = {
2096 .type = SAFEXCEL_ALG_TYPE_AHASH,
2097 .algo_mask = 0,
2098 .alg.ahash = {
2099 .init = safexcel_cbcmac_init,
2100 .update = safexcel_ahash_update,
2101 .final = safexcel_ahash_final,
2102 .finup = safexcel_ahash_finup,
2103 .digest = safexcel_cbcmac_digest,
2104 .setkey = safexcel_xcbcmac_setkey,
2105 .export = safexcel_ahash_export,
2106 .import = safexcel_ahash_import,
2107 .halg = {
2108 .digestsize = AES_BLOCK_SIZE,
2109 .statesize = sizeof(struct safexcel_ahash_export_state),
2110 .base = {
2111 .cra_name = "xcbc(aes)",
2112 .cra_driver_name = "safexcel-xcbc-aes",
2113 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2114 .cra_flags = CRYPTO_ALG_ASYNC |
2115 CRYPTO_ALG_KERN_DRIVER_ONLY,
2116 .cra_blocksize = AES_BLOCK_SIZE,
2117 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2118 .cra_init = safexcel_xcbcmac_cra_init,
2119 .cra_exit = safexcel_xcbcmac_cra_exit,
2120 .cra_module = THIS_MODULE,
2121 },
2122 },
2123 },
2124};
7a627db9
PL
2125
2126static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2127 unsigned int len)
2128{
2129 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2130 struct crypto_aes_ctx aes;
2131 __be64 consts[4];
2132 u64 _const[2];
2133 u8 msb_mask, gfmask;
2134 int ret, i;
2135
2136 ret = aes_expandkey(&aes, key, len);
2137 if (ret) {
2138 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2139 return ret;
2140 }
2141
2142 for (i = 0; i < len / sizeof(u32); i++)
2143 ctx->ipad[i + 8] = cpu_to_be32(aes.key_enc[i]);
2144
2145 /* precompute the CMAC key material */
2146 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2147 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2148 CRYPTO_TFM_REQ_MASK);
2149 ret = crypto_cipher_setkey(ctx->kaes, key, len);
2150 crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2151 CRYPTO_TFM_RES_MASK);
2152 if (ret)
2153 return ret;
2154
2155 /* code below borrowed from crypto/cmac.c */
2156 /* encrypt the zero block */
2157 memset(consts, 0, AES_BLOCK_SIZE);
2158 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2159
2160 gfmask = 0x87;
2161 _const[0] = be64_to_cpu(consts[1]);
2162 _const[1] = be64_to_cpu(consts[0]);
2163
2164 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2165 for (i = 0; i < 4; i += 2) {
2166 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2167 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2168 _const[0] = (_const[0] << 1) ^ msb_mask;
2169
2170 consts[i + 0] = cpu_to_be64(_const[1]);
2171 consts[i + 1] = cpu_to_be64(_const[0]);
2172 }
2173 /* end of code borrowed from crypto/cmac.c */
2174
2175 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2176 ctx->ipad[i] = cpu_to_be32(((u32 *)consts)[i]);
2177
2178 if (len == AES_KEYSIZE_192) {
2179 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2180 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2181 } else if (len == AES_KEYSIZE_256) {
2182 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2183 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2184 } else {
2185 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2186 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2187 }
2188 ctx->cbcmac = false;
2189
2190 memzero_explicit(&aes, sizeof(aes));
2191 return 0;
2192}
2193
2194struct safexcel_alg_template safexcel_alg_cmac = {
2195 .type = SAFEXCEL_ALG_TYPE_AHASH,
2196 .algo_mask = 0,
2197 .alg.ahash = {
2198 .init = safexcel_cbcmac_init,
2199 .update = safexcel_ahash_update,
2200 .final = safexcel_ahash_final,
2201 .finup = safexcel_ahash_finup,
2202 .digest = safexcel_cbcmac_digest,
2203 .setkey = safexcel_cmac_setkey,
2204 .export = safexcel_ahash_export,
2205 .import = safexcel_ahash_import,
2206 .halg = {
2207 .digestsize = AES_BLOCK_SIZE,
2208 .statesize = sizeof(struct safexcel_ahash_export_state),
2209 .base = {
2210 .cra_name = "cmac(aes)",
2211 .cra_driver_name = "safexcel-cmac-aes",
2212 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2213 .cra_flags = CRYPTO_ALG_ASYNC |
2214 CRYPTO_ALG_KERN_DRIVER_ONLY,
2215 .cra_blocksize = AES_BLOCK_SIZE,
2216 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2217 .cra_init = safexcel_xcbcmac_cra_init,
2218 .cra_exit = safexcel_xcbcmac_cra_exit,
2219 .cra_module = THIS_MODULE,
2220 },
2221 },
2222 },
2223};