]>
Commit | Line | Data |
---|---|---|
301422e3 | 1 | // SPDX-License-Identifier: GPL-2.0 |
1b44c5a6 AT |
2 | /* |
3 | * Copyright (C) 2017 Marvell | |
4 | * | |
5 | * Antoine Tenart <antoine.tenart@free-electrons.com> | |
1b44c5a6 AT |
6 | */ |
7 | ||
b98687bb | 8 | #include <crypto/aes.h> |
aed3731e | 9 | #include <crypto/hmac.h> |
293f89cf | 10 | #include <crypto/md5.h> |
1b44c5a6 | 11 | #include <crypto/sha.h> |
aaf5a383 | 12 | #include <crypto/sha3.h> |
38f21b4b | 13 | #include <crypto/skcipher.h> |
0f2bc131 | 14 | #include <crypto/sm3.h> |
1b44c5a6 AT |
15 | #include <linux/device.h> |
16 | #include <linux/dma-mapping.h> | |
17 | #include <linux/dmapool.h> | |
18 | ||
1b44c5a6 AT |
19 | #include "safexcel.h" |
20 | ||
21 | struct safexcel_ahash_ctx { | |
22 | struct safexcel_context base; | |
23 | struct safexcel_crypto_priv *priv; | |
24 | ||
25 | u32 alg; | |
b98687bb | 26 | u8 key_sz; |
38f21b4b | 27 | bool cbcmac; |
aaf5a383 PL |
28 | bool do_fallback; |
29 | bool fb_init_done; | |
6c1c09b3 | 30 | bool fb_do_setkey; |
1b44c5a6 | 31 | |
13a1bb93 PL |
32 | __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)]; |
33 | __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)]; | |
38f21b4b PL |
34 | |
35 | struct crypto_cipher *kaes; | |
aaf5a383 | 36 | struct crypto_ahash *fback; |
6c1c09b3 PL |
37 | struct crypto_shash *shpre; |
38 | struct shash_desc *shdesc; | |
1b44c5a6 AT |
39 | }; |
40 | ||
41 | struct safexcel_ahash_req { | |
42 | bool last_req; | |
43 | bool finish; | |
44 | bool hmac; | |
1eb7b403 | 45 | bool needs_inv; |
85b36ee8 PL |
46 | bool hmac_zlen; |
47 | bool len_is_le; | |
b98687bb PL |
48 | bool not_first; |
49 | bool xcbcmac; | |
1b44c5a6 | 50 | |
c957f8b3 | 51 | int nents; |
b8592027 | 52 | dma_addr_t result_dma; |
c957f8b3 | 53 | |
b869648c AT |
54 | u32 digest; |
55 | ||
41abed7d PL |
56 | u8 state_sz; /* expected state size, only set once */ |
57 | u8 block_sz; /* block size, only set once */ | |
6c1c09b3 | 58 | u8 digest_sz; /* output digest size, only set once */ |
13a1bb93 PL |
59 | __le32 state[SHA3_512_BLOCK_SIZE / |
60 | sizeof(__le32)] __aligned(sizeof(__le32)); | |
1b44c5a6 | 61 | |
31fb084c PL |
62 | u64 len; |
63 | u64 processed; | |
1b44c5a6 | 64 | |
41abed7d | 65 | u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32)); |
cff9a175 AT |
66 | dma_addr_t cache_dma; |
67 | unsigned int cache_sz; | |
68 | ||
41abed7d | 69 | u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32)); |
1b44c5a6 AT |
70 | }; |
71 | ||
b460edb6 AT |
72 | static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req) |
73 | { | |
31fb084c | 74 | return req->len - req->processed; |
b460edb6 AT |
75 | } |
76 | ||
1b44c5a6 | 77 | static void safexcel_hash_token(struct safexcel_command_desc *cdesc, |
b98687bb | 78 | u32 input_length, u32 result_length, |
38f21b4b | 79 | bool cbcmac) |
1b44c5a6 AT |
80 | { |
81 | struct safexcel_token *token = | |
82 | (struct safexcel_token *)cdesc->control_data.token; | |
83 | ||
84 | token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; | |
85 | token[0].packet_length = input_length; | |
1b44c5a6 AT |
86 | token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH; |
87 | ||
b98687bb | 88 | input_length &= 15; |
38f21b4b | 89 | if (unlikely(cbcmac && input_length)) { |
098e51e5 | 90 | token[0].stat = 0; |
b98687bb PL |
91 | token[1].opcode = EIP197_TOKEN_OPCODE_INSERT; |
92 | token[1].packet_length = 16 - input_length; | |
93 | token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; | |
94 | token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH; | |
95 | } else { | |
96 | token[0].stat = EIP197_TOKEN_STAT_LAST_HASH; | |
098e51e5 | 97 | eip197_noop_token(&token[1]); |
b98687bb PL |
98 | } |
99 | ||
100 | token[2].opcode = EIP197_TOKEN_OPCODE_INSERT; | |
101 | token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | | |
1b44c5a6 | 102 | EIP197_TOKEN_STAT_LAST_PACKET; |
b98687bb PL |
103 | token[2].packet_length = result_length; |
104 | token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | | |
1b44c5a6 | 105 | EIP197_TOKEN_INS_INSERT_HASH_DIGEST; |
098e51e5 PL |
106 | |
107 | eip197_noop_token(&token[3]); | |
1b44c5a6 AT |
108 | } |
109 | ||
110 | static void safexcel_context_control(struct safexcel_ahash_ctx *ctx, | |
111 | struct safexcel_ahash_req *req, | |
41abed7d | 112 | struct safexcel_command_desc *cdesc) |
1b44c5a6 | 113 | { |
b460edb6 | 114 | struct safexcel_crypto_priv *priv = ctx->priv; |
41abed7d | 115 | u64 count = 0; |
1b44c5a6 | 116 | |
a7cf8658 | 117 | cdesc->control_data.control0 = ctx->alg; |
098e51e5 | 118 | cdesc->control_data.control1 = 0; |
41abed7d PL |
119 | |
120 | /* | |
121 | * Copy the input digest if needed, and setup the context | |
122 | * fields. Do this now as we need it to setup the first command | |
123 | * descriptor. | |
124 | */ | |
a7cf8658 | 125 | if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) { |
b98687bb PL |
126 | if (req->xcbcmac) |
127 | memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz); | |
128 | else | |
129 | memcpy(ctx->base.ctxr->data, req->state, req->state_sz); | |
a7cf8658 | 130 | |
b98687bb PL |
131 | if (!req->finish && req->xcbcmac) |
132 | cdesc->control_data.control0 |= | |
133 | CONTEXT_CONTROL_DIGEST_XCM | | |
134 | CONTEXT_CONTROL_TYPE_HASH_OUT | | |
135 | CONTEXT_CONTROL_NO_FINISH_HASH | | |
136 | CONTEXT_CONTROL_SIZE(req->state_sz / | |
137 | sizeof(u32)); | |
138 | else | |
139 | cdesc->control_data.control0 |= | |
140 | CONTEXT_CONTROL_DIGEST_XCM | | |
141 | CONTEXT_CONTROL_TYPE_HASH_OUT | | |
142 | CONTEXT_CONTROL_SIZE(req->state_sz / | |
143 | sizeof(u32)); | |
a7cf8658 PL |
144 | return; |
145 | } else if (!req->processed) { | |
41abed7d | 146 | /* First - and possibly only - block of basic hash only */ |
b98687bb | 147 | if (req->finish) |
a7cf8658 | 148 | cdesc->control_data.control0 |= req->digest | |
41abed7d PL |
149 | CONTEXT_CONTROL_TYPE_HASH_OUT | |
150 | CONTEXT_CONTROL_RESTART_HASH | | |
151 | /* ensure its not 0! */ | |
152 | CONTEXT_CONTROL_SIZE(1); | |
b98687bb | 153 | else |
a7cf8658 | 154 | cdesc->control_data.control0 |= req->digest | |
41abed7d PL |
155 | CONTEXT_CONTROL_TYPE_HASH_OUT | |
156 | CONTEXT_CONTROL_RESTART_HASH | | |
157 | CONTEXT_CONTROL_NO_FINISH_HASH | | |
158 | /* ensure its not 0! */ | |
159 | CONTEXT_CONTROL_SIZE(1); | |
41abed7d PL |
160 | return; |
161 | } | |
1b44c5a6 | 162 | |
41abed7d PL |
163 | /* Hash continuation or HMAC, setup (inner) digest from state */ |
164 | memcpy(ctx->base.ctxr->data, req->state, req->state_sz); | |
165 | ||
166 | if (req->finish) { | |
167 | /* Compute digest count for hash/HMAC finish operations */ | |
168 | if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || | |
31fb084c PL |
169 | req->hmac_zlen || (req->processed != req->block_sz)) { |
170 | count = req->processed / EIP197_COUNTER_BLOCK_SIZE; | |
41abed7d PL |
171 | |
172 | /* This is a hardware limitation, as the | |
173 | * counter must fit into an u32. This represents | |
174 | * a fairly big amount of input data, so we | |
175 | * shouldn't see this. | |
176 | */ | |
177 | if (unlikely(count & 0xffffffff00000000ULL)) { | |
178 | dev_warn(priv->dev, | |
179 | "Input data is too big\n"); | |
180 | return; | |
b460edb6 | 181 | } |
1b44c5a6 | 182 | } |
1b44c5a6 | 183 | |
41abed7d | 184 | if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || |
85b36ee8 PL |
185 | /* Special case: zero length HMAC */ |
186 | req->hmac_zlen || | |
41abed7d | 187 | /* PE HW < 4.4 cannot do HMAC continue, fake using hash */ |
31fb084c | 188 | (req->processed != req->block_sz)) { |
41abed7d PL |
189 | /* Basic hash continue operation, need digest + cnt */ |
190 | cdesc->control_data.control0 |= | |
191 | CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) | | |
192 | CONTEXT_CONTROL_TYPE_HASH_OUT | | |
193 | CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
85b36ee8 PL |
194 | /* For zero-len HMAC, don't finalize, already padded! */ |
195 | if (req->hmac_zlen) | |
196 | cdesc->control_data.control0 |= | |
197 | CONTEXT_CONTROL_NO_FINISH_HASH; | |
41abed7d PL |
198 | cdesc->control_data.control1 |= |
199 | CONTEXT_CONTROL_DIGEST_CNT; | |
200 | ctx->base.ctxr->data[req->state_sz >> 2] = | |
201 | cpu_to_le32(count); | |
202 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
85b36ee8 PL |
203 | |
204 | /* Clear zero-length HMAC flag for next operation! */ | |
205 | req->hmac_zlen = false; | |
41abed7d PL |
206 | } else { /* HMAC */ |
207 | /* Need outer digest for HMAC finalization */ | |
208 | memcpy(ctx->base.ctxr->data + (req->state_sz >> 2), | |
209 | ctx->opad, req->state_sz); | |
210 | ||
211 | /* Single pass HMAC - no digest count */ | |
212 | cdesc->control_data.control0 |= | |
213 | CONTEXT_CONTROL_SIZE(req->state_sz >> 1) | | |
214 | CONTEXT_CONTROL_TYPE_HASH_OUT | | |
215 | CONTEXT_CONTROL_DIGEST_HMAC; | |
216 | } | |
217 | } else { /* Hash continuation, do not finish yet */ | |
218 | cdesc->control_data.control0 |= | |
219 | CONTEXT_CONTROL_SIZE(req->state_sz >> 2) | | |
220 | CONTEXT_CONTROL_DIGEST_PRECOMPUTED | | |
221 | CONTEXT_CONTROL_TYPE_HASH_OUT | | |
222 | CONTEXT_CONTROL_NO_FINISH_HASH; | |
1b44c5a6 AT |
223 | } |
224 | } | |
225 | ||
41abed7d PL |
226 | static int safexcel_ahash_enqueue(struct ahash_request *areq); |
227 | ||
228 | static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, | |
229 | int ring, | |
1eb7b403 OH |
230 | struct crypto_async_request *async, |
231 | bool *should_complete, int *ret) | |
1b44c5a6 AT |
232 | { |
233 | struct safexcel_result_desc *rdesc; | |
234 | struct ahash_request *areq = ahash_request_cast(async); | |
235 | struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); | |
236 | struct safexcel_ahash_req *sreq = ahash_request_ctx(areq); | |
41abed7d | 237 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash); |
b460edb6 | 238 | u64 cache_len; |
1b44c5a6 AT |
239 | |
240 | *ret = 0; | |
241 | ||
1b44c5a6 AT |
242 | rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr); |
243 | if (IS_ERR(rdesc)) { | |
244 | dev_err(priv->dev, | |
245 | "hash: result: could not retrieve the result descriptor\n"); | |
246 | *ret = PTR_ERR(rdesc); | |
bdfd1909 AT |
247 | } else { |
248 | *ret = safexcel_rdesc_check_errors(priv, rdesc); | |
1b44c5a6 AT |
249 | } |
250 | ||
251 | safexcel_complete(priv, ring); | |
1b44c5a6 | 252 | |
c957f8b3 AT |
253 | if (sreq->nents) { |
254 | dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE); | |
255 | sreq->nents = 0; | |
256 | } | |
1b44c5a6 | 257 | |
b8592027 | 258 | if (sreq->result_dma) { |
6c1c09b3 | 259 | dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz, |
b8592027 OH |
260 | DMA_FROM_DEVICE); |
261 | sreq->result_dma = 0; | |
262 | } | |
263 | ||
cff9a175 AT |
264 | if (sreq->cache_dma) { |
265 | dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz, | |
266 | DMA_TO_DEVICE); | |
267 | sreq->cache_dma = 0; | |
aa524286 | 268 | sreq->cache_sz = 0; |
cff9a175 | 269 | } |
1b44c5a6 | 270 | |
41abed7d PL |
271 | if (sreq->finish) { |
272 | if (sreq->hmac && | |
273 | (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) { | |
274 | /* Faking HMAC using hash - need to do outer hash */ | |
275 | memcpy(sreq->cache, sreq->state, | |
276 | crypto_ahash_digestsize(ahash)); | |
277 | ||
6c1c09b3 | 278 | memcpy(sreq->state, ctx->opad, sreq->digest_sz); |
41abed7d | 279 | |
31fb084c PL |
280 | sreq->len = sreq->block_sz + |
281 | crypto_ahash_digestsize(ahash); | |
282 | sreq->processed = sreq->block_sz; | |
41abed7d PL |
283 | sreq->hmac = 0; |
284 | ||
177e358c PL |
285 | if (priv->flags & EIP197_TRC_CACHE) |
286 | ctx->base.needs_inv = true; | |
41abed7d PL |
287 | areq->nbytes = 0; |
288 | safexcel_ahash_enqueue(areq); | |
289 | ||
290 | *should_complete = false; /* Not done yet */ | |
291 | return 1; | |
292 | } | |
293 | ||
b98687bb PL |
294 | if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM && |
295 | ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) { | |
a7cf8658 | 296 | /* Undo final XOR with 0xffffffff ...*/ |
13a1bb93 | 297 | *(__le32 *)areq->result = ~sreq->state[0]; |
a7cf8658 PL |
298 | } else { |
299 | memcpy(areq->result, sreq->state, | |
300 | crypto_ahash_digestsize(ahash)); | |
301 | } | |
41abed7d | 302 | } |
b89a8159 | 303 | |
b460edb6 | 304 | cache_len = safexcel_queued_len(sreq); |
1b44c5a6 AT |
305 | if (cache_len) |
306 | memcpy(sreq->cache, sreq->cache_next, cache_len); | |
307 | ||
308 | *should_complete = true; | |
309 | ||
310 | return 1; | |
311 | } | |
312 | ||
1eb7b403 | 313 | static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring, |
1eb7b403 | 314 | int *commands, int *results) |
1b44c5a6 AT |
315 | { |
316 | struct ahash_request *areq = ahash_request_cast(async); | |
1b44c5a6 AT |
317 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
318 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
319 | struct safexcel_crypto_priv *priv = ctx->priv; | |
320 | struct safexcel_command_desc *cdesc, *first_cdesc = NULL; | |
321 | struct safexcel_result_desc *rdesc; | |
322 | struct scatterlist *sg; | |
098e51e5 | 323 | struct safexcel_token *dmmy; |
6c1c09b3 | 324 | int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0; |
b98687bb | 325 | u64 queued, len; |
1b44c5a6 | 326 | |
b98687bb | 327 | queued = safexcel_queued_len(req); |
41abed7d | 328 | if (queued <= HASH_CACHE_SIZE) |
1b44c5a6 AT |
329 | cache_len = queued; |
330 | else | |
331 | cache_len = queued - areq->nbytes; | |
332 | ||
41abed7d | 333 | if (!req->finish && !req->last_req) { |
809778e0 | 334 | /* If this is not the last request and the queued data does not |
41abed7d | 335 | * fit into full cache blocks, cache it for the next send call. |
809778e0 | 336 | */ |
41abed7d | 337 | extra = queued & (HASH_CACHE_SIZE - 1); |
082ec2d4 | 338 | |
dd4306a6 AT |
339 | /* If this is not the last request and the queued data |
340 | * is a multiple of a block, cache the last one for now. | |
341 | */ | |
809778e0 | 342 | if (!extra) |
41abed7d | 343 | extra = HASH_CACHE_SIZE; |
809778e0 | 344 | |
709ecc10 AT |
345 | sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), |
346 | req->cache_next, extra, | |
347 | areq->nbytes - extra); | |
348 | ||
349 | queued -= extra; | |
dc5268b6 PL |
350 | |
351 | if (!queued) { | |
352 | *commands = 0; | |
353 | *results = 0; | |
354 | return 0; | |
355 | } | |
b98687bb PL |
356 | |
357 | extra = 0; | |
358 | } | |
359 | ||
360 | if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) { | |
361 | if (unlikely(cache_len < AES_BLOCK_SIZE)) { | |
362 | /* | |
363 | * Cache contains less than 1 full block, complete. | |
364 | */ | |
365 | extra = AES_BLOCK_SIZE - cache_len; | |
366 | if (queued > cache_len) { | |
367 | /* More data follows: borrow bytes */ | |
368 | u64 tmp = queued - cache_len; | |
369 | ||
370 | skip = min_t(u64, tmp, extra); | |
371 | sg_pcopy_to_buffer(areq->src, | |
372 | sg_nents(areq->src), | |
373 | req->cache + cache_len, | |
374 | skip, 0); | |
375 | } | |
376 | extra -= skip; | |
377 | memset(req->cache + cache_len + skip, 0, extra); | |
38f21b4b PL |
378 | if (!ctx->cbcmac && extra) { |
379 | // 10- padding for XCBCMAC & CMAC | |
380 | req->cache[cache_len + skip] = 0x80; | |
381 | // HW will use K2 iso K3 - compensate! | |
382 | for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) | |
13a1bb93 PL |
383 | ((__be32 *)req->cache)[i] ^= |
384 | cpu_to_be32(le32_to_cpu( | |
385 | ctx->ipad[i] ^ ctx->ipad[i + 4])); | |
38f21b4b | 386 | } |
b98687bb PL |
387 | cache_len = AES_BLOCK_SIZE; |
388 | queued = queued + extra; | |
389 | } | |
390 | ||
391 | /* XCBC continue: XOR previous result into 1st word */ | |
392 | crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE); | |
1b44c5a6 AT |
393 | } |
394 | ||
b98687bb | 395 | len = queued; |
1b44c5a6 AT |
396 | /* Add a command descriptor for the cached data, if any */ |
397 | if (cache_len) { | |
cff9a175 AT |
398 | req->cache_dma = dma_map_single(priv->dev, req->cache, |
399 | cache_len, DMA_TO_DEVICE); | |
9744fec9 | 400 | if (dma_mapping_error(priv->dev, req->cache_dma)) |
cff9a175 | 401 | return -EINVAL; |
1b44c5a6 | 402 | |
cff9a175 | 403 | req->cache_sz = cache_len; |
1b44c5a6 AT |
404 | first_cdesc = safexcel_add_cdesc(priv, ring, 1, |
405 | (cache_len == len), | |
b98687bb | 406 | req->cache_dma, cache_len, |
098e51e5 PL |
407 | len, ctx->base.ctxr_dma, |
408 | &dmmy); | |
1b44c5a6 AT |
409 | if (IS_ERR(first_cdesc)) { |
410 | ret = PTR_ERR(first_cdesc); | |
411 | goto unmap_cache; | |
412 | } | |
413 | n_cdesc++; | |
414 | ||
415 | queued -= cache_len; | |
416 | if (!queued) | |
417 | goto send_command; | |
418 | } | |
419 | ||
420 | /* Now handle the current ahash request buffer(s) */ | |
41abed7d PL |
421 | req->nents = dma_map_sg(priv->dev, areq->src, |
422 | sg_nents_for_len(areq->src, | |
423 | areq->nbytes), | |
c957f8b3 AT |
424 | DMA_TO_DEVICE); |
425 | if (!req->nents) { | |
1b44c5a6 AT |
426 | ret = -ENOMEM; |
427 | goto cdesc_rollback; | |
428 | } | |
429 | ||
c957f8b3 | 430 | for_each_sg(areq->src, sg, req->nents, i) { |
1b44c5a6 AT |
431 | int sglen = sg_dma_len(sg); |
432 | ||
b98687bb PL |
433 | if (unlikely(sglen <= skip)) { |
434 | skip -= sglen; | |
435 | continue; | |
436 | } | |
437 | ||
1b44c5a6 | 438 | /* Do not overflow the request */ |
b98687bb | 439 | if ((queued + skip) <= sglen) |
1b44c5a6 | 440 | sglen = queued; |
b98687bb PL |
441 | else |
442 | sglen -= skip; | |
1b44c5a6 AT |
443 | |
444 | cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, | |
41abed7d | 445 | !(queued - sglen), |
b98687bb | 446 | sg_dma_address(sg) + skip, sglen, |
098e51e5 | 447 | len, ctx->base.ctxr_dma, &dmmy); |
1b44c5a6 AT |
448 | if (IS_ERR(cdesc)) { |
449 | ret = PTR_ERR(cdesc); | |
57433b58 | 450 | goto unmap_sg; |
1b44c5a6 | 451 | } |
1b44c5a6 | 452 | |
b98687bb | 453 | if (!n_cdesc) |
1b44c5a6 | 454 | first_cdesc = cdesc; |
b98687bb | 455 | n_cdesc++; |
1b44c5a6 AT |
456 | |
457 | queued -= sglen; | |
458 | if (!queued) | |
459 | break; | |
b98687bb | 460 | skip = 0; |
1b44c5a6 AT |
461 | } |
462 | ||
463 | send_command: | |
464 | /* Setup the context options */ | |
41abed7d | 465 | safexcel_context_control(ctx, req, first_cdesc); |
1b44c5a6 | 466 | |
6c1c09b3 PL |
467 | /* Add the token */ |
468 | safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac); | |
1b44c5a6 | 469 | |
6c1c09b3 | 470 | req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz, |
b8592027 OH |
471 | DMA_FROM_DEVICE); |
472 | if (dma_mapping_error(priv->dev, req->result_dma)) { | |
1b44c5a6 | 473 | ret = -EINVAL; |
57433b58 | 474 | goto unmap_sg; |
1b44c5a6 AT |
475 | } |
476 | ||
477 | /* Add a result descriptor */ | |
b8592027 | 478 | rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma, |
6c1c09b3 | 479 | req->digest_sz); |
1b44c5a6 AT |
480 | if (IS_ERR(rdesc)) { |
481 | ret = PTR_ERR(rdesc); | |
57240a78 | 482 | goto unmap_result; |
1b44c5a6 AT |
483 | } |
484 | ||
9744fec9 | 485 | safexcel_rdr_req_set(priv, ring, rdesc, &areq->base); |
1b44c5a6 | 486 | |
b98687bb | 487 | req->processed += len - extra; |
b460edb6 | 488 | |
1b44c5a6 AT |
489 | *commands = n_cdesc; |
490 | *results = 1; | |
491 | return 0; | |
492 | ||
57240a78 | 493 | unmap_result: |
6c1c09b3 | 494 | dma_unmap_single(priv->dev, req->result_dma, req->digest_sz, |
57433b58 AT |
495 | DMA_FROM_DEVICE); |
496 | unmap_sg: | |
b98687bb PL |
497 | if (req->nents) { |
498 | dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE); | |
499 | req->nents = 0; | |
500 | } | |
1b44c5a6 AT |
501 | cdesc_rollback: |
502 | for (i = 0; i < n_cdesc; i++) | |
503 | safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); | |
504 | unmap_cache: | |
cff9a175 AT |
505 | if (req->cache_dma) { |
506 | dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz, | |
507 | DMA_TO_DEVICE); | |
aa524286 | 508 | req->cache_dma = 0; |
cff9a175 | 509 | req->cache_sz = 0; |
1b44c5a6 | 510 | } |
1b44c5a6 | 511 | |
1b44c5a6 AT |
512 | return ret; |
513 | } | |
514 | ||
1b44c5a6 AT |
515 | static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, |
516 | int ring, | |
517 | struct crypto_async_request *async, | |
518 | bool *should_complete, int *ret) | |
519 | { | |
520 | struct safexcel_result_desc *rdesc; | |
521 | struct ahash_request *areq = ahash_request_cast(async); | |
522 | struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); | |
523 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash); | |
524 | int enq_ret; | |
525 | ||
526 | *ret = 0; | |
527 | ||
1b44c5a6 AT |
528 | rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr); |
529 | if (IS_ERR(rdesc)) { | |
530 | dev_err(priv->dev, | |
531 | "hash: invalidate: could not retrieve the result descriptor\n"); | |
532 | *ret = PTR_ERR(rdesc); | |
cda3e73a AT |
533 | } else { |
534 | *ret = safexcel_rdesc_check_errors(priv, rdesc); | |
1b44c5a6 AT |
535 | } |
536 | ||
537 | safexcel_complete(priv, ring); | |
1b44c5a6 AT |
538 | |
539 | if (ctx->base.exit_inv) { | |
540 | dma_pool_free(priv->context_pool, ctx->base.ctxr, | |
541 | ctx->base.ctxr_dma); | |
542 | ||
543 | *should_complete = true; | |
544 | return 1; | |
545 | } | |
546 | ||
86671abb AT |
547 | ring = safexcel_select_ring(priv); |
548 | ctx->base.ring = ring; | |
1b44c5a6 | 549 | |
86671abb AT |
550 | spin_lock_bh(&priv->ring[ring].queue_lock); |
551 | enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async); | |
552 | spin_unlock_bh(&priv->ring[ring].queue_lock); | |
1b44c5a6 AT |
553 | |
554 | if (enq_ret != -EINPROGRESS) | |
555 | *ret = enq_ret; | |
556 | ||
8472e778 AT |
557 | queue_work(priv->ring[ring].workqueue, |
558 | &priv->ring[ring].work_data.work); | |
86671abb | 559 | |
1b44c5a6 AT |
560 | *should_complete = false; |
561 | ||
562 | return 1; | |
563 | } | |
564 | ||
1eb7b403 OH |
565 | static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, |
566 | struct crypto_async_request *async, | |
567 | bool *should_complete, int *ret) | |
568 | { | |
569 | struct ahash_request *areq = ahash_request_cast(async); | |
570 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
571 | int err; | |
572 | ||
53c83e91 | 573 | BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv); |
871df319 | 574 | |
1eb7b403 OH |
575 | if (req->needs_inv) { |
576 | req->needs_inv = false; | |
577 | err = safexcel_handle_inv_result(priv, ring, async, | |
578 | should_complete, ret); | |
579 | } else { | |
580 | err = safexcel_handle_req_result(priv, ring, async, | |
581 | should_complete, ret); | |
582 | } | |
583 | ||
584 | return err; | |
585 | } | |
586 | ||
1b44c5a6 | 587 | static int safexcel_ahash_send_inv(struct crypto_async_request *async, |
9744fec9 | 588 | int ring, int *commands, int *results) |
1b44c5a6 AT |
589 | { |
590 | struct ahash_request *areq = ahash_request_cast(async); | |
591 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
592 | int ret; | |
593 | ||
5290ad6e | 594 | ret = safexcel_invalidate_cache(async, ctx->priv, |
9744fec9 | 595 | ctx->base.ctxr_dma, ring); |
1b44c5a6 AT |
596 | if (unlikely(ret)) |
597 | return ret; | |
598 | ||
599 | *commands = 1; | |
600 | *results = 1; | |
601 | ||
602 | return 0; | |
603 | } | |
604 | ||
1eb7b403 | 605 | static int safexcel_ahash_send(struct crypto_async_request *async, |
9744fec9 | 606 | int ring, int *commands, int *results) |
1eb7b403 OH |
607 | { |
608 | struct ahash_request *areq = ahash_request_cast(async); | |
609 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
610 | int ret; | |
611 | ||
612 | if (req->needs_inv) | |
9744fec9 | 613 | ret = safexcel_ahash_send_inv(async, ring, commands, results); |
1eb7b403 | 614 | else |
9744fec9 OH |
615 | ret = safexcel_ahash_send_req(async, ring, commands, results); |
616 | ||
1eb7b403 OH |
617 | return ret; |
618 | } | |
619 | ||
1b44c5a6 AT |
620 | static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm) |
621 | { | |
622 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
623 | struct safexcel_crypto_priv *priv = ctx->priv; | |
61824806 | 624 | EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE); |
7cad2fab | 625 | struct safexcel_ahash_req *rctx = ahash_request_ctx(req); |
3e1166b9 | 626 | struct safexcel_inv_result result = {}; |
86671abb | 627 | int ring = ctx->base.ring; |
1b44c5a6 | 628 | |
b926213d | 629 | memset(req, 0, EIP197_AHASH_REQ_SIZE); |
1b44c5a6 AT |
630 | |
631 | /* create invalidation request */ | |
632 | init_completion(&result.completion); | |
7cad2fab | 633 | ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, |
1b44c5a6 AT |
634 | safexcel_inv_complete, &result); |
635 | ||
7cad2fab AT |
636 | ahash_request_set_tfm(req, __crypto_ahash_cast(tfm)); |
637 | ctx = crypto_tfm_ctx(req->base.tfm); | |
1b44c5a6 | 638 | ctx->base.exit_inv = true; |
1eb7b403 | 639 | rctx->needs_inv = true; |
1b44c5a6 | 640 | |
86671abb | 641 | spin_lock_bh(&priv->ring[ring].queue_lock); |
7cad2fab | 642 | crypto_enqueue_request(&priv->ring[ring].queue, &req->base); |
86671abb | 643 | spin_unlock_bh(&priv->ring[ring].queue_lock); |
1b44c5a6 | 644 | |
8472e778 AT |
645 | queue_work(priv->ring[ring].workqueue, |
646 | &priv->ring[ring].work_data.work); | |
1b44c5a6 | 647 | |
b7007dbc | 648 | wait_for_completion(&result.completion); |
1b44c5a6 AT |
649 | |
650 | if (result.error) { | |
651 | dev_warn(priv->dev, "hash: completion error (%d)\n", | |
652 | result.error); | |
653 | return result.error; | |
654 | } | |
655 | ||
656 | return 0; | |
657 | } | |
658 | ||
cc75f5ce AT |
659 | /* safexcel_ahash_cache: cache data until at least one request can be sent to |
660 | * the engine, aka. when there is at least 1 block size in the pipe. | |
661 | */ | |
41abed7d | 662 | static int safexcel_ahash_cache(struct ahash_request *areq) |
1b44c5a6 AT |
663 | { |
664 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
41abed7d | 665 | u64 cache_len; |
1b44c5a6 | 666 | |
b460edb6 AT |
667 | /* cache_len: everything accepted by the driver but not sent yet, |
668 | * tot sz handled by update() - last req sz - tot sz handled by send() | |
669 | */ | |
41abed7d | 670 | cache_len = safexcel_queued_len(req); |
1b44c5a6 AT |
671 | |
672 | /* | |
673 | * In case there isn't enough bytes to proceed (less than a | |
674 | * block size), cache the data until we have enough. | |
675 | */ | |
41abed7d | 676 | if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) { |
1b44c5a6 AT |
677 | sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), |
678 | req->cache + cache_len, | |
679 | areq->nbytes, 0); | |
41abed7d | 680 | return 0; |
1b44c5a6 AT |
681 | } |
682 | ||
dfbcc08f | 683 | /* We couldn't cache all the data */ |
1b44c5a6 AT |
684 | return -E2BIG; |
685 | } | |
686 | ||
687 | static int safexcel_ahash_enqueue(struct ahash_request *areq) | |
688 | { | |
689 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
690 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
691 | struct safexcel_crypto_priv *priv = ctx->priv; | |
86671abb | 692 | int ret, ring; |
1b44c5a6 | 693 | |
1eb7b403 | 694 | req->needs_inv = false; |
1b44c5a6 | 695 | |
1b44c5a6 | 696 | if (ctx->base.ctxr) { |
53c83e91 | 697 | if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv && |
b98687bb PL |
698 | /* invalidate for *any* non-XCBC continuation */ |
699 | ((req->not_first && !req->xcbcmac) || | |
41abed7d PL |
700 | /* invalidate if (i)digest changed */ |
701 | memcmp(ctx->base.ctxr->data, req->state, req->state_sz) || | |
41abed7d | 702 | /* invalidate for HMAC finish with odigest changed */ |
a7cf8658 | 703 | (req->finish && req->hmac && |
41abed7d PL |
704 | memcmp(ctx->base.ctxr->data + (req->state_sz>>2), |
705 | ctx->opad, req->state_sz)))) | |
706 | /* | |
707 | * We're still setting needs_inv here, even though it is | |
c4daf4cc OH |
708 | * cleared right away, because the needs_inv flag can be |
709 | * set in other functions and we want to keep the same | |
710 | * logic. | |
711 | */ | |
41abed7d | 712 | ctx->base.needs_inv = true; |
c4daf4cc | 713 | |
1eb7b403 OH |
714 | if (ctx->base.needs_inv) { |
715 | ctx->base.needs_inv = false; | |
716 | req->needs_inv = true; | |
717 | } | |
1b44c5a6 AT |
718 | } else { |
719 | ctx->base.ring = safexcel_select_ring(priv); | |
720 | ctx->base.ctxr = dma_pool_zalloc(priv->context_pool, | |
721 | EIP197_GFP_FLAGS(areq->base), | |
722 | &ctx->base.ctxr_dma); | |
723 | if (!ctx->base.ctxr) | |
724 | return -ENOMEM; | |
725 | } | |
b98687bb | 726 | req->not_first = true; |
1b44c5a6 | 727 | |
86671abb AT |
728 | ring = ctx->base.ring; |
729 | ||
730 | spin_lock_bh(&priv->ring[ring].queue_lock); | |
731 | ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base); | |
732 | spin_unlock_bh(&priv->ring[ring].queue_lock); | |
1b44c5a6 | 733 | |
8472e778 AT |
734 | queue_work(priv->ring[ring].workqueue, |
735 | &priv->ring[ring].work_data.work); | |
1b44c5a6 AT |
736 | |
737 | return ret; | |
738 | } | |
739 | ||
740 | static int safexcel_ahash_update(struct ahash_request *areq) | |
741 | { | |
1b44c5a6 | 742 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
41abed7d | 743 | int ret; |
1b44c5a6 AT |
744 | |
745 | /* If the request is 0 length, do nothing */ | |
746 | if (!areq->nbytes) | |
747 | return 0; | |
748 | ||
41abed7d PL |
749 | /* Add request to the cache if it fits */ |
750 | ret = safexcel_ahash_cache(areq); | |
751 | ||
752 | /* Update total request length */ | |
31fb084c | 753 | req->len += areq->nbytes; |
1b44c5a6 | 754 | |
41abed7d PL |
755 | /* If not all data could fit into the cache, go process the excess. |
756 | * Also go process immediately for an HMAC IV precompute, which | |
757 | * will never be finished at all, but needs to be processed anyway. | |
1b44c5a6 | 758 | */ |
41abed7d | 759 | if ((ret && !req->finish) || req->last_req) |
1b44c5a6 AT |
760 | return safexcel_ahash_enqueue(areq); |
761 | ||
762 | return 0; | |
763 | } | |
764 | ||
765 | static int safexcel_ahash_final(struct ahash_request *areq) | |
766 | { | |
767 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
768 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
769 | ||
1b44c5a6 AT |
770 | req->finish = true; |
771 | ||
31fb084c | 772 | if (unlikely(!req->len && !areq->nbytes)) { |
85695b09 PL |
773 | /* |
774 | * If we have an overall 0 length *hash* request: | |
775 | * The HW cannot do 0 length hash, so we provide the correct | |
776 | * result directly here. | |
777 | */ | |
293f89cf OH |
778 | if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5) |
779 | memcpy(areq->result, md5_zero_message_hash, | |
780 | MD5_DIGEST_SIZE); | |
781 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1) | |
1b44c5a6 AT |
782 | memcpy(areq->result, sha1_zero_message_hash, |
783 | SHA1_DIGEST_SIZE); | |
784 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224) | |
785 | memcpy(areq->result, sha224_zero_message_hash, | |
786 | SHA224_DIGEST_SIZE); | |
787 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256) | |
788 | memcpy(areq->result, sha256_zero_message_hash, | |
789 | SHA256_DIGEST_SIZE); | |
9e46eafd AT |
790 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384) |
791 | memcpy(areq->result, sha384_zero_message_hash, | |
792 | SHA384_DIGEST_SIZE); | |
b460edb6 AT |
793 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512) |
794 | memcpy(areq->result, sha512_zero_message_hash, | |
795 | SHA512_DIGEST_SIZE); | |
0f2bc131 | 796 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) { |
756497cb PL |
797 | memcpy(areq->result, |
798 | EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE); | |
0f2bc131 | 799 | } |
1b44c5a6 | 800 | |
a7cf8658 PL |
801 | return 0; |
802 | } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM && | |
803 | ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 && | |
804 | req->len == sizeof(u32) && !areq->nbytes)) { | |
805 | /* Zero length CRC32 */ | |
806 | memcpy(areq->result, ctx->ipad, sizeof(u32)); | |
1b44c5a6 | 807 | return 0; |
38f21b4b | 808 | } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE && |
b98687bb PL |
809 | !areq->nbytes)) { |
810 | /* Zero length CBC MAC */ | |
811 | memset(areq->result, 0, AES_BLOCK_SIZE); | |
812 | return 0; | |
38f21b4b PL |
813 | } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE && |
814 | !areq->nbytes)) { | |
815 | /* Zero length (X)CBC/CMAC */ | |
816 | int i; | |
817 | ||
818 | for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) | |
13a1bb93 PL |
819 | ((__be32 *)areq->result)[i] = |
820 | cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3 | |
38f21b4b PL |
821 | areq->result[0] ^= 0x80; // 10- padding |
822 | crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result); | |
823 | return 0; | |
31fb084c PL |
824 | } else if (unlikely(req->hmac && |
825 | (req->len == req->block_sz) && | |
41abed7d | 826 | !areq->nbytes)) { |
85b36ee8 PL |
827 | /* |
828 | * If we have an overall 0 length *HMAC* request: | |
829 | * For HMAC, we need to finalize the inner digest | |
830 | * and then perform the outer hash. | |
831 | */ | |
832 | ||
833 | /* generate pad block in the cache */ | |
834 | /* start with a hash block of all zeroes */ | |
835 | memset(req->cache, 0, req->block_sz); | |
836 | /* set the first byte to 0x80 to 'append a 1 bit' */ | |
837 | req->cache[0] = 0x80; | |
838 | /* add the length in bits in the last 2 bytes */ | |
839 | if (req->len_is_le) { | |
840 | /* Little endian length word (e.g. MD5) */ | |
841 | req->cache[req->block_sz-8] = (req->block_sz << 3) & | |
842 | 255; | |
843 | req->cache[req->block_sz-7] = (req->block_sz >> 5); | |
844 | } else { | |
845 | /* Big endian length word (e.g. any SHA) */ | |
846 | req->cache[req->block_sz-2] = (req->block_sz >> 5); | |
847 | req->cache[req->block_sz-1] = (req->block_sz << 3) & | |
848 | 255; | |
849 | } | |
850 | ||
31fb084c | 851 | req->len += req->block_sz; /* plus 1 hash block */ |
85b36ee8 PL |
852 | |
853 | /* Set special zero-length HMAC flag */ | |
854 | req->hmac_zlen = true; | |
855 | ||
856 | /* Finalize HMAC */ | |
857 | req->digest = CONTEXT_CONTROL_DIGEST_HMAC; | |
41abed7d PL |
858 | } else if (req->hmac) { |
859 | /* Finalize HMAC */ | |
860 | req->digest = CONTEXT_CONTROL_DIGEST_HMAC; | |
1b44c5a6 AT |
861 | } |
862 | ||
863 | return safexcel_ahash_enqueue(areq); | |
864 | } | |
865 | ||
866 | static int safexcel_ahash_finup(struct ahash_request *areq) | |
867 | { | |
868 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
869 | ||
1b44c5a6 AT |
870 | req->finish = true; |
871 | ||
872 | safexcel_ahash_update(areq); | |
873 | return safexcel_ahash_final(areq); | |
874 | } | |
875 | ||
876 | static int safexcel_ahash_export(struct ahash_request *areq, void *out) | |
877 | { | |
1b44c5a6 AT |
878 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
879 | struct safexcel_ahash_export_state *export = out; | |
880 | ||
31fb084c PL |
881 | export->len = req->len; |
882 | export->processed = req->processed; | |
1b44c5a6 | 883 | |
b869648c AT |
884 | export->digest = req->digest; |
885 | ||
1b44c5a6 | 886 | memcpy(export->state, req->state, req->state_sz); |
41abed7d | 887 | memcpy(export->cache, req->cache, HASH_CACHE_SIZE); |
1b44c5a6 AT |
888 | |
889 | return 0; | |
890 | } | |
891 | ||
892 | static int safexcel_ahash_import(struct ahash_request *areq, const void *in) | |
893 | { | |
1b44c5a6 AT |
894 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
895 | const struct safexcel_ahash_export_state *export = in; | |
896 | int ret; | |
897 | ||
898 | ret = crypto_ahash_init(areq); | |
899 | if (ret) | |
900 | return ret; | |
901 | ||
31fb084c PL |
902 | req->len = export->len; |
903 | req->processed = export->processed; | |
1b44c5a6 | 904 | |
b869648c AT |
905 | req->digest = export->digest; |
906 | ||
41abed7d | 907 | memcpy(req->cache, export->cache, HASH_CACHE_SIZE); |
1b44c5a6 AT |
908 | memcpy(req->state, export->state, req->state_sz); |
909 | ||
910 | return 0; | |
911 | } | |
912 | ||
913 | static int safexcel_ahash_cra_init(struct crypto_tfm *tfm) | |
914 | { | |
915 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
916 | struct safexcel_alg_template *tmpl = | |
917 | container_of(__crypto_ahash_alg(tfm->__crt_alg), | |
918 | struct safexcel_alg_template, alg.ahash); | |
919 | ||
920 | ctx->priv = tmpl->priv; | |
1eb7b403 OH |
921 | ctx->base.send = safexcel_ahash_send; |
922 | ctx->base.handle_result = safexcel_handle_result; | |
6c1c09b3 | 923 | ctx->fb_do_setkey = false; |
1b44c5a6 AT |
924 | |
925 | crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), | |
926 | sizeof(struct safexcel_ahash_req)); | |
927 | return 0; | |
928 | } | |
929 | ||
930 | static int safexcel_sha1_init(struct ahash_request *areq) | |
931 | { | |
932 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
933 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
934 | ||
935 | memset(req, 0, sizeof(*req)); | |
936 | ||
1b44c5a6 | 937 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; |
b869648c | 938 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
1b44c5a6 | 939 | req->state_sz = SHA1_DIGEST_SIZE; |
6c1c09b3 | 940 | req->digest_sz = SHA1_DIGEST_SIZE; |
41abed7d | 941 | req->block_sz = SHA1_BLOCK_SIZE; |
1b44c5a6 AT |
942 | |
943 | return 0; | |
944 | } | |
945 | ||
946 | static int safexcel_sha1_digest(struct ahash_request *areq) | |
947 | { | |
948 | int ret = safexcel_sha1_init(areq); | |
949 | ||
950 | if (ret) | |
951 | return ret; | |
952 | ||
953 | return safexcel_ahash_finup(areq); | |
954 | } | |
955 | ||
956 | static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm) | |
957 | { | |
958 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
959 | struct safexcel_crypto_priv *priv = ctx->priv; | |
960 | int ret; | |
961 | ||
962 | /* context not allocated, skip invalidation */ | |
963 | if (!ctx->base.ctxr) | |
964 | return; | |
965 | ||
53c83e91 | 966 | if (priv->flags & EIP197_TRC_CACHE) { |
871df319 AT |
967 | ret = safexcel_ahash_exit_inv(tfm); |
968 | if (ret) | |
969 | dev_warn(priv->dev, "hash: invalidation error %d\n", ret); | |
970 | } else { | |
971 | dma_pool_free(priv->context_pool, ctx->base.ctxr, | |
972 | ctx->base.ctxr_dma); | |
973 | } | |
1b44c5a6 AT |
974 | } |
975 | ||
976 | struct safexcel_alg_template safexcel_alg_sha1 = { | |
977 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 978 | .algo_mask = SAFEXCEL_ALG_SHA1, |
1b44c5a6 AT |
979 | .alg.ahash = { |
980 | .init = safexcel_sha1_init, | |
981 | .update = safexcel_ahash_update, | |
982 | .final = safexcel_ahash_final, | |
983 | .finup = safexcel_ahash_finup, | |
984 | .digest = safexcel_sha1_digest, | |
985 | .export = safexcel_ahash_export, | |
986 | .import = safexcel_ahash_import, | |
987 | .halg = { | |
988 | .digestsize = SHA1_DIGEST_SIZE, | |
989 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
990 | .base = { | |
991 | .cra_name = "sha1", | |
992 | .cra_driver_name = "safexcel-sha1", | |
aa88f331 | 993 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
1b44c5a6 | 994 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 995 | CRYPTO_ALG_ALLOCATES_MEMORY | |
1b44c5a6 AT |
996 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
997 | .cra_blocksize = SHA1_BLOCK_SIZE, | |
998 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
999 | .cra_init = safexcel_ahash_cra_init, | |
1000 | .cra_exit = safexcel_ahash_cra_exit, | |
1001 | .cra_module = THIS_MODULE, | |
1002 | }, | |
1003 | }, | |
1004 | }, | |
1005 | }; | |
1006 | ||
1007 | static int safexcel_hmac_sha1_init(struct ahash_request *areq) | |
1008 | { | |
41abed7d | 1009 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
b869648c | 1010 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
1b44c5a6 | 1011 | |
41abed7d PL |
1012 | memset(req, 0, sizeof(*req)); |
1013 | ||
1014 | /* Start from ipad precompute */ | |
1015 | memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE); | |
1016 | /* Already processed the key^ipad part now! */ | |
31fb084c PL |
1017 | req->len = SHA1_BLOCK_SIZE; |
1018 | req->processed = SHA1_BLOCK_SIZE; | |
41abed7d PL |
1019 | |
1020 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; | |
1021 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1022 | req->state_sz = SHA1_DIGEST_SIZE; | |
6c1c09b3 | 1023 | req->digest_sz = SHA1_DIGEST_SIZE; |
41abed7d PL |
1024 | req->block_sz = SHA1_BLOCK_SIZE; |
1025 | req->hmac = true; | |
1026 | ||
1b44c5a6 AT |
1027 | return 0; |
1028 | } | |
1029 | ||
1030 | static int safexcel_hmac_sha1_digest(struct ahash_request *areq) | |
1031 | { | |
1032 | int ret = safexcel_hmac_sha1_init(areq); | |
1033 | ||
1034 | if (ret) | |
1035 | return ret; | |
1036 | ||
1037 | return safexcel_ahash_finup(areq); | |
1038 | } | |
1039 | ||
1040 | struct safexcel_ahash_result { | |
1041 | struct completion completion; | |
1042 | int error; | |
1043 | }; | |
1044 | ||
1045 | static void safexcel_ahash_complete(struct crypto_async_request *req, int error) | |
1046 | { | |
1047 | struct safexcel_ahash_result *result = req->data; | |
1048 | ||
1049 | if (error == -EINPROGRESS) | |
1050 | return; | |
1051 | ||
1052 | result->error = error; | |
1053 | complete(&result->completion); | |
1054 | } | |
1055 | ||
1056 | static int safexcel_hmac_init_pad(struct ahash_request *areq, | |
1057 | unsigned int blocksize, const u8 *key, | |
1058 | unsigned int keylen, u8 *ipad, u8 *opad) | |
1059 | { | |
1060 | struct safexcel_ahash_result result; | |
1061 | struct scatterlist sg; | |
1062 | int ret, i; | |
1063 | u8 *keydup; | |
1064 | ||
1065 | if (keylen <= blocksize) { | |
1066 | memcpy(ipad, key, keylen); | |
1067 | } else { | |
1068 | keydup = kmemdup(key, keylen, GFP_KERNEL); | |
1069 | if (!keydup) | |
1070 | return -ENOMEM; | |
1071 | ||
1072 | ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG, | |
1073 | safexcel_ahash_complete, &result); | |
1074 | sg_init_one(&sg, keydup, keylen); | |
1075 | ahash_request_set_crypt(areq, &sg, ipad, keylen); | |
1076 | init_completion(&result.completion); | |
1077 | ||
1078 | ret = crypto_ahash_digest(areq); | |
4dc5475a | 1079 | if (ret == -EINPROGRESS || ret == -EBUSY) { |
1b44c5a6 AT |
1080 | wait_for_completion_interruptible(&result.completion); |
1081 | ret = result.error; | |
1082 | } | |
1083 | ||
1084 | /* Avoid leaking */ | |
57059185 | 1085 | kfree_sensitive(keydup); |
1b44c5a6 AT |
1086 | |
1087 | if (ret) | |
1088 | return ret; | |
1089 | ||
1090 | keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq)); | |
1091 | } | |
1092 | ||
1093 | memset(ipad + keylen, 0, blocksize - keylen); | |
1094 | memcpy(opad, ipad, blocksize); | |
1095 | ||
1096 | for (i = 0; i < blocksize; i++) { | |
aed3731e AT |
1097 | ipad[i] ^= HMAC_IPAD_VALUE; |
1098 | opad[i] ^= HMAC_OPAD_VALUE; | |
1b44c5a6 AT |
1099 | } |
1100 | ||
1101 | return 0; | |
1102 | } | |
1103 | ||
1104 | static int safexcel_hmac_init_iv(struct ahash_request *areq, | |
1105 | unsigned int blocksize, u8 *pad, void *state) | |
1106 | { | |
1107 | struct safexcel_ahash_result result; | |
1108 | struct safexcel_ahash_req *req; | |
1109 | struct scatterlist sg; | |
1110 | int ret; | |
1111 | ||
1112 | ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG, | |
1113 | safexcel_ahash_complete, &result); | |
1114 | sg_init_one(&sg, pad, blocksize); | |
1115 | ahash_request_set_crypt(areq, &sg, pad, blocksize); | |
1116 | init_completion(&result.completion); | |
1117 | ||
1118 | ret = crypto_ahash_init(areq); | |
1119 | if (ret) | |
1120 | return ret; | |
1121 | ||
1122 | req = ahash_request_ctx(areq); | |
1123 | req->hmac = true; | |
1124 | req->last_req = true; | |
1125 | ||
1126 | ret = crypto_ahash_update(areq); | |
12bf4142 | 1127 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) |
1b44c5a6 AT |
1128 | return ret; |
1129 | ||
1130 | wait_for_completion_interruptible(&result.completion); | |
1131 | if (result.error) | |
1132 | return result.error; | |
1133 | ||
1134 | return crypto_ahash_export(areq, state); | |
1135 | } | |
1136 | ||
f6beaea3 AT |
1137 | int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen, |
1138 | void *istate, void *ostate) | |
1b44c5a6 AT |
1139 | { |
1140 | struct ahash_request *areq; | |
1141 | struct crypto_ahash *tfm; | |
1142 | unsigned int blocksize; | |
1143 | u8 *ipad, *opad; | |
1144 | int ret; | |
1145 | ||
85d7311f | 1146 | tfm = crypto_alloc_ahash(alg, 0, 0); |
1b44c5a6 AT |
1147 | if (IS_ERR(tfm)) |
1148 | return PTR_ERR(tfm); | |
1149 | ||
1150 | areq = ahash_request_alloc(tfm, GFP_KERNEL); | |
1151 | if (!areq) { | |
1152 | ret = -ENOMEM; | |
1153 | goto free_ahash; | |
1154 | } | |
1155 | ||
1156 | crypto_ahash_clear_flags(tfm, ~0); | |
1157 | blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm)); | |
1158 | ||
6396bb22 | 1159 | ipad = kcalloc(2, blocksize, GFP_KERNEL); |
1b44c5a6 AT |
1160 | if (!ipad) { |
1161 | ret = -ENOMEM; | |
1162 | goto free_request; | |
1163 | } | |
1164 | ||
1165 | opad = ipad + blocksize; | |
1166 | ||
1167 | ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad); | |
1168 | if (ret) | |
1169 | goto free_ipad; | |
1170 | ||
1171 | ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate); | |
1172 | if (ret) | |
1173 | goto free_ipad; | |
1174 | ||
1175 | ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate); | |
1176 | ||
1177 | free_ipad: | |
1178 | kfree(ipad); | |
1179 | free_request: | |
1180 | ahash_request_free(areq); | |
1181 | free_ahash: | |
1182 | crypto_free_ahash(tfm); | |
1183 | ||
1184 | return ret; | |
1185 | } | |
1186 | ||
73f36ea7 AT |
1187 | static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key, |
1188 | unsigned int keylen, const char *alg, | |
1189 | unsigned int state_sz) | |
1b44c5a6 AT |
1190 | { |
1191 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); | |
871df319 | 1192 | struct safexcel_crypto_priv *priv = ctx->priv; |
1b44c5a6 | 1193 | struct safexcel_ahash_export_state istate, ostate; |
41abed7d | 1194 | int ret; |
1b44c5a6 | 1195 | |
73f36ea7 | 1196 | ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate); |
1b44c5a6 AT |
1197 | if (ret) |
1198 | return ret; | |
1199 | ||
41abed7d PL |
1200 | if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr && |
1201 | (memcmp(ctx->ipad, istate.state, state_sz) || | |
1202 | memcmp(ctx->opad, ostate.state, state_sz))) | |
1203 | ctx->base.needs_inv = true; | |
1b44c5a6 | 1204 | |
73f36ea7 AT |
1205 | memcpy(ctx->ipad, &istate.state, state_sz); |
1206 | memcpy(ctx->opad, &ostate.state, state_sz); | |
42ef3bed | 1207 | |
1b44c5a6 AT |
1208 | return 0; |
1209 | } | |
1210 | ||
73f36ea7 AT |
1211 | static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key, |
1212 | unsigned int keylen) | |
1213 | { | |
1214 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1", | |
1215 | SHA1_DIGEST_SIZE); | |
1216 | } | |
1217 | ||
1b44c5a6 AT |
1218 | struct safexcel_alg_template safexcel_alg_hmac_sha1 = { |
1219 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1220 | .algo_mask = SAFEXCEL_ALG_SHA1, |
1b44c5a6 AT |
1221 | .alg.ahash = { |
1222 | .init = safexcel_hmac_sha1_init, | |
1223 | .update = safexcel_ahash_update, | |
1224 | .final = safexcel_ahash_final, | |
1225 | .finup = safexcel_ahash_finup, | |
1226 | .digest = safexcel_hmac_sha1_digest, | |
1227 | .setkey = safexcel_hmac_sha1_setkey, | |
1228 | .export = safexcel_ahash_export, | |
1229 | .import = safexcel_ahash_import, | |
1230 | .halg = { | |
1231 | .digestsize = SHA1_DIGEST_SIZE, | |
1232 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1233 | .base = { | |
1234 | .cra_name = "hmac(sha1)", | |
1235 | .cra_driver_name = "safexcel-hmac-sha1", | |
aa88f331 | 1236 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
1b44c5a6 | 1237 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1238 | CRYPTO_ALG_ALLOCATES_MEMORY | |
1b44c5a6 AT |
1239 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1240 | .cra_blocksize = SHA1_BLOCK_SIZE, | |
1241 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1242 | .cra_init = safexcel_ahash_cra_init, | |
1243 | .cra_exit = safexcel_ahash_cra_exit, | |
1244 | .cra_module = THIS_MODULE, | |
1245 | }, | |
1246 | }, | |
1247 | }, | |
1248 | }; | |
1249 | ||
1250 | static int safexcel_sha256_init(struct ahash_request *areq) | |
1251 | { | |
1252 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1253 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1254 | ||
1255 | memset(req, 0, sizeof(*req)); | |
1256 | ||
1b44c5a6 | 1257 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; |
b869648c | 1258 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
1b44c5a6 | 1259 | req->state_sz = SHA256_DIGEST_SIZE; |
6c1c09b3 | 1260 | req->digest_sz = SHA256_DIGEST_SIZE; |
41abed7d | 1261 | req->block_sz = SHA256_BLOCK_SIZE; |
1b44c5a6 AT |
1262 | |
1263 | return 0; | |
1264 | } | |
1265 | ||
1266 | static int safexcel_sha256_digest(struct ahash_request *areq) | |
1267 | { | |
1268 | int ret = safexcel_sha256_init(areq); | |
1269 | ||
1270 | if (ret) | |
1271 | return ret; | |
1272 | ||
1273 | return safexcel_ahash_finup(areq); | |
1274 | } | |
1275 | ||
1276 | struct safexcel_alg_template safexcel_alg_sha256 = { | |
1277 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1278 | .algo_mask = SAFEXCEL_ALG_SHA2_256, |
1b44c5a6 AT |
1279 | .alg.ahash = { |
1280 | .init = safexcel_sha256_init, | |
1281 | .update = safexcel_ahash_update, | |
1282 | .final = safexcel_ahash_final, | |
1283 | .finup = safexcel_ahash_finup, | |
1284 | .digest = safexcel_sha256_digest, | |
1285 | .export = safexcel_ahash_export, | |
1286 | .import = safexcel_ahash_import, | |
1287 | .halg = { | |
1288 | .digestsize = SHA256_DIGEST_SIZE, | |
1289 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1290 | .base = { | |
1291 | .cra_name = "sha256", | |
1292 | .cra_driver_name = "safexcel-sha256", | |
aa88f331 | 1293 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
1b44c5a6 | 1294 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1295 | CRYPTO_ALG_ALLOCATES_MEMORY | |
1b44c5a6 AT |
1296 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1297 | .cra_blocksize = SHA256_BLOCK_SIZE, | |
1298 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1299 | .cra_init = safexcel_ahash_cra_init, | |
1300 | .cra_exit = safexcel_ahash_cra_exit, | |
1301 | .cra_module = THIS_MODULE, | |
1302 | }, | |
1303 | }, | |
1304 | }, | |
1305 | }; | |
1306 | ||
1307 | static int safexcel_sha224_init(struct ahash_request *areq) | |
1308 | { | |
1309 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1310 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1311 | ||
1312 | memset(req, 0, sizeof(*req)); | |
1313 | ||
1b44c5a6 | 1314 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; |
b869648c | 1315 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
1b44c5a6 | 1316 | req->state_sz = SHA256_DIGEST_SIZE; |
6c1c09b3 | 1317 | req->digest_sz = SHA256_DIGEST_SIZE; |
41abed7d | 1318 | req->block_sz = SHA256_BLOCK_SIZE; |
1b44c5a6 AT |
1319 | |
1320 | return 0; | |
1321 | } | |
1322 | ||
1323 | static int safexcel_sha224_digest(struct ahash_request *areq) | |
1324 | { | |
1325 | int ret = safexcel_sha224_init(areq); | |
1326 | ||
1327 | if (ret) | |
1328 | return ret; | |
1329 | ||
1330 | return safexcel_ahash_finup(areq); | |
1331 | } | |
1332 | ||
1333 | struct safexcel_alg_template safexcel_alg_sha224 = { | |
1334 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1335 | .algo_mask = SAFEXCEL_ALG_SHA2_256, |
1b44c5a6 AT |
1336 | .alg.ahash = { |
1337 | .init = safexcel_sha224_init, | |
1338 | .update = safexcel_ahash_update, | |
1339 | .final = safexcel_ahash_final, | |
1340 | .finup = safexcel_ahash_finup, | |
1341 | .digest = safexcel_sha224_digest, | |
1342 | .export = safexcel_ahash_export, | |
1343 | .import = safexcel_ahash_import, | |
1344 | .halg = { | |
1345 | .digestsize = SHA224_DIGEST_SIZE, | |
1346 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1347 | .base = { | |
1348 | .cra_name = "sha224", | |
1349 | .cra_driver_name = "safexcel-sha224", | |
aa88f331 | 1350 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
1b44c5a6 | 1351 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1352 | CRYPTO_ALG_ALLOCATES_MEMORY | |
1b44c5a6 AT |
1353 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1354 | .cra_blocksize = SHA224_BLOCK_SIZE, | |
1355 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1356 | .cra_init = safexcel_ahash_cra_init, | |
1357 | .cra_exit = safexcel_ahash_cra_exit, | |
1358 | .cra_module = THIS_MODULE, | |
1359 | }, | |
1360 | }, | |
1361 | }, | |
1362 | }; | |
73f36ea7 | 1363 | |
3ad618d8 AT |
1364 | static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key, |
1365 | unsigned int keylen) | |
1366 | { | |
1367 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224", | |
1368 | SHA256_DIGEST_SIZE); | |
1369 | } | |
1370 | ||
1371 | static int safexcel_hmac_sha224_init(struct ahash_request *areq) | |
1372 | { | |
41abed7d | 1373 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
3ad618d8 AT |
1374 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
1375 | ||
41abed7d PL |
1376 | memset(req, 0, sizeof(*req)); |
1377 | ||
1378 | /* Start from ipad precompute */ | |
1379 | memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE); | |
1380 | /* Already processed the key^ipad part now! */ | |
31fb084c PL |
1381 | req->len = SHA256_BLOCK_SIZE; |
1382 | req->processed = SHA256_BLOCK_SIZE; | |
41abed7d PL |
1383 | |
1384 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; | |
1385 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1386 | req->state_sz = SHA256_DIGEST_SIZE; | |
6c1c09b3 | 1387 | req->digest_sz = SHA256_DIGEST_SIZE; |
41abed7d PL |
1388 | req->block_sz = SHA256_BLOCK_SIZE; |
1389 | req->hmac = true; | |
1390 | ||
3ad618d8 AT |
1391 | return 0; |
1392 | } | |
1393 | ||
1394 | static int safexcel_hmac_sha224_digest(struct ahash_request *areq) | |
1395 | { | |
1396 | int ret = safexcel_hmac_sha224_init(areq); | |
1397 | ||
1398 | if (ret) | |
1399 | return ret; | |
1400 | ||
1401 | return safexcel_ahash_finup(areq); | |
1402 | } | |
1403 | ||
1404 | struct safexcel_alg_template safexcel_alg_hmac_sha224 = { | |
1405 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1406 | .algo_mask = SAFEXCEL_ALG_SHA2_256, |
3ad618d8 AT |
1407 | .alg.ahash = { |
1408 | .init = safexcel_hmac_sha224_init, | |
1409 | .update = safexcel_ahash_update, | |
1410 | .final = safexcel_ahash_final, | |
1411 | .finup = safexcel_ahash_finup, | |
1412 | .digest = safexcel_hmac_sha224_digest, | |
1413 | .setkey = safexcel_hmac_sha224_setkey, | |
1414 | .export = safexcel_ahash_export, | |
1415 | .import = safexcel_ahash_import, | |
1416 | .halg = { | |
1417 | .digestsize = SHA224_DIGEST_SIZE, | |
1418 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1419 | .base = { | |
1420 | .cra_name = "hmac(sha224)", | |
1421 | .cra_driver_name = "safexcel-hmac-sha224", | |
aa88f331 | 1422 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
3ad618d8 | 1423 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1424 | CRYPTO_ALG_ALLOCATES_MEMORY | |
3ad618d8 AT |
1425 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1426 | .cra_blocksize = SHA224_BLOCK_SIZE, | |
1427 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1428 | .cra_init = safexcel_ahash_cra_init, | |
1429 | .cra_exit = safexcel_ahash_cra_exit, | |
1430 | .cra_module = THIS_MODULE, | |
1431 | }, | |
1432 | }, | |
1433 | }, | |
1434 | }; | |
1435 | ||
73f36ea7 AT |
1436 | static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key, |
1437 | unsigned int keylen) | |
1438 | { | |
1439 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256", | |
1440 | SHA256_DIGEST_SIZE); | |
1441 | } | |
1442 | ||
1443 | static int safexcel_hmac_sha256_init(struct ahash_request *areq) | |
1444 | { | |
41abed7d | 1445 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
73f36ea7 AT |
1446 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
1447 | ||
41abed7d PL |
1448 | memset(req, 0, sizeof(*req)); |
1449 | ||
1450 | /* Start from ipad precompute */ | |
1451 | memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE); | |
1452 | /* Already processed the key^ipad part now! */ | |
31fb084c PL |
1453 | req->len = SHA256_BLOCK_SIZE; |
1454 | req->processed = SHA256_BLOCK_SIZE; | |
41abed7d PL |
1455 | |
1456 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; | |
1457 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1458 | req->state_sz = SHA256_DIGEST_SIZE; | |
6c1c09b3 | 1459 | req->digest_sz = SHA256_DIGEST_SIZE; |
41abed7d PL |
1460 | req->block_sz = SHA256_BLOCK_SIZE; |
1461 | req->hmac = true; | |
1462 | ||
73f36ea7 AT |
1463 | return 0; |
1464 | } | |
1465 | ||
1466 | static int safexcel_hmac_sha256_digest(struct ahash_request *areq) | |
1467 | { | |
1468 | int ret = safexcel_hmac_sha256_init(areq); | |
1469 | ||
1470 | if (ret) | |
1471 | return ret; | |
1472 | ||
1473 | return safexcel_ahash_finup(areq); | |
1474 | } | |
1475 | ||
1476 | struct safexcel_alg_template safexcel_alg_hmac_sha256 = { | |
1477 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1478 | .algo_mask = SAFEXCEL_ALG_SHA2_256, |
73f36ea7 AT |
1479 | .alg.ahash = { |
1480 | .init = safexcel_hmac_sha256_init, | |
1481 | .update = safexcel_ahash_update, | |
1482 | .final = safexcel_ahash_final, | |
1483 | .finup = safexcel_ahash_finup, | |
1484 | .digest = safexcel_hmac_sha256_digest, | |
1485 | .setkey = safexcel_hmac_sha256_setkey, | |
1486 | .export = safexcel_ahash_export, | |
1487 | .import = safexcel_ahash_import, | |
1488 | .halg = { | |
1489 | .digestsize = SHA256_DIGEST_SIZE, | |
1490 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1491 | .base = { | |
1492 | .cra_name = "hmac(sha256)", | |
1493 | .cra_driver_name = "safexcel-hmac-sha256", | |
aa88f331 | 1494 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
73f36ea7 | 1495 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1496 | CRYPTO_ALG_ALLOCATES_MEMORY | |
73f36ea7 AT |
1497 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1498 | .cra_blocksize = SHA256_BLOCK_SIZE, | |
1499 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1500 | .cra_init = safexcel_ahash_cra_init, | |
1501 | .cra_exit = safexcel_ahash_cra_exit, | |
1502 | .cra_module = THIS_MODULE, | |
1503 | }, | |
1504 | }, | |
1505 | }, | |
1506 | }; | |
b460edb6 AT |
1507 | |
1508 | static int safexcel_sha512_init(struct ahash_request *areq) | |
1509 | { | |
1510 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1511 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1512 | ||
1513 | memset(req, 0, sizeof(*req)); | |
1514 | ||
b460edb6 AT |
1515 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; |
1516 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1517 | req->state_sz = SHA512_DIGEST_SIZE; | |
6c1c09b3 | 1518 | req->digest_sz = SHA512_DIGEST_SIZE; |
41abed7d | 1519 | req->block_sz = SHA512_BLOCK_SIZE; |
b460edb6 AT |
1520 | |
1521 | return 0; | |
1522 | } | |
1523 | ||
1524 | static int safexcel_sha512_digest(struct ahash_request *areq) | |
1525 | { | |
1526 | int ret = safexcel_sha512_init(areq); | |
1527 | ||
1528 | if (ret) | |
1529 | return ret; | |
1530 | ||
1531 | return safexcel_ahash_finup(areq); | |
1532 | } | |
1533 | ||
1534 | struct safexcel_alg_template safexcel_alg_sha512 = { | |
1535 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1536 | .algo_mask = SAFEXCEL_ALG_SHA2_512, |
b460edb6 AT |
1537 | .alg.ahash = { |
1538 | .init = safexcel_sha512_init, | |
1539 | .update = safexcel_ahash_update, | |
1540 | .final = safexcel_ahash_final, | |
1541 | .finup = safexcel_ahash_finup, | |
1542 | .digest = safexcel_sha512_digest, | |
1543 | .export = safexcel_ahash_export, | |
1544 | .import = safexcel_ahash_import, | |
1545 | .halg = { | |
1546 | .digestsize = SHA512_DIGEST_SIZE, | |
1547 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1548 | .base = { | |
1549 | .cra_name = "sha512", | |
1550 | .cra_driver_name = "safexcel-sha512", | |
aa88f331 | 1551 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
b460edb6 | 1552 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1553 | CRYPTO_ALG_ALLOCATES_MEMORY | |
b460edb6 AT |
1554 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1555 | .cra_blocksize = SHA512_BLOCK_SIZE, | |
1556 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1557 | .cra_init = safexcel_ahash_cra_init, | |
1558 | .cra_exit = safexcel_ahash_cra_exit, | |
1559 | .cra_module = THIS_MODULE, | |
1560 | }, | |
1561 | }, | |
1562 | }, | |
1563 | }; | |
0de54fb1 | 1564 | |
9e46eafd AT |
1565 | static int safexcel_sha384_init(struct ahash_request *areq) |
1566 | { | |
1567 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1568 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1569 | ||
1570 | memset(req, 0, sizeof(*req)); | |
1571 | ||
9e46eafd AT |
1572 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; |
1573 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1574 | req->state_sz = SHA512_DIGEST_SIZE; | |
6c1c09b3 | 1575 | req->digest_sz = SHA512_DIGEST_SIZE; |
41abed7d | 1576 | req->block_sz = SHA512_BLOCK_SIZE; |
9e46eafd AT |
1577 | |
1578 | return 0; | |
1579 | } | |
1580 | ||
1581 | static int safexcel_sha384_digest(struct ahash_request *areq) | |
1582 | { | |
1583 | int ret = safexcel_sha384_init(areq); | |
1584 | ||
1585 | if (ret) | |
1586 | return ret; | |
1587 | ||
1588 | return safexcel_ahash_finup(areq); | |
1589 | } | |
1590 | ||
1591 | struct safexcel_alg_template safexcel_alg_sha384 = { | |
1592 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1593 | .algo_mask = SAFEXCEL_ALG_SHA2_512, |
9e46eafd AT |
1594 | .alg.ahash = { |
1595 | .init = safexcel_sha384_init, | |
1596 | .update = safexcel_ahash_update, | |
1597 | .final = safexcel_ahash_final, | |
1598 | .finup = safexcel_ahash_finup, | |
1599 | .digest = safexcel_sha384_digest, | |
1600 | .export = safexcel_ahash_export, | |
1601 | .import = safexcel_ahash_import, | |
1602 | .halg = { | |
1603 | .digestsize = SHA384_DIGEST_SIZE, | |
1604 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1605 | .base = { | |
1606 | .cra_name = "sha384", | |
1607 | .cra_driver_name = "safexcel-sha384", | |
aa88f331 | 1608 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
9e46eafd | 1609 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1610 | CRYPTO_ALG_ALLOCATES_MEMORY | |
9e46eafd AT |
1611 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1612 | .cra_blocksize = SHA384_BLOCK_SIZE, | |
1613 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1614 | .cra_init = safexcel_ahash_cra_init, | |
1615 | .cra_exit = safexcel_ahash_cra_exit, | |
1616 | .cra_module = THIS_MODULE, | |
1617 | }, | |
1618 | }, | |
1619 | }, | |
1620 | }; | |
1621 | ||
0de54fb1 AT |
1622 | static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key, |
1623 | unsigned int keylen) | |
1624 | { | |
1625 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512", | |
1626 | SHA512_DIGEST_SIZE); | |
1627 | } | |
1628 | ||
1629 | static int safexcel_hmac_sha512_init(struct ahash_request *areq) | |
1630 | { | |
41abed7d | 1631 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
0de54fb1 AT |
1632 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
1633 | ||
41abed7d PL |
1634 | memset(req, 0, sizeof(*req)); |
1635 | ||
1636 | /* Start from ipad precompute */ | |
1637 | memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE); | |
1638 | /* Already processed the key^ipad part now! */ | |
31fb084c PL |
1639 | req->len = SHA512_BLOCK_SIZE; |
1640 | req->processed = SHA512_BLOCK_SIZE; | |
41abed7d PL |
1641 | |
1642 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; | |
1643 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1644 | req->state_sz = SHA512_DIGEST_SIZE; | |
6c1c09b3 | 1645 | req->digest_sz = SHA512_DIGEST_SIZE; |
41abed7d PL |
1646 | req->block_sz = SHA512_BLOCK_SIZE; |
1647 | req->hmac = true; | |
1648 | ||
0de54fb1 AT |
1649 | return 0; |
1650 | } | |
1651 | ||
1652 | static int safexcel_hmac_sha512_digest(struct ahash_request *areq) | |
1653 | { | |
1654 | int ret = safexcel_hmac_sha512_init(areq); | |
1655 | ||
1656 | if (ret) | |
1657 | return ret; | |
1658 | ||
1659 | return safexcel_ahash_finup(areq); | |
1660 | } | |
1661 | ||
1662 | struct safexcel_alg_template safexcel_alg_hmac_sha512 = { | |
1663 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1664 | .algo_mask = SAFEXCEL_ALG_SHA2_512, |
0de54fb1 AT |
1665 | .alg.ahash = { |
1666 | .init = safexcel_hmac_sha512_init, | |
1667 | .update = safexcel_ahash_update, | |
1668 | .final = safexcel_ahash_final, | |
1669 | .finup = safexcel_ahash_finup, | |
1670 | .digest = safexcel_hmac_sha512_digest, | |
1671 | .setkey = safexcel_hmac_sha512_setkey, | |
1672 | .export = safexcel_ahash_export, | |
1673 | .import = safexcel_ahash_import, | |
1674 | .halg = { | |
1675 | .digestsize = SHA512_DIGEST_SIZE, | |
1676 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1677 | .base = { | |
1678 | .cra_name = "hmac(sha512)", | |
1679 | .cra_driver_name = "safexcel-hmac-sha512", | |
aa88f331 | 1680 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
0de54fb1 | 1681 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1682 | CRYPTO_ALG_ALLOCATES_MEMORY | |
0de54fb1 AT |
1683 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1684 | .cra_blocksize = SHA512_BLOCK_SIZE, | |
1685 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1686 | .cra_init = safexcel_ahash_cra_init, | |
1687 | .cra_exit = safexcel_ahash_cra_exit, | |
1688 | .cra_module = THIS_MODULE, | |
1689 | }, | |
1690 | }, | |
1691 | }, | |
1692 | }; | |
1f5d5d98 AT |
1693 | |
1694 | static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key, | |
1695 | unsigned int keylen) | |
1696 | { | |
1697 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384", | |
1698 | SHA512_DIGEST_SIZE); | |
1699 | } | |
1700 | ||
1701 | static int safexcel_hmac_sha384_init(struct ahash_request *areq) | |
1702 | { | |
41abed7d | 1703 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
1f5d5d98 AT |
1704 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
1705 | ||
41abed7d PL |
1706 | memset(req, 0, sizeof(*req)); |
1707 | ||
1708 | /* Start from ipad precompute */ | |
1709 | memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE); | |
1710 | /* Already processed the key^ipad part now! */ | |
31fb084c PL |
1711 | req->len = SHA512_BLOCK_SIZE; |
1712 | req->processed = SHA512_BLOCK_SIZE; | |
41abed7d PL |
1713 | |
1714 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; | |
1715 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1716 | req->state_sz = SHA512_DIGEST_SIZE; | |
6c1c09b3 | 1717 | req->digest_sz = SHA512_DIGEST_SIZE; |
41abed7d PL |
1718 | req->block_sz = SHA512_BLOCK_SIZE; |
1719 | req->hmac = true; | |
1720 | ||
1f5d5d98 AT |
1721 | return 0; |
1722 | } | |
1723 | ||
1724 | static int safexcel_hmac_sha384_digest(struct ahash_request *areq) | |
1725 | { | |
1726 | int ret = safexcel_hmac_sha384_init(areq); | |
1727 | ||
1728 | if (ret) | |
1729 | return ret; | |
1730 | ||
1731 | return safexcel_ahash_finup(areq); | |
1732 | } | |
1733 | ||
1734 | struct safexcel_alg_template safexcel_alg_hmac_sha384 = { | |
1735 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1736 | .algo_mask = SAFEXCEL_ALG_SHA2_512, |
1f5d5d98 AT |
1737 | .alg.ahash = { |
1738 | .init = safexcel_hmac_sha384_init, | |
1739 | .update = safexcel_ahash_update, | |
1740 | .final = safexcel_ahash_final, | |
1741 | .finup = safexcel_ahash_finup, | |
1742 | .digest = safexcel_hmac_sha384_digest, | |
1743 | .setkey = safexcel_hmac_sha384_setkey, | |
1744 | .export = safexcel_ahash_export, | |
1745 | .import = safexcel_ahash_import, | |
1746 | .halg = { | |
1747 | .digestsize = SHA384_DIGEST_SIZE, | |
1748 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1749 | .base = { | |
1750 | .cra_name = "hmac(sha384)", | |
1751 | .cra_driver_name = "safexcel-hmac-sha384", | |
aa88f331 | 1752 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
1f5d5d98 | 1753 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1754 | CRYPTO_ALG_ALLOCATES_MEMORY | |
1f5d5d98 AT |
1755 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1756 | .cra_blocksize = SHA384_BLOCK_SIZE, | |
1757 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1758 | .cra_init = safexcel_ahash_cra_init, | |
1759 | .cra_exit = safexcel_ahash_cra_exit, | |
1760 | .cra_module = THIS_MODULE, | |
1761 | }, | |
1762 | }, | |
1763 | }, | |
1764 | }; | |
293f89cf OH |
1765 | |
1766 | static int safexcel_md5_init(struct ahash_request *areq) | |
1767 | { | |
1768 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1769 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1770 | ||
1771 | memset(req, 0, sizeof(*req)); | |
1772 | ||
293f89cf OH |
1773 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5; |
1774 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1775 | req->state_sz = MD5_DIGEST_SIZE; | |
6c1c09b3 | 1776 | req->digest_sz = MD5_DIGEST_SIZE; |
41abed7d | 1777 | req->block_sz = MD5_HMAC_BLOCK_SIZE; |
293f89cf OH |
1778 | |
1779 | return 0; | |
1780 | } | |
1781 | ||
1782 | static int safexcel_md5_digest(struct ahash_request *areq) | |
1783 | { | |
1784 | int ret = safexcel_md5_init(areq); | |
1785 | ||
1786 | if (ret) | |
1787 | return ret; | |
1788 | ||
1789 | return safexcel_ahash_finup(areq); | |
1790 | } | |
1791 | ||
1792 | struct safexcel_alg_template safexcel_alg_md5 = { | |
1793 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1794 | .algo_mask = SAFEXCEL_ALG_MD5, |
293f89cf OH |
1795 | .alg.ahash = { |
1796 | .init = safexcel_md5_init, | |
1797 | .update = safexcel_ahash_update, | |
1798 | .final = safexcel_ahash_final, | |
1799 | .finup = safexcel_ahash_finup, | |
1800 | .digest = safexcel_md5_digest, | |
1801 | .export = safexcel_ahash_export, | |
1802 | .import = safexcel_ahash_import, | |
1803 | .halg = { | |
1804 | .digestsize = MD5_DIGEST_SIZE, | |
1805 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1806 | .base = { | |
1807 | .cra_name = "md5", | |
1808 | .cra_driver_name = "safexcel-md5", | |
aa88f331 | 1809 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
293f89cf | 1810 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1811 | CRYPTO_ALG_ALLOCATES_MEMORY | |
293f89cf OH |
1812 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1813 | .cra_blocksize = MD5_HMAC_BLOCK_SIZE, | |
1814 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1815 | .cra_init = safexcel_ahash_cra_init, | |
1816 | .cra_exit = safexcel_ahash_cra_exit, | |
1817 | .cra_module = THIS_MODULE, | |
1818 | }, | |
1819 | }, | |
1820 | }, | |
1821 | }; | |
b471e4b9 OH |
1822 | |
1823 | static int safexcel_hmac_md5_init(struct ahash_request *areq) | |
1824 | { | |
41abed7d | 1825 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
b471e4b9 OH |
1826 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
1827 | ||
41abed7d PL |
1828 | memset(req, 0, sizeof(*req)); |
1829 | ||
1830 | /* Start from ipad precompute */ | |
1831 | memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE); | |
1832 | /* Already processed the key^ipad part now! */ | |
31fb084c PL |
1833 | req->len = MD5_HMAC_BLOCK_SIZE; |
1834 | req->processed = MD5_HMAC_BLOCK_SIZE; | |
41abed7d PL |
1835 | |
1836 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5; | |
1837 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
1838 | req->state_sz = MD5_DIGEST_SIZE; | |
6c1c09b3 | 1839 | req->digest_sz = MD5_DIGEST_SIZE; |
41abed7d | 1840 | req->block_sz = MD5_HMAC_BLOCK_SIZE; |
85b36ee8 | 1841 | req->len_is_le = true; /* MD5 is little endian! ... */ |
41abed7d PL |
1842 | req->hmac = true; |
1843 | ||
b471e4b9 OH |
1844 | return 0; |
1845 | } | |
1846 | ||
1847 | static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key, | |
1848 | unsigned int keylen) | |
1849 | { | |
1850 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5", | |
1851 | MD5_DIGEST_SIZE); | |
1852 | } | |
1853 | ||
1854 | static int safexcel_hmac_md5_digest(struct ahash_request *areq) | |
1855 | { | |
1856 | int ret = safexcel_hmac_md5_init(areq); | |
1857 | ||
1858 | if (ret) | |
1859 | return ret; | |
1860 | ||
1861 | return safexcel_ahash_finup(areq); | |
1862 | } | |
1863 | ||
1864 | struct safexcel_alg_template safexcel_alg_hmac_md5 = { | |
1865 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
062b64ca | 1866 | .algo_mask = SAFEXCEL_ALG_MD5, |
b471e4b9 OH |
1867 | .alg.ahash = { |
1868 | .init = safexcel_hmac_md5_init, | |
1869 | .update = safexcel_ahash_update, | |
1870 | .final = safexcel_ahash_final, | |
1871 | .finup = safexcel_ahash_finup, | |
1872 | .digest = safexcel_hmac_md5_digest, | |
1873 | .setkey = safexcel_hmac_md5_setkey, | |
1874 | .export = safexcel_ahash_export, | |
1875 | .import = safexcel_ahash_import, | |
1876 | .halg = { | |
1877 | .digestsize = MD5_DIGEST_SIZE, | |
1878 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1879 | .base = { | |
1880 | .cra_name = "hmac(md5)", | |
1881 | .cra_driver_name = "safexcel-hmac-md5", | |
aa88f331 | 1882 | .cra_priority = SAFEXCEL_CRA_PRIORITY, |
b471e4b9 | 1883 | .cra_flags = CRYPTO_ALG_ASYNC | |
b8aa7dc5 | 1884 | CRYPTO_ALG_ALLOCATES_MEMORY | |
b471e4b9 OH |
1885 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1886 | .cra_blocksize = MD5_HMAC_BLOCK_SIZE, | |
1887 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1888 | .cra_init = safexcel_ahash_cra_init, | |
1889 | .cra_exit = safexcel_ahash_cra_exit, | |
1890 | .cra_module = THIS_MODULE, | |
1891 | }, | |
1892 | }, | |
1893 | }, | |
1894 | }; | |
a7cf8658 PL |
1895 | |
1896 | static int safexcel_crc32_cra_init(struct crypto_tfm *tfm) | |
1897 | { | |
1898 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
1899 | int ret = safexcel_ahash_cra_init(tfm); | |
1900 | ||
1901 | /* Default 'key' is all zeroes */ | |
1902 | memset(ctx->ipad, 0, sizeof(u32)); | |
1903 | return ret; | |
1904 | } | |
1905 | ||
1906 | static int safexcel_crc32_init(struct ahash_request *areq) | |
1907 | { | |
1908 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1909 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1910 | ||
1911 | memset(req, 0, sizeof(*req)); | |
1912 | ||
1913 | /* Start from loaded key */ | |
13a1bb93 | 1914 | req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]); |
a7cf8658 PL |
1915 | /* Set processed to non-zero to enable invalidation detection */ |
1916 | req->len = sizeof(u32); | |
1917 | req->processed = sizeof(u32); | |
1918 | ||
1919 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32; | |
1920 | req->digest = CONTEXT_CONTROL_DIGEST_XCM; | |
1921 | req->state_sz = sizeof(u32); | |
6c1c09b3 | 1922 | req->digest_sz = sizeof(u32); |
a7cf8658 PL |
1923 | req->block_sz = sizeof(u32); |
1924 | ||
1925 | return 0; | |
1926 | } | |
1927 | ||
1928 | static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key, | |
1929 | unsigned int keylen) | |
1930 | { | |
1931 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); | |
1932 | ||
674f368a | 1933 | if (keylen != sizeof(u32)) |
a7cf8658 | 1934 | return -EINVAL; |
a7cf8658 PL |
1935 | |
1936 | memcpy(ctx->ipad, key, sizeof(u32)); | |
1937 | return 0; | |
1938 | } | |
1939 | ||
1940 | static int safexcel_crc32_digest(struct ahash_request *areq) | |
1941 | { | |
1942 | return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq); | |
1943 | } | |
1944 | ||
1945 | struct safexcel_alg_template safexcel_alg_crc32 = { | |
1946 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
1947 | .algo_mask = 0, | |
1948 | .alg.ahash = { | |
1949 | .init = safexcel_crc32_init, | |
1950 | .update = safexcel_ahash_update, | |
1951 | .final = safexcel_ahash_final, | |
1952 | .finup = safexcel_ahash_finup, | |
1953 | .digest = safexcel_crc32_digest, | |
1954 | .setkey = safexcel_crc32_setkey, | |
1955 | .export = safexcel_ahash_export, | |
1956 | .import = safexcel_ahash_import, | |
1957 | .halg = { | |
1958 | .digestsize = sizeof(u32), | |
1959 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
1960 | .base = { | |
1961 | .cra_name = "crc32", | |
1962 | .cra_driver_name = "safexcel-crc32", | |
1963 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
1964 | .cra_flags = CRYPTO_ALG_OPTIONAL_KEY | | |
1965 | CRYPTO_ALG_ASYNC | | |
b8aa7dc5 | 1966 | CRYPTO_ALG_ALLOCATES_MEMORY | |
a7cf8658 PL |
1967 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
1968 | .cra_blocksize = 1, | |
1969 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
1970 | .cra_init = safexcel_crc32_cra_init, | |
1971 | .cra_exit = safexcel_ahash_cra_exit, | |
1972 | .cra_module = THIS_MODULE, | |
1973 | }, | |
1974 | }, | |
1975 | }, | |
1976 | }; | |
b98687bb PL |
1977 | |
1978 | static int safexcel_cbcmac_init(struct ahash_request *areq) | |
1979 | { | |
1980 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
1981 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
1982 | ||
1983 | memset(req, 0, sizeof(*req)); | |
1984 | ||
1985 | /* Start from loaded keys */ | |
1986 | memcpy(req->state, ctx->ipad, ctx->key_sz); | |
1987 | /* Set processed to non-zero to enable invalidation detection */ | |
1988 | req->len = AES_BLOCK_SIZE; | |
1989 | req->processed = AES_BLOCK_SIZE; | |
1990 | ||
1991 | req->digest = CONTEXT_CONTROL_DIGEST_XCM; | |
1992 | req->state_sz = ctx->key_sz; | |
6c1c09b3 | 1993 | req->digest_sz = AES_BLOCK_SIZE; |
b98687bb PL |
1994 | req->block_sz = AES_BLOCK_SIZE; |
1995 | req->xcbcmac = true; | |
1996 | ||
1997 | return 0; | |
1998 | } | |
1999 | ||
2000 | static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, | |
2001 | unsigned int len) | |
2002 | { | |
2003 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); | |
2004 | struct crypto_aes_ctx aes; | |
2005 | int ret, i; | |
2006 | ||
2007 | ret = aes_expandkey(&aes, key, len); | |
674f368a | 2008 | if (ret) |
b98687bb | 2009 | return ret; |
b98687bb PL |
2010 | |
2011 | memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE); | |
2012 | for (i = 0; i < len / sizeof(u32); i++) | |
13a1bb93 | 2013 | ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]); |
b98687bb PL |
2014 | |
2015 | if (len == AES_KEYSIZE_192) { | |
2016 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; | |
2017 | ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2018 | } else if (len == AES_KEYSIZE_256) { | |
2019 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256; | |
2020 | ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2021 | } else { | |
2022 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; | |
2023 | ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2024 | } | |
38f21b4b | 2025 | ctx->cbcmac = true; |
b98687bb PL |
2026 | |
2027 | memzero_explicit(&aes, sizeof(aes)); | |
2028 | return 0; | |
2029 | } | |
2030 | ||
2031 | static int safexcel_cbcmac_digest(struct ahash_request *areq) | |
2032 | { | |
2033 | return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq); | |
2034 | } | |
2035 | ||
2036 | struct safexcel_alg_template safexcel_alg_cbcmac = { | |
2037 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2038 | .algo_mask = 0, | |
2039 | .alg.ahash = { | |
2040 | .init = safexcel_cbcmac_init, | |
2041 | .update = safexcel_ahash_update, | |
2042 | .final = safexcel_ahash_final, | |
2043 | .finup = safexcel_ahash_finup, | |
2044 | .digest = safexcel_cbcmac_digest, | |
2045 | .setkey = safexcel_cbcmac_setkey, | |
2046 | .export = safexcel_ahash_export, | |
2047 | .import = safexcel_ahash_import, | |
2048 | .halg = { | |
2049 | .digestsize = AES_BLOCK_SIZE, | |
2050 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2051 | .base = { | |
2052 | .cra_name = "cbcmac(aes)", | |
2053 | .cra_driver_name = "safexcel-cbcmac-aes", | |
2054 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2055 | .cra_flags = CRYPTO_ALG_ASYNC | | |
b8aa7dc5 | 2056 | CRYPTO_ALG_ALLOCATES_MEMORY | |
b98687bb PL |
2057 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
2058 | .cra_blocksize = 1, | |
2059 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2060 | .cra_init = safexcel_ahash_cra_init, | |
2061 | .cra_exit = safexcel_ahash_cra_exit, | |
2062 | .cra_module = THIS_MODULE, | |
2063 | }, | |
2064 | }, | |
2065 | }, | |
2066 | }; | |
38f21b4b PL |
2067 | |
2068 | static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, | |
2069 | unsigned int len) | |
2070 | { | |
2071 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); | |
2072 | struct crypto_aes_ctx aes; | |
2073 | u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)]; | |
2074 | int ret, i; | |
2075 | ||
2076 | ret = aes_expandkey(&aes, key, len); | |
674f368a | 2077 | if (ret) |
38f21b4b | 2078 | return ret; |
38f21b4b PL |
2079 | |
2080 | /* precompute the XCBC key material */ | |
2081 | crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); | |
2082 | crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & | |
2083 | CRYPTO_TFM_REQ_MASK); | |
2084 | ret = crypto_cipher_setkey(ctx->kaes, key, len); | |
38f21b4b PL |
2085 | if (ret) |
2086 | return ret; | |
2087 | ||
2088 | crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE, | |
2089 | "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1"); | |
2090 | crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp, | |
2091 | "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2"); | |
2092 | crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE, | |
2093 | "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3"); | |
2094 | for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++) | |
13a1bb93 PL |
2095 | ctx->ipad[i] = |
2096 | cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i])); | |
38f21b4b PL |
2097 | |
2098 | crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); | |
2099 | crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & | |
2100 | CRYPTO_TFM_REQ_MASK); | |
2101 | ret = crypto_cipher_setkey(ctx->kaes, | |
2102 | (u8 *)key_tmp + 2 * AES_BLOCK_SIZE, | |
2103 | AES_MIN_KEY_SIZE); | |
38f21b4b PL |
2104 | if (ret) |
2105 | return ret; | |
2106 | ||
2107 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; | |
2108 | ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2109 | ctx->cbcmac = false; | |
2110 | ||
2111 | memzero_explicit(&aes, sizeof(aes)); | |
2112 | return 0; | |
2113 | } | |
2114 | ||
2115 | static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm) | |
2116 | { | |
2117 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
2118 | ||
2119 | safexcel_ahash_cra_init(tfm); | |
2120 | ctx->kaes = crypto_alloc_cipher("aes", 0, 0); | |
27018ab1 | 2121 | return PTR_ERR_OR_ZERO(ctx->kaes); |
38f21b4b PL |
2122 | } |
2123 | ||
2124 | static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm) | |
2125 | { | |
2126 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
2127 | ||
2128 | crypto_free_cipher(ctx->kaes); | |
2129 | safexcel_ahash_cra_exit(tfm); | |
2130 | } | |
2131 | ||
2132 | struct safexcel_alg_template safexcel_alg_xcbcmac = { | |
2133 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2134 | .algo_mask = 0, | |
2135 | .alg.ahash = { | |
2136 | .init = safexcel_cbcmac_init, | |
2137 | .update = safexcel_ahash_update, | |
2138 | .final = safexcel_ahash_final, | |
2139 | .finup = safexcel_ahash_finup, | |
2140 | .digest = safexcel_cbcmac_digest, | |
2141 | .setkey = safexcel_xcbcmac_setkey, | |
2142 | .export = safexcel_ahash_export, | |
2143 | .import = safexcel_ahash_import, | |
2144 | .halg = { | |
2145 | .digestsize = AES_BLOCK_SIZE, | |
2146 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2147 | .base = { | |
2148 | .cra_name = "xcbc(aes)", | |
2149 | .cra_driver_name = "safexcel-xcbc-aes", | |
2150 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2151 | .cra_flags = CRYPTO_ALG_ASYNC | | |
b8aa7dc5 | 2152 | CRYPTO_ALG_ALLOCATES_MEMORY | |
38f21b4b PL |
2153 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
2154 | .cra_blocksize = AES_BLOCK_SIZE, | |
2155 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2156 | .cra_init = safexcel_xcbcmac_cra_init, | |
2157 | .cra_exit = safexcel_xcbcmac_cra_exit, | |
2158 | .cra_module = THIS_MODULE, | |
2159 | }, | |
2160 | }, | |
2161 | }, | |
2162 | }; | |
7a627db9 PL |
2163 | |
2164 | static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key, | |
2165 | unsigned int len) | |
2166 | { | |
2167 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); | |
2168 | struct crypto_aes_ctx aes; | |
2169 | __be64 consts[4]; | |
2170 | u64 _const[2]; | |
2171 | u8 msb_mask, gfmask; | |
2172 | int ret, i; | |
2173 | ||
2174 | ret = aes_expandkey(&aes, key, len); | |
674f368a | 2175 | if (ret) |
7a627db9 | 2176 | return ret; |
7a627db9 PL |
2177 | |
2178 | for (i = 0; i < len / sizeof(u32); i++) | |
13a1bb93 PL |
2179 | ctx->ipad[i + 8] = |
2180 | cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i])); | |
7a627db9 PL |
2181 | |
2182 | /* precompute the CMAC key material */ | |
2183 | crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); | |
2184 | crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & | |
2185 | CRYPTO_TFM_REQ_MASK); | |
2186 | ret = crypto_cipher_setkey(ctx->kaes, key, len); | |
7a627db9 PL |
2187 | if (ret) |
2188 | return ret; | |
2189 | ||
2190 | /* code below borrowed from crypto/cmac.c */ | |
2191 | /* encrypt the zero block */ | |
2192 | memset(consts, 0, AES_BLOCK_SIZE); | |
2193 | crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts); | |
2194 | ||
2195 | gfmask = 0x87; | |
2196 | _const[0] = be64_to_cpu(consts[1]); | |
2197 | _const[1] = be64_to_cpu(consts[0]); | |
2198 | ||
2199 | /* gf(2^128) multiply zero-ciphertext with u and u^2 */ | |
2200 | for (i = 0; i < 4; i += 2) { | |
2201 | msb_mask = ((s64)_const[1] >> 63) & gfmask; | |
2202 | _const[1] = (_const[1] << 1) | (_const[0] >> 63); | |
2203 | _const[0] = (_const[0] << 1) ^ msb_mask; | |
2204 | ||
2205 | consts[i + 0] = cpu_to_be64(_const[1]); | |
2206 | consts[i + 1] = cpu_to_be64(_const[0]); | |
2207 | } | |
2208 | /* end of code borrowed from crypto/cmac.c */ | |
2209 | ||
2210 | for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++) | |
13a1bb93 | 2211 | ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]); |
7a627db9 PL |
2212 | |
2213 | if (len == AES_KEYSIZE_192) { | |
2214 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; | |
2215 | ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2216 | } else if (len == AES_KEYSIZE_256) { | |
2217 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256; | |
2218 | ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2219 | } else { | |
2220 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; | |
2221 | ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; | |
2222 | } | |
2223 | ctx->cbcmac = false; | |
2224 | ||
2225 | memzero_explicit(&aes, sizeof(aes)); | |
2226 | return 0; | |
2227 | } | |
2228 | ||
2229 | struct safexcel_alg_template safexcel_alg_cmac = { | |
2230 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2231 | .algo_mask = 0, | |
2232 | .alg.ahash = { | |
2233 | .init = safexcel_cbcmac_init, | |
2234 | .update = safexcel_ahash_update, | |
2235 | .final = safexcel_ahash_final, | |
2236 | .finup = safexcel_ahash_finup, | |
2237 | .digest = safexcel_cbcmac_digest, | |
2238 | .setkey = safexcel_cmac_setkey, | |
2239 | .export = safexcel_ahash_export, | |
2240 | .import = safexcel_ahash_import, | |
2241 | .halg = { | |
2242 | .digestsize = AES_BLOCK_SIZE, | |
2243 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2244 | .base = { | |
2245 | .cra_name = "cmac(aes)", | |
2246 | .cra_driver_name = "safexcel-cmac-aes", | |
2247 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2248 | .cra_flags = CRYPTO_ALG_ASYNC | | |
b8aa7dc5 | 2249 | CRYPTO_ALG_ALLOCATES_MEMORY | |
7a627db9 PL |
2250 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
2251 | .cra_blocksize = AES_BLOCK_SIZE, | |
2252 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2253 | .cra_init = safexcel_xcbcmac_cra_init, | |
2254 | .cra_exit = safexcel_xcbcmac_cra_exit, | |
2255 | .cra_module = THIS_MODULE, | |
2256 | }, | |
2257 | }, | |
2258 | }, | |
2259 | }; | |
0f2bc131 PL |
2260 | |
2261 | static int safexcel_sm3_init(struct ahash_request *areq) | |
2262 | { | |
2263 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
2264 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2265 | ||
2266 | memset(req, 0, sizeof(*req)); | |
2267 | ||
2268 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3; | |
2269 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
2270 | req->state_sz = SM3_DIGEST_SIZE; | |
6c1c09b3 | 2271 | req->digest_sz = SM3_DIGEST_SIZE; |
0f2bc131 PL |
2272 | req->block_sz = SM3_BLOCK_SIZE; |
2273 | ||
2274 | return 0; | |
2275 | } | |
2276 | ||
2277 | static int safexcel_sm3_digest(struct ahash_request *areq) | |
2278 | { | |
2279 | int ret = safexcel_sm3_init(areq); | |
2280 | ||
2281 | if (ret) | |
2282 | return ret; | |
2283 | ||
2284 | return safexcel_ahash_finup(areq); | |
2285 | } | |
2286 | ||
2287 | struct safexcel_alg_template safexcel_alg_sm3 = { | |
2288 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2289 | .algo_mask = SAFEXCEL_ALG_SM3, | |
2290 | .alg.ahash = { | |
2291 | .init = safexcel_sm3_init, | |
2292 | .update = safexcel_ahash_update, | |
2293 | .final = safexcel_ahash_final, | |
2294 | .finup = safexcel_ahash_finup, | |
2295 | .digest = safexcel_sm3_digest, | |
2296 | .export = safexcel_ahash_export, | |
2297 | .import = safexcel_ahash_import, | |
2298 | .halg = { | |
2299 | .digestsize = SM3_DIGEST_SIZE, | |
2300 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2301 | .base = { | |
2302 | .cra_name = "sm3", | |
2303 | .cra_driver_name = "safexcel-sm3", | |
2304 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2305 | .cra_flags = CRYPTO_ALG_ASYNC | | |
b8aa7dc5 | 2306 | CRYPTO_ALG_ALLOCATES_MEMORY | |
0f2bc131 PL |
2307 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
2308 | .cra_blocksize = SM3_BLOCK_SIZE, | |
2309 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2310 | .cra_init = safexcel_ahash_cra_init, | |
2311 | .cra_exit = safexcel_ahash_cra_exit, | |
2312 | .cra_module = THIS_MODULE, | |
2313 | }, | |
2314 | }, | |
2315 | }, | |
2316 | }; | |
aa3a43e6 PL |
2317 | |
2318 | static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key, | |
2319 | unsigned int keylen) | |
2320 | { | |
2321 | return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3", | |
2322 | SM3_DIGEST_SIZE); | |
2323 | } | |
2324 | ||
2325 | static int safexcel_hmac_sm3_init(struct ahash_request *areq) | |
2326 | { | |
2327 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); | |
2328 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2329 | ||
2330 | memset(req, 0, sizeof(*req)); | |
2331 | ||
2332 | /* Start from ipad precompute */ | |
2333 | memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE); | |
2334 | /* Already processed the key^ipad part now! */ | |
2335 | req->len = SM3_BLOCK_SIZE; | |
2336 | req->processed = SM3_BLOCK_SIZE; | |
2337 | ||
2338 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3; | |
2339 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; | |
2340 | req->state_sz = SM3_DIGEST_SIZE; | |
6c1c09b3 | 2341 | req->digest_sz = SM3_DIGEST_SIZE; |
aa3a43e6 PL |
2342 | req->block_sz = SM3_BLOCK_SIZE; |
2343 | req->hmac = true; | |
2344 | ||
2345 | return 0; | |
2346 | } | |
2347 | ||
2348 | static int safexcel_hmac_sm3_digest(struct ahash_request *areq) | |
2349 | { | |
2350 | int ret = safexcel_hmac_sm3_init(areq); | |
2351 | ||
2352 | if (ret) | |
2353 | return ret; | |
2354 | ||
2355 | return safexcel_ahash_finup(areq); | |
2356 | } | |
2357 | ||
2358 | struct safexcel_alg_template safexcel_alg_hmac_sm3 = { | |
2359 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2360 | .algo_mask = SAFEXCEL_ALG_SM3, | |
2361 | .alg.ahash = { | |
2362 | .init = safexcel_hmac_sm3_init, | |
2363 | .update = safexcel_ahash_update, | |
2364 | .final = safexcel_ahash_final, | |
2365 | .finup = safexcel_ahash_finup, | |
2366 | .digest = safexcel_hmac_sm3_digest, | |
2367 | .setkey = safexcel_hmac_sm3_setkey, | |
2368 | .export = safexcel_ahash_export, | |
2369 | .import = safexcel_ahash_import, | |
2370 | .halg = { | |
2371 | .digestsize = SM3_DIGEST_SIZE, | |
2372 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2373 | .base = { | |
2374 | .cra_name = "hmac(sm3)", | |
2375 | .cra_driver_name = "safexcel-hmac-sm3", | |
2376 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2377 | .cra_flags = CRYPTO_ALG_ASYNC | | |
b8aa7dc5 | 2378 | CRYPTO_ALG_ALLOCATES_MEMORY | |
aa3a43e6 PL |
2379 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
2380 | .cra_blocksize = SM3_BLOCK_SIZE, | |
2381 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2382 | .cra_init = safexcel_ahash_cra_init, | |
2383 | .cra_exit = safexcel_ahash_cra_exit, | |
2384 | .cra_module = THIS_MODULE, | |
2385 | }, | |
2386 | }, | |
2387 | }, | |
2388 | }; | |
aaf5a383 PL |
2389 | |
2390 | static int safexcel_sha3_224_init(struct ahash_request *areq) | |
2391 | { | |
2392 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2393 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2394 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2395 | ||
2396 | memset(req, 0, sizeof(*req)); | |
2397 | ||
2398 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224; | |
2399 | req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; | |
2400 | req->state_sz = SHA3_224_DIGEST_SIZE; | |
6c1c09b3 | 2401 | req->digest_sz = SHA3_224_DIGEST_SIZE; |
aaf5a383 PL |
2402 | req->block_sz = SHA3_224_BLOCK_SIZE; |
2403 | ctx->do_fallback = false; | |
2404 | ctx->fb_init_done = false; | |
2405 | return 0; | |
2406 | } | |
2407 | ||
2408 | static int safexcel_sha3_fbcheck(struct ahash_request *req) | |
2409 | { | |
2410 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2411 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2412 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2413 | int ret = 0; | |
2414 | ||
2415 | if (ctx->do_fallback) { | |
2416 | ahash_request_set_tfm(subreq, ctx->fback); | |
2417 | ahash_request_set_callback(subreq, req->base.flags, | |
2418 | req->base.complete, req->base.data); | |
2419 | ahash_request_set_crypt(subreq, req->src, req->result, | |
2420 | req->nbytes); | |
2421 | if (!ctx->fb_init_done) { | |
6c1c09b3 PL |
2422 | if (ctx->fb_do_setkey) { |
2423 | /* Set fallback cipher HMAC key */ | |
2424 | u8 key[SHA3_224_BLOCK_SIZE]; | |
2425 | ||
2426 | memcpy(key, ctx->ipad, | |
2427 | crypto_ahash_blocksize(ctx->fback) / 2); | |
2428 | memcpy(key + | |
2429 | crypto_ahash_blocksize(ctx->fback) / 2, | |
2430 | ctx->opad, | |
2431 | crypto_ahash_blocksize(ctx->fback) / 2); | |
2432 | ret = crypto_ahash_setkey(ctx->fback, key, | |
2433 | crypto_ahash_blocksize(ctx->fback)); | |
2434 | memzero_explicit(key, | |
2435 | crypto_ahash_blocksize(ctx->fback)); | |
2436 | ctx->fb_do_setkey = false; | |
2437 | } | |
2438 | ret = ret ?: crypto_ahash_init(subreq); | |
aaf5a383 PL |
2439 | ctx->fb_init_done = true; |
2440 | } | |
2441 | } | |
2442 | return ret; | |
2443 | } | |
2444 | ||
2445 | static int safexcel_sha3_update(struct ahash_request *req) | |
2446 | { | |
2447 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2448 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2449 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2450 | ||
2451 | ctx->do_fallback = true; | |
2452 | return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq); | |
2453 | } | |
2454 | ||
2455 | static int safexcel_sha3_final(struct ahash_request *req) | |
2456 | { | |
2457 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2458 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2459 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2460 | ||
2461 | ctx->do_fallback = true; | |
2462 | return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq); | |
2463 | } | |
2464 | ||
2465 | static int safexcel_sha3_finup(struct ahash_request *req) | |
2466 | { | |
2467 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2468 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2469 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2470 | ||
2471 | ctx->do_fallback |= !req->nbytes; | |
2472 | if (ctx->do_fallback) | |
2473 | /* Update or ex/import happened or len 0, cannot use the HW */ | |
2474 | return safexcel_sha3_fbcheck(req) ?: | |
2475 | crypto_ahash_finup(subreq); | |
2476 | else | |
2477 | return safexcel_ahash_finup(req); | |
2478 | } | |
2479 | ||
2480 | static int safexcel_sha3_digest_fallback(struct ahash_request *req) | |
2481 | { | |
2482 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2483 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2484 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2485 | ||
2486 | ctx->do_fallback = true; | |
2487 | ctx->fb_init_done = false; | |
2488 | return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq); | |
2489 | } | |
2490 | ||
2491 | static int safexcel_sha3_224_digest(struct ahash_request *req) | |
2492 | { | |
2493 | if (req->nbytes) | |
2494 | return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req); | |
2495 | ||
2496 | /* HW cannot do zero length hash, use fallback instead */ | |
2497 | return safexcel_sha3_digest_fallback(req); | |
2498 | } | |
2499 | ||
2500 | static int safexcel_sha3_export(struct ahash_request *req, void *out) | |
2501 | { | |
2502 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2503 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2504 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2505 | ||
2506 | ctx->do_fallback = true; | |
2507 | return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out); | |
2508 | } | |
2509 | ||
2510 | static int safexcel_sha3_import(struct ahash_request *req, const void *in) | |
2511 | { | |
2512 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
2513 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2514 | struct ahash_request *subreq = ahash_request_ctx(req); | |
2515 | ||
2516 | ctx->do_fallback = true; | |
2517 | return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in); | |
2518 | // return safexcel_ahash_import(req, in); | |
2519 | } | |
2520 | ||
2521 | static int safexcel_sha3_cra_init(struct crypto_tfm *tfm) | |
2522 | { | |
2523 | struct crypto_ahash *ahash = __crypto_ahash_cast(tfm); | |
2524 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
2525 | ||
2526 | safexcel_ahash_cra_init(tfm); | |
2527 | ||
2528 | /* Allocate fallback implementation */ | |
2529 | ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0, | |
2530 | CRYPTO_ALG_ASYNC | | |
2531 | CRYPTO_ALG_NEED_FALLBACK); | |
2532 | if (IS_ERR(ctx->fback)) | |
2533 | return PTR_ERR(ctx->fback); | |
2534 | ||
2535 | /* Update statesize from fallback algorithm! */ | |
2536 | crypto_hash_alg_common(ahash)->statesize = | |
2537 | crypto_ahash_statesize(ctx->fback); | |
2538 | crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req), | |
2539 | sizeof(struct ahash_request) + | |
2540 | crypto_ahash_reqsize(ctx->fback))); | |
2541 | return 0; | |
2542 | } | |
2543 | ||
2544 | static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm) | |
2545 | { | |
2546 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
2547 | ||
2548 | crypto_free_ahash(ctx->fback); | |
2549 | safexcel_ahash_cra_exit(tfm); | |
2550 | } | |
2551 | ||
2552 | struct safexcel_alg_template safexcel_alg_sha3_224 = { | |
2553 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2554 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
2555 | .alg.ahash = { | |
2556 | .init = safexcel_sha3_224_init, | |
2557 | .update = safexcel_sha3_update, | |
2558 | .final = safexcel_sha3_final, | |
2559 | .finup = safexcel_sha3_finup, | |
2560 | .digest = safexcel_sha3_224_digest, | |
2561 | .export = safexcel_sha3_export, | |
2562 | .import = safexcel_sha3_import, | |
2563 | .halg = { | |
2564 | .digestsize = SHA3_224_DIGEST_SIZE, | |
2565 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2566 | .base = { | |
2567 | .cra_name = "sha3-224", | |
2568 | .cra_driver_name = "safexcel-sha3-224", | |
2569 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2570 | .cra_flags = CRYPTO_ALG_ASYNC | | |
2571 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
2572 | CRYPTO_ALG_NEED_FALLBACK, | |
2573 | .cra_blocksize = SHA3_224_BLOCK_SIZE, | |
2574 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2575 | .cra_init = safexcel_sha3_cra_init, | |
2576 | .cra_exit = safexcel_sha3_cra_exit, | |
2577 | .cra_module = THIS_MODULE, | |
2578 | }, | |
2579 | }, | |
2580 | }, | |
2581 | }; | |
2582 | ||
2583 | static int safexcel_sha3_256_init(struct ahash_request *areq) | |
2584 | { | |
2585 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2586 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2587 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2588 | ||
2589 | memset(req, 0, sizeof(*req)); | |
2590 | ||
2591 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256; | |
2592 | req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; | |
2593 | req->state_sz = SHA3_256_DIGEST_SIZE; | |
6c1c09b3 | 2594 | req->digest_sz = SHA3_256_DIGEST_SIZE; |
aaf5a383 PL |
2595 | req->block_sz = SHA3_256_BLOCK_SIZE; |
2596 | ctx->do_fallback = false; | |
2597 | ctx->fb_init_done = false; | |
2598 | return 0; | |
2599 | } | |
2600 | ||
2601 | static int safexcel_sha3_256_digest(struct ahash_request *req) | |
2602 | { | |
2603 | if (req->nbytes) | |
2604 | return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req); | |
2605 | ||
2606 | /* HW cannot do zero length hash, use fallback instead */ | |
2607 | return safexcel_sha3_digest_fallback(req); | |
2608 | } | |
2609 | ||
2610 | struct safexcel_alg_template safexcel_alg_sha3_256 = { | |
2611 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2612 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
2613 | .alg.ahash = { | |
2614 | .init = safexcel_sha3_256_init, | |
2615 | .update = safexcel_sha3_update, | |
2616 | .final = safexcel_sha3_final, | |
2617 | .finup = safexcel_sha3_finup, | |
2618 | .digest = safexcel_sha3_256_digest, | |
2619 | .export = safexcel_sha3_export, | |
2620 | .import = safexcel_sha3_import, | |
2621 | .halg = { | |
2622 | .digestsize = SHA3_256_DIGEST_SIZE, | |
2623 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2624 | .base = { | |
2625 | .cra_name = "sha3-256", | |
2626 | .cra_driver_name = "safexcel-sha3-256", | |
2627 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2628 | .cra_flags = CRYPTO_ALG_ASYNC | | |
2629 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
2630 | CRYPTO_ALG_NEED_FALLBACK, | |
2631 | .cra_blocksize = SHA3_256_BLOCK_SIZE, | |
2632 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2633 | .cra_init = safexcel_sha3_cra_init, | |
2634 | .cra_exit = safexcel_sha3_cra_exit, | |
2635 | .cra_module = THIS_MODULE, | |
2636 | }, | |
2637 | }, | |
2638 | }, | |
2639 | }; | |
2640 | ||
2641 | static int safexcel_sha3_384_init(struct ahash_request *areq) | |
2642 | { | |
2643 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2644 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2645 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2646 | ||
2647 | memset(req, 0, sizeof(*req)); | |
2648 | ||
2649 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384; | |
2650 | req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; | |
2651 | req->state_sz = SHA3_384_DIGEST_SIZE; | |
6c1c09b3 | 2652 | req->digest_sz = SHA3_384_DIGEST_SIZE; |
aaf5a383 PL |
2653 | req->block_sz = SHA3_384_BLOCK_SIZE; |
2654 | ctx->do_fallback = false; | |
2655 | ctx->fb_init_done = false; | |
2656 | return 0; | |
2657 | } | |
2658 | ||
2659 | static int safexcel_sha3_384_digest(struct ahash_request *req) | |
2660 | { | |
2661 | if (req->nbytes) | |
2662 | return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req); | |
2663 | ||
2664 | /* HW cannot do zero length hash, use fallback instead */ | |
2665 | return safexcel_sha3_digest_fallback(req); | |
2666 | } | |
2667 | ||
2668 | struct safexcel_alg_template safexcel_alg_sha3_384 = { | |
2669 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2670 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
2671 | .alg.ahash = { | |
2672 | .init = safexcel_sha3_384_init, | |
2673 | .update = safexcel_sha3_update, | |
2674 | .final = safexcel_sha3_final, | |
2675 | .finup = safexcel_sha3_finup, | |
2676 | .digest = safexcel_sha3_384_digest, | |
2677 | .export = safexcel_sha3_export, | |
2678 | .import = safexcel_sha3_import, | |
2679 | .halg = { | |
2680 | .digestsize = SHA3_384_DIGEST_SIZE, | |
2681 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2682 | .base = { | |
2683 | .cra_name = "sha3-384", | |
2684 | .cra_driver_name = "safexcel-sha3-384", | |
2685 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2686 | .cra_flags = CRYPTO_ALG_ASYNC | | |
2687 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
2688 | CRYPTO_ALG_NEED_FALLBACK, | |
2689 | .cra_blocksize = SHA3_384_BLOCK_SIZE, | |
2690 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2691 | .cra_init = safexcel_sha3_cra_init, | |
2692 | .cra_exit = safexcel_sha3_cra_exit, | |
2693 | .cra_module = THIS_MODULE, | |
2694 | }, | |
2695 | }, | |
2696 | }, | |
2697 | }; | |
2698 | ||
2699 | static int safexcel_sha3_512_init(struct ahash_request *areq) | |
2700 | { | |
2701 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2702 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2703 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2704 | ||
2705 | memset(req, 0, sizeof(*req)); | |
2706 | ||
2707 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512; | |
2708 | req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; | |
2709 | req->state_sz = SHA3_512_DIGEST_SIZE; | |
6c1c09b3 | 2710 | req->digest_sz = SHA3_512_DIGEST_SIZE; |
aaf5a383 PL |
2711 | req->block_sz = SHA3_512_BLOCK_SIZE; |
2712 | ctx->do_fallback = false; | |
2713 | ctx->fb_init_done = false; | |
2714 | return 0; | |
2715 | } | |
2716 | ||
2717 | static int safexcel_sha3_512_digest(struct ahash_request *req) | |
2718 | { | |
2719 | if (req->nbytes) | |
2720 | return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req); | |
2721 | ||
2722 | /* HW cannot do zero length hash, use fallback instead */ | |
2723 | return safexcel_sha3_digest_fallback(req); | |
2724 | } | |
2725 | ||
2726 | struct safexcel_alg_template safexcel_alg_sha3_512 = { | |
2727 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2728 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
2729 | .alg.ahash = { | |
2730 | .init = safexcel_sha3_512_init, | |
2731 | .update = safexcel_sha3_update, | |
2732 | .final = safexcel_sha3_final, | |
2733 | .finup = safexcel_sha3_finup, | |
2734 | .digest = safexcel_sha3_512_digest, | |
2735 | .export = safexcel_sha3_export, | |
2736 | .import = safexcel_sha3_import, | |
2737 | .halg = { | |
2738 | .digestsize = SHA3_512_DIGEST_SIZE, | |
2739 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2740 | .base = { | |
2741 | .cra_name = "sha3-512", | |
2742 | .cra_driver_name = "safexcel-sha3-512", | |
2743 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2744 | .cra_flags = CRYPTO_ALG_ASYNC | | |
2745 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
2746 | CRYPTO_ALG_NEED_FALLBACK, | |
2747 | .cra_blocksize = SHA3_512_BLOCK_SIZE, | |
2748 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2749 | .cra_init = safexcel_sha3_cra_init, | |
2750 | .cra_exit = safexcel_sha3_cra_exit, | |
2751 | .cra_module = THIS_MODULE, | |
2752 | }, | |
2753 | }, | |
2754 | }, | |
2755 | }; | |
6c1c09b3 PL |
2756 | |
2757 | static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg) | |
2758 | { | |
2759 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
2760 | int ret; | |
2761 | ||
2762 | ret = safexcel_sha3_cra_init(tfm); | |
2763 | if (ret) | |
2764 | return ret; | |
2765 | ||
2766 | /* Allocate precalc basic digest implementation */ | |
2767 | ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK); | |
2768 | if (IS_ERR(ctx->shpre)) | |
2769 | return PTR_ERR(ctx->shpre); | |
2770 | ||
2771 | ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) + | |
2772 | crypto_shash_descsize(ctx->shpre), GFP_KERNEL); | |
2773 | if (!ctx->shdesc) { | |
2774 | crypto_free_shash(ctx->shpre); | |
2775 | return -ENOMEM; | |
2776 | } | |
2777 | ctx->shdesc->tfm = ctx->shpre; | |
2778 | return 0; | |
2779 | } | |
2780 | ||
2781 | static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm) | |
2782 | { | |
2783 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); | |
2784 | ||
2785 | crypto_free_ahash(ctx->fback); | |
2786 | crypto_free_shash(ctx->shpre); | |
2787 | kfree(ctx->shdesc); | |
2788 | safexcel_ahash_cra_exit(tfm); | |
2789 | } | |
2790 | ||
2791 | static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key, | |
2792 | unsigned int keylen) | |
2793 | { | |
2794 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2795 | int ret = 0; | |
2796 | ||
2797 | if (keylen > crypto_ahash_blocksize(tfm)) { | |
2798 | /* | |
2799 | * If the key is larger than the blocksize, then hash it | |
2800 | * first using our fallback cipher | |
2801 | */ | |
2802 | ret = crypto_shash_digest(ctx->shdesc, key, keylen, | |
2803 | (u8 *)ctx->ipad); | |
2804 | keylen = crypto_shash_digestsize(ctx->shpre); | |
2805 | ||
2806 | /* | |
2807 | * If the digest is larger than half the blocksize, we need to | |
2808 | * move the rest to opad due to the way our HMAC infra works. | |
2809 | */ | |
2810 | if (keylen > crypto_ahash_blocksize(tfm) / 2) | |
2811 | /* Buffers overlap, need to use memmove iso memcpy! */ | |
2812 | memmove(ctx->opad, | |
2813 | (u8 *)ctx->ipad + | |
2814 | crypto_ahash_blocksize(tfm) / 2, | |
2815 | keylen - crypto_ahash_blocksize(tfm) / 2); | |
2816 | } else { | |
2817 | /* | |
2818 | * Copy the key to our ipad & opad buffers | |
2819 | * Note that ipad and opad each contain one half of the key, | |
2820 | * to match the existing HMAC driver infrastructure. | |
2821 | */ | |
2822 | if (keylen <= crypto_ahash_blocksize(tfm) / 2) { | |
2823 | memcpy(ctx->ipad, key, keylen); | |
2824 | } else { | |
2825 | memcpy(ctx->ipad, key, | |
2826 | crypto_ahash_blocksize(tfm) / 2); | |
2827 | memcpy(ctx->opad, | |
2828 | key + crypto_ahash_blocksize(tfm) / 2, | |
2829 | keylen - crypto_ahash_blocksize(tfm) / 2); | |
2830 | } | |
2831 | } | |
2832 | ||
2833 | /* Pad key with zeroes */ | |
2834 | if (keylen <= crypto_ahash_blocksize(tfm) / 2) { | |
2835 | memset((u8 *)ctx->ipad + keylen, 0, | |
2836 | crypto_ahash_blocksize(tfm) / 2 - keylen); | |
2837 | memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2); | |
2838 | } else { | |
2839 | memset((u8 *)ctx->opad + keylen - | |
2840 | crypto_ahash_blocksize(tfm) / 2, 0, | |
2841 | crypto_ahash_blocksize(tfm) - keylen); | |
2842 | } | |
2843 | ||
2844 | /* If doing fallback, still need to set the new key! */ | |
2845 | ctx->fb_do_setkey = true; | |
2846 | return ret; | |
2847 | } | |
2848 | ||
2849 | static int safexcel_hmac_sha3_224_init(struct ahash_request *areq) | |
2850 | { | |
2851 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2852 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2853 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2854 | ||
2855 | memset(req, 0, sizeof(*req)); | |
2856 | ||
2857 | /* Copy (half of) the key */ | |
2858 | memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2); | |
2859 | /* Start of HMAC should have len == processed == blocksize */ | |
2860 | req->len = SHA3_224_BLOCK_SIZE; | |
2861 | req->processed = SHA3_224_BLOCK_SIZE; | |
2862 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224; | |
2863 | req->digest = CONTEXT_CONTROL_DIGEST_HMAC; | |
2864 | req->state_sz = SHA3_224_BLOCK_SIZE / 2; | |
2865 | req->digest_sz = SHA3_224_DIGEST_SIZE; | |
2866 | req->block_sz = SHA3_224_BLOCK_SIZE; | |
2867 | req->hmac = true; | |
2868 | ctx->do_fallback = false; | |
2869 | ctx->fb_init_done = false; | |
2870 | return 0; | |
2871 | } | |
2872 | ||
2873 | static int safexcel_hmac_sha3_224_digest(struct ahash_request *req) | |
2874 | { | |
2875 | if (req->nbytes) | |
2876 | return safexcel_hmac_sha3_224_init(req) ?: | |
2877 | safexcel_ahash_finup(req); | |
2878 | ||
2879 | /* HW cannot do zero length HMAC, use fallback instead */ | |
2880 | return safexcel_sha3_digest_fallback(req); | |
2881 | } | |
2882 | ||
2883 | static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm) | |
2884 | { | |
2885 | return safexcel_hmac_sha3_cra_init(tfm, "sha3-224"); | |
2886 | } | |
2887 | ||
2888 | struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = { | |
2889 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2890 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
2891 | .alg.ahash = { | |
2892 | .init = safexcel_hmac_sha3_224_init, | |
2893 | .update = safexcel_sha3_update, | |
2894 | .final = safexcel_sha3_final, | |
2895 | .finup = safexcel_sha3_finup, | |
2896 | .digest = safexcel_hmac_sha3_224_digest, | |
2897 | .setkey = safexcel_hmac_sha3_setkey, | |
2898 | .export = safexcel_sha3_export, | |
2899 | .import = safexcel_sha3_import, | |
2900 | .halg = { | |
2901 | .digestsize = SHA3_224_DIGEST_SIZE, | |
2902 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2903 | .base = { | |
2904 | .cra_name = "hmac(sha3-224)", | |
2905 | .cra_driver_name = "safexcel-hmac-sha3-224", | |
2906 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2907 | .cra_flags = CRYPTO_ALG_ASYNC | | |
2908 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
2909 | CRYPTO_ALG_NEED_FALLBACK, | |
2910 | .cra_blocksize = SHA3_224_BLOCK_SIZE, | |
2911 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2912 | .cra_init = safexcel_hmac_sha3_224_cra_init, | |
2913 | .cra_exit = safexcel_hmac_sha3_cra_exit, | |
2914 | .cra_module = THIS_MODULE, | |
2915 | }, | |
2916 | }, | |
2917 | }, | |
2918 | }; | |
2919 | ||
2920 | static int safexcel_hmac_sha3_256_init(struct ahash_request *areq) | |
2921 | { | |
2922 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2923 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2924 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2925 | ||
2926 | memset(req, 0, sizeof(*req)); | |
2927 | ||
2928 | /* Copy (half of) the key */ | |
2929 | memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2); | |
2930 | /* Start of HMAC should have len == processed == blocksize */ | |
2931 | req->len = SHA3_256_BLOCK_SIZE; | |
2932 | req->processed = SHA3_256_BLOCK_SIZE; | |
2933 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256; | |
2934 | req->digest = CONTEXT_CONTROL_DIGEST_HMAC; | |
2935 | req->state_sz = SHA3_256_BLOCK_SIZE / 2; | |
2936 | req->digest_sz = SHA3_256_DIGEST_SIZE; | |
2937 | req->block_sz = SHA3_256_BLOCK_SIZE; | |
2938 | req->hmac = true; | |
2939 | ctx->do_fallback = false; | |
2940 | ctx->fb_init_done = false; | |
2941 | return 0; | |
2942 | } | |
2943 | ||
2944 | static int safexcel_hmac_sha3_256_digest(struct ahash_request *req) | |
2945 | { | |
2946 | if (req->nbytes) | |
2947 | return safexcel_hmac_sha3_256_init(req) ?: | |
2948 | safexcel_ahash_finup(req); | |
2949 | ||
2950 | /* HW cannot do zero length HMAC, use fallback instead */ | |
2951 | return safexcel_sha3_digest_fallback(req); | |
2952 | } | |
2953 | ||
2954 | static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm) | |
2955 | { | |
2956 | return safexcel_hmac_sha3_cra_init(tfm, "sha3-256"); | |
2957 | } | |
2958 | ||
2959 | struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = { | |
2960 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
2961 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
2962 | .alg.ahash = { | |
2963 | .init = safexcel_hmac_sha3_256_init, | |
2964 | .update = safexcel_sha3_update, | |
2965 | .final = safexcel_sha3_final, | |
2966 | .finup = safexcel_sha3_finup, | |
2967 | .digest = safexcel_hmac_sha3_256_digest, | |
2968 | .setkey = safexcel_hmac_sha3_setkey, | |
2969 | .export = safexcel_sha3_export, | |
2970 | .import = safexcel_sha3_import, | |
2971 | .halg = { | |
2972 | .digestsize = SHA3_256_DIGEST_SIZE, | |
2973 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
2974 | .base = { | |
2975 | .cra_name = "hmac(sha3-256)", | |
2976 | .cra_driver_name = "safexcel-hmac-sha3-256", | |
2977 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
2978 | .cra_flags = CRYPTO_ALG_ASYNC | | |
2979 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
2980 | CRYPTO_ALG_NEED_FALLBACK, | |
2981 | .cra_blocksize = SHA3_256_BLOCK_SIZE, | |
2982 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
2983 | .cra_init = safexcel_hmac_sha3_256_cra_init, | |
2984 | .cra_exit = safexcel_hmac_sha3_cra_exit, | |
2985 | .cra_module = THIS_MODULE, | |
2986 | }, | |
2987 | }, | |
2988 | }, | |
2989 | }; | |
2990 | ||
2991 | static int safexcel_hmac_sha3_384_init(struct ahash_request *areq) | |
2992 | { | |
2993 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
2994 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
2995 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
2996 | ||
2997 | memset(req, 0, sizeof(*req)); | |
2998 | ||
2999 | /* Copy (half of) the key */ | |
3000 | memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2); | |
3001 | /* Start of HMAC should have len == processed == blocksize */ | |
3002 | req->len = SHA3_384_BLOCK_SIZE; | |
3003 | req->processed = SHA3_384_BLOCK_SIZE; | |
3004 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384; | |
3005 | req->digest = CONTEXT_CONTROL_DIGEST_HMAC; | |
3006 | req->state_sz = SHA3_384_BLOCK_SIZE / 2; | |
3007 | req->digest_sz = SHA3_384_DIGEST_SIZE; | |
3008 | req->block_sz = SHA3_384_BLOCK_SIZE; | |
3009 | req->hmac = true; | |
3010 | ctx->do_fallback = false; | |
3011 | ctx->fb_init_done = false; | |
3012 | return 0; | |
3013 | } | |
3014 | ||
3015 | static int safexcel_hmac_sha3_384_digest(struct ahash_request *req) | |
3016 | { | |
3017 | if (req->nbytes) | |
3018 | return safexcel_hmac_sha3_384_init(req) ?: | |
3019 | safexcel_ahash_finup(req); | |
3020 | ||
3021 | /* HW cannot do zero length HMAC, use fallback instead */ | |
3022 | return safexcel_sha3_digest_fallback(req); | |
3023 | } | |
3024 | ||
3025 | static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm) | |
3026 | { | |
3027 | return safexcel_hmac_sha3_cra_init(tfm, "sha3-384"); | |
3028 | } | |
3029 | ||
3030 | struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = { | |
3031 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
3032 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
3033 | .alg.ahash = { | |
3034 | .init = safexcel_hmac_sha3_384_init, | |
3035 | .update = safexcel_sha3_update, | |
3036 | .final = safexcel_sha3_final, | |
3037 | .finup = safexcel_sha3_finup, | |
3038 | .digest = safexcel_hmac_sha3_384_digest, | |
3039 | .setkey = safexcel_hmac_sha3_setkey, | |
3040 | .export = safexcel_sha3_export, | |
3041 | .import = safexcel_sha3_import, | |
3042 | .halg = { | |
3043 | .digestsize = SHA3_384_DIGEST_SIZE, | |
3044 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
3045 | .base = { | |
3046 | .cra_name = "hmac(sha3-384)", | |
3047 | .cra_driver_name = "safexcel-hmac-sha3-384", | |
3048 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
3049 | .cra_flags = CRYPTO_ALG_ASYNC | | |
3050 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
3051 | CRYPTO_ALG_NEED_FALLBACK, | |
3052 | .cra_blocksize = SHA3_384_BLOCK_SIZE, | |
3053 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
3054 | .cra_init = safexcel_hmac_sha3_384_cra_init, | |
3055 | .cra_exit = safexcel_hmac_sha3_cra_exit, | |
3056 | .cra_module = THIS_MODULE, | |
3057 | }, | |
3058 | }, | |
3059 | }, | |
3060 | }; | |
3061 | ||
3062 | static int safexcel_hmac_sha3_512_init(struct ahash_request *areq) | |
3063 | { | |
3064 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | |
3065 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); | |
3066 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); | |
3067 | ||
3068 | memset(req, 0, sizeof(*req)); | |
3069 | ||
3070 | /* Copy (half of) the key */ | |
3071 | memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2); | |
3072 | /* Start of HMAC should have len == processed == blocksize */ | |
3073 | req->len = SHA3_512_BLOCK_SIZE; | |
3074 | req->processed = SHA3_512_BLOCK_SIZE; | |
3075 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512; | |
3076 | req->digest = CONTEXT_CONTROL_DIGEST_HMAC; | |
3077 | req->state_sz = SHA3_512_BLOCK_SIZE / 2; | |
3078 | req->digest_sz = SHA3_512_DIGEST_SIZE; | |
3079 | req->block_sz = SHA3_512_BLOCK_SIZE; | |
3080 | req->hmac = true; | |
3081 | ctx->do_fallback = false; | |
3082 | ctx->fb_init_done = false; | |
3083 | return 0; | |
3084 | } | |
3085 | ||
3086 | static int safexcel_hmac_sha3_512_digest(struct ahash_request *req) | |
3087 | { | |
3088 | if (req->nbytes) | |
3089 | return safexcel_hmac_sha3_512_init(req) ?: | |
3090 | safexcel_ahash_finup(req); | |
3091 | ||
3092 | /* HW cannot do zero length HMAC, use fallback instead */ | |
3093 | return safexcel_sha3_digest_fallback(req); | |
3094 | } | |
3095 | ||
3096 | static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm) | |
3097 | { | |
3098 | return safexcel_hmac_sha3_cra_init(tfm, "sha3-512"); | |
3099 | } | |
3100 | struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = { | |
3101 | .type = SAFEXCEL_ALG_TYPE_AHASH, | |
3102 | .algo_mask = SAFEXCEL_ALG_SHA3, | |
3103 | .alg.ahash = { | |
3104 | .init = safexcel_hmac_sha3_512_init, | |
3105 | .update = safexcel_sha3_update, | |
3106 | .final = safexcel_sha3_final, | |
3107 | .finup = safexcel_sha3_finup, | |
3108 | .digest = safexcel_hmac_sha3_512_digest, | |
3109 | .setkey = safexcel_hmac_sha3_setkey, | |
3110 | .export = safexcel_sha3_export, | |
3111 | .import = safexcel_sha3_import, | |
3112 | .halg = { | |
3113 | .digestsize = SHA3_512_DIGEST_SIZE, | |
3114 | .statesize = sizeof(struct safexcel_ahash_export_state), | |
3115 | .base = { | |
3116 | .cra_name = "hmac(sha3-512)", | |
3117 | .cra_driver_name = "safexcel-hmac-sha3-512", | |
3118 | .cra_priority = SAFEXCEL_CRA_PRIORITY, | |
3119 | .cra_flags = CRYPTO_ALG_ASYNC | | |
3120 | CRYPTO_ALG_KERN_DRIVER_ONLY | | |
3121 | CRYPTO_ALG_NEED_FALLBACK, | |
3122 | .cra_blocksize = SHA3_512_BLOCK_SIZE, | |
3123 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), | |
3124 | .cra_init = safexcel_hmac_sha3_512_cra_init, | |
3125 | .cra_exit = safexcel_hmac_sha3_cra_exit, | |
3126 | .cra_module = THIS_MODULE, | |
3127 | }, | |
3128 | }, | |
3129 | }, | |
3130 | }; |