2 * Copyright (C) 2017 Marvell
4 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 * This file is licensed under the terms of the GNU General Public
7 * License version 2. This program is licensed "as is" without any
8 * warranty of any kind, whether express or implied.
11 #include <linux/device.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmapool.h>
15 #include <crypto/aead.h>
16 #include <crypto/aes.h>
17 #include <crypto/authenc.h>
18 #include <crypto/sha.h>
19 #include <crypto/skcipher.h>
20 #include <crypto/internal/aead.h>
21 #include <crypto/internal/skcipher.h>
25 enum safexcel_cipher_direction
{
30 struct safexcel_cipher_ctx
{
31 struct safexcel_context base
;
32 struct safexcel_crypto_priv
*priv
;
40 /* All the below is AEAD specific */
43 u32 ipad
[SHA256_DIGEST_SIZE
/ sizeof(u32
)];
44 u32 opad
[SHA256_DIGEST_SIZE
/ sizeof(u32
)];
47 struct safexcel_cipher_req
{
48 enum safexcel_cipher_direction direction
;
52 static void safexcel_skcipher_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
53 struct safexcel_command_desc
*cdesc
,
56 struct safexcel_token
*token
;
59 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) {
60 offset
= AES_BLOCK_SIZE
/ sizeof(u32
);
61 memcpy(cdesc
->control_data
.token
, iv
, AES_BLOCK_SIZE
);
63 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
66 token
= (struct safexcel_token
*)(cdesc
->control_data
.token
+ offset
);
68 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
69 token
[0].packet_length
= length
;
70 token
[0].stat
= EIP197_TOKEN_STAT_LAST_PACKET
|
71 EIP197_TOKEN_STAT_LAST_HASH
;
72 token
[0].instructions
= EIP197_TOKEN_INS_LAST
|
73 EIP197_TOKEN_INS_TYPE_CRYTO
|
74 EIP197_TOKEN_INS_TYPE_OUTPUT
;
77 static void safexcel_aead_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
78 struct safexcel_command_desc
*cdesc
,
79 enum safexcel_cipher_direction direction
,
80 u32 cryptlen
, u32 assoclen
, u32 digestsize
)
82 struct safexcel_token
*token
;
85 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) {
86 offset
= AES_BLOCK_SIZE
/ sizeof(u32
);
87 memcpy(cdesc
->control_data
.token
, iv
, AES_BLOCK_SIZE
);
89 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
92 token
= (struct safexcel_token
*)(cdesc
->control_data
.token
+ offset
);
94 if (direction
== SAFEXCEL_DECRYPT
)
95 cryptlen
-= digestsize
;
97 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
98 token
[0].packet_length
= assoclen
;
99 token
[0].instructions
= EIP197_TOKEN_INS_TYPE_HASH
|
100 EIP197_TOKEN_INS_TYPE_OUTPUT
;
102 token
[1].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
103 token
[1].packet_length
= cryptlen
;
104 token
[1].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
105 token
[1].instructions
= EIP197_TOKEN_INS_LAST
|
106 EIP197_TOKEN_INS_TYPE_CRYTO
|
107 EIP197_TOKEN_INS_TYPE_HASH
|
108 EIP197_TOKEN_INS_TYPE_OUTPUT
;
110 if (direction
== SAFEXCEL_ENCRYPT
) {
111 token
[2].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
112 token
[2].packet_length
= digestsize
;
113 token
[2].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
114 EIP197_TOKEN_STAT_LAST_PACKET
;
115 token
[2].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
116 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
118 token
[2].opcode
= EIP197_TOKEN_OPCODE_RETRIEVE
;
119 token
[2].packet_length
= digestsize
;
120 token
[2].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
121 EIP197_TOKEN_STAT_LAST_PACKET
;
122 token
[2].instructions
= EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
124 token
[3].opcode
= EIP197_TOKEN_OPCODE_VERIFY
;
125 token
[3].packet_length
= digestsize
|
126 EIP197_TOKEN_HASH_RESULT_VERIFY
;
127 token
[3].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
128 EIP197_TOKEN_STAT_LAST_PACKET
;
129 token
[3].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
;
133 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher
*ctfm
,
134 const u8
*key
, unsigned int len
)
136 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
137 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
138 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
139 struct crypto_aes_ctx aes
;
142 ret
= crypto_aes_expand_key(&aes
, key
, len
);
144 crypto_skcipher_set_flags(ctfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
148 if (priv
->version
== EIP197
&& ctx
->base
.ctxr_dma
) {
149 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
150 if (ctx
->key
[i
] != cpu_to_le32(aes
.key_enc
[i
])) {
151 ctx
->base
.needs_inv
= true;
157 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
158 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
162 memzero_explicit(&aes
, sizeof(aes
));
166 static int safexcel_aead_aes_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
169 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
170 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
171 struct safexcel_ahash_export_state istate
, ostate
;
172 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
173 struct crypto_authenc_keys keys
;
175 if (crypto_authenc_extractkeys(&keys
, key
, len
) != 0)
178 if (keys
.enckeylen
> sizeof(ctx
->key
))
182 if (priv
->version
== EIP197
&& ctx
->base
.ctxr_dma
&&
183 memcmp(ctx
->key
, keys
.enckey
, keys
.enckeylen
))
184 ctx
->base
.needs_inv
= true;
188 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224
:
189 if (safexcel_hmac_setkey("safexcel-sha224", keys
.authkey
,
190 keys
.authkeylen
, &istate
, &ostate
))
193 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256
:
194 if (safexcel_hmac_setkey("safexcel-sha256", keys
.authkey
,
195 keys
.authkeylen
, &istate
, &ostate
))
199 dev_err(priv
->dev
, "aead: unsupported hash algorithm\n");
203 crypto_aead_set_flags(ctfm
, crypto_aead_get_flags(ctfm
) &
204 CRYPTO_TFM_RES_MASK
);
206 if (priv
->version
== EIP197
&& ctx
->base
.ctxr_dma
&&
207 (memcmp(ctx
->ipad
, istate
.state
, ctx
->state_sz
) ||
208 memcmp(ctx
->opad
, ostate
.state
, ctx
->state_sz
)))
209 ctx
->base
.needs_inv
= true;
211 /* Now copy the keys into the context */
212 memcpy(ctx
->key
, keys
.enckey
, keys
.enckeylen
);
213 ctx
->key_len
= keys
.enckeylen
;
215 memcpy(ctx
->ipad
, &istate
.state
, ctx
->state_sz
);
216 memcpy(ctx
->opad
, &ostate
.state
, ctx
->state_sz
);
218 memzero_explicit(&keys
, sizeof(keys
));
222 crypto_aead_set_flags(ctfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
223 memzero_explicit(&keys
, sizeof(keys
));
227 static int safexcel_context_control(struct safexcel_cipher_ctx
*ctx
,
228 struct crypto_async_request
*async
,
229 struct safexcel_cipher_req
*sreq
,
230 struct safexcel_command_desc
*cdesc
)
232 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
236 if (sreq
->direction
== SAFEXCEL_ENCRYPT
)
237 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT
;
239 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN
;
241 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_CRYPTO_OUT
;
243 /* The decryption control type is a combination of the
244 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
247 if (sreq
->direction
== SAFEXCEL_DECRYPT
)
248 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_NULL_IN
;
251 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_KEY_EN
;
252 cdesc
->control_data
.control1
|= ctx
->mode
;
255 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_DIGEST_HMAC
|
258 switch (ctx
->key_len
) {
259 case AES_KEYSIZE_128
:
260 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_CRYPTO_ALG_AES128
;
262 case AES_KEYSIZE_192
:
263 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_CRYPTO_ALG_AES192
;
265 case AES_KEYSIZE_256
:
266 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_CRYPTO_ALG_AES256
;
269 dev_err(priv
->dev
, "aes keysize not supported: %u\n",
274 ctrl_size
= ctx
->key_len
/ sizeof(u32
);
276 /* Take in account the ipad+opad digests */
277 ctrl_size
+= ctx
->state_sz
/ sizeof(u32
) * 2;
278 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(ctrl_size
);
283 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
, int ring
,
284 struct crypto_async_request
*async
,
285 struct scatterlist
*src
,
286 struct scatterlist
*dst
,
287 unsigned int cryptlen
,
288 struct safexcel_cipher_req
*sreq
,
289 bool *should_complete
, int *ret
)
291 struct safexcel_result_desc
*rdesc
;
296 spin_lock_bh(&priv
->ring
[ring
].egress_lock
);
298 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
301 "cipher: result: could not retrieve the result descriptor\n");
302 *ret
= PTR_ERR(rdesc
);
307 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
310 } while (!rdesc
->last_seg
);
312 safexcel_complete(priv
, ring
);
313 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
316 dma_unmap_sg(priv
->dev
, src
,
317 sg_nents_for_len(src
, cryptlen
),
320 dma_unmap_sg(priv
->dev
, src
,
321 sg_nents_for_len(src
, cryptlen
),
323 dma_unmap_sg(priv
->dev
, dst
,
324 sg_nents_for_len(dst
, cryptlen
),
328 *should_complete
= true;
333 static int safexcel_aes_send(struct crypto_async_request
*base
, int ring
,
334 struct safexcel_request
*request
,
335 struct safexcel_cipher_req
*sreq
,
336 struct scatterlist
*src
, struct scatterlist
*dst
,
337 unsigned int cryptlen
, unsigned int assoclen
,
338 unsigned int digestsize
, u8
*iv
, int *commands
,
341 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
342 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
343 struct safexcel_command_desc
*cdesc
;
344 struct safexcel_result_desc
*rdesc
;
345 struct scatterlist
*sg
;
346 unsigned int totlen
= cryptlen
+ assoclen
;
347 int nr_src
, nr_dst
, n_cdesc
= 0, n_rdesc
= 0, queued
= totlen
;
351 nr_src
= dma_map_sg(priv
->dev
, src
,
352 sg_nents_for_len(src
, totlen
),
358 nr_src
= dma_map_sg(priv
->dev
, src
,
359 sg_nents_for_len(src
, totlen
),
364 nr_dst
= dma_map_sg(priv
->dev
, dst
,
365 sg_nents_for_len(dst
, totlen
),
368 dma_unmap_sg(priv
->dev
, src
,
369 sg_nents_for_len(src
, totlen
),
375 memcpy(ctx
->base
.ctxr
->data
, ctx
->key
, ctx
->key_len
);
378 memcpy(ctx
->base
.ctxr
->data
+ ctx
->key_len
/ sizeof(u32
),
379 ctx
->ipad
, ctx
->state_sz
);
380 memcpy(ctx
->base
.ctxr
->data
+ (ctx
->key_len
+ ctx
->state_sz
) / sizeof(u32
),
381 ctx
->opad
, ctx
->state_sz
);
384 spin_lock_bh(&priv
->ring
[ring
].egress_lock
);
386 /* command descriptors */
387 for_each_sg(src
, sg
, nr_src
, i
) {
388 int len
= sg_dma_len(sg
);
390 /* Do not overflow the request */
391 if (queued
- len
< 0)
394 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
, !(queued
- len
),
395 sg_dma_address(sg
), len
, totlen
,
398 /* No space left in the command descriptor ring */
399 ret
= PTR_ERR(cdesc
);
405 safexcel_context_control(ctx
, base
, sreq
, cdesc
);
407 safexcel_aead_token(ctx
, iv
, cdesc
,
408 sreq
->direction
, cryptlen
,
409 assoclen
, digestsize
);
411 safexcel_skcipher_token(ctx
, iv
, cdesc
,
420 /* result descriptors */
421 for_each_sg(dst
, sg
, nr_dst
, i
) {
422 bool first
= !i
, last
= (i
== nr_dst
- 1);
423 u32 len
= sg_dma_len(sg
);
425 rdesc
= safexcel_add_rdesc(priv
, ring
, first
, last
,
426 sg_dma_address(sg
), len
);
428 /* No space left in the result descriptor ring */
429 ret
= PTR_ERR(rdesc
);
435 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
444 for (i
= 0; i
< n_rdesc
; i
++)
445 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].rdr
);
447 for (i
= 0; i
< n_cdesc
; i
++)
448 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
450 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
453 dma_unmap_sg(priv
->dev
, src
,
454 sg_nents_for_len(src
, totlen
),
457 dma_unmap_sg(priv
->dev
, src
,
458 sg_nents_for_len(src
, totlen
),
460 dma_unmap_sg(priv
->dev
, dst
,
461 sg_nents_for_len(dst
, totlen
),
468 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
470 struct crypto_async_request
*base
,
471 bool *should_complete
, int *ret
)
473 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
474 struct safexcel_result_desc
*rdesc
;
475 int ndesc
= 0, enq_ret
;
479 spin_lock_bh(&priv
->ring
[ring
].egress_lock
);
481 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
484 "cipher: invalidate: could not retrieve the result descriptor\n");
485 *ret
= PTR_ERR(rdesc
);
489 if (rdesc
->result_data
.error_code
) {
490 dev_err(priv
->dev
, "cipher: invalidate: result descriptor error (%d)\n",
491 rdesc
->result_data
.error_code
);
496 } while (!rdesc
->last_seg
);
498 safexcel_complete(priv
, ring
);
499 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
501 if (ctx
->base
.exit_inv
) {
502 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
505 *should_complete
= true;
510 ring
= safexcel_select_ring(priv
);
511 ctx
->base
.ring
= ring
;
513 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
514 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
515 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
517 if (enq_ret
!= -EINPROGRESS
)
520 queue_work(priv
->ring
[ring
].workqueue
,
521 &priv
->ring
[ring
].work_data
.work
);
523 *should_complete
= false;
528 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv
*priv
,
530 struct crypto_async_request
*async
,
531 bool *should_complete
, int *ret
)
533 struct skcipher_request
*req
= skcipher_request_cast(async
);
534 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
537 if (sreq
->needs_inv
) {
538 sreq
->needs_inv
= false;
539 err
= safexcel_handle_inv_result(priv
, ring
, async
,
540 should_complete
, ret
);
542 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
543 req
->dst
, req
->cryptlen
, sreq
,
544 should_complete
, ret
);
550 static int safexcel_aead_handle_result(struct safexcel_crypto_priv
*priv
,
552 struct crypto_async_request
*async
,
553 bool *should_complete
, int *ret
)
555 struct aead_request
*req
= aead_request_cast(async
);
556 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
557 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
560 if (sreq
->needs_inv
) {
561 sreq
->needs_inv
= false;
562 err
= safexcel_handle_inv_result(priv
, ring
, async
,
563 should_complete
, ret
);
565 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
567 req
->cryptlen
+ crypto_aead_authsize(tfm
),
568 sreq
, should_complete
, ret
);
574 static int safexcel_cipher_send_inv(struct crypto_async_request
*base
,
575 int ring
, struct safexcel_request
*request
,
576 int *commands
, int *results
)
578 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
579 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
582 ret
= safexcel_invalidate_cache(base
, priv
, ctx
->base
.ctxr_dma
, ring
,
593 static int safexcel_skcipher_send(struct crypto_async_request
*async
, int ring
,
594 struct safexcel_request
*request
,
595 int *commands
, int *results
)
597 struct skcipher_request
*req
= skcipher_request_cast(async
);
598 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
599 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
600 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
603 BUG_ON(priv
->version
== EIP97
&& sreq
->needs_inv
);
606 ret
= safexcel_cipher_send_inv(async
, ring
, request
, commands
,
609 ret
= safexcel_aes_send(async
, ring
, request
, sreq
, req
->src
,
610 req
->dst
, req
->cryptlen
, 0, 0, req
->iv
,
615 static int safexcel_aead_send(struct crypto_async_request
*async
, int ring
,
616 struct safexcel_request
*request
, int *commands
,
619 struct aead_request
*req
= aead_request_cast(async
);
620 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
621 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
622 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
623 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
626 BUG_ON(priv
->version
== EIP97
&& sreq
->needs_inv
);
629 ret
= safexcel_cipher_send_inv(async
, ring
, request
, commands
,
632 ret
= safexcel_aes_send(async
, ring
, request
, sreq
, req
->src
,
633 req
->dst
, req
->cryptlen
, req
->assoclen
,
634 crypto_aead_authsize(tfm
), req
->iv
,
639 static int safexcel_cipher_exit_inv(struct crypto_tfm
*tfm
,
640 struct crypto_async_request
*base
,
641 struct safexcel_cipher_req
*sreq
,
642 struct safexcel_inv_result
*result
)
644 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
645 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
646 int ring
= ctx
->base
.ring
;
648 init_completion(&result
->completion
);
650 ctx
= crypto_tfm_ctx(base
->tfm
);
651 ctx
->base
.exit_inv
= true;
652 sreq
->needs_inv
= true;
654 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
655 crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
656 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
658 queue_work(priv
->ring
[ring
].workqueue
,
659 &priv
->ring
[ring
].work_data
.work
);
661 wait_for_completion(&result
->completion
);
665 "cipher: sync: invalidate: completion error %d\n",
667 return result
->error
;
673 static int safexcel_skcipher_exit_inv(struct crypto_tfm
*tfm
)
675 EIP197_REQUEST_ON_STACK(req
, skcipher
, EIP197_SKCIPHER_REQ_SIZE
);
676 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
677 struct safexcel_inv_result result
= {};
679 memset(req
, 0, sizeof(struct skcipher_request
));
681 skcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
682 safexcel_inv_complete
, &result
);
683 skcipher_request_set_tfm(req
, __crypto_skcipher_cast(tfm
));
685 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
688 static int safexcel_aead_exit_inv(struct crypto_tfm
*tfm
)
690 EIP197_REQUEST_ON_STACK(req
, aead
, EIP197_AEAD_REQ_SIZE
);
691 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
692 struct safexcel_inv_result result
= {};
694 memset(req
, 0, sizeof(struct aead_request
));
696 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
697 safexcel_inv_complete
, &result
);
698 aead_request_set_tfm(req
, __crypto_aead_cast(tfm
));
700 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
703 static int safexcel_aes(struct crypto_async_request
*base
,
704 struct safexcel_cipher_req
*sreq
,
705 enum safexcel_cipher_direction dir
, u32 mode
)
707 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
708 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
711 sreq
->needs_inv
= false;
712 sreq
->direction
= dir
;
715 if (ctx
->base
.ctxr
) {
716 if (priv
->version
== EIP197
&& ctx
->base
.needs_inv
) {
717 sreq
->needs_inv
= true;
718 ctx
->base
.needs_inv
= false;
721 ctx
->base
.ring
= safexcel_select_ring(priv
);
722 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
723 EIP197_GFP_FLAGS(*base
),
724 &ctx
->base
.ctxr_dma
);
729 ring
= ctx
->base
.ring
;
731 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
732 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
733 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
735 queue_work(priv
->ring
[ring
].workqueue
,
736 &priv
->ring
[ring
].work_data
.work
);
741 static int safexcel_ecb_aes_encrypt(struct skcipher_request
*req
)
743 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
744 SAFEXCEL_ENCRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_ECB
);
747 static int safexcel_ecb_aes_decrypt(struct skcipher_request
*req
)
749 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
750 SAFEXCEL_DECRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_ECB
);
753 static int safexcel_skcipher_cra_init(struct crypto_tfm
*tfm
)
755 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
756 struct safexcel_alg_template
*tmpl
=
757 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
760 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
761 sizeof(struct safexcel_cipher_req
));
763 ctx
->priv
= tmpl
->priv
;
765 ctx
->base
.send
= safexcel_skcipher_send
;
766 ctx
->base
.handle_result
= safexcel_skcipher_handle_result
;
770 static int safexcel_cipher_cra_exit(struct crypto_tfm
*tfm
)
772 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
774 memzero_explicit(ctx
->key
, sizeof(ctx
->key
));
776 /* context not allocated, skip invalidation */
780 memzero_explicit(ctx
->base
.ctxr
->data
, sizeof(ctx
->base
.ctxr
->data
));
784 static void safexcel_skcipher_cra_exit(struct crypto_tfm
*tfm
)
786 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
787 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
790 if (safexcel_cipher_cra_exit(tfm
))
793 if (priv
->version
== EIP197
) {
794 ret
= safexcel_skcipher_exit_inv(tfm
);
796 dev_warn(priv
->dev
, "skcipher: invalidation error %d\n",
799 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
804 static void safexcel_aead_cra_exit(struct crypto_tfm
*tfm
)
806 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
807 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
810 if (safexcel_cipher_cra_exit(tfm
))
813 if (priv
->version
== EIP197
) {
814 ret
= safexcel_aead_exit_inv(tfm
);
816 dev_warn(priv
->dev
, "aead: invalidation error %d\n",
819 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
824 struct safexcel_alg_template safexcel_alg_ecb_aes
= {
825 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
827 .setkey
= safexcel_skcipher_aes_setkey
,
828 .encrypt
= safexcel_ecb_aes_encrypt
,
829 .decrypt
= safexcel_ecb_aes_decrypt
,
830 .min_keysize
= AES_MIN_KEY_SIZE
,
831 .max_keysize
= AES_MAX_KEY_SIZE
,
833 .cra_name
= "ecb(aes)",
834 .cra_driver_name
= "safexcel-ecb-aes",
836 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_ASYNC
|
837 CRYPTO_ALG_KERN_DRIVER_ONLY
,
838 .cra_blocksize
= AES_BLOCK_SIZE
,
839 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
841 .cra_init
= safexcel_skcipher_cra_init
,
842 .cra_exit
= safexcel_skcipher_cra_exit
,
843 .cra_module
= THIS_MODULE
,
848 static int safexcel_cbc_aes_encrypt(struct skcipher_request
*req
)
850 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
851 SAFEXCEL_ENCRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
854 static int safexcel_cbc_aes_decrypt(struct skcipher_request
*req
)
856 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
857 SAFEXCEL_DECRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
860 struct safexcel_alg_template safexcel_alg_cbc_aes
= {
861 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
863 .setkey
= safexcel_skcipher_aes_setkey
,
864 .encrypt
= safexcel_cbc_aes_encrypt
,
865 .decrypt
= safexcel_cbc_aes_decrypt
,
866 .min_keysize
= AES_MIN_KEY_SIZE
,
867 .max_keysize
= AES_MAX_KEY_SIZE
,
868 .ivsize
= AES_BLOCK_SIZE
,
870 .cra_name
= "cbc(aes)",
871 .cra_driver_name
= "safexcel-cbc-aes",
873 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_ASYNC
|
874 CRYPTO_ALG_KERN_DRIVER_ONLY
,
875 .cra_blocksize
= AES_BLOCK_SIZE
,
876 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
878 .cra_init
= safexcel_skcipher_cra_init
,
879 .cra_exit
= safexcel_skcipher_cra_exit
,
880 .cra_module
= THIS_MODULE
,
885 static int safexcel_aead_encrypt(struct aead_request
*req
)
887 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
889 return safexcel_aes(&req
->base
, creq
, SAFEXCEL_ENCRYPT
,
890 CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
893 static int safexcel_aead_decrypt(struct aead_request
*req
)
895 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
897 return safexcel_aes(&req
->base
, creq
, SAFEXCEL_DECRYPT
,
898 CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
901 static int safexcel_aead_cra_init(struct crypto_tfm
*tfm
)
903 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
904 struct safexcel_alg_template
*tmpl
=
905 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
908 crypto_aead_set_reqsize(__crypto_aead_cast(tfm
),
909 sizeof(struct safexcel_cipher_req
));
911 ctx
->priv
= tmpl
->priv
;
914 ctx
->base
.send
= safexcel_aead_send
;
915 ctx
->base
.handle_result
= safexcel_aead_handle_result
;
919 static int safexcel_aead_sha256_cra_init(struct crypto_tfm
*tfm
)
921 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
923 safexcel_aead_cra_init(tfm
);
924 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
925 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
929 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes
= {
930 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
932 .setkey
= safexcel_aead_aes_setkey
,
933 .encrypt
= safexcel_aead_encrypt
,
934 .decrypt
= safexcel_aead_decrypt
,
935 .ivsize
= AES_BLOCK_SIZE
,
936 .maxauthsize
= SHA256_DIGEST_SIZE
,
938 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
939 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-aes",
941 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
|
942 CRYPTO_ALG_KERN_DRIVER_ONLY
,
943 .cra_blocksize
= AES_BLOCK_SIZE
,
944 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
946 .cra_init
= safexcel_aead_sha256_cra_init
,
947 .cra_exit
= safexcel_aead_cra_exit
,
948 .cra_module
= THIS_MODULE
,
953 static int safexcel_aead_sha224_cra_init(struct crypto_tfm
*tfm
)
955 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
957 safexcel_aead_cra_init(tfm
);
958 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
959 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
963 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes
= {
964 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
966 .setkey
= safexcel_aead_aes_setkey
,
967 .encrypt
= safexcel_aead_encrypt
,
968 .decrypt
= safexcel_aead_decrypt
,
969 .ivsize
= AES_BLOCK_SIZE
,
970 .maxauthsize
= SHA224_DIGEST_SIZE
,
972 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
973 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-aes",
975 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
|
976 CRYPTO_ALG_KERN_DRIVER_ONLY
,
977 .cra_blocksize
= AES_BLOCK_SIZE
,
978 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
980 .cra_init
= safexcel_aead_sha224_cra_init
,
981 .cra_exit
= safexcel_aead_cra_exit
,
982 .cra_module
= THIS_MODULE
,