1 // SPDX-License-Identifier: GPL-2.0
3 * K3 SA2UL crypto accelerator driver
5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
11 #include <linux/clk.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmaengine.h>
14 #include <linux/dmapool.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/of_device.h>
18 #include <linux/platform_device.h>
19 #include <linux/pm_runtime.h>
21 #include <crypto/aes.h>
22 #include <crypto/authenc.h>
23 #include <crypto/des.h>
24 #include <crypto/internal/aead.h>
25 #include <crypto/internal/hash.h>
26 #include <crypto/internal/skcipher.h>
27 #include <crypto/scatterwalk.h>
28 #include <crypto/sha1.h>
29 #include <crypto/sha2.h>
33 /* Byte offset for key in encryption security context */
34 #define SC_ENC_KEY_OFFSET (1 + 27 + 4)
35 /* Byte offset for Aux-1 in encryption security context */
36 #define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
38 #define SA_CMDL_UPD_ENC 0x0001
39 #define SA_CMDL_UPD_AUTH 0x0002
40 #define SA_CMDL_UPD_ENC_IV 0x0004
41 #define SA_CMDL_UPD_AUTH_IV 0x0008
42 #define SA_CMDL_UPD_AUX_KEY 0x0010
44 #define SA_AUTH_SUBKEY_LEN 16
45 #define SA_CMDL_PAYLOAD_LENGTH_MASK 0xFFFF
46 #define SA_CMDL_SOP_BYPASS_LEN_MASK 0xFF000000
48 #define MODE_CONTROL_BYTES 27
49 #define SA_HASH_PROCESSING 0
50 #define SA_CRYPTO_PROCESSING 0
51 #define SA_UPLOAD_HASH_TO_TLR BIT(6)
53 #define SA_SW0_FLAGS_MASK 0xF0000
54 #define SA_SW0_CMDL_INFO_MASK 0x1F00000
55 #define SA_SW0_CMDL_PRESENT BIT(4)
56 #define SA_SW0_ENG_ID_MASK 0x3E000000
57 #define SA_SW0_DEST_INFO_PRESENT BIT(30)
58 #define SA_SW2_EGRESS_LENGTH 0xFF000000
59 #define SA_BASIC_HASH 0x10
61 #define SHA256_DIGEST_WORDS 8
62 /* Make 32-bit word from 4 bytes */
63 #define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
66 /* size of SCCTL structure in bytes */
67 #define SA_SCCTL_SZ 16
69 /* Max Authentication tag size */
70 #define SA_MAX_AUTH_TAG_SZ 64
80 SA_ALG_AUTHENC_SHA1_AES
,
81 SA_ALG_AUTHENC_SHA256_AES
,
84 struct sa_match_data
{
88 bool skip_engine_control
;
91 static struct device
*sa_k3_dev
;
94 * struct sa_cmdl_cfg - Command label configuration descriptor
95 * @aalg: authentication algorithm ID
96 * @enc_eng_id: Encryption Engine ID supported by the SA hardware
97 * @auth_eng_id: Authentication Engine ID
98 * @iv_size: Initialization Vector size
99 * @akey: Authentication key
100 * @akey_len: Authentication key length
101 * @enc: True, if this is an encode request
114 * struct algo_data - Crypto algorithm specific data
115 * @enc_eng: Encryption engine info structure
116 * @auth_eng: Authentication engine info structure
117 * @auth_ctrl: Authentication control word
118 * @hash_size: Size of digest
119 * @iv_idx: iv index in psdata
120 * @iv_out_size: iv out size
121 * @ealg_id: Encryption Algorithm ID
122 * @aalg_id: Authentication algorithm ID
123 * @mci_enc: Mode Control Instruction for Encryption algorithm
124 * @mci_dec: Mode Control Instruction for Decryption
125 * @inv_key: Whether the encryption algorithm demands key inversion
126 * @ctx: Pointer to the algorithm context
127 * @keyed_mac: Whether the authentication algorithm has key
128 * @prep_iopad: Function pointer to generate intermediate ipad/opad
131 struct sa_eng_info enc_eng
;
132 struct sa_eng_info auth_eng
;
142 struct sa_tfm_ctx
*ctx
;
144 void (*prep_iopad
)(struct algo_data
*algo
, const u8
*key
,
145 u16 key_sz
, __be32
*ipad
, __be32
*opad
);
149 * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
150 * @type: Type of the crypto algorithm.
151 * @alg: Union of crypto algorithm definitions.
152 * @registered: Flag indicating if the crypto algorithm is already registered
155 u32 type
; /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
157 struct skcipher_alg skcipher
;
158 struct ahash_alg ahash
;
159 struct aead_alg aead
;
165 * struct sa_mapped_sg: scatterlist information for tx and rx
166 * @mapped: Set to true if the @sgt is mapped
167 * @dir: mapping direction used for @sgt
168 * @split_sg: Set if the sg is split and needs to be freed up
169 * @static_sg: Static scatterlist entry for overriding data
170 * @sgt: scatterlist table for DMA API use
172 struct sa_mapped_sg
{
174 enum dma_data_direction dir
;
175 struct scatterlist static_sg
;
176 struct scatterlist
*split_sg
;
180 * struct sa_rx_data: RX Packet miscellaneous data place holder
181 * @req: crypto request data pointer
182 * @ddev: pointer to the DMA device
183 * @tx_in: dma_async_tx_descriptor pointer for rx channel
184 * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
185 * @enc: Flag indicating either encryption or decryption
186 * @enc_iv_size: Initialisation vector size
187 * @iv_idx: Initialisation vector index
192 struct dma_async_tx_descriptor
*tx_in
;
193 struct sa_mapped_sg mapped_sg
[2];
200 * struct sa_req: SA request definition
201 * @dev: device for the request
202 * @size: total data to the xmitted via DMA
203 * @enc_offset: offset of cipher data
204 * @enc_size: data to be passed to cipher engine
206 * @auth_offset: offset of the authentication data
207 * @auth_size: size of the authentication data
208 * @auth_iv: authentication IV
209 * @type: algorithm type for the request
210 * @cmdl: command label pointer
211 * @base: pointer to the base request
212 * @ctx: pointer to the algorithm context data
213 * @enc: true if this is an encode request
215 * @dst: destination data
216 * @callback: DMA callback for the request
217 * @mdata_size: metadata size passed to DMA
230 struct crypto_async_request
*base
;
231 struct sa_tfm_ctx
*ctx
;
233 struct scatterlist
*src
;
234 struct scatterlist
*dst
;
235 dma_async_tx_callback callback
;
240 * Mode Control Instructions for various Key lengths 128, 192, 256
241 * For CBC (Cipher Block Chaining) mode for encryption
243 static u8 mci_cbc_enc_array
[3][MODE_CONTROL_BYTES
] = {
244 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
245 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
246 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
247 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
248 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
249 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
250 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
251 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
252 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
256 * Mode Control Instructions for various Key lengths 128, 192, 256
257 * For CBC (Cipher Block Chaining) mode for decryption
259 static u8 mci_cbc_dec_array
[3][MODE_CONTROL_BYTES
] = {
260 { 0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
261 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
262 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
263 { 0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
264 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
265 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
266 { 0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
267 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
268 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
272 * Mode Control Instructions for various Key lengths 128, 192, 256
273 * For CBC (Cipher Block Chaining) mode for encryption
275 static u8 mci_cbc_enc_no_iv_array
[3][MODE_CONTROL_BYTES
] = {
276 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
277 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
278 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
279 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
280 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
281 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
282 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
283 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
284 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
288 * Mode Control Instructions for various Key lengths 128, 192, 256
289 * For CBC (Cipher Block Chaining) mode for decryption
291 static u8 mci_cbc_dec_no_iv_array
[3][MODE_CONTROL_BYTES
] = {
292 { 0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
293 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
294 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
295 { 0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
296 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
297 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
298 { 0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
299 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
300 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
304 * Mode Control Instructions for various Key lengths 128, 192, 256
305 * For ECB (Electronic Code Book) mode for encryption
307 static u8 mci_ecb_enc_array
[3][27] = {
308 { 0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
309 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
310 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
311 { 0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
312 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
313 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
314 { 0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
315 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
316 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
320 * Mode Control Instructions for various Key lengths 128, 192, 256
321 * For ECB (Electronic Code Book) mode for decryption
323 static u8 mci_ecb_dec_array
[3][27] = {
324 { 0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
325 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
326 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
327 { 0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
328 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
329 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
330 { 0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
331 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
332 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
336 * Mode Control Instructions for DES algorithm
337 * For CBC (Cipher Block Chaining) mode and ECB mode
338 * encryption and for decryption respectively
340 static u8 mci_cbc_3des_enc_array
[MODE_CONTROL_BYTES
] = {
341 0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
342 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
346 static u8 mci_cbc_3des_dec_array
[MODE_CONTROL_BYTES
] = {
347 0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
348 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
352 static u8 mci_ecb_3des_enc_array
[MODE_CONTROL_BYTES
] = {
353 0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
354 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
358 static u8 mci_ecb_3des_dec_array
[MODE_CONTROL_BYTES
] = {
359 0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
360 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
365 * Perform 16 byte or 128 bit swizzling
366 * The SA2UL Expects the security context to
367 * be in little Endian and the bus width is 128 bits or 16 bytes
368 * Hence swap 16 bytes at a time from higher to lower address
370 static void sa_swiz_128(u8
*in
, u16 len
)
375 for (i
= 0; i
< len
; i
+= 16) {
376 memcpy(data
, &in
[i
], 16);
377 for (j
= 0; j
< 16; j
++)
378 in
[i
+ j
] = data
[15 - j
];
382 /* Prepare the ipad and opad from key as per SHA algorithm step 1*/
383 static void prepare_kipad(u8
*k_ipad
, const u8
*key
, u16 key_sz
)
387 for (i
= 0; i
< key_sz
; i
++)
388 k_ipad
[i
] = key
[i
] ^ 0x36;
390 /* Instead of XOR with 0 */
391 for (; i
< SHA1_BLOCK_SIZE
; i
++)
395 static void prepare_kopad(u8
*k_opad
, const u8
*key
, u16 key_sz
)
399 for (i
= 0; i
< key_sz
; i
++)
400 k_opad
[i
] = key
[i
] ^ 0x5c;
402 /* Instead of XOR with 0 */
403 for (; i
< SHA1_BLOCK_SIZE
; i
++)
407 static void sa_export_shash(void *state
, struct shash_desc
*hash
,
408 int digest_size
, __be32
*out
)
410 struct sha1_state
*sha1
;
411 struct sha256_state
*sha256
;
414 switch (digest_size
) {
415 case SHA1_DIGEST_SIZE
:
417 result
= sha1
->state
;
419 case SHA256_DIGEST_SIZE
:
421 result
= sha256
->state
;
424 dev_err(sa_k3_dev
, "%s: bad digest_size=%d\n", __func__
,
429 crypto_shash_export(hash
, state
);
431 cpu_to_be32_array(out
, result
, digest_size
/ 4);
434 static void sa_prepare_iopads(struct algo_data
*data
, const u8
*key
,
435 u16 key_sz
, __be32
*ipad
, __be32
*opad
)
437 SHASH_DESC_ON_STACK(shash
, data
->ctx
->shash
);
438 int block_size
= crypto_shash_blocksize(data
->ctx
->shash
);
439 int digest_size
= crypto_shash_digestsize(data
->ctx
->shash
);
441 struct sha1_state sha1
;
442 struct sha256_state sha256
;
443 u8 k_pad
[SHA1_BLOCK_SIZE
];
446 shash
->tfm
= data
->ctx
->shash
;
448 prepare_kipad(sha
.k_pad
, key
, key_sz
);
450 crypto_shash_init(shash
);
451 crypto_shash_update(shash
, sha
.k_pad
, block_size
);
452 sa_export_shash(&sha
, shash
, digest_size
, ipad
);
454 prepare_kopad(sha
.k_pad
, key
, key_sz
);
456 crypto_shash_init(shash
);
457 crypto_shash_update(shash
, sha
.k_pad
, block_size
);
459 sa_export_shash(&sha
, shash
, digest_size
, opad
);
461 memzero_explicit(&sha
, sizeof(sha
));
464 /* Derive the inverse key used in AES-CBC decryption operation */
465 static inline int sa_aes_inv_key(u8
*inv_key
, const u8
*key
, u16 key_sz
)
467 struct crypto_aes_ctx ctx
;
470 if (aes_expandkey(&ctx
, key
, key_sz
)) {
471 dev_err(sa_k3_dev
, "%s: bad key len(%d)\n", __func__
, key_sz
);
475 /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
476 if (key_sz
== AES_KEYSIZE_192
) {
477 ctx
.key_enc
[52] = ctx
.key_enc
[51] ^ ctx
.key_enc
[46];
478 ctx
.key_enc
[53] = ctx
.key_enc
[52] ^ ctx
.key_enc
[47];
481 /* Based crypto_aes_expand_key logic */
483 case AES_KEYSIZE_128
:
484 case AES_KEYSIZE_192
:
485 key_pos
= key_sz
+ 24;
488 case AES_KEYSIZE_256
:
489 key_pos
= key_sz
+ 24 - 4;
493 dev_err(sa_k3_dev
, "%s: bad key len(%d)\n", __func__
, key_sz
);
497 memcpy(inv_key
, &ctx
.key_enc
[key_pos
], key_sz
);
501 /* Set Security context for the encryption engine */
502 static int sa_set_sc_enc(struct algo_data
*ad
, const u8
*key
, u16 key_sz
,
505 const u8
*mci
= NULL
;
507 /* Set Encryption mode selector to crypto processing */
508 sc_buf
[0] = SA_CRYPTO_PROCESSING
;
514 /* Set the mode control instructions in security context */
516 memcpy(&sc_buf
[1], mci
, MODE_CONTROL_BYTES
);
518 /* For AES-CBC decryption get the inverse key */
519 if (ad
->inv_key
&& !enc
) {
520 if (sa_aes_inv_key(&sc_buf
[SC_ENC_KEY_OFFSET
], key
, key_sz
))
522 /* For all other cases: key is used */
524 memcpy(&sc_buf
[SC_ENC_KEY_OFFSET
], key
, key_sz
);
530 /* Set Security context for the authentication engine */
531 static void sa_set_sc_auth(struct algo_data
*ad
, const u8
*key
, u16 key_sz
,
534 __be32
*ipad
= (void *)(sc_buf
+ 32);
535 __be32
*opad
= (void *)(sc_buf
+ 64);
537 /* Set Authentication mode selector to hash processing */
538 sc_buf
[0] = SA_HASH_PROCESSING
;
539 /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
540 sc_buf
[1] = SA_UPLOAD_HASH_TO_TLR
;
541 sc_buf
[1] |= ad
->auth_ctrl
;
543 /* Copy the keys or ipad/opad */
545 ad
->prep_iopad(ad
, key
, key_sz
, ipad
, opad
);
548 sc_buf
[1] |= SA_BASIC_HASH
;
552 static inline void sa_copy_iv(__be32
*out
, const u8
*iv
, bool size16
)
556 for (j
= 0; j
< ((size16
) ? 4 : 2); j
++) {
557 *out
= cpu_to_be32(*((u32
*)iv
));
563 /* Format general command label */
564 static int sa_format_cmdl_gen(struct sa_cmdl_cfg
*cfg
, u8
*cmdl
,
565 struct sa_cmdl_upd_info
*upd_info
)
567 u8 enc_offset
= 0, auth_offset
= 0, total
= 0;
568 u8 enc_next_eng
= SA_ENG_ID_OUTPORT2
;
569 u8 auth_next_eng
= SA_ENG_ID_OUTPORT2
;
570 u32
*word_ptr
= (u32
*)cmdl
;
573 /* Clear the command label */
574 memzero_explicit(cmdl
, (SA_MAX_CMDL_WORDS
* sizeof(u32
)));
576 /* Iniialize the command update structure */
577 memzero_explicit(upd_info
, sizeof(*upd_info
));
579 if (cfg
->enc_eng_id
&& cfg
->auth_eng_id
) {
581 auth_offset
= SA_CMDL_HEADER_SIZE_BYTES
;
582 enc_next_eng
= cfg
->auth_eng_id
;
585 auth_offset
+= cfg
->iv_size
;
587 enc_offset
= SA_CMDL_HEADER_SIZE_BYTES
;
588 auth_next_eng
= cfg
->enc_eng_id
;
592 if (cfg
->enc_eng_id
) {
593 upd_info
->flags
|= SA_CMDL_UPD_ENC
;
594 upd_info
->enc_size
.index
= enc_offset
>> 2;
595 upd_info
->enc_offset
.index
= upd_info
->enc_size
.index
+ 1;
596 /* Encryption command label */
597 cmdl
[enc_offset
+ SA_CMDL_OFFSET_NESC
] = enc_next_eng
;
599 /* Encryption modes requiring IV */
601 upd_info
->flags
|= SA_CMDL_UPD_ENC_IV
;
602 upd_info
->enc_iv
.index
=
603 (enc_offset
+ SA_CMDL_HEADER_SIZE_BYTES
) >> 2;
604 upd_info
->enc_iv
.size
= cfg
->iv_size
;
606 cmdl
[enc_offset
+ SA_CMDL_OFFSET_LABEL_LEN
] =
607 SA_CMDL_HEADER_SIZE_BYTES
+ cfg
->iv_size
;
609 cmdl
[enc_offset
+ SA_CMDL_OFFSET_OPTION_CTRL1
] =
610 (SA_CTX_ENC_AUX2_OFFSET
| (cfg
->iv_size
>> 3));
611 total
+= SA_CMDL_HEADER_SIZE_BYTES
+ cfg
->iv_size
;
613 cmdl
[enc_offset
+ SA_CMDL_OFFSET_LABEL_LEN
] =
614 SA_CMDL_HEADER_SIZE_BYTES
;
615 total
+= SA_CMDL_HEADER_SIZE_BYTES
;
619 if (cfg
->auth_eng_id
) {
620 upd_info
->flags
|= SA_CMDL_UPD_AUTH
;
621 upd_info
->auth_size
.index
= auth_offset
>> 2;
622 upd_info
->auth_offset
.index
= upd_info
->auth_size
.index
+ 1;
623 cmdl
[auth_offset
+ SA_CMDL_OFFSET_NESC
] = auth_next_eng
;
624 cmdl
[auth_offset
+ SA_CMDL_OFFSET_LABEL_LEN
] =
625 SA_CMDL_HEADER_SIZE_BYTES
;
626 total
+= SA_CMDL_HEADER_SIZE_BYTES
;
629 total
= roundup(total
, 8);
631 for (i
= 0; i
< total
/ 4; i
++)
632 word_ptr
[i
] = swab32(word_ptr
[i
]);
637 /* Update Command label */
638 static inline void sa_update_cmdl(struct sa_req
*req
, u32
*cmdl
,
639 struct sa_cmdl_upd_info
*upd_info
)
643 if (likely(upd_info
->flags
& SA_CMDL_UPD_ENC
)) {
644 cmdl
[upd_info
->enc_size
.index
] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK
;
645 cmdl
[upd_info
->enc_size
.index
] |= req
->enc_size
;
646 cmdl
[upd_info
->enc_offset
.index
] &=
647 ~SA_CMDL_SOP_BYPASS_LEN_MASK
;
648 cmdl
[upd_info
->enc_offset
.index
] |=
649 ((u32
)req
->enc_offset
<<
650 __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK
));
652 if (likely(upd_info
->flags
& SA_CMDL_UPD_ENC_IV
)) {
653 __be32
*data
= (__be32
*)&cmdl
[upd_info
->enc_iv
.index
];
654 u32
*enc_iv
= (u32
*)req
->enc_iv
;
656 for (j
= 0; i
< upd_info
->enc_iv
.size
; i
+= 4, j
++) {
657 data
[j
] = cpu_to_be32(*enc_iv
);
663 if (likely(upd_info
->flags
& SA_CMDL_UPD_AUTH
)) {
664 cmdl
[upd_info
->auth_size
.index
] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK
;
665 cmdl
[upd_info
->auth_size
.index
] |= req
->auth_size
;
666 cmdl
[upd_info
->auth_offset
.index
] &=
667 ~SA_CMDL_SOP_BYPASS_LEN_MASK
;
668 cmdl
[upd_info
->auth_offset
.index
] |=
669 ((u32
)req
->auth_offset
<<
670 __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK
));
671 if (upd_info
->flags
& SA_CMDL_UPD_AUTH_IV
) {
672 sa_copy_iv((void *)&cmdl
[upd_info
->auth_iv
.index
],
674 (upd_info
->auth_iv
.size
> 8));
676 if (upd_info
->flags
& SA_CMDL_UPD_AUX_KEY
) {
677 int offset
= (req
->auth_size
& 0xF) ? 4 : 0;
679 memcpy(&cmdl
[upd_info
->aux_key_info
.index
],
680 &upd_info
->aux_key
[offset
], 16);
685 /* Format SWINFO words to be sent to SA */
687 void sa_set_swinfo(u8 eng_id
, u16 sc_id
, dma_addr_t sc_phys
,
688 u8 cmdl_present
, u8 cmdl_offset
, u8 flags
,
689 u8 hash_size
, u32
*swinfo
)
692 swinfo
[0] |= (flags
<< __ffs(SA_SW0_FLAGS_MASK
));
693 if (likely(cmdl_present
))
694 swinfo
[0] |= ((cmdl_offset
| SA_SW0_CMDL_PRESENT
) <<
695 __ffs(SA_SW0_CMDL_INFO_MASK
));
696 swinfo
[0] |= (eng_id
<< __ffs(SA_SW0_ENG_ID_MASK
));
698 swinfo
[0] |= SA_SW0_DEST_INFO_PRESENT
;
699 swinfo
[1] = (u32
)(sc_phys
& 0xFFFFFFFFULL
);
700 swinfo
[2] = (u32
)((sc_phys
& 0xFFFFFFFF00000000ULL
) >> 32);
701 swinfo
[2] |= (hash_size
<< __ffs(SA_SW2_EGRESS_LENGTH
));
704 /* Dump the security context */
705 static void sa_dump_sc(u8
*buf
, dma_addr_t dma_addr
)
708 dev_info(sa_k3_dev
, "Security context dump:: 0x%pad\n", &dma_addr
);
709 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
710 16, 1, buf
, SA_CTX_MAX_SZ
, false);
715 int sa_init_sc(struct sa_ctx_info
*ctx
, const struct sa_match_data
*match_data
,
716 const u8
*enc_key
, u16 enc_key_sz
,
717 const u8
*auth_key
, u16 auth_key_sz
,
718 struct algo_data
*ad
, u8 enc
, u32
*swinfo
)
720 int enc_sc_offset
= 0;
721 int auth_sc_offset
= 0;
722 u8
*sc_buf
= ctx
->sc
;
723 u16 sc_id
= ctx
->sc_id
;
726 memzero_explicit(sc_buf
, SA_CTX_MAX_SZ
);
728 if (ad
->auth_eng
.eng_id
) {
730 first_engine
= ad
->enc_eng
.eng_id
;
732 first_engine
= ad
->auth_eng
.eng_id
;
734 enc_sc_offset
= SA_CTX_PHP_PE_CTX_SZ
;
735 auth_sc_offset
= enc_sc_offset
+ ad
->enc_eng
.sc_size
;
736 sc_buf
[1] = SA_SCCTL_FE_AUTH_ENC
;
739 ad
->hash_size
= roundup(ad
->hash_size
, 8);
741 } else if (ad
->enc_eng
.eng_id
&& !ad
->auth_eng
.eng_id
) {
742 enc_sc_offset
= SA_CTX_PHP_PE_CTX_SZ
;
743 first_engine
= ad
->enc_eng
.eng_id
;
744 sc_buf
[1] = SA_SCCTL_FE_ENC
;
745 ad
->hash_size
= ad
->iv_out_size
;
748 /* SCCTL Owner info: 0=host, 1=CP_ACE */
749 sc_buf
[SA_CTX_SCCTL_OWNER_OFFSET
] = 0;
750 memcpy(&sc_buf
[2], &sc_id
, 2);
752 sc_buf
[5] = match_data
->priv_id
;
753 sc_buf
[6] = match_data
->priv
;
756 /* Prepare context for encryption engine */
757 if (ad
->enc_eng
.sc_size
) {
758 if (sa_set_sc_enc(ad
, enc_key
, enc_key_sz
, enc
,
759 &sc_buf
[enc_sc_offset
]))
763 /* Prepare context for authentication engine */
764 if (ad
->auth_eng
.sc_size
)
765 sa_set_sc_auth(ad
, auth_key
, auth_key_sz
,
766 &sc_buf
[auth_sc_offset
]);
768 /* Set the ownership of context to CP_ACE */
769 sc_buf
[SA_CTX_SCCTL_OWNER_OFFSET
] = 0x80;
771 /* swizzle the security context */
772 sa_swiz_128(sc_buf
, SA_CTX_MAX_SZ
);
774 sa_set_swinfo(first_engine
, ctx
->sc_id
, ctx
->sc_phys
, 1, 0,
775 SA_SW_INFO_FLAG_EVICT
, ad
->hash_size
, swinfo
);
777 sa_dump_sc(sc_buf
, ctx
->sc_phys
);
782 /* Free the per direction context memory */
783 static void sa_free_ctx_info(struct sa_ctx_info
*ctx
,
784 struct sa_crypto_data
*data
)
788 bn
= ctx
->sc_id
- data
->sc_id_start
;
789 spin_lock(&data
->scid_lock
);
790 __clear_bit(bn
, data
->ctx_bm
);
792 spin_unlock(&data
->scid_lock
);
795 dma_pool_free(data
->sc_pool
, ctx
->sc
, ctx
->sc_phys
);
800 static int sa_init_ctx_info(struct sa_ctx_info
*ctx
,
801 struct sa_crypto_data
*data
)
806 spin_lock(&data
->scid_lock
);
807 bn
= find_first_zero_bit(data
->ctx_bm
, SA_MAX_NUM_CTX
);
808 __set_bit(bn
, data
->ctx_bm
);
810 spin_unlock(&data
->scid_lock
);
812 ctx
->sc_id
= (u16
)(data
->sc_id_start
+ bn
);
814 ctx
->sc
= dma_pool_alloc(data
->sc_pool
, GFP_KERNEL
, &ctx
->sc_phys
);
816 dev_err(&data
->pdev
->dev
, "Failed to allocate SC memory\n");
824 spin_lock(&data
->scid_lock
);
825 __clear_bit(bn
, data
->ctx_bm
);
827 spin_unlock(&data
->scid_lock
);
832 static void sa_cipher_cra_exit(struct crypto_skcipher
*tfm
)
834 struct sa_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
835 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
837 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
838 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
839 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
841 sa_free_ctx_info(&ctx
->enc
, data
);
842 sa_free_ctx_info(&ctx
->dec
, data
);
844 crypto_free_skcipher(ctx
->fallback
.skcipher
);
847 static int sa_cipher_cra_init(struct crypto_skcipher
*tfm
)
849 struct sa_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
850 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
851 const char *name
= crypto_tfm_alg_name(&tfm
->base
);
852 struct crypto_skcipher
*child
;
855 memzero_explicit(ctx
, sizeof(*ctx
));
856 ctx
->dev_data
= data
;
858 ret
= sa_init_ctx_info(&ctx
->enc
, data
);
861 ret
= sa_init_ctx_info(&ctx
->dec
, data
);
863 sa_free_ctx_info(&ctx
->enc
, data
);
867 child
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
870 dev_err(sa_k3_dev
, "Error allocating fallback algo %s\n", name
);
871 return PTR_ERR(child
);
874 ctx
->fallback
.skcipher
= child
;
875 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
876 sizeof(struct skcipher_request
));
878 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
879 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
880 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
884 static int sa_cipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
885 unsigned int keylen
, struct algo_data
*ad
)
887 struct sa_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
888 struct crypto_skcipher
*child
= ctx
->fallback
.skcipher
;
890 struct sa_cmdl_cfg cfg
;
893 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
894 keylen
!= AES_KEYSIZE_256
)
897 ad
->enc_eng
.eng_id
= SA_ENG_ID_EM1
;
898 ad
->enc_eng
.sc_size
= SA_CTX_ENC_TYPE1_SZ
;
900 memzero_explicit(&cfg
, sizeof(cfg
));
901 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
902 cfg
.iv_size
= crypto_skcipher_ivsize(tfm
);
904 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
905 crypto_skcipher_set_flags(child
, tfm
->base
.crt_flags
&
906 CRYPTO_TFM_REQ_MASK
);
907 ret
= crypto_skcipher_setkey(child
, key
, keylen
);
911 /* Setup Encryption Security Context & Command label template */
912 if (sa_init_sc(&ctx
->enc
, ctx
->dev_data
->match_data
, key
, keylen
, NULL
, 0,
913 ad
, 1, &ctx
->enc
.epib
[1]))
916 cmdl_len
= sa_format_cmdl_gen(&cfg
,
918 &ctx
->enc
.cmdl_upd_info
);
919 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
922 ctx
->enc
.cmdl_size
= cmdl_len
;
924 /* Setup Decryption Security Context & Command label template */
925 if (sa_init_sc(&ctx
->dec
, ctx
->dev_data
->match_data
, key
, keylen
, NULL
, 0,
926 ad
, 0, &ctx
->dec
.epib
[1]))
929 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
930 cmdl_len
= sa_format_cmdl_gen(&cfg
, (u8
*)ctx
->dec
.cmdl
,
931 &ctx
->dec
.cmdl_upd_info
);
933 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
936 ctx
->dec
.cmdl_size
= cmdl_len
;
937 ctx
->iv_idx
= ad
->iv_idx
;
942 dev_err(sa_k3_dev
, "%s: badkey\n", __func__
);
946 static int sa_aes_cbc_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
949 struct algo_data ad
= { 0 };
950 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
951 int key_idx
= (keylen
>> 3) - 2;
956 ad
.mci_enc
= mci_cbc_enc_array
[key_idx
];
957 ad
.mci_dec
= mci_cbc_dec_array
[key_idx
];
959 ad
.ealg_id
= SA_EALG_ID_AES_CBC
;
963 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
966 static int sa_aes_ecb_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
969 struct algo_data ad
= { 0 };
970 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
971 int key_idx
= (keylen
>> 3) - 2;
976 ad
.mci_enc
= mci_ecb_enc_array
[key_idx
];
977 ad
.mci_dec
= mci_ecb_dec_array
[key_idx
];
979 ad
.ealg_id
= SA_EALG_ID_AES_ECB
;
981 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
984 static int sa_3des_cbc_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
987 struct algo_data ad
= { 0 };
989 ad
.mci_enc
= mci_cbc_3des_enc_array
;
990 ad
.mci_dec
= mci_cbc_3des_dec_array
;
991 ad
.ealg_id
= SA_EALG_ID_3DES_CBC
;
995 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
998 static int sa_3des_ecb_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1001 struct algo_data ad
= { 0 };
1003 ad
.mci_enc
= mci_ecb_3des_enc_array
;
1004 ad
.mci_dec
= mci_ecb_3des_dec_array
;
1006 return sa_cipher_setkey(tfm
, key
, keylen
, &ad
);
1009 static void sa_sync_from_device(struct sa_rx_data
*rxd
)
1011 struct sg_table
*sgt
;
1013 if (rxd
->mapped_sg
[0].dir
== DMA_BIDIRECTIONAL
)
1014 sgt
= &rxd
->mapped_sg
[0].sgt
;
1016 sgt
= &rxd
->mapped_sg
[1].sgt
;
1018 dma_sync_sgtable_for_cpu(rxd
->ddev
, sgt
, DMA_FROM_DEVICE
);
1021 static void sa_free_sa_rx_data(struct sa_rx_data
*rxd
)
1025 for (i
= 0; i
< ARRAY_SIZE(rxd
->mapped_sg
); i
++) {
1026 struct sa_mapped_sg
*mapped_sg
= &rxd
->mapped_sg
[i
];
1028 if (mapped_sg
->mapped
) {
1029 dma_unmap_sgtable(rxd
->ddev
, &mapped_sg
->sgt
,
1031 kfree(mapped_sg
->split_sg
);
1038 static void sa_aes_dma_in_callback(void *data
)
1040 struct sa_rx_data
*rxd
= (struct sa_rx_data
*)data
;
1041 struct skcipher_request
*req
;
1047 sa_sync_from_device(rxd
);
1048 req
= container_of(rxd
->req
, struct skcipher_request
, base
);
1051 mdptr
= (__be32
*)dmaengine_desc_get_metadata_ptr(rxd
->tx_in
, &pl
,
1053 result
= (u32
*)req
->iv
;
1055 for (i
= 0; i
< (rxd
->enc_iv_size
/ 4); i
++)
1056 result
[i
] = be32_to_cpu(mdptr
[i
+ rxd
->iv_idx
]);
1059 sa_free_sa_rx_data(rxd
);
1061 skcipher_request_complete(req
, 0);
1065 sa_prepare_tx_desc(u32
*mdptr
, u32 pslen
, u32
*psdata
, u32 epiblen
, u32
*epib
)
1070 for (out
= mdptr
, in
= epib
, i
= 0; i
< epiblen
/ sizeof(u32
); i
++)
1073 mdptr
[4] = (0xFFFF << 16);
1074 for (out
= &mdptr
[5], in
= psdata
, i
= 0;
1075 i
< pslen
/ sizeof(u32
); i
++)
1079 static int sa_run(struct sa_req
*req
)
1081 struct sa_rx_data
*rxd
;
1083 u32 cmdl
[SA_MAX_CMDL_WORDS
];
1084 struct sa_crypto_data
*pdata
= dev_get_drvdata(sa_k3_dev
);
1085 struct device
*ddev
;
1086 struct dma_chan
*dma_rx
;
1087 int sg_nents
, src_nents
, dst_nents
;
1088 struct scatterlist
*src
, *dst
;
1089 size_t pl
, ml
, split_size
;
1090 struct sa_ctx_info
*sa_ctx
= req
->enc
? &req
->ctx
->enc
: &req
->ctx
->dec
;
1092 struct dma_async_tx_descriptor
*tx_out
;
1095 enum dma_data_direction dir_src
;
1096 struct sa_mapped_sg
*mapped_sg
;
1098 gfp_flags
= req
->base
->flags
& CRYPTO_TFM_REQ_MAY_SLEEP
?
1099 GFP_KERNEL
: GFP_ATOMIC
;
1101 rxd
= kzalloc(sizeof(*rxd
), gfp_flags
);
1105 if (req
->src
!= req
->dst
) {
1107 dir_src
= DMA_TO_DEVICE
;
1110 dir_src
= DMA_BIDIRECTIONAL
;
1114 * SA2UL has an interesting feature where the receive DMA channel
1115 * is selected based on the data passed to the engine. Within the
1116 * transition range, there is also a space where it is impossible
1117 * to determine where the data will end up, and this should be
1118 * avoided. This will be handled by the SW fallback mechanism by
1119 * the individual algorithm implementations.
1121 if (req
->size
>= 256)
1122 dma_rx
= pdata
->dma_rx2
;
1124 dma_rx
= pdata
->dma_rx1
;
1126 ddev
= dmaengine_get_dma_device(pdata
->dma_tx
);
1129 memcpy(cmdl
, sa_ctx
->cmdl
, sa_ctx
->cmdl_size
);
1131 sa_update_cmdl(req
, cmdl
, &sa_ctx
->cmdl_upd_info
);
1133 if (req
->type
!= CRYPTO_ALG_TYPE_AHASH
) {
1136 (SA_REQ_SUBTYPE_ENC
<< SA_REQ_SUBTYPE_SHIFT
);
1139 (SA_REQ_SUBTYPE_DEC
<< SA_REQ_SUBTYPE_SHIFT
);
1142 cmdl
[sa_ctx
->cmdl_size
/ sizeof(u32
)] = req
->type
;
1145 * Map the packets, first we check if the data fits into a single
1146 * sg entry and use that if possible. If it does not fit, we check
1147 * if we need to do sg_split to align the scatterlist data on the
1148 * actual data size being processed by the crypto engine.
1151 sg_nents
= sg_nents_for_len(src
, req
->size
);
1153 split_size
= req
->size
;
1155 mapped_sg
= &rxd
->mapped_sg
[0];
1156 if (sg_nents
== 1 && split_size
<= req
->src
->length
) {
1157 src
= &mapped_sg
->static_sg
;
1159 sg_init_table(src
, 1);
1160 sg_set_page(src
, sg_page(req
->src
), split_size
,
1163 mapped_sg
->sgt
.sgl
= src
;
1164 mapped_sg
->sgt
.orig_nents
= src_nents
;
1165 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
, dir_src
, 0);
1171 mapped_sg
->dir
= dir_src
;
1172 mapped_sg
->mapped
= true;
1174 mapped_sg
->sgt
.sgl
= req
->src
;
1175 mapped_sg
->sgt
.orig_nents
= sg_nents
;
1176 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
, dir_src
, 0);
1182 mapped_sg
->dir
= dir_src
;
1183 mapped_sg
->mapped
= true;
1185 ret
= sg_split(mapped_sg
->sgt
.sgl
, mapped_sg
->sgt
.nents
, 0, 1,
1186 &split_size
, &src
, &src_nents
, gfp_flags
);
1188 src_nents
= mapped_sg
->sgt
.nents
;
1189 src
= mapped_sg
->sgt
.sgl
;
1191 mapped_sg
->split_sg
= src
;
1195 dma_sync_sgtable_for_device(ddev
, &mapped_sg
->sgt
, DMA_TO_DEVICE
);
1198 dst_nents
= src_nents
;
1201 dst_nents
= sg_nents_for_len(req
->dst
, req
->size
);
1202 mapped_sg
= &rxd
->mapped_sg
[1];
1204 if (dst_nents
== 1 && split_size
<= req
->dst
->length
) {
1205 dst
= &mapped_sg
->static_sg
;
1207 sg_init_table(dst
, 1);
1208 sg_set_page(dst
, sg_page(req
->dst
), split_size
,
1211 mapped_sg
->sgt
.sgl
= dst
;
1212 mapped_sg
->sgt
.orig_nents
= dst_nents
;
1213 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
,
1214 DMA_FROM_DEVICE
, 0);
1218 mapped_sg
->dir
= DMA_FROM_DEVICE
;
1219 mapped_sg
->mapped
= true;
1221 mapped_sg
->sgt
.sgl
= req
->dst
;
1222 mapped_sg
->sgt
.orig_nents
= dst_nents
;
1223 ret
= dma_map_sgtable(ddev
, &mapped_sg
->sgt
,
1224 DMA_FROM_DEVICE
, 0);
1228 mapped_sg
->dir
= DMA_FROM_DEVICE
;
1229 mapped_sg
->mapped
= true;
1231 ret
= sg_split(mapped_sg
->sgt
.sgl
, mapped_sg
->sgt
.nents
,
1232 0, 1, &split_size
, &dst
, &dst_nents
,
1235 dst_nents
= mapped_sg
->sgt
.nents
;
1236 dst
= mapped_sg
->sgt
.sgl
;
1238 mapped_sg
->split_sg
= dst
;
1243 rxd
->tx_in
= dmaengine_prep_slave_sg(dma_rx
, dst
, dst_nents
,
1245 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
1247 dev_err(pdata
->dev
, "IN prep_slave_sg() failed\n");
1252 rxd
->req
= (void *)req
->base
;
1253 rxd
->enc
= req
->enc
;
1254 rxd
->iv_idx
= req
->ctx
->iv_idx
;
1255 rxd
->enc_iv_size
= sa_ctx
->cmdl_upd_info
.enc_iv
.size
;
1256 rxd
->tx_in
->callback
= req
->callback
;
1257 rxd
->tx_in
->callback_param
= rxd
;
1259 tx_out
= dmaengine_prep_slave_sg(pdata
->dma_tx
, src
,
1260 src_nents
, DMA_MEM_TO_DEV
,
1261 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
1264 dev_err(pdata
->dev
, "OUT prep_slave_sg() failed\n");
1270 * Prepare metadata for DMA engine. This essentially describes the
1271 * crypto algorithm to be used, data sizes, different keys etc.
1273 mdptr
= (u32
*)dmaengine_desc_get_metadata_ptr(tx_out
, &pl
, &ml
);
1275 sa_prepare_tx_desc(mdptr
, (sa_ctx
->cmdl_size
+ (SA_PSDATA_CTX_WORDS
*
1276 sizeof(u32
))), cmdl
, sizeof(sa_ctx
->epib
),
1279 ml
= sa_ctx
->cmdl_size
+ (SA_PSDATA_CTX_WORDS
* sizeof(u32
));
1280 dmaengine_desc_set_metadata_len(tx_out
, req
->mdata_size
);
1282 dmaengine_submit(tx_out
);
1283 dmaengine_submit(rxd
->tx_in
);
1285 dma_async_issue_pending(dma_rx
);
1286 dma_async_issue_pending(pdata
->dma_tx
);
1288 return -EINPROGRESS
;
1291 sa_free_sa_rx_data(rxd
);
1296 static int sa_cipher_run(struct skcipher_request
*req
, u8
*iv
, int enc
)
1298 struct sa_tfm_ctx
*ctx
=
1299 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
1300 struct crypto_alg
*alg
= req
->base
.tfm
->__crt_alg
;
1301 struct sa_req sa_req
= { 0 };
1306 if (req
->cryptlen
% alg
->cra_blocksize
)
1309 /* Use SW fallback if the data size is not supported */
1310 if (req
->cryptlen
> SA_MAX_DATA_SZ
||
1311 (req
->cryptlen
>= SA_UNSAFE_DATA_SZ_MIN
&&
1312 req
->cryptlen
<= SA_UNSAFE_DATA_SZ_MAX
)) {
1313 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
1315 skcipher_request_set_tfm(subreq
, ctx
->fallback
.skcipher
);
1316 skcipher_request_set_callback(subreq
, req
->base
.flags
,
1319 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
1320 req
->cryptlen
, req
->iv
);
1322 return crypto_skcipher_encrypt(subreq
);
1324 return crypto_skcipher_decrypt(subreq
);
1327 sa_req
.size
= req
->cryptlen
;
1328 sa_req
.enc_size
= req
->cryptlen
;
1329 sa_req
.src
= req
->src
;
1330 sa_req
.dst
= req
->dst
;
1332 sa_req
.type
= CRYPTO_ALG_TYPE_SKCIPHER
;
1334 sa_req
.callback
= sa_aes_dma_in_callback
;
1335 sa_req
.mdata_size
= 44;
1336 sa_req
.base
= &req
->base
;
1339 return sa_run(&sa_req
);
1342 static int sa_encrypt(struct skcipher_request
*req
)
1344 return sa_cipher_run(req
, req
->iv
, 1);
1347 static int sa_decrypt(struct skcipher_request
*req
)
1349 return sa_cipher_run(req
, req
->iv
, 0);
1352 static void sa_sha_dma_in_callback(void *data
)
1354 struct sa_rx_data
*rxd
= (struct sa_rx_data
*)data
;
1355 struct ahash_request
*req
;
1356 struct crypto_ahash
*tfm
;
1357 unsigned int authsize
;
1363 sa_sync_from_device(rxd
);
1364 req
= container_of(rxd
->req
, struct ahash_request
, base
);
1365 tfm
= crypto_ahash_reqtfm(req
);
1366 authsize
= crypto_ahash_digestsize(tfm
);
1368 mdptr
= (__be32
*)dmaengine_desc_get_metadata_ptr(rxd
->tx_in
, &pl
, &ml
);
1369 result
= (u32
*)req
->result
;
1371 for (i
= 0; i
< (authsize
/ 4); i
++)
1372 result
[i
] = be32_to_cpu(mdptr
[i
+ 4]);
1374 sa_free_sa_rx_data(rxd
);
1376 ahash_request_complete(req
, 0);
1379 static int zero_message_process(struct ahash_request
*req
)
1381 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1382 int sa_digest_size
= crypto_ahash_digestsize(tfm
);
1384 switch (sa_digest_size
) {
1385 case SHA1_DIGEST_SIZE
:
1386 memcpy(req
->result
, sha1_zero_message_hash
, sa_digest_size
);
1388 case SHA256_DIGEST_SIZE
:
1389 memcpy(req
->result
, sha256_zero_message_hash
, sa_digest_size
);
1391 case SHA512_DIGEST_SIZE
:
1392 memcpy(req
->result
, sha512_zero_message_hash
, sa_digest_size
);
1401 static int sa_sha_run(struct ahash_request
*req
)
1403 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
1404 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1405 struct sa_req sa_req
= { 0 };
1408 auth_len
= req
->nbytes
;
1411 return zero_message_process(req
);
1413 if (auth_len
> SA_MAX_DATA_SZ
||
1414 (auth_len
>= SA_UNSAFE_DATA_SZ_MIN
&&
1415 auth_len
<= SA_UNSAFE_DATA_SZ_MAX
)) {
1416 struct ahash_request
*subreq
= &rctx
->fallback_req
;
1419 ahash_request_set_tfm(subreq
, ctx
->fallback
.ahash
);
1420 subreq
->base
.flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1422 crypto_ahash_init(subreq
);
1424 subreq
->nbytes
= auth_len
;
1425 subreq
->src
= req
->src
;
1426 subreq
->result
= req
->result
;
1428 ret
|= crypto_ahash_update(subreq
);
1432 ret
|= crypto_ahash_final(subreq
);
1437 sa_req
.size
= auth_len
;
1438 sa_req
.auth_size
= auth_len
;
1439 sa_req
.src
= req
->src
;
1440 sa_req
.dst
= req
->src
;
1442 sa_req
.type
= CRYPTO_ALG_TYPE_AHASH
;
1443 sa_req
.callback
= sa_sha_dma_in_callback
;
1444 sa_req
.mdata_size
= 28;
1446 sa_req
.base
= &req
->base
;
1448 return sa_run(&sa_req
);
1451 static int sa_sha_setup(struct sa_tfm_ctx
*ctx
, struct algo_data
*ad
)
1453 int bs
= crypto_shash_blocksize(ctx
->shash
);
1455 struct sa_cmdl_cfg cfg
;
1457 ad
->enc_eng
.sc_size
= SA_CTX_ENC_TYPE1_SZ
;
1458 ad
->auth_eng
.eng_id
= SA_ENG_ID_AM1
;
1459 ad
->auth_eng
.sc_size
= SA_CTX_AUTH_TYPE2_SZ
;
1461 memset(ctx
->authkey
, 0, bs
);
1462 memset(&cfg
, 0, sizeof(cfg
));
1463 cfg
.aalg
= ad
->aalg_id
;
1464 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
1465 cfg
.auth_eng_id
= ad
->auth_eng
.eng_id
;
1470 ctx
->dev_data
= dev_get_drvdata(sa_k3_dev
);
1471 /* Setup Encryption Security Context & Command label template */
1472 if (sa_init_sc(&ctx
->enc
, ctx
->dev_data
->match_data
, NULL
, 0, NULL
, 0,
1473 ad
, 0, &ctx
->enc
.epib
[1]))
1476 cmdl_len
= sa_format_cmdl_gen(&cfg
,
1477 (u8
*)ctx
->enc
.cmdl
,
1478 &ctx
->enc
.cmdl_upd_info
);
1479 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
1482 ctx
->enc
.cmdl_size
= cmdl_len
;
1487 dev_err(sa_k3_dev
, "%s: badkey\n", __func__
);
1491 static int sa_sha_cra_init_alg(struct crypto_tfm
*tfm
, const char *alg_base
)
1493 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1494 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1497 memset(ctx
, 0, sizeof(*ctx
));
1498 ctx
->dev_data
= data
;
1499 ret
= sa_init_ctx_info(&ctx
->enc
, data
);
1504 ctx
->shash
= crypto_alloc_shash(alg_base
, 0,
1505 CRYPTO_ALG_NEED_FALLBACK
);
1506 if (IS_ERR(ctx
->shash
)) {
1507 dev_err(sa_k3_dev
, "base driver %s couldn't be loaded\n",
1509 return PTR_ERR(ctx
->shash
);
1512 ctx
->fallback
.ahash
=
1513 crypto_alloc_ahash(alg_base
, 0,
1514 CRYPTO_ALG_NEED_FALLBACK
);
1515 if (IS_ERR(ctx
->fallback
.ahash
)) {
1516 dev_err(ctx
->dev_data
->dev
,
1517 "Could not load fallback driver\n");
1518 return PTR_ERR(ctx
->fallback
.ahash
);
1522 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1523 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
1524 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
1526 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
1527 sizeof(struct sa_sha_req_ctx
) +
1528 crypto_ahash_reqsize(ctx
->fallback
.ahash
));
1533 static int sa_sha_digest(struct ahash_request
*req
)
1535 return sa_sha_run(req
);
1538 static int sa_sha_init(struct ahash_request
*req
)
1540 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1541 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1542 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1544 dev_dbg(sa_k3_dev
, "init: digest size: %u, rctx=%p\n",
1545 crypto_ahash_digestsize(tfm
), rctx
);
1547 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1548 rctx
->fallback_req
.base
.flags
=
1549 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1551 return crypto_ahash_init(&rctx
->fallback_req
);
1554 static int sa_sha_update(struct ahash_request
*req
)
1556 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1557 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1558 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1560 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1561 rctx
->fallback_req
.base
.flags
=
1562 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1563 rctx
->fallback_req
.nbytes
= req
->nbytes
;
1564 rctx
->fallback_req
.src
= req
->src
;
1566 return crypto_ahash_update(&rctx
->fallback_req
);
1569 static int sa_sha_final(struct ahash_request
*req
)
1571 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1572 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1573 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1575 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1576 rctx
->fallback_req
.base
.flags
=
1577 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1578 rctx
->fallback_req
.result
= req
->result
;
1580 return crypto_ahash_final(&rctx
->fallback_req
);
1583 static int sa_sha_finup(struct ahash_request
*req
)
1585 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1586 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1587 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1589 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1590 rctx
->fallback_req
.base
.flags
=
1591 req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1593 rctx
->fallback_req
.nbytes
= req
->nbytes
;
1594 rctx
->fallback_req
.src
= req
->src
;
1595 rctx
->fallback_req
.result
= req
->result
;
1597 return crypto_ahash_finup(&rctx
->fallback_req
);
1600 static int sa_sha_import(struct ahash_request
*req
, const void *in
)
1602 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1603 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1604 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1606 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
.ahash
);
1607 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
1608 CRYPTO_TFM_REQ_MAY_SLEEP
;
1610 return crypto_ahash_import(&rctx
->fallback_req
, in
);
1613 static int sa_sha_export(struct ahash_request
*req
, void *out
)
1615 struct sa_sha_req_ctx
*rctx
= ahash_request_ctx(req
);
1616 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1617 struct sa_tfm_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1618 struct ahash_request
*subreq
= &rctx
->fallback_req
;
1620 ahash_request_set_tfm(subreq
, ctx
->fallback
.ahash
);
1621 subreq
->base
.flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
1623 return crypto_ahash_export(subreq
, out
);
1626 static int sa_sha1_cra_init(struct crypto_tfm
*tfm
)
1628 struct algo_data ad
= { 0 };
1629 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1631 sa_sha_cra_init_alg(tfm
, "sha1");
1633 ad
.aalg_id
= SA_AALG_ID_SHA1
;
1634 ad
.hash_size
= SHA1_DIGEST_SIZE
;
1635 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA1
;
1637 sa_sha_setup(ctx
, &ad
);
1642 static int sa_sha256_cra_init(struct crypto_tfm
*tfm
)
1644 struct algo_data ad
= { 0 };
1645 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1647 sa_sha_cra_init_alg(tfm
, "sha256");
1649 ad
.aalg_id
= SA_AALG_ID_SHA2_256
;
1650 ad
.hash_size
= SHA256_DIGEST_SIZE
;
1651 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA256
;
1653 sa_sha_setup(ctx
, &ad
);
1658 static int sa_sha512_cra_init(struct crypto_tfm
*tfm
)
1660 struct algo_data ad
= { 0 };
1661 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1663 sa_sha_cra_init_alg(tfm
, "sha512");
1665 ad
.aalg_id
= SA_AALG_ID_SHA2_512
;
1666 ad
.hash_size
= SHA512_DIGEST_SIZE
;
1667 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA512
;
1669 sa_sha_setup(ctx
, &ad
);
1674 static void sa_sha_cra_exit(struct crypto_tfm
*tfm
)
1676 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1677 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1679 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1680 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
1681 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
1683 if (crypto_tfm_alg_type(tfm
) == CRYPTO_ALG_TYPE_AHASH
)
1684 sa_free_ctx_info(&ctx
->enc
, data
);
1686 crypto_free_shash(ctx
->shash
);
1687 crypto_free_ahash(ctx
->fallback
.ahash
);
1690 static void sa_aead_dma_in_callback(void *data
)
1692 struct sa_rx_data
*rxd
= (struct sa_rx_data
*)data
;
1693 struct aead_request
*req
;
1694 struct crypto_aead
*tfm
;
1696 unsigned int authsize
;
1697 u8 auth_tag
[SA_MAX_AUTH_TAG_SZ
];
1703 sa_sync_from_device(rxd
);
1704 req
= container_of(rxd
->req
, struct aead_request
, base
);
1705 tfm
= crypto_aead_reqtfm(req
);
1706 start
= req
->assoclen
+ req
->cryptlen
;
1707 authsize
= crypto_aead_authsize(tfm
);
1709 mdptr
= (u32
*)dmaengine_desc_get_metadata_ptr(rxd
->tx_in
, &pl
, &ml
);
1710 for (i
= 0; i
< (authsize
/ 4); i
++)
1711 mdptr
[i
+ 4] = swab32(mdptr
[i
+ 4]);
1714 scatterwalk_map_and_copy(&mdptr
[4], req
->dst
, start
, authsize
,
1718 scatterwalk_map_and_copy(auth_tag
, req
->src
, start
, authsize
,
1721 err
= memcmp(&mdptr
[4], auth_tag
, authsize
) ? -EBADMSG
: 0;
1724 sa_free_sa_rx_data(rxd
);
1726 aead_request_complete(req
, err
);
1729 static int sa_cra_init_aead(struct crypto_aead
*tfm
, const char *hash
,
1730 const char *fallback
)
1732 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1733 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1736 memzero_explicit(ctx
, sizeof(*ctx
));
1737 ctx
->dev_data
= data
;
1739 ctx
->shash
= crypto_alloc_shash(hash
, 0, CRYPTO_ALG_NEED_FALLBACK
);
1740 if (IS_ERR(ctx
->shash
)) {
1741 dev_err(sa_k3_dev
, "base driver %s couldn't be loaded\n", hash
);
1742 return PTR_ERR(ctx
->shash
);
1745 ctx
->fallback
.aead
= crypto_alloc_aead(fallback
, 0,
1746 CRYPTO_ALG_NEED_FALLBACK
);
1748 if (IS_ERR(ctx
->fallback
.aead
)) {
1749 dev_err(sa_k3_dev
, "fallback driver %s couldn't be loaded\n",
1751 return PTR_ERR(ctx
->fallback
.aead
);
1754 crypto_aead_set_reqsize(tfm
, sizeof(struct aead_request
) +
1755 crypto_aead_reqsize(ctx
->fallback
.aead
));
1757 ret
= sa_init_ctx_info(&ctx
->enc
, data
);
1761 ret
= sa_init_ctx_info(&ctx
->dec
, data
);
1763 sa_free_ctx_info(&ctx
->enc
, data
);
1767 dev_dbg(sa_k3_dev
, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1768 __func__
, tfm
, ctx
->enc
.sc_id
, &ctx
->enc
.sc_phys
,
1769 ctx
->dec
.sc_id
, &ctx
->dec
.sc_phys
);
1774 static int sa_cra_init_aead_sha1(struct crypto_aead
*tfm
)
1776 return sa_cra_init_aead(tfm
, "sha1",
1777 "authenc(hmac(sha1-ce),cbc(aes-ce))");
1780 static int sa_cra_init_aead_sha256(struct crypto_aead
*tfm
)
1782 return sa_cra_init_aead(tfm
, "sha256",
1783 "authenc(hmac(sha256-ce),cbc(aes-ce))");
1786 static void sa_exit_tfm_aead(struct crypto_aead
*tfm
)
1788 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1789 struct sa_crypto_data
*data
= dev_get_drvdata(sa_k3_dev
);
1791 crypto_free_shash(ctx
->shash
);
1792 crypto_free_aead(ctx
->fallback
.aead
);
1794 sa_free_ctx_info(&ctx
->enc
, data
);
1795 sa_free_ctx_info(&ctx
->dec
, data
);
1798 /* AEAD algorithm configuration interface function */
1799 static int sa_aead_setkey(struct crypto_aead
*authenc
,
1800 const u8
*key
, unsigned int keylen
,
1801 struct algo_data
*ad
)
1803 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(authenc
);
1804 struct crypto_authenc_keys keys
;
1806 struct sa_cmdl_cfg cfg
;
1809 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
1812 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
1813 key_idx
= (keys
.enckeylen
>> 3) - 2;
1818 ad
->enc_eng
.eng_id
= SA_ENG_ID_EM1
;
1819 ad
->enc_eng
.sc_size
= SA_CTX_ENC_TYPE1_SZ
;
1820 ad
->auth_eng
.eng_id
= SA_ENG_ID_AM1
;
1821 ad
->auth_eng
.sc_size
= SA_CTX_AUTH_TYPE2_SZ
;
1822 ad
->mci_enc
= mci_cbc_enc_no_iv_array
[key_idx
];
1823 ad
->mci_dec
= mci_cbc_dec_no_iv_array
[key_idx
];
1825 ad
->keyed_mac
= true;
1826 ad
->ealg_id
= SA_EALG_ID_AES_CBC
;
1827 ad
->prep_iopad
= sa_prepare_iopads
;
1829 memset(&cfg
, 0, sizeof(cfg
));
1831 cfg
.aalg
= ad
->aalg_id
;
1832 cfg
.enc_eng_id
= ad
->enc_eng
.eng_id
;
1833 cfg
.auth_eng_id
= ad
->auth_eng
.eng_id
;
1834 cfg
.iv_size
= crypto_aead_ivsize(authenc
);
1835 cfg
.akey
= keys
.authkey
;
1836 cfg
.akey_len
= keys
.authkeylen
;
1838 /* Setup Encryption Security Context & Command label template */
1839 if (sa_init_sc(&ctx
->enc
, ctx
->dev_data
->match_data
, keys
.enckey
,
1840 keys
.enckeylen
, keys
.authkey
, keys
.authkeylen
,
1841 ad
, 1, &ctx
->enc
.epib
[1]))
1844 cmdl_len
= sa_format_cmdl_gen(&cfg
,
1845 (u8
*)ctx
->enc
.cmdl
,
1846 &ctx
->enc
.cmdl_upd_info
);
1847 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
1850 ctx
->enc
.cmdl_size
= cmdl_len
;
1852 /* Setup Decryption Security Context & Command label template */
1853 if (sa_init_sc(&ctx
->dec
, ctx
->dev_data
->match_data
, keys
.enckey
,
1854 keys
.enckeylen
, keys
.authkey
, keys
.authkeylen
,
1855 ad
, 0, &ctx
->dec
.epib
[1]))
1859 cmdl_len
= sa_format_cmdl_gen(&cfg
, (u8
*)ctx
->dec
.cmdl
,
1860 &ctx
->dec
.cmdl_upd_info
);
1862 if (cmdl_len
<= 0 || (cmdl_len
> SA_MAX_CMDL_WORDS
* sizeof(u32
)))
1865 ctx
->dec
.cmdl_size
= cmdl_len
;
1867 crypto_aead_clear_flags(ctx
->fallback
.aead
, CRYPTO_TFM_REQ_MASK
);
1868 crypto_aead_set_flags(ctx
->fallback
.aead
,
1869 crypto_aead_get_flags(authenc
) &
1870 CRYPTO_TFM_REQ_MASK
);
1871 crypto_aead_setkey(ctx
->fallback
.aead
, key
, keylen
);
1876 static int sa_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
1878 struct sa_tfm_ctx
*ctx
= crypto_tfm_ctx(crypto_aead_tfm(tfm
));
1880 return crypto_aead_setauthsize(ctx
->fallback
.aead
, authsize
);
1883 static int sa_aead_cbc_sha1_setkey(struct crypto_aead
*authenc
,
1884 const u8
*key
, unsigned int keylen
)
1886 struct algo_data ad
= { 0 };
1888 ad
.ealg_id
= SA_EALG_ID_AES_CBC
;
1889 ad
.aalg_id
= SA_AALG_ID_HMAC_SHA1
;
1890 ad
.hash_size
= SHA1_DIGEST_SIZE
;
1891 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA1
;
1893 return sa_aead_setkey(authenc
, key
, keylen
, &ad
);
1896 static int sa_aead_cbc_sha256_setkey(struct crypto_aead
*authenc
,
1897 const u8
*key
, unsigned int keylen
)
1899 struct algo_data ad
= { 0 };
1901 ad
.ealg_id
= SA_EALG_ID_AES_CBC
;
1902 ad
.aalg_id
= SA_AALG_ID_HMAC_SHA2_256
;
1903 ad
.hash_size
= SHA256_DIGEST_SIZE
;
1904 ad
.auth_ctrl
= SA_AUTH_SW_CTRL_SHA256
;
1906 return sa_aead_setkey(authenc
, key
, keylen
, &ad
);
1909 static int sa_aead_run(struct aead_request
*req
, u8
*iv
, int enc
)
1911 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1912 struct sa_tfm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1913 struct sa_req sa_req
= { 0 };
1914 size_t auth_size
, enc_size
;
1916 enc_size
= req
->cryptlen
;
1917 auth_size
= req
->assoclen
+ req
->cryptlen
;
1920 enc_size
-= crypto_aead_authsize(tfm
);
1921 auth_size
-= crypto_aead_authsize(tfm
);
1924 if (auth_size
> SA_MAX_DATA_SZ
||
1925 (auth_size
>= SA_UNSAFE_DATA_SZ_MIN
&&
1926 auth_size
<= SA_UNSAFE_DATA_SZ_MAX
)) {
1927 struct aead_request
*subreq
= aead_request_ctx(req
);
1930 aead_request_set_tfm(subreq
, ctx
->fallback
.aead
);
1931 aead_request_set_callback(subreq
, req
->base
.flags
,
1932 req
->base
.complete
, req
->base
.data
);
1933 aead_request_set_crypt(subreq
, req
->src
, req
->dst
,
1934 req
->cryptlen
, req
->iv
);
1935 aead_request_set_ad(subreq
, req
->assoclen
);
1937 ret
= enc
? crypto_aead_encrypt(subreq
) :
1938 crypto_aead_decrypt(subreq
);
1942 sa_req
.enc_offset
= req
->assoclen
;
1943 sa_req
.enc_size
= enc_size
;
1944 sa_req
.auth_size
= auth_size
;
1945 sa_req
.size
= auth_size
;
1947 sa_req
.type
= CRYPTO_ALG_TYPE_AEAD
;
1949 sa_req
.callback
= sa_aead_dma_in_callback
;
1950 sa_req
.mdata_size
= 52;
1951 sa_req
.base
= &req
->base
;
1953 sa_req
.src
= req
->src
;
1954 sa_req
.dst
= req
->dst
;
1956 return sa_run(&sa_req
);
1959 /* AEAD algorithm encrypt interface function */
1960 static int sa_aead_encrypt(struct aead_request
*req
)
1962 return sa_aead_run(req
, req
->iv
, 1);
1965 /* AEAD algorithm decrypt interface function */
1966 static int sa_aead_decrypt(struct aead_request
*req
)
1968 return sa_aead_run(req
, req
->iv
, 0);
1971 static struct sa_alg_tmpl sa_algs
[] = {
1972 [SA_ALG_CBC_AES
] = {
1973 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
1975 .base
.cra_name
= "cbc(aes)",
1976 .base
.cra_driver_name
= "cbc-aes-sa2ul",
1977 .base
.cra_priority
= 30000,
1978 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
1979 CRYPTO_ALG_KERN_DRIVER_ONLY
|
1981 CRYPTO_ALG_NEED_FALLBACK
,
1982 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1983 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
1984 .base
.cra_module
= THIS_MODULE
,
1985 .init
= sa_cipher_cra_init
,
1986 .exit
= sa_cipher_cra_exit
,
1987 .min_keysize
= AES_MIN_KEY_SIZE
,
1988 .max_keysize
= AES_MAX_KEY_SIZE
,
1989 .ivsize
= AES_BLOCK_SIZE
,
1990 .setkey
= sa_aes_cbc_setkey
,
1991 .encrypt
= sa_encrypt
,
1992 .decrypt
= sa_decrypt
,
1995 [SA_ALG_EBC_AES
] = {
1996 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
1998 .base
.cra_name
= "ecb(aes)",
1999 .base
.cra_driver_name
= "ecb-aes-sa2ul",
2000 .base
.cra_priority
= 30000,
2001 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2002 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2004 CRYPTO_ALG_NEED_FALLBACK
,
2005 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
2006 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2007 .base
.cra_module
= THIS_MODULE
,
2008 .init
= sa_cipher_cra_init
,
2009 .exit
= sa_cipher_cra_exit
,
2010 .min_keysize
= AES_MIN_KEY_SIZE
,
2011 .max_keysize
= AES_MAX_KEY_SIZE
,
2012 .setkey
= sa_aes_ecb_setkey
,
2013 .encrypt
= sa_encrypt
,
2014 .decrypt
= sa_decrypt
,
2017 [SA_ALG_CBC_DES3
] = {
2018 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2020 .base
.cra_name
= "cbc(des3_ede)",
2021 .base
.cra_driver_name
= "cbc-des3-sa2ul",
2022 .base
.cra_priority
= 30000,
2023 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2024 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2026 CRYPTO_ALG_NEED_FALLBACK
,
2027 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
2028 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2029 .base
.cra_module
= THIS_MODULE
,
2030 .init
= sa_cipher_cra_init
,
2031 .exit
= sa_cipher_cra_exit
,
2032 .min_keysize
= 3 * DES_KEY_SIZE
,
2033 .max_keysize
= 3 * DES_KEY_SIZE
,
2034 .ivsize
= DES_BLOCK_SIZE
,
2035 .setkey
= sa_3des_cbc_setkey
,
2036 .encrypt
= sa_encrypt
,
2037 .decrypt
= sa_decrypt
,
2040 [SA_ALG_ECB_DES3
] = {
2041 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2043 .base
.cra_name
= "ecb(des3_ede)",
2044 .base
.cra_driver_name
= "ecb-des3-sa2ul",
2045 .base
.cra_priority
= 30000,
2046 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2047 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2049 CRYPTO_ALG_NEED_FALLBACK
,
2050 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
2051 .base
.cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2052 .base
.cra_module
= THIS_MODULE
,
2053 .init
= sa_cipher_cra_init
,
2054 .exit
= sa_cipher_cra_exit
,
2055 .min_keysize
= 3 * DES_KEY_SIZE
,
2056 .max_keysize
= 3 * DES_KEY_SIZE
,
2057 .setkey
= sa_3des_ecb_setkey
,
2058 .encrypt
= sa_encrypt
,
2059 .decrypt
= sa_decrypt
,
2063 .type
= CRYPTO_ALG_TYPE_AHASH
,
2067 .cra_driver_name
= "sha1-sa2ul",
2068 .cra_priority
= 400,
2069 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
2071 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2072 CRYPTO_ALG_NEED_FALLBACK
,
2073 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2074 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2075 .cra_module
= THIS_MODULE
,
2076 .cra_init
= sa_sha1_cra_init
,
2077 .cra_exit
= sa_sha_cra_exit
,
2079 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2080 .halg
.statesize
= sizeof(struct sa_sha_req_ctx
) +
2081 sizeof(struct sha1_state
),
2082 .init
= sa_sha_init
,
2083 .update
= sa_sha_update
,
2084 .final
= sa_sha_final
,
2085 .finup
= sa_sha_finup
,
2086 .digest
= sa_sha_digest
,
2087 .export
= sa_sha_export
,
2088 .import
= sa_sha_import
,
2092 .type
= CRYPTO_ALG_TYPE_AHASH
,
2095 .cra_name
= "sha256",
2096 .cra_driver_name
= "sha256-sa2ul",
2097 .cra_priority
= 400,
2098 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
2100 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2101 CRYPTO_ALG_NEED_FALLBACK
,
2102 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2103 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2104 .cra_module
= THIS_MODULE
,
2105 .cra_init
= sa_sha256_cra_init
,
2106 .cra_exit
= sa_sha_cra_exit
,
2108 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2109 .halg
.statesize
= sizeof(struct sa_sha_req_ctx
) +
2110 sizeof(struct sha256_state
),
2111 .init
= sa_sha_init
,
2112 .update
= sa_sha_update
,
2113 .final
= sa_sha_final
,
2114 .finup
= sa_sha_finup
,
2115 .digest
= sa_sha_digest
,
2116 .export
= sa_sha_export
,
2117 .import
= sa_sha_import
,
2121 .type
= CRYPTO_ALG_TYPE_AHASH
,
2124 .cra_name
= "sha512",
2125 .cra_driver_name
= "sha512-sa2ul",
2126 .cra_priority
= 400,
2127 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
2129 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2130 CRYPTO_ALG_NEED_FALLBACK
,
2131 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2132 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2133 .cra_module
= THIS_MODULE
,
2134 .cra_init
= sa_sha512_cra_init
,
2135 .cra_exit
= sa_sha_cra_exit
,
2137 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2138 .halg
.statesize
= sizeof(struct sa_sha_req_ctx
) +
2139 sizeof(struct sha512_state
),
2140 .init
= sa_sha_init
,
2141 .update
= sa_sha_update
,
2142 .final
= sa_sha_final
,
2143 .finup
= sa_sha_finup
,
2144 .digest
= sa_sha_digest
,
2145 .export
= sa_sha_export
,
2146 .import
= sa_sha_import
,
2149 [SA_ALG_AUTHENC_SHA1_AES
] = {
2150 .type
= CRYPTO_ALG_TYPE_AEAD
,
2153 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2155 "authenc(hmac(sha1),cbc(aes))-sa2ul",
2156 .cra_blocksize
= AES_BLOCK_SIZE
,
2157 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
|
2158 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2160 CRYPTO_ALG_NEED_FALLBACK
,
2161 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2162 .cra_module
= THIS_MODULE
,
2163 .cra_priority
= 3000,
2165 .ivsize
= AES_BLOCK_SIZE
,
2166 .maxauthsize
= SHA1_DIGEST_SIZE
,
2168 .init
= sa_cra_init_aead_sha1
,
2169 .exit
= sa_exit_tfm_aead
,
2170 .setkey
= sa_aead_cbc_sha1_setkey
,
2171 .setauthsize
= sa_aead_setauthsize
,
2172 .encrypt
= sa_aead_encrypt
,
2173 .decrypt
= sa_aead_decrypt
,
2176 [SA_ALG_AUTHENC_SHA256_AES
] = {
2177 .type
= CRYPTO_ALG_TYPE_AEAD
,
2180 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2182 "authenc(hmac(sha256),cbc(aes))-sa2ul",
2183 .cra_blocksize
= AES_BLOCK_SIZE
,
2184 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
|
2185 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2187 CRYPTO_ALG_NEED_FALLBACK
,
2188 .cra_ctxsize
= sizeof(struct sa_tfm_ctx
),
2189 .cra_module
= THIS_MODULE
,
2191 .cra_priority
= 3000,
2193 .ivsize
= AES_BLOCK_SIZE
,
2194 .maxauthsize
= SHA256_DIGEST_SIZE
,
2196 .init
= sa_cra_init_aead_sha256
,
2197 .exit
= sa_exit_tfm_aead
,
2198 .setkey
= sa_aead_cbc_sha256_setkey
,
2199 .setauthsize
= sa_aead_setauthsize
,
2200 .encrypt
= sa_aead_encrypt
,
2201 .decrypt
= sa_aead_decrypt
,
2206 /* Register the algorithms in crypto framework */
2207 static void sa_register_algos(struct sa_crypto_data
*dev_data
)
2209 const struct sa_match_data
*match_data
= dev_data
->match_data
;
2210 struct device
*dev
= dev_data
->dev
;
2215 for (i
= 0; i
< ARRAY_SIZE(sa_algs
); i
++) {
2216 /* Skip unsupported algos */
2217 if (!(match_data
->supported_algos
& BIT(i
)))
2220 type
= sa_algs
[i
].type
;
2221 if (type
== CRYPTO_ALG_TYPE_SKCIPHER
) {
2222 alg_name
= sa_algs
[i
].alg
.skcipher
.base
.cra_name
;
2223 err
= crypto_register_skcipher(&sa_algs
[i
].alg
.skcipher
);
2224 } else if (type
== CRYPTO_ALG_TYPE_AHASH
) {
2225 alg_name
= sa_algs
[i
].alg
.ahash
.halg
.base
.cra_name
;
2226 err
= crypto_register_ahash(&sa_algs
[i
].alg
.ahash
);
2227 } else if (type
== CRYPTO_ALG_TYPE_AEAD
) {
2228 alg_name
= sa_algs
[i
].alg
.aead
.base
.cra_name
;
2229 err
= crypto_register_aead(&sa_algs
[i
].alg
.aead
);
2232 "un-supported crypto algorithm (%d)",
2238 dev_err(dev
, "Failed to register '%s'\n", alg_name
);
2240 sa_algs
[i
].registered
= true;
2244 /* Unregister the algorithms in crypto framework */
2245 static void sa_unregister_algos(const struct device
*dev
)
2250 for (i
= 0; i
< ARRAY_SIZE(sa_algs
); i
++) {
2251 type
= sa_algs
[i
].type
;
2252 if (!sa_algs
[i
].registered
)
2254 if (type
== CRYPTO_ALG_TYPE_SKCIPHER
)
2255 crypto_unregister_skcipher(&sa_algs
[i
].alg
.skcipher
);
2256 else if (type
== CRYPTO_ALG_TYPE_AHASH
)
2257 crypto_unregister_ahash(&sa_algs
[i
].alg
.ahash
);
2258 else if (type
== CRYPTO_ALG_TYPE_AEAD
)
2259 crypto_unregister_aead(&sa_algs
[i
].alg
.aead
);
2261 sa_algs
[i
].registered
= false;
2265 static int sa_init_mem(struct sa_crypto_data
*dev_data
)
2267 struct device
*dev
= &dev_data
->pdev
->dev
;
2268 /* Setup dma pool for security context buffers */
2269 dev_data
->sc_pool
= dma_pool_create("keystone-sc", dev
,
2270 SA_CTX_MAX_SZ
, 64, 0);
2271 if (!dev_data
->sc_pool
) {
2272 dev_err(dev
, "Failed to create dma pool");
2279 static int sa_dma_init(struct sa_crypto_data
*dd
)
2282 struct dma_slave_config cfg
;
2288 ret
= dma_coerce_mask_and_coherent(dd
->dev
, DMA_BIT_MASK(48));
2292 dd
->dma_rx1
= dma_request_chan(dd
->dev
, "rx1");
2293 if (IS_ERR(dd
->dma_rx1
))
2294 return dev_err_probe(dd
->dev
, PTR_ERR(dd
->dma_rx1
),
2295 "Unable to request rx1 DMA channel\n");
2297 dd
->dma_rx2
= dma_request_chan(dd
->dev
, "rx2");
2298 if (IS_ERR(dd
->dma_rx2
)) {
2299 ret
= dev_err_probe(dd
->dev
, PTR_ERR(dd
->dma_rx2
),
2300 "Unable to request rx2 DMA channel\n");
2304 dd
->dma_tx
= dma_request_chan(dd
->dev
, "tx");
2305 if (IS_ERR(dd
->dma_tx
)) {
2306 ret
= dev_err_probe(dd
->dev
, PTR_ERR(dd
->dma_tx
),
2307 "Unable to request tx DMA channel\n");
2311 memzero_explicit(&cfg
, sizeof(cfg
));
2313 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
2314 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
2315 cfg
.src_maxburst
= 4;
2316 cfg
.dst_maxburst
= 4;
2318 ret
= dmaengine_slave_config(dd
->dma_rx1
, &cfg
);
2320 dev_err(dd
->dev
, "can't configure IN dmaengine slave: %d\n",
2322 goto err_dma_config
;
2325 ret
= dmaengine_slave_config(dd
->dma_rx2
, &cfg
);
2327 dev_err(dd
->dev
, "can't configure IN dmaengine slave: %d\n",
2329 goto err_dma_config
;
2332 ret
= dmaengine_slave_config(dd
->dma_tx
, &cfg
);
2334 dev_err(dd
->dev
, "can't configure OUT dmaengine slave: %d\n",
2336 goto err_dma_config
;
2342 dma_release_channel(dd
->dma_tx
);
2344 dma_release_channel(dd
->dma_rx2
);
2346 dma_release_channel(dd
->dma_rx1
);
2351 static int sa_link_child(struct device
*dev
, void *data
)
2353 struct device
*parent
= data
;
2355 device_link_add(dev
, parent
, DL_FLAG_AUTOPROBE_CONSUMER
);
2360 static struct sa_match_data am654_match_data
= {
2363 .supported_algos
= GENMASK(SA_ALG_AUTHENC_SHA256_AES
, 0),
2366 static struct sa_match_data am64_match_data
= {
2369 .supported_algos
= BIT(SA_ALG_CBC_AES
) |
2370 BIT(SA_ALG_EBC_AES
) |
2371 BIT(SA_ALG_SHA256
) |
2372 BIT(SA_ALG_SHA512
) |
2373 BIT(SA_ALG_AUTHENC_SHA256_AES
),
2374 .skip_engine_control
= true,
2377 static const struct of_device_id of_match
[] = {
2378 { .compatible
= "ti,j721e-sa2ul", .data
= &am654_match_data
, },
2379 { .compatible
= "ti,am654-sa2ul", .data
= &am654_match_data
, },
2380 { .compatible
= "ti,am64-sa2ul", .data
= &am64_match_data
, },
2383 MODULE_DEVICE_TABLE(of
, of_match
);
2385 static int sa_ul_probe(struct platform_device
*pdev
)
2387 struct device
*dev
= &pdev
->dev
;
2388 struct device_node
*node
= dev
->of_node
;
2389 static void __iomem
*saul_base
;
2390 struct sa_crypto_data
*dev_data
;
2393 dev_data
= devm_kzalloc(dev
, sizeof(*dev_data
), GFP_KERNEL
);
2397 dev_data
->match_data
= of_device_get_match_data(dev
);
2398 if (!dev_data
->match_data
)
2401 saul_base
= devm_platform_ioremap_resource(pdev
, 0);
2402 if (IS_ERR(saul_base
))
2403 return PTR_ERR(saul_base
);
2406 dev_data
->dev
= dev
;
2407 dev_data
->pdev
= pdev
;
2408 dev_data
->base
= saul_base
;
2409 platform_set_drvdata(pdev
, dev_data
);
2410 dev_set_drvdata(sa_k3_dev
, dev_data
);
2412 pm_runtime_enable(dev
);
2413 ret
= pm_runtime_resume_and_get(dev
);
2415 dev_err(&pdev
->dev
, "%s: failed to get sync: %d\n", __func__
,
2417 pm_runtime_disable(dev
);
2421 sa_init_mem(dev_data
);
2422 ret
= sa_dma_init(dev_data
);
2424 goto destroy_dma_pool
;
2426 spin_lock_init(&dev_data
->scid_lock
);
2428 if (!dev_data
->match_data
->skip_engine_control
) {
2429 u32 val
= SA_EEC_ENCSS_EN
| SA_EEC_AUTHSS_EN
| SA_EEC_CTXCACH_EN
|
2430 SA_EEC_CPPI_PORT_IN_EN
| SA_EEC_CPPI_PORT_OUT_EN
|
2433 writel_relaxed(val
, saul_base
+ SA_ENGINE_ENABLE_CONTROL
);
2436 sa_register_algos(dev_data
);
2438 ret
= of_platform_populate(node
, NULL
, NULL
, &pdev
->dev
);
2442 device_for_each_child(&pdev
->dev
, &pdev
->dev
, sa_link_child
);
2447 sa_unregister_algos(&pdev
->dev
);
2449 dma_release_channel(dev_data
->dma_rx2
);
2450 dma_release_channel(dev_data
->dma_rx1
);
2451 dma_release_channel(dev_data
->dma_tx
);
2454 dma_pool_destroy(dev_data
->sc_pool
);
2456 pm_runtime_put_sync(&pdev
->dev
);
2457 pm_runtime_disable(&pdev
->dev
);
2462 static int sa_ul_remove(struct platform_device
*pdev
)
2464 struct sa_crypto_data
*dev_data
= platform_get_drvdata(pdev
);
2466 of_platform_depopulate(&pdev
->dev
);
2468 sa_unregister_algos(&pdev
->dev
);
2470 dma_release_channel(dev_data
->dma_rx2
);
2471 dma_release_channel(dev_data
->dma_rx1
);
2472 dma_release_channel(dev_data
->dma_tx
);
2474 dma_pool_destroy(dev_data
->sc_pool
);
2476 platform_set_drvdata(pdev
, NULL
);
2478 pm_runtime_put_sync(&pdev
->dev
);
2479 pm_runtime_disable(&pdev
->dev
);
2484 static struct platform_driver sa_ul_driver
= {
2485 .probe
= sa_ul_probe
,
2486 .remove
= sa_ul_remove
,
2488 .name
= "saul-crypto",
2489 .of_match_table
= of_match
,
2492 module_platform_driver(sa_ul_driver
);
2493 MODULE_LICENSE("GPL v2");