]> git.proxmox.com Git - ceph.git/blob - ceph/src/spdk/dpdk/drivers/crypto/armv8/rte_armv8_pmd.c
update sources to ceph Nautilus 14.2.1
[ceph.git] / ceph / src / spdk / dpdk / drivers / crypto / armv8 / rte_armv8_pmd.c
1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
3 */
4
5 #include <stdbool.h>
6
7 #include <rte_common.h>
8 #include <rte_hexdump.h>
9 #include <rte_cryptodev.h>
10 #include <rte_cryptodev_pmd.h>
11 #include <rte_bus_vdev.h>
12 #include <rte_malloc.h>
13 #include <rte_cpuflags.h>
14
15 #include "armv8_crypto_defs.h"
16
17 #include "rte_armv8_pmd_private.h"
18
19 static uint8_t cryptodev_driver_id;
20
21 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
22
23 /**
24 * Pointers to the supported combined mode crypto functions are stored
25 * in the static tables. Each combined (chained) cryptographic operation
26 * can be described by a set of numbers:
27 * - order: order of operations (cipher, auth) or (auth, cipher)
28 * - direction: encryption or decryption
29 * - calg: cipher algorithm such as AES_CBC, AES_CTR, etc.
30 * - aalg: authentication algorithm such as SHA1, SHA256, etc.
31 * - keyl: cipher key length, for example 128, 192, 256 bits
32 *
33 * In order to quickly acquire each function pointer based on those numbers,
34 * a hierarchy of arrays is maintained. The final level, 3D array is indexed
35 * by the combined mode function parameters only (cipher algorithm,
36 * authentication algorithm and key length).
37 *
38 * This gives 3 memory accesses to obtain a function pointer instead of
39 * traversing the array manually and comparing function parameters on each loop.
40 *
41 * +--+CRYPTO_FUNC
42 * +--+ENC|
43 * +--+CA|
44 * | +--+DEC
45 * ORDER|
46 * | +--+ENC
47 * +--+AC|
48 * +--+DEC
49 *
50 */
51
52 /**
53 * 3D array type for ARM Combined Mode crypto functions pointers.
54 * CRYPTO_CIPHER_MAX: max cipher ID number
55 * CRYPTO_AUTH_MAX: max auth ID number
56 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
57 */
58 typedef const crypto_func_t
59 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
60
61 /* Evaluate to key length definition */
62 #define KEYL(keyl) (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
63
64 /* Local aliases for supported ciphers */
65 #define CIPH_AES_CBC RTE_CRYPTO_CIPHER_AES_CBC
66 /* Local aliases for supported hashes */
67 #define AUTH_SHA1_HMAC RTE_CRYPTO_AUTH_SHA1_HMAC
68 #define AUTH_SHA256_HMAC RTE_CRYPTO_AUTH_SHA256_HMAC
69
70 /**
71 * Arrays containing pointers to particular cryptographic,
72 * combined mode functions.
73 * crypto_op_ca_encrypt: cipher (encrypt), authenticate
74 * crypto_op_ca_decrypt: cipher (decrypt), authenticate
75 * crypto_op_ac_encrypt: authenticate, cipher (encrypt)
76 * crypto_op_ac_decrypt: authenticate, cipher (decrypt)
77 */
78 static const crypto_func_tbl_t
79 crypto_op_ca_encrypt = {
80 /* [cipher alg][auth alg][key length] = crypto_function, */
81 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
82 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
83 };
84
85 static const crypto_func_tbl_t
86 crypto_op_ca_decrypt = {
87 NULL
88 };
89
90 static const crypto_func_tbl_t
91 crypto_op_ac_encrypt = {
92 NULL
93 };
94
95 static const crypto_func_tbl_t
96 crypto_op_ac_decrypt = {
97 /* [cipher alg][auth alg][key length] = crypto_function, */
98 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
99 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
100 };
101
102 /**
103 * Arrays containing pointers to particular cryptographic function sets,
104 * covering given cipher operation directions (encrypt, decrypt)
105 * for each order of cipher and authentication pairs.
106 */
107 static const crypto_func_tbl_t *
108 crypto_cipher_auth[] = {
109 &crypto_op_ca_encrypt,
110 &crypto_op_ca_decrypt,
111 NULL
112 };
113
114 static const crypto_func_tbl_t *
115 crypto_auth_cipher[] = {
116 &crypto_op_ac_encrypt,
117 &crypto_op_ac_decrypt,
118 NULL
119 };
120
121 /**
122 * Top level array containing pointers to particular cryptographic
123 * function sets, covering given order of chained operations.
124 * crypto_cipher_auth: cipher first, authenticate after
125 * crypto_auth_cipher: authenticate first, cipher after
126 */
127 static const crypto_func_tbl_t **
128 crypto_chain_order[] = {
129 crypto_cipher_auth,
130 crypto_auth_cipher,
131 NULL
132 };
133
134 /**
135 * Extract particular combined mode crypto function from the 3D array.
136 */
137 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl) \
138 ({ \
139 crypto_func_tbl_t *func_tbl = \
140 (crypto_chain_order[(order)])[(cop)]; \
141 \
142 ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]); \
143 })
144
145 /*----------------------------------------------------------------------------*/
146
147 /**
148 * 2D array type for ARM key schedule functions pointers.
149 * CRYPTO_CIPHER_MAX: max cipher ID number
150 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
151 */
152 typedef const crypto_key_sched_t
153 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
154
155 static const crypto_key_sched_tbl_t
156 crypto_key_sched_encrypt = {
157 /* [cipher alg][key length] = key_expand_func, */
158 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
159 };
160
161 static const crypto_key_sched_tbl_t
162 crypto_key_sched_decrypt = {
163 /* [cipher alg][key length] = key_expand_func, */
164 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
165 };
166
167 /**
168 * Top level array containing pointers to particular key generation
169 * function sets, covering given operation direction.
170 * crypto_key_sched_encrypt: keys for encryption
171 * crypto_key_sched_decrypt: keys for decryption
172 */
173 static const crypto_key_sched_tbl_t *
174 crypto_key_sched_dir[] = {
175 &crypto_key_sched_encrypt,
176 &crypto_key_sched_decrypt,
177 NULL
178 };
179
180 /**
181 * Extract particular combined mode crypto function from the 3D array.
182 */
183 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl) \
184 ({ \
185 crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)]; \
186 \
187 ((*ks_tbl)[(calg)][KEYL(keyl)]); \
188 })
189
190 /*----------------------------------------------------------------------------*/
191
192 /*
193 *------------------------------------------------------------------------------
194 * Session Prepare
195 *------------------------------------------------------------------------------
196 */
197
198 /** Get xform chain order */
199 static enum armv8_crypto_chain_order
200 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
201 {
202
203 /*
204 * This driver currently covers only chained operations.
205 * Ignore only cipher or only authentication operations
206 * or chains longer than 2 xform structures.
207 */
208 if (xform->next == NULL || xform->next->next != NULL)
209 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
210
211 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
212 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
213 return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
214 }
215
216 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
217 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
218 return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
219 }
220
221 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
222 }
223
224 static inline void
225 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
226 const struct rte_crypto_sym_xform *xform)
227 {
228 size_t i;
229
230 /* Generate i_key_pad and o_key_pad */
231 memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
232 rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
233 xform->auth.key.length);
234 memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
235 rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
236 xform->auth.key.length);
237 /*
238 * XOR key with IPAD/OPAD values to obtain i_key_pad
239 * and o_key_pad.
240 * Byte-by-byte operation may seem to be the less efficient
241 * here but in fact it's the opposite.
242 * The result ASM code is likely operate on NEON registers
243 * (load auth key to Qx, load IPAD/OPAD to multiple
244 * elements of Qy, eor 128 bits at once).
245 */
246 for (i = 0; i < SHA_BLOCK_MAX; i++) {
247 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
248 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
249 }
250 }
251
252 static inline int
253 auth_set_prerequisites(struct armv8_crypto_session *sess,
254 const struct rte_crypto_sym_xform *xform)
255 {
256 uint8_t partial[64] = { 0 };
257 int error;
258
259 switch (xform->auth.algo) {
260 case RTE_CRYPTO_AUTH_SHA1_HMAC:
261 /*
262 * Generate authentication key, i_key_pad and o_key_pad.
263 */
264 /* Zero memory under key */
265 memset(sess->auth.hmac.key, 0, SHA1_BLOCK_SIZE);
266
267 /*
268 * Now copy the given authentication key to the session
269 * key.
270 */
271 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
272 xform->auth.key.length);
273
274 /* Prepare HMAC padding: key|pattern */
275 auth_hmac_pad_prepare(sess, xform);
276 /*
277 * Calculate partial hash values for i_key_pad and o_key_pad.
278 * Will be used as initialization state for final HMAC.
279 */
280 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
281 partial, SHA1_BLOCK_SIZE);
282 if (error != 0)
283 return -1;
284 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
285
286 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
287 partial, SHA1_BLOCK_SIZE);
288 if (error != 0)
289 return -1;
290 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
291
292 break;
293 case RTE_CRYPTO_AUTH_SHA256_HMAC:
294 /*
295 * Generate authentication key, i_key_pad and o_key_pad.
296 */
297 /* Zero memory under key */
298 memset(sess->auth.hmac.key, 0, SHA256_BLOCK_SIZE);
299
300 /*
301 * Now copy the given authentication key to the session
302 * key.
303 */
304 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
305 xform->auth.key.length);
306
307 /* Prepare HMAC padding: key|pattern */
308 auth_hmac_pad_prepare(sess, xform);
309 /*
310 * Calculate partial hash values for i_key_pad and o_key_pad.
311 * Will be used as initialization state for final HMAC.
312 */
313 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
314 partial, SHA256_BLOCK_SIZE);
315 if (error != 0)
316 return -1;
317 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
318
319 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
320 partial, SHA256_BLOCK_SIZE);
321 if (error != 0)
322 return -1;
323 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
324
325 break;
326 default:
327 break;
328 }
329
330 return 0;
331 }
332
333 static inline int
334 cipher_set_prerequisites(struct armv8_crypto_session *sess,
335 const struct rte_crypto_sym_xform *xform)
336 {
337 crypto_key_sched_t cipher_key_sched;
338
339 cipher_key_sched = sess->cipher.key_sched;
340 if (likely(cipher_key_sched != NULL)) {
341 /* Set up cipher session key */
342 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
343 }
344
345 return 0;
346 }
347
348 static int
349 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
350 const struct rte_crypto_sym_xform *cipher_xform,
351 const struct rte_crypto_sym_xform *auth_xform)
352 {
353 enum armv8_crypto_chain_order order;
354 enum armv8_crypto_cipher_operation cop;
355 enum rte_crypto_cipher_algorithm calg;
356 enum rte_crypto_auth_algorithm aalg;
357
358 /* Validate and prepare scratch order of combined operations */
359 switch (sess->chain_order) {
360 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
361 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
362 order = sess->chain_order;
363 break;
364 default:
365 return -ENOTSUP;
366 }
367 /* Select cipher direction */
368 sess->cipher.direction = cipher_xform->cipher.op;
369 /* Select cipher key */
370 sess->cipher.key.length = cipher_xform->cipher.key.length;
371 /* Set cipher direction */
372 cop = sess->cipher.direction;
373 /* Set cipher algorithm */
374 calg = cipher_xform->cipher.algo;
375
376 /* Select cipher algo */
377 switch (calg) {
378 /* Cover supported cipher algorithms */
379 case RTE_CRYPTO_CIPHER_AES_CBC:
380 sess->cipher.algo = calg;
381 /* IV len is always 16 bytes (block size) for AES CBC */
382 sess->cipher.iv.length = 16;
383 break;
384 default:
385 return -ENOTSUP;
386 }
387 /* Select auth generate/verify */
388 sess->auth.operation = auth_xform->auth.op;
389
390 /* Select auth algo */
391 switch (auth_xform->auth.algo) {
392 /* Cover supported hash algorithms */
393 case RTE_CRYPTO_AUTH_SHA1_HMAC:
394 case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
395 aalg = auth_xform->auth.algo;
396 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
397 break;
398 default:
399 return -ENOTSUP;
400 }
401
402 /* Set the digest length */
403 sess->auth.digest_length = auth_xform->auth.digest_length;
404
405 /* Verify supported key lengths and extract proper algorithm */
406 switch (cipher_xform->cipher.key.length << 3) {
407 case 128:
408 sess->crypto_func =
409 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
410 sess->cipher.key_sched =
411 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
412 break;
413 case 192:
414 case 256:
415 /* These key lengths are not supported yet */
416 default: /* Fall through */
417 sess->crypto_func = NULL;
418 sess->cipher.key_sched = NULL;
419 return -ENOTSUP;
420 }
421
422 if (unlikely(sess->crypto_func == NULL)) {
423 /*
424 * If we got here that means that there must be a bug
425 * in the algorithms selection above. Nevertheless keep
426 * it here to catch bug immediately and avoid NULL pointer
427 * dereference in OPs processing.
428 */
429 ARMV8_CRYPTO_LOG_ERR(
430 "No appropriate crypto function for given parameters");
431 return -EINVAL;
432 }
433
434 /* Set up cipher session prerequisites */
435 if (cipher_set_prerequisites(sess, cipher_xform) != 0)
436 return -EINVAL;
437
438 /* Set up authentication session prerequisites */
439 if (auth_set_prerequisites(sess, auth_xform) != 0)
440 return -EINVAL;
441
442 return 0;
443 }
444
445 /** Parse crypto xform chain and set private session parameters */
446 int
447 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
448 const struct rte_crypto_sym_xform *xform)
449 {
450 const struct rte_crypto_sym_xform *cipher_xform = NULL;
451 const struct rte_crypto_sym_xform *auth_xform = NULL;
452 bool is_chained_op;
453 int ret;
454
455 /* Filter out spurious/broken requests */
456 if (xform == NULL)
457 return -EINVAL;
458
459 sess->chain_order = armv8_crypto_get_chain_order(xform);
460 switch (sess->chain_order) {
461 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
462 cipher_xform = xform;
463 auth_xform = xform->next;
464 is_chained_op = true;
465 break;
466 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
467 auth_xform = xform;
468 cipher_xform = xform->next;
469 is_chained_op = true;
470 break;
471 default:
472 is_chained_op = false;
473 return -ENOTSUP;
474 }
475
476 /* Set IV offset */
477 sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
478
479 if (is_chained_op) {
480 ret = armv8_crypto_set_session_chained_parameters(sess,
481 cipher_xform, auth_xform);
482 if (unlikely(ret != 0)) {
483 ARMV8_CRYPTO_LOG_ERR(
484 "Invalid/unsupported chained (cipher/auth) parameters");
485 return ret;
486 }
487 } else {
488 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
489 return -ENOTSUP;
490 }
491
492 return 0;
493 }
494
495 /** Provide session for operation */
496 static inline struct armv8_crypto_session *
497 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
498 {
499 struct armv8_crypto_session *sess = NULL;
500
501 if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
502 /* get existing session */
503 if (likely(op->sym->session != NULL)) {
504 sess = (struct armv8_crypto_session *)
505 get_sym_session_private_data(
506 op->sym->session,
507 cryptodev_driver_id);
508 }
509 } else {
510 /* provide internal session */
511 void *_sess = NULL;
512 void *_sess_private_data = NULL;
513
514 if (rte_mempool_get(qp->sess_mp, (void **)&_sess))
515 return NULL;
516
517 if (rte_mempool_get(qp->sess_mp, (void **)&_sess_private_data))
518 return NULL;
519
520 sess = (struct armv8_crypto_session *)_sess_private_data;
521
522 if (unlikely(armv8_crypto_set_session_parameters(sess,
523 op->sym->xform) != 0)) {
524 rte_mempool_put(qp->sess_mp, _sess);
525 rte_mempool_put(qp->sess_mp, _sess_private_data);
526 sess = NULL;
527 }
528 op->sym->session = (struct rte_cryptodev_sym_session *)_sess;
529 set_sym_session_private_data(op->sym->session,
530 cryptodev_driver_id, _sess_private_data);
531 }
532
533 if (unlikely(sess == NULL))
534 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
535
536 return sess;
537 }
538
539 /*
540 *------------------------------------------------------------------------------
541 * Process Operations
542 *------------------------------------------------------------------------------
543 */
544
545 /*----------------------------------------------------------------------------*/
546
547 /** Process cipher operation */
548 static inline void
549 process_armv8_chained_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
550 struct armv8_crypto_session *sess,
551 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
552 {
553 crypto_func_t crypto_func;
554 crypto_arg_t arg;
555 struct rte_mbuf *m_asrc, *m_adst;
556 uint8_t *csrc, *cdst;
557 uint8_t *adst, *asrc;
558 uint64_t clen, alen;
559 int error;
560
561 clen = op->sym->cipher.data.length;
562 alen = op->sym->auth.data.length;
563
564 csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
565 op->sym->cipher.data.offset);
566 cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
567 op->sym->cipher.data.offset);
568
569 switch (sess->chain_order) {
570 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
571 m_asrc = m_adst = mbuf_dst;
572 break;
573 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
574 m_asrc = mbuf_src;
575 m_adst = mbuf_dst;
576 break;
577 default:
578 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
579 return;
580 }
581 asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
582 op->sym->auth.data.offset);
583
584 switch (sess->auth.mode) {
585 case ARMV8_CRYPTO_AUTH_AS_AUTH:
586 /* Nothing to do here, just verify correct option */
587 break;
588 case ARMV8_CRYPTO_AUTH_AS_HMAC:
589 arg.digest.hmac.key = sess->auth.hmac.key;
590 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
591 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
592 break;
593 default:
594 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
595 return;
596 }
597
598 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
599 adst = op->sym->auth.digest.data;
600 if (adst == NULL) {
601 adst = rte_pktmbuf_mtod_offset(m_adst,
602 uint8_t *,
603 op->sym->auth.data.offset +
604 op->sym->auth.data.length);
605 }
606 } else {
607 adst = qp->temp_digest;
608 }
609
610 arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
611 sess->cipher.iv.offset);
612 arg.cipher.key = sess->cipher.key.data;
613 /* Acquire combined mode function */
614 crypto_func = sess->crypto_func;
615 ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
616 error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
617 if (error != 0) {
618 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
619 return;
620 }
621
622 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
623 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
624 if (memcmp(adst, op->sym->auth.digest.data,
625 sess->auth.digest_length) != 0) {
626 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
627 }
628 }
629 }
630
631 /** Process crypto operation for mbuf */
632 static inline int
633 process_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
634 struct armv8_crypto_session *sess)
635 {
636 struct rte_mbuf *msrc, *mdst;
637
638 msrc = op->sym->m_src;
639 mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
640
641 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
642
643 switch (sess->chain_order) {
644 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
645 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
646 process_armv8_chained_op(qp, op, sess, msrc, mdst);
647 break;
648 default:
649 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
650 break;
651 }
652
653 /* Free session if a session-less crypto op */
654 if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
655 memset(sess, 0, sizeof(struct armv8_crypto_session));
656 memset(op->sym->session, 0,
657 rte_cryptodev_sym_get_header_session_size());
658 rte_mempool_put(qp->sess_mp, sess);
659 rte_mempool_put(qp->sess_mp, op->sym->session);
660 op->sym->session = NULL;
661 }
662
663 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
664 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
665
666 if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
667 return -1;
668
669 return 0;
670 }
671
672 /*
673 *------------------------------------------------------------------------------
674 * PMD Framework
675 *------------------------------------------------------------------------------
676 */
677
678 /** Enqueue burst */
679 static uint16_t
680 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
681 uint16_t nb_ops)
682 {
683 struct armv8_crypto_session *sess;
684 struct armv8_crypto_qp *qp = queue_pair;
685 int i, retval;
686
687 for (i = 0; i < nb_ops; i++) {
688 sess = get_session(qp, ops[i]);
689 if (unlikely(sess == NULL))
690 goto enqueue_err;
691
692 retval = process_op(qp, ops[i], sess);
693 if (unlikely(retval < 0))
694 goto enqueue_err;
695 }
696
697 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
698 NULL);
699 qp->stats.enqueued_count += retval;
700
701 return retval;
702
703 enqueue_err:
704 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
705 NULL);
706 if (ops[i] != NULL)
707 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
708
709 qp->stats.enqueue_err_count++;
710 return retval;
711 }
712
713 /** Dequeue burst */
714 static uint16_t
715 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
716 uint16_t nb_ops)
717 {
718 struct armv8_crypto_qp *qp = queue_pair;
719
720 unsigned int nb_dequeued = 0;
721
722 nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
723 (void **)ops, nb_ops, NULL);
724 qp->stats.dequeued_count += nb_dequeued;
725
726 return nb_dequeued;
727 }
728
729 /** Create ARMv8 crypto device */
730 static int
731 cryptodev_armv8_crypto_create(const char *name,
732 struct rte_vdev_device *vdev,
733 struct rte_cryptodev_pmd_init_params *init_params)
734 {
735 struct rte_cryptodev *dev;
736 struct armv8_crypto_private *internals;
737
738 /* Check CPU for support for AES instruction set */
739 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
740 ARMV8_CRYPTO_LOG_ERR(
741 "AES instructions not supported by CPU");
742 return -EFAULT;
743 }
744
745 /* Check CPU for support for SHA instruction set */
746 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
747 !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
748 ARMV8_CRYPTO_LOG_ERR(
749 "SHA1/SHA2 instructions not supported by CPU");
750 return -EFAULT;
751 }
752
753 /* Check CPU for support for Advance SIMD instruction set */
754 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
755 ARMV8_CRYPTO_LOG_ERR(
756 "Advanced SIMD instructions not supported by CPU");
757 return -EFAULT;
758 }
759
760 dev = rte_cryptodev_pmd_create(name, &vdev->device, init_params);
761 if (dev == NULL) {
762 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
763 goto init_error;
764 }
765
766 dev->driver_id = cryptodev_driver_id;
767 dev->dev_ops = rte_armv8_crypto_pmd_ops;
768
769 /* register rx/tx burst functions for data path */
770 dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
771 dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
772
773 dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
774 RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
775 RTE_CRYPTODEV_FF_CPU_NEON |
776 RTE_CRYPTODEV_FF_CPU_ARM_CE;
777
778 /* Set vector instructions mode supported */
779 internals = dev->data->dev_private;
780
781 internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
782
783 return 0;
784
785 init_error:
786 ARMV8_CRYPTO_LOG_ERR(
787 "driver %s: cryptodev_armv8_crypto_create failed",
788 init_params->name);
789
790 cryptodev_armv8_crypto_uninit(vdev);
791 return -EFAULT;
792 }
793
794 /** Initialise ARMv8 crypto device */
795 static int
796 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
797 {
798 struct rte_cryptodev_pmd_init_params init_params = {
799 "",
800 sizeof(struct armv8_crypto_private),
801 rte_socket_id(),
802 RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS
803 };
804 const char *name;
805 const char *input_args;
806
807 name = rte_vdev_device_name(vdev);
808 if (name == NULL)
809 return -EINVAL;
810 input_args = rte_vdev_device_args(vdev);
811 rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
812
813 return cryptodev_armv8_crypto_create(name, vdev, &init_params);
814 }
815
816 /** Uninitialise ARMv8 crypto device */
817 static int
818 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
819 {
820 struct rte_cryptodev *cryptodev;
821 const char *name;
822
823 name = rte_vdev_device_name(vdev);
824 if (name == NULL)
825 return -EINVAL;
826
827 RTE_LOG(INFO, PMD,
828 "Closing ARMv8 crypto device %s on numa socket %u\n",
829 name, rte_socket_id());
830
831 cryptodev = rte_cryptodev_pmd_get_named_dev(name);
832 if (cryptodev == NULL)
833 return -ENODEV;
834
835 return rte_cryptodev_pmd_destroy(cryptodev);
836 }
837
838 static struct rte_vdev_driver armv8_crypto_pmd_drv = {
839 .probe = cryptodev_armv8_crypto_init,
840 .remove = cryptodev_armv8_crypto_uninit
841 };
842
843 static struct cryptodev_driver armv8_crypto_drv;
844
845 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_pmd_drv);
846 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
847 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
848 "max_nb_queue_pairs=<int> "
849 "socket_id=<int>");
850 RTE_PMD_REGISTER_CRYPTO_DRIVER(armv8_crypto_drv, armv8_crypto_pmd_drv.driver,
851 cryptodev_driver_id);