1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
7 #include <rte_common.h>
8 #include <rte_cryptodev_pmd.h>
9 #include <rte_malloc.h>
11 #include "ccp_pmd_private.h"
13 #include "ccp_crypto.h"
15 #define CCP_BASE_SYM_CRYPTO_CAPABILITIES \
17 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
19 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
21 .algo = RTE_CRYPTO_AUTH_SHA1, \
38 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
40 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
42 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, \
59 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
61 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
63 .algo = RTE_CRYPTO_AUTH_SHA224, \
80 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
82 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
84 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, \
101 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
103 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
105 .algo = RTE_CRYPTO_AUTH_SHA3_224, \
121 { /* SHA3-224 HMAC*/ \
122 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
124 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
126 .algo = RTE_CRYPTO_AUTH_SHA3_224_HMAC, \
143 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
145 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
147 .algo = RTE_CRYPTO_AUTH_SHA256, \
163 { /* SHA256 HMAC */ \
164 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
166 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
168 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, \
185 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
187 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
189 .algo = RTE_CRYPTO_AUTH_SHA3_256, \
205 { /* SHA3-256-HMAC */ \
206 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
208 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
210 .algo = RTE_CRYPTO_AUTH_SHA3_256_HMAC, \
227 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
229 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
231 .algo = RTE_CRYPTO_AUTH_SHA384, \
247 { /* SHA384 HMAC */ \
248 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
250 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
252 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, \
269 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
271 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
273 .algo = RTE_CRYPTO_AUTH_SHA3_384, \
289 { /* SHA3-384-HMAC */ \
290 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
292 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
294 .algo = RTE_CRYPTO_AUTH_SHA3_384_HMAC, \
311 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
313 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
315 .algo = RTE_CRYPTO_AUTH_SHA512, \
331 { /* SHA512 HMAC */ \
332 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
334 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
336 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, \
353 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
355 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
357 .algo = RTE_CRYPTO_AUTH_SHA3_512, \
373 { /* SHA3-512-HMAC */ \
374 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
376 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
378 .algo = RTE_CRYPTO_AUTH_SHA3_512_HMAC, \
395 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
397 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
399 .algo = RTE_CRYPTO_AUTH_AES_CMAC, \
415 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
417 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
419 .algo = RTE_CRYPTO_CIPHER_AES_ECB, \
435 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
437 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
439 .algo = RTE_CRYPTO_CIPHER_AES_CBC, \
455 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
457 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
459 .algo = RTE_CRYPTO_CIPHER_AES_CTR, \
475 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
477 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
479 .algo = RTE_CRYPTO_CIPHER_3DES_CBC, \
495 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
497 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
499 .algo = RTE_CRYPTO_AEAD_AES_GCM, \
525 #define CCP_EXTRA_SYM_CRYPTO_CAPABILITIES \
527 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
529 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
531 .algo = RTE_CRYPTO_AUTH_MD5_HMAC, \
548 static const struct rte_cryptodev_capabilities ccp_crypto_cap
[] = {
549 CCP_BASE_SYM_CRYPTO_CAPABILITIES
,
550 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
553 static const struct rte_cryptodev_capabilities ccp_crypto_cap_complete
[] = {
554 CCP_EXTRA_SYM_CRYPTO_CAPABILITIES
,
555 CCP_BASE_SYM_CRYPTO_CAPABILITIES
,
556 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
560 ccp_pmd_config(struct rte_cryptodev
*dev __rte_unused
,
561 struct rte_cryptodev_config
*config __rte_unused
)
567 ccp_pmd_start(struct rte_cryptodev
*dev
)
569 return ccp_dev_start(dev
);
573 ccp_pmd_stop(struct rte_cryptodev
*dev __rte_unused
)
579 ccp_pmd_close(struct rte_cryptodev
*dev __rte_unused
)
585 ccp_pmd_stats_get(struct rte_cryptodev
*dev
,
586 struct rte_cryptodev_stats
*stats
)
590 for (qp_id
= 0; qp_id
< dev
->data
->nb_queue_pairs
; qp_id
++) {
591 struct ccp_qp
*qp
= dev
->data
->queue_pairs
[qp_id
];
593 stats
->enqueued_count
+= qp
->qp_stats
.enqueued_count
;
594 stats
->dequeued_count
+= qp
->qp_stats
.dequeued_count
;
596 stats
->enqueue_err_count
+= qp
->qp_stats
.enqueue_err_count
;
597 stats
->dequeue_err_count
+= qp
->qp_stats
.dequeue_err_count
;
603 ccp_pmd_stats_reset(struct rte_cryptodev
*dev
)
607 for (qp_id
= 0; qp_id
< dev
->data
->nb_queue_pairs
; qp_id
++) {
608 struct ccp_qp
*qp
= dev
->data
->queue_pairs
[qp_id
];
610 memset(&qp
->qp_stats
, 0, sizeof(qp
->qp_stats
));
615 ccp_pmd_info_get(struct rte_cryptodev
*dev
,
616 struct rte_cryptodev_info
*dev_info
)
618 struct ccp_private
*internals
= dev
->data
->dev_private
;
620 if (dev_info
!= NULL
) {
621 dev_info
->driver_id
= dev
->driver_id
;
622 dev_info
->feature_flags
= dev
->feature_flags
;
623 dev_info
->capabilities
= ccp_crypto_cap
;
624 if (internals
->auth_opt
== 1)
625 dev_info
->capabilities
= ccp_crypto_cap_complete
;
626 dev_info
->max_nb_queue_pairs
= internals
->max_nb_qpairs
;
627 /* No limit of number of sessions */
628 dev_info
->sym
.max_nb_sessions
= 0;
633 ccp_pmd_qp_release(struct rte_cryptodev
*dev
, uint16_t qp_id
)
637 if (dev
->data
->queue_pairs
[qp_id
] != NULL
) {
638 qp
= (struct ccp_qp
*)dev
->data
->queue_pairs
[qp_id
];
639 rte_ring_free(qp
->processed_pkts
);
640 rte_mempool_free(qp
->batch_mp
);
642 dev
->data
->queue_pairs
[qp_id
] = NULL
;
648 ccp_pmd_qp_set_unique_name(struct rte_cryptodev
*dev
,
651 unsigned int n
= snprintf(qp
->name
, sizeof(qp
->name
),
653 dev
->data
->dev_id
, qp
->id
);
655 if (n
> sizeof(qp
->name
))
661 static struct rte_ring
*
662 ccp_pmd_qp_create_batch_info_ring(struct ccp_qp
*qp
,
663 unsigned int ring_size
, int socket_id
)
667 r
= rte_ring_lookup(qp
->name
);
669 if (r
->size
>= ring_size
) {
671 "Reusing ring %s for processed packets",
676 "Unable to reuse ring %s for processed packets",
681 return rte_ring_create(qp
->name
, ring_size
, socket_id
,
682 RING_F_SP_ENQ
| RING_F_SC_DEQ
);
686 ccp_pmd_qp_setup(struct rte_cryptodev
*dev
, uint16_t qp_id
,
687 const struct rte_cryptodev_qp_conf
*qp_conf
,
690 struct ccp_private
*internals
= dev
->data
->dev_private
;
694 if (qp_id
>= internals
->max_nb_qpairs
) {
695 CCP_LOG_ERR("Invalid qp_id %u, should be less than %u",
696 qp_id
, internals
->max_nb_qpairs
);
700 /* Free memory prior to re-allocation if needed. */
701 if (dev
->data
->queue_pairs
[qp_id
] != NULL
)
702 ccp_pmd_qp_release(dev
, qp_id
);
704 /* Allocate the queue pair data structure. */
705 qp
= rte_zmalloc_socket("CCP Crypto PMD Queue Pair", sizeof(*qp
),
706 RTE_CACHE_LINE_SIZE
, socket_id
);
708 CCP_LOG_ERR("Failed to allocate queue pair memory");
714 dev
->data
->queue_pairs
[qp_id
] = qp
;
716 retval
= ccp_pmd_qp_set_unique_name(dev
, qp
);
718 CCP_LOG_ERR("Failed to create unique name for ccp qp");
719 goto qp_setup_cleanup
;
722 qp
->processed_pkts
= ccp_pmd_qp_create_batch_info_ring(qp
,
723 qp_conf
->nb_descriptors
, socket_id
);
724 if (qp
->processed_pkts
== NULL
) {
725 CCP_LOG_ERR("Failed to create batch info ring");
726 goto qp_setup_cleanup
;
729 qp
->sess_mp
= qp_conf
->mp_session
;
730 qp
->sess_mp_priv
= qp_conf
->mp_session_private
;
732 /* mempool for batch info */
733 qp
->batch_mp
= rte_mempool_create(
735 qp_conf
->nb_descriptors
,
736 sizeof(struct ccp_batch_info
),
738 0, NULL
, NULL
, NULL
, NULL
,
740 if (qp
->batch_mp
== NULL
)
741 goto qp_setup_cleanup
;
742 memset(&qp
->qp_stats
, 0, sizeof(qp
->qp_stats
));
746 dev
->data
->queue_pairs
[qp_id
] = NULL
;
753 ccp_pmd_qp_count(struct rte_cryptodev
*dev
)
755 return dev
->data
->nb_queue_pairs
;
759 ccp_pmd_sym_session_get_size(struct rte_cryptodev
*dev __rte_unused
)
761 return sizeof(struct ccp_session
);
765 ccp_pmd_sym_session_configure(struct rte_cryptodev
*dev
,
766 struct rte_crypto_sym_xform
*xform
,
767 struct rte_cryptodev_sym_session
*sess
,
768 struct rte_mempool
*mempool
)
771 void *sess_private_data
;
772 struct ccp_private
*internals
;
774 if (unlikely(sess
== NULL
|| xform
== NULL
)) {
775 CCP_LOG_ERR("Invalid session struct or xform");
779 if (rte_mempool_get(mempool
, &sess_private_data
)) {
780 CCP_LOG_ERR("Couldn't get object from session mempool");
783 internals
= (struct ccp_private
*)dev
->data
->dev_private
;
784 ret
= ccp_set_session_parameters(sess_private_data
, xform
, internals
);
786 CCP_LOG_ERR("failed configure session parameters");
788 /* Return session to mempool */
789 rte_mempool_put(mempool
, sess_private_data
);
792 set_sym_session_private_data(sess
, dev
->driver_id
,
799 ccp_pmd_sym_session_clear(struct rte_cryptodev
*dev
,
800 struct rte_cryptodev_sym_session
*sess
)
802 uint8_t index
= dev
->driver_id
;
803 void *sess_priv
= get_sym_session_private_data(sess
, index
);
806 struct rte_mempool
*sess_mp
= rte_mempool_from_obj(sess_priv
);
808 rte_mempool_put(sess_mp
, sess_priv
);
809 memset(sess_priv
, 0, sizeof(struct ccp_session
));
810 set_sym_session_private_data(sess
, index
, NULL
);
814 struct rte_cryptodev_ops ccp_ops
= {
815 .dev_configure
= ccp_pmd_config
,
816 .dev_start
= ccp_pmd_start
,
817 .dev_stop
= ccp_pmd_stop
,
818 .dev_close
= ccp_pmd_close
,
820 .stats_get
= ccp_pmd_stats_get
,
821 .stats_reset
= ccp_pmd_stats_reset
,
823 .dev_infos_get
= ccp_pmd_info_get
,
825 .queue_pair_setup
= ccp_pmd_qp_setup
,
826 .queue_pair_release
= ccp_pmd_qp_release
,
827 .queue_pair_count
= ccp_pmd_qp_count
,
829 .sym_session_get_size
= ccp_pmd_sym_session_get_size
,
830 .sym_session_configure
= ccp_pmd_sym_session_configure
,
831 .sym_session_clear
= ccp_pmd_sym_session_clear
,
834 struct rte_cryptodev_ops
*ccp_pmd_ops
= &ccp_ops
;