4 * Copyright (C) Cavium networks Ltd. 2017.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
16 * * Neither the name of Cavium networks nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 #include <rte_common.h>
36 #include <rte_malloc.h>
37 #include <rte_cryptodev_pmd.h>
39 #include "armv8_crypto_defs.h"
41 #include "rte_armv8_pmd_private.h"
43 static const struct rte_cryptodev_capabilities
44 armv8_crypto_pmd_capabilities
[] = {
46 .op
= RTE_CRYPTO_OP_TYPE_SYMMETRIC
,
48 .xform_type
= RTE_CRYPTO_SYM_XFORM_AUTH
,
50 .algo
= RTE_CRYPTO_AUTH_SHA1_HMAC
,
67 .op
= RTE_CRYPTO_OP_TYPE_SYMMETRIC
,
69 .xform_type
= RTE_CRYPTO_SYM_XFORM_AUTH
,
71 .algo
= RTE_CRYPTO_AUTH_SHA256_HMAC
,
88 .op
= RTE_CRYPTO_OP_TYPE_SYMMETRIC
,
90 .xform_type
= RTE_CRYPTO_SYM_XFORM_CIPHER
,
92 .algo
= RTE_CRYPTO_CIPHER_AES_CBC
,
108 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
112 /** Configure device */
114 armv8_crypto_pmd_config(__rte_unused
struct rte_cryptodev
*dev
,
115 __rte_unused
struct rte_cryptodev_config
*config
)
122 armv8_crypto_pmd_start(__rte_unused
struct rte_cryptodev
*dev
)
129 armv8_crypto_pmd_stop(__rte_unused
struct rte_cryptodev
*dev
)
135 armv8_crypto_pmd_close(__rte_unused
struct rte_cryptodev
*dev
)
141 /** Get device statistics */
143 armv8_crypto_pmd_stats_get(struct rte_cryptodev
*dev
,
144 struct rte_cryptodev_stats
*stats
)
148 for (qp_id
= 0; qp_id
< dev
->data
->nb_queue_pairs
; qp_id
++) {
149 struct armv8_crypto_qp
*qp
= dev
->data
->queue_pairs
[qp_id
];
151 stats
->enqueued_count
+= qp
->stats
.enqueued_count
;
152 stats
->dequeued_count
+= qp
->stats
.dequeued_count
;
154 stats
->enqueue_err_count
+= qp
->stats
.enqueue_err_count
;
155 stats
->dequeue_err_count
+= qp
->stats
.dequeue_err_count
;
159 /** Reset device statistics */
161 armv8_crypto_pmd_stats_reset(struct rte_cryptodev
*dev
)
165 for (qp_id
= 0; qp_id
< dev
->data
->nb_queue_pairs
; qp_id
++) {
166 struct armv8_crypto_qp
*qp
= dev
->data
->queue_pairs
[qp_id
];
168 memset(&qp
->stats
, 0, sizeof(qp
->stats
));
173 /** Get device info */
175 armv8_crypto_pmd_info_get(struct rte_cryptodev
*dev
,
176 struct rte_cryptodev_info
*dev_info
)
178 struct armv8_crypto_private
*internals
= dev
->data
->dev_private
;
180 if (dev_info
!= NULL
) {
181 dev_info
->dev_type
= dev
->dev_type
;
182 dev_info
->feature_flags
= dev
->feature_flags
;
183 dev_info
->capabilities
= armv8_crypto_pmd_capabilities
;
184 dev_info
->max_nb_queue_pairs
= internals
->max_nb_qpairs
;
185 dev_info
->sym
.max_nb_sessions
= internals
->max_nb_sessions
;
189 /** Release queue pair */
191 armv8_crypto_pmd_qp_release(struct rte_cryptodev
*dev
, uint16_t qp_id
)
194 if (dev
->data
->queue_pairs
[qp_id
] != NULL
) {
195 rte_free(dev
->data
->queue_pairs
[qp_id
]);
196 dev
->data
->queue_pairs
[qp_id
] = NULL
;
202 /** set a unique name for the queue pair based on it's name, dev_id and qp_id */
204 armv8_crypto_pmd_qp_set_unique_name(struct rte_cryptodev
*dev
,
205 struct armv8_crypto_qp
*qp
)
209 n
= snprintf(qp
->name
, sizeof(qp
->name
), "armv8_crypto_pmd_%u_qp_%u",
210 dev
->data
->dev_id
, qp
->id
);
212 if (n
> sizeof(qp
->name
))
219 /** Create a ring to place processed operations on */
220 static struct rte_ring
*
221 armv8_crypto_pmd_qp_create_processed_ops_ring(struct armv8_crypto_qp
*qp
,
222 unsigned int ring_size
, int socket_id
)
226 r
= rte_ring_lookup(qp
->name
);
228 if (rte_ring_get_size(r
) >= ring_size
) {
229 ARMV8_CRYPTO_LOG_INFO(
230 "Reusing existing ring %s for processed ops",
235 ARMV8_CRYPTO_LOG_ERR(
236 "Unable to reuse existing ring %s for processed ops",
241 return rte_ring_create(qp
->name
, ring_size
, socket_id
,
242 RING_F_SP_ENQ
| RING_F_SC_DEQ
);
246 /** Setup a queue pair */
248 armv8_crypto_pmd_qp_setup(struct rte_cryptodev
*dev
, uint16_t qp_id
,
249 const struct rte_cryptodev_qp_conf
*qp_conf
,
252 struct armv8_crypto_qp
*qp
= NULL
;
254 /* Free memory prior to re-allocation if needed. */
255 if (dev
->data
->queue_pairs
[qp_id
] != NULL
)
256 armv8_crypto_pmd_qp_release(dev
, qp_id
);
258 /* Allocate the queue pair data structure. */
259 qp
= rte_zmalloc_socket("ARMv8 PMD Queue Pair", sizeof(*qp
),
260 RTE_CACHE_LINE_SIZE
, socket_id
);
265 dev
->data
->queue_pairs
[qp_id
] = qp
;
267 if (armv8_crypto_pmd_qp_set_unique_name(dev
, qp
) != 0)
268 goto qp_setup_cleanup
;
270 qp
->processed_ops
= armv8_crypto_pmd_qp_create_processed_ops_ring(qp
,
271 qp_conf
->nb_descriptors
, socket_id
);
272 if (qp
->processed_ops
== NULL
)
273 goto qp_setup_cleanup
;
275 qp
->sess_mp
= dev
->data
->session_pool
;
277 memset(&qp
->stats
, 0, sizeof(qp
->stats
));
288 /** Start queue pair */
290 armv8_crypto_pmd_qp_start(__rte_unused
struct rte_cryptodev
*dev
,
291 __rte_unused
uint16_t queue_pair_id
)
296 /** Stop queue pair */
298 armv8_crypto_pmd_qp_stop(__rte_unused
struct rte_cryptodev
*dev
,
299 __rte_unused
uint16_t queue_pair_id
)
304 /** Return the number of allocated queue pairs */
306 armv8_crypto_pmd_qp_count(struct rte_cryptodev
*dev
)
308 return dev
->data
->nb_queue_pairs
;
311 /** Returns the size of the session structure */
313 armv8_crypto_pmd_session_get_size(struct rte_cryptodev
*dev __rte_unused
)
315 return sizeof(struct armv8_crypto_session
);
318 /** Configure the session from a crypto xform chain */
320 armv8_crypto_pmd_session_configure(struct rte_cryptodev
*dev __rte_unused
,
321 struct rte_crypto_sym_xform
*xform
, void *sess
)
323 if (unlikely(sess
== NULL
)) {
324 ARMV8_CRYPTO_LOG_ERR("invalid session struct");
328 if (armv8_crypto_set_session_parameters(
330 ARMV8_CRYPTO_LOG_ERR("failed configure session parameters");
337 /** Clear the memory of session so it doesn't leave key material behind */
339 armv8_crypto_pmd_session_clear(struct rte_cryptodev
*dev __rte_unused
,
343 /* Zero out the whole structure */
345 memset(sess
, 0, sizeof(struct armv8_crypto_session
));
348 struct rte_cryptodev_ops armv8_crypto_pmd_ops
= {
349 .dev_configure
= armv8_crypto_pmd_config
,
350 .dev_start
= armv8_crypto_pmd_start
,
351 .dev_stop
= armv8_crypto_pmd_stop
,
352 .dev_close
= armv8_crypto_pmd_close
,
354 .stats_get
= armv8_crypto_pmd_stats_get
,
355 .stats_reset
= armv8_crypto_pmd_stats_reset
,
357 .dev_infos_get
= armv8_crypto_pmd_info_get
,
359 .queue_pair_setup
= armv8_crypto_pmd_qp_setup
,
360 .queue_pair_release
= armv8_crypto_pmd_qp_release
,
361 .queue_pair_start
= armv8_crypto_pmd_qp_start
,
362 .queue_pair_stop
= armv8_crypto_pmd_qp_stop
,
363 .queue_pair_count
= armv8_crypto_pmd_qp_count
,
365 .session_get_size
= armv8_crypto_pmd_session_get_size
,
366 .session_configure
= armv8_crypto_pmd_session_configure
,
367 .session_clear
= armv8_crypto_pmd_session_clear
370 struct rte_cryptodev_ops
*rte_armv8_crypto_pmd_ops
= &armv8_crypto_pmd_ops
;