]> git.proxmox.com Git - ceph.git/blob - ceph/src/seastar/dpdk/drivers/crypto/armv8/rte_armv8_pmd_ops.c
update sources to ceph Nautilus 14.2.1
[ceph.git] / ceph / src / seastar / dpdk / drivers / crypto / armv8 / rte_armv8_pmd_ops.c
1 /*
2 * BSD LICENSE
3 *
4 * Copyright (C) Cavium networks Ltd. 2017.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
15 * distribution.
16 * * Neither the name of Cavium networks nor the names of its
17 * contributors may be used to endorse or promote products derived
18 * from this software without specific prior written permission.
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 */
32
33 #include <string.h>
34
35 #include <rte_common.h>
36 #include <rte_malloc.h>
37 #include <rte_cryptodev_pmd.h>
38
39 #include "armv8_crypto_defs.h"
40
41 #include "rte_armv8_pmd_private.h"
42
43 static const struct rte_cryptodev_capabilities
44 armv8_crypto_pmd_capabilities[] = {
45 { /* SHA1 HMAC */
46 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
47 {.sym = {
48 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
49 {.auth = {
50 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
51 .block_size = 64,
52 .key_size = {
53 .min = 16,
54 .max = 128,
55 .increment = 0
56 },
57 .digest_size = {
58 .min = 20,
59 .max = 20,
60 .increment = 0
61 },
62 .aad_size = { 0 }
63 }, }
64 }, }
65 },
66 { /* SHA256 HMAC */
67 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
68 {.sym = {
69 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
70 {.auth = {
71 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
72 .block_size = 64,
73 .key_size = {
74 .min = 16,
75 .max = 128,
76 .increment = 0
77 },
78 .digest_size = {
79 .min = 32,
80 .max = 32,
81 .increment = 0
82 },
83 .aad_size = { 0 }
84 }, }
85 }, }
86 },
87 { /* AES CBC */
88 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
89 {.sym = {
90 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
91 {.cipher = {
92 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
93 .block_size = 16,
94 .key_size = {
95 .min = 16,
96 .max = 16,
97 .increment = 0
98 },
99 .iv_size = {
100 .min = 16,
101 .max = 16,
102 .increment = 0
103 }
104 }, }
105 }, }
106 },
107
108 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
109 };
110
111
112 /** Configure device */
113 static int
114 armv8_crypto_pmd_config(__rte_unused struct rte_cryptodev *dev,
115 __rte_unused struct rte_cryptodev_config *config)
116 {
117 return 0;
118 }
119
120 /** Start device */
121 static int
122 armv8_crypto_pmd_start(__rte_unused struct rte_cryptodev *dev)
123 {
124 return 0;
125 }
126
127 /** Stop device */
128 static void
129 armv8_crypto_pmd_stop(__rte_unused struct rte_cryptodev *dev)
130 {
131 }
132
133 /** Close device */
134 static int
135 armv8_crypto_pmd_close(__rte_unused struct rte_cryptodev *dev)
136 {
137 return 0;
138 }
139
140
141 /** Get device statistics */
142 static void
143 armv8_crypto_pmd_stats_get(struct rte_cryptodev *dev,
144 struct rte_cryptodev_stats *stats)
145 {
146 int qp_id;
147
148 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
149 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id];
150
151 stats->enqueued_count += qp->stats.enqueued_count;
152 stats->dequeued_count += qp->stats.dequeued_count;
153
154 stats->enqueue_err_count += qp->stats.enqueue_err_count;
155 stats->dequeue_err_count += qp->stats.dequeue_err_count;
156 }
157 }
158
159 /** Reset device statistics */
160 static void
161 armv8_crypto_pmd_stats_reset(struct rte_cryptodev *dev)
162 {
163 int qp_id;
164
165 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
166 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id];
167
168 memset(&qp->stats, 0, sizeof(qp->stats));
169 }
170 }
171
172
173 /** Get device info */
174 static void
175 armv8_crypto_pmd_info_get(struct rte_cryptodev *dev,
176 struct rte_cryptodev_info *dev_info)
177 {
178 struct armv8_crypto_private *internals = dev->data->dev_private;
179
180 if (dev_info != NULL) {
181 dev_info->dev_type = dev->dev_type;
182 dev_info->feature_flags = dev->feature_flags;
183 dev_info->capabilities = armv8_crypto_pmd_capabilities;
184 dev_info->max_nb_queue_pairs = internals->max_nb_qpairs;
185 dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
186 }
187 }
188
189 /** Release queue pair */
190 static int
191 armv8_crypto_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id)
192 {
193
194 if (dev->data->queue_pairs[qp_id] != NULL) {
195 rte_free(dev->data->queue_pairs[qp_id]);
196 dev->data->queue_pairs[qp_id] = NULL;
197 }
198
199 return 0;
200 }
201
202 /** set a unique name for the queue pair based on it's name, dev_id and qp_id */
203 static int
204 armv8_crypto_pmd_qp_set_unique_name(struct rte_cryptodev *dev,
205 struct armv8_crypto_qp *qp)
206 {
207 unsigned int n;
208
209 n = snprintf(qp->name, sizeof(qp->name), "armv8_crypto_pmd_%u_qp_%u",
210 dev->data->dev_id, qp->id);
211
212 if (n > sizeof(qp->name))
213 return -1;
214
215 return 0;
216 }
217
218
219 /** Create a ring to place processed operations on */
220 static struct rte_ring *
221 armv8_crypto_pmd_qp_create_processed_ops_ring(struct armv8_crypto_qp *qp,
222 unsigned int ring_size, int socket_id)
223 {
224 struct rte_ring *r;
225
226 r = rte_ring_lookup(qp->name);
227 if (r) {
228 if (rte_ring_get_size(r) >= ring_size) {
229 ARMV8_CRYPTO_LOG_INFO(
230 "Reusing existing ring %s for processed ops",
231 qp->name);
232 return r;
233 }
234
235 ARMV8_CRYPTO_LOG_ERR(
236 "Unable to reuse existing ring %s for processed ops",
237 qp->name);
238 return NULL;
239 }
240
241 return rte_ring_create(qp->name, ring_size, socket_id,
242 RING_F_SP_ENQ | RING_F_SC_DEQ);
243 }
244
245
246 /** Setup a queue pair */
247 static int
248 armv8_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
249 const struct rte_cryptodev_qp_conf *qp_conf,
250 int socket_id)
251 {
252 struct armv8_crypto_qp *qp = NULL;
253
254 /* Free memory prior to re-allocation if needed. */
255 if (dev->data->queue_pairs[qp_id] != NULL)
256 armv8_crypto_pmd_qp_release(dev, qp_id);
257
258 /* Allocate the queue pair data structure. */
259 qp = rte_zmalloc_socket("ARMv8 PMD Queue Pair", sizeof(*qp),
260 RTE_CACHE_LINE_SIZE, socket_id);
261 if (qp == NULL)
262 return -ENOMEM;
263
264 qp->id = qp_id;
265 dev->data->queue_pairs[qp_id] = qp;
266
267 if (armv8_crypto_pmd_qp_set_unique_name(dev, qp) != 0)
268 goto qp_setup_cleanup;
269
270 qp->processed_ops = armv8_crypto_pmd_qp_create_processed_ops_ring(qp,
271 qp_conf->nb_descriptors, socket_id);
272 if (qp->processed_ops == NULL)
273 goto qp_setup_cleanup;
274
275 qp->sess_mp = dev->data->session_pool;
276
277 memset(&qp->stats, 0, sizeof(qp->stats));
278
279 return 0;
280
281 qp_setup_cleanup:
282 if (qp)
283 rte_free(qp);
284
285 return -1;
286 }
287
288 /** Start queue pair */
289 static int
290 armv8_crypto_pmd_qp_start(__rte_unused struct rte_cryptodev *dev,
291 __rte_unused uint16_t queue_pair_id)
292 {
293 return -ENOTSUP;
294 }
295
296 /** Stop queue pair */
297 static int
298 armv8_crypto_pmd_qp_stop(__rte_unused struct rte_cryptodev *dev,
299 __rte_unused uint16_t queue_pair_id)
300 {
301 return -ENOTSUP;
302 }
303
304 /** Return the number of allocated queue pairs */
305 static uint32_t
306 armv8_crypto_pmd_qp_count(struct rte_cryptodev *dev)
307 {
308 return dev->data->nb_queue_pairs;
309 }
310
311 /** Returns the size of the session structure */
312 static unsigned
313 armv8_crypto_pmd_session_get_size(struct rte_cryptodev *dev __rte_unused)
314 {
315 return sizeof(struct armv8_crypto_session);
316 }
317
318 /** Configure the session from a crypto xform chain */
319 static void *
320 armv8_crypto_pmd_session_configure(struct rte_cryptodev *dev __rte_unused,
321 struct rte_crypto_sym_xform *xform, void *sess)
322 {
323 if (unlikely(sess == NULL)) {
324 ARMV8_CRYPTO_LOG_ERR("invalid session struct");
325 return NULL;
326 }
327
328 if (armv8_crypto_set_session_parameters(
329 sess, xform) != 0) {
330 ARMV8_CRYPTO_LOG_ERR("failed configure session parameters");
331 return NULL;
332 }
333
334 return sess;
335 }
336
337 /** Clear the memory of session so it doesn't leave key material behind */
338 static void
339 armv8_crypto_pmd_session_clear(struct rte_cryptodev *dev __rte_unused,
340 void *sess)
341 {
342
343 /* Zero out the whole structure */
344 if (sess)
345 memset(sess, 0, sizeof(struct armv8_crypto_session));
346 }
347
348 struct rte_cryptodev_ops armv8_crypto_pmd_ops = {
349 .dev_configure = armv8_crypto_pmd_config,
350 .dev_start = armv8_crypto_pmd_start,
351 .dev_stop = armv8_crypto_pmd_stop,
352 .dev_close = armv8_crypto_pmd_close,
353
354 .stats_get = armv8_crypto_pmd_stats_get,
355 .stats_reset = armv8_crypto_pmd_stats_reset,
356
357 .dev_infos_get = armv8_crypto_pmd_info_get,
358
359 .queue_pair_setup = armv8_crypto_pmd_qp_setup,
360 .queue_pair_release = armv8_crypto_pmd_qp_release,
361 .queue_pair_start = armv8_crypto_pmd_qp_start,
362 .queue_pair_stop = armv8_crypto_pmd_qp_stop,
363 .queue_pair_count = armv8_crypto_pmd_qp_count,
364
365 .session_get_size = armv8_crypto_pmd_session_get_size,
366 .session_configure = armv8_crypto_pmd_session_configure,
367 .session_clear = armv8_crypto_pmd_session_clear
368 };
369
370 struct rte_cryptodev_ops *rte_armv8_crypto_pmd_ops = &armv8_crypto_pmd_ops;