DPDK  21.08.0
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include "rte_kvargs.h"
22 #include "rte_crypto.h"
23 #include "rte_dev.h"
24 #include <rte_common.h>
25 #include <rte_config.h>
26 #include <rte_rcu_qsbr.h>
27 
28 #include "rte_cryptodev_trace_fp.h"
29 
30 extern const char **rte_cyptodev_names;
31 
32 /* Logging Macros */
33 
34 #define CDEV_LOG_ERR(...) \
35  RTE_LOG(ERR, CRYPTODEV, \
36  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
37  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
38 
39 #define CDEV_LOG_INFO(...) \
40  RTE_LOG(INFO, CRYPTODEV, \
41  RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
42  RTE_FMT_TAIL(__VA_ARGS__,)))
43 
44 #define CDEV_LOG_DEBUG(...) \
45  RTE_LOG(DEBUG, CRYPTODEV, \
46  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
47  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
48 
49 #define CDEV_PMD_TRACE(...) \
50  RTE_LOG(DEBUG, CRYPTODEV, \
51  RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
52  dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
53 
67 #define rte_crypto_op_ctod_offset(c, t, o) \
68  ((t)((char *)(c) + (o)))
69 
81 #define rte_crypto_op_ctophys_offset(c, o) \
82  (rte_iova_t)((c)->phys_addr + (o))
83 
88  uint16_t min;
89  uint16_t max;
90  uint16_t increment;
96 };
97 
103 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
104 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
105 
110  enum rte_crypto_sym_xform_type xform_type;
113  union {
114  struct {
117  uint16_t block_size;
119  struct rte_crypto_param_range key_size;
121  struct rte_crypto_param_range digest_size;
123  struct rte_crypto_param_range aad_size;
125  struct rte_crypto_param_range iv_size;
127  } auth;
129  struct {
132  uint16_t block_size;
134  struct rte_crypto_param_range key_size;
136  struct rte_crypto_param_range iv_size;
138  uint32_t dataunit_set;
144  } cipher;
146  struct {
149  uint16_t block_size;
151  struct rte_crypto_param_range key_size;
153  struct rte_crypto_param_range digest_size;
155  struct rte_crypto_param_range aad_size;
157  struct rte_crypto_param_range iv_size;
159  } aead;
160  };
161 };
162 
168  enum rte_crypto_asym_xform_type xform_type;
171  uint32_t op_types;
174  __extension__
175  union {
176  struct rte_crypto_param_range modlen;
180  };
181 };
182 
189 };
190 
191 
198  union {
203  };
204 };
205 
208  enum rte_crypto_sym_xform_type type;
209  union {
210  enum rte_crypto_cipher_algorithm cipher;
211  enum rte_crypto_auth_algorithm auth;
212  enum rte_crypto_aead_algorithm aead;
213  } algo;
214 };
215 
224 };
225 
237 rte_cryptodev_sym_capability_get(uint8_t dev_id,
238  const struct rte_cryptodev_sym_capability_idx *idx);
239 
250 __rte_experimental
252 rte_cryptodev_asym_capability_get(uint8_t dev_id,
253  const struct rte_cryptodev_asym_capability_idx *idx);
254 
267 int
269  const struct rte_cryptodev_symmetric_capability *capability,
270  uint16_t key_size, uint16_t iv_size);
271 
285 int
287  const struct rte_cryptodev_symmetric_capability *capability,
288  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
289 
304 int
306  const struct rte_cryptodev_symmetric_capability *capability,
307  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
308  uint16_t iv_size);
309 
320 __rte_experimental
321 int
323  const struct rte_cryptodev_asymmetric_xform_capability *capability,
324  enum rte_crypto_asym_op_type op_type);
325 
336 __rte_experimental
337 int
339  const struct rte_cryptodev_asymmetric_xform_capability *capability,
340  uint16_t modlen);
341 
353 int
355  const char *algo_string);
356 
368 int
370  const char *algo_string);
371 
383 int
385  const char *algo_string);
386 
398 __rte_experimental
399 int
401  const char *xform_string);
402 
403 
405 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
406  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
407 
408 
417 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
418 
419 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
420 
421 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
422 
423 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
424 
425 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
426 
427 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
428 
429 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
430 
431 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
432 
435 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
436 
437 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
438 
441 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
442 
445 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
446 
450 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
451 
454 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
455 
456 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
457 
458 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
459 
460 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
461 
462 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
463 
464 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
465 
466 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
467 
468 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
469 
470 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
471 
472 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
473 
474 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
475 
476 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
477 
478 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
479 
480 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
481 
492 extern const char *
493 rte_cryptodev_get_feature_name(uint64_t flag);
494 
497  const char *driver_name;
498  uint8_t driver_id;
499  struct rte_device *device;
501  uint64_t feature_flags;
516  struct {
517  unsigned max_nb_sessions;
522  } sym;
523 };
524 
525 #define RTE_CRYPTODEV_DETACHED (0)
526 #define RTE_CRYPTODEV_ATTACHED (1)
527 
533 };
534 
537  uint32_t nb_descriptors;
542 };
543 
565 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
566  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
567 
577 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
578  enum rte_cryptodev_event_type event, void *cb_arg);
579 
580 
583  uint64_t enqueued_count;
585  uint64_t dequeued_count;
592 };
593 
594 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
595 
606 extern int
607 rte_cryptodev_get_dev_id(const char *name);
608 
619 extern const char *
620 rte_cryptodev_name_get(uint8_t dev_id);
621 
629 extern uint8_t
630 rte_cryptodev_count(void);
631 
640 extern uint8_t
641 rte_cryptodev_device_count_by_driver(uint8_t driver_id);
642 
654 uint8_t
655 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
656  uint8_t nb_devices);
657 /*
658  * Return the NUMA socket to which a device is connected
659  *
660  * @param dev_id
661  * The identifier of the device
662  * @return
663  * The NUMA socket id to which the device is connected or
664  * a default of zero if the socket could not be determined.
665  * -1 if returned is the dev_id value is out of range.
666  */
667 extern int
668 rte_cryptodev_socket_id(uint8_t dev_id);
669 
672  int socket_id;
673  uint16_t nb_queue_pairs;
675  uint64_t ff_disable;
682 };
683 
698 extern int
699 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
700 
716 extern int
717 rte_cryptodev_start(uint8_t dev_id);
718 
725 extern void
726 rte_cryptodev_stop(uint8_t dev_id);
727 
737 extern int
738 rte_cryptodev_close(uint8_t dev_id);
739 
761 extern int
762 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
763  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
764 
778 __rte_experimental
779 int
780 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
781 
789 extern uint16_t
790 rte_cryptodev_queue_pair_count(uint8_t dev_id);
791 
792 
804 extern int
805 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
806 
812 extern void
813 rte_cryptodev_stats_reset(uint8_t dev_id);
814 
828 extern void
829 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
830 
831 
845 extern int
846 rte_cryptodev_callback_register(uint8_t dev_id,
847  enum rte_cryptodev_event_type event,
848  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
849 
863 extern int
864 rte_cryptodev_callback_unregister(uint8_t dev_id,
865  enum rte_cryptodev_event_type event,
866  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
867 
868 typedef uint16_t (*dequeue_pkt_burst_t)(void *qp,
869  struct rte_crypto_op **ops, uint16_t nb_ops);
872 typedef uint16_t (*enqueue_pkt_burst_t)(void *qp,
873  struct rte_crypto_op **ops, uint16_t nb_ops);
879 struct rte_cryptodev_callback;
880 
882 TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
883 
893  void *arg;
895 };
896 
901 struct rte_cryptodev_cb_rcu {
902  struct rte_cryptodev_cb *next;
904  struct rte_rcu_qsbr *qsbr;
906 };
907 
919  uint64_t feature_flags;
924  uint8_t driver_id;
927  struct rte_cryptodev_cb_list link_intr_cbs;
933  __extension__
934  uint8_t attached : 1;
937  struct rte_cryptodev_cb_rcu *enq_cbs;
940  struct rte_cryptodev_cb_rcu *deq_cbs;
943 
944 void *
945 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
946 
955  uint8_t dev_id;
957  uint8_t socket_id;
962  __extension__
963  uint8_t dev_started : 1;
968  void **queue_pairs;
970  uint16_t nb_queue_pairs;
973  void *dev_private;
976 
977 extern struct rte_cryptodev *rte_cryptodevs;
1014 static inline uint16_t
1015 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1016  struct rte_crypto_op **ops, uint16_t nb_ops)
1017 {
1018  struct rte_cryptodev *dev = &rte_cryptodevs[dev_id];
1019 
1020  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1021  nb_ops = (*dev->dequeue_burst)
1022  (dev->data->queue_pairs[qp_id], ops, nb_ops);
1023 #ifdef RTE_CRYPTO_CALLBACKS
1024  if (unlikely(dev->deq_cbs != NULL)) {
1025  struct rte_cryptodev_cb_rcu *list;
1026  struct rte_cryptodev_cb *cb;
1027 
1028  /* __ATOMIC_RELEASE memory order was used when the
1029  * call back was inserted into the list.
1030  * Since there is a clear dependency between loading
1031  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1032  * not required.
1033  */
1034  list = &dev->deq_cbs[qp_id];
1035  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1036  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1037 
1038  while (cb != NULL) {
1039  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1040  cb->arg);
1041  cb = cb->next;
1042  };
1043 
1044  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1045  }
1046 #endif
1047  return nb_ops;
1048 }
1049 
1081 static inline uint16_t
1082 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1083  struct rte_crypto_op **ops, uint16_t nb_ops)
1084 {
1085  struct rte_cryptodev *dev = &rte_cryptodevs[dev_id];
1086 
1087 #ifdef RTE_CRYPTO_CALLBACKS
1088  if (unlikely(dev->enq_cbs != NULL)) {
1089  struct rte_cryptodev_cb_rcu *list;
1090  struct rte_cryptodev_cb *cb;
1091 
1092  /* __ATOMIC_RELEASE memory order was used when the
1093  * call back was inserted into the list.
1094  * Since there is a clear dependency between loading
1095  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1096  * not required.
1097  */
1098  list = &dev->enq_cbs[qp_id];
1099  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1100  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1101 
1102  while (cb != NULL) {
1103  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1104  cb->arg);
1105  cb = cb->next;
1106  };
1107 
1108  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1109  }
1110 #endif
1111 
1112  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1113  return (*dev->enqueue_burst)(
1114  dev->data->queue_pairs[qp_id], ops, nb_ops);
1115 }
1116 
1117 
1123  uint64_t opaque_data;
1125  uint16_t nb_drivers;
1127  uint16_t user_data_sz;
1129  __extension__ struct {
1130  void *data;
1131  uint16_t refcnt;
1132  } sess_data[0];
1134 };
1135 
1138  __extension__ void *sess_private_data[0];
1140 };
1141 
1168 __rte_experimental
1169 struct rte_mempool *
1170 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
1171  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
1172  int socket_id);
1173 
1185 
1195 __rte_experimental
1198 
1211 int
1213 
1226 __rte_experimental
1227 int
1229 
1246 int
1247 rte_cryptodev_sym_session_init(uint8_t dev_id,
1248  struct rte_cryptodev_sym_session *sess,
1249  struct rte_crypto_sym_xform *xforms,
1250  struct rte_mempool *mempool);
1251 
1267 __rte_experimental
1268 int
1269 rte_cryptodev_asym_session_init(uint8_t dev_id,
1270  struct rte_cryptodev_asym_session *sess,
1271  struct rte_crypto_asym_xform *xforms,
1272  struct rte_mempool *mempool);
1273 
1288 int
1289 rte_cryptodev_sym_session_clear(uint8_t dev_id,
1290  struct rte_cryptodev_sym_session *sess);
1291 
1302 __rte_experimental
1303 int
1304 rte_cryptodev_asym_session_clear(uint8_t dev_id,
1305  struct rte_cryptodev_asym_session *sess);
1306 
1314 unsigned int
1316 
1328 __rte_experimental
1329 unsigned int
1331  struct rte_cryptodev_sym_session *sess);
1332 
1339 __rte_experimental
1340 unsigned int
1342 
1354 unsigned int
1356 
1367 __rte_experimental
1368 unsigned int
1370 
1379 int rte_cryptodev_driver_id_get(const char *name);
1380 
1389 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1390 
1403 __rte_experimental
1404 int
1406  struct rte_cryptodev_sym_session *sess,
1407  void *data,
1408  uint16_t size);
1409 
1420 __rte_experimental
1421 void *
1423  struct rte_cryptodev_sym_session *sess);
1424 
1437 __rte_experimental
1438 uint32_t
1440  struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs,
1441  struct rte_crypto_sym_vec *vec);
1442 
1452 __rte_experimental
1453 int
1454 rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id);
1455 
1461  struct rte_cryptodev_sym_session *crypto_sess;
1462  struct rte_crypto_sym_xform *xform;
1463  struct rte_security_session *sec_sess;
1464 };
1465 
1492  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1493  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1494 
1517  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1518  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1519  struct rte_crypto_va_iova_ptr *iv,
1520  struct rte_crypto_va_iova_ptr *digest,
1521  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1522  void *user_data);
1523 
1535 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1536  uint32_t n);
1537 
1547 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1548 
1557 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1558  uint32_t index, uint8_t is_op_success);
1559 
1601 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1602  uint8_t *drv_ctx,
1603  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1604  uint32_t max_nb_to_dequeue,
1605  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1606  void **out_user_data, uint8_t is_user_data_array,
1607  uint32_t *n_success, int *dequeue_status);
1608 
1632 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1633  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1634  enum rte_crypto_op_status *op_status);
1635 
1642  void *qp_data;
1643 
1645  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1648  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1650 
1651  /* Driver specific context data */
1652  __extension__ uint8_t drv_ctx_data[];
1653 };
1654 
1678 __rte_experimental
1679 int
1680 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1681  struct rte_crypto_raw_dp_ctx *ctx,
1682  enum rte_crypto_op_sess_type sess_type,
1683  union rte_cryptodev_session_ctx session_ctx,
1684  uint8_t is_update);
1685 
1710 __rte_experimental
1711 uint32_t
1713  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1714  void **user_data, int *enqueue_status);
1715 
1736 __rte_experimental
1737 static __rte_always_inline int
1739  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1740  union rte_crypto_sym_ofs ofs,
1741  struct rte_crypto_va_iova_ptr *iv,
1742  struct rte_crypto_va_iova_ptr *digest,
1743  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1744  void *user_data)
1745 {
1746  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1747  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1748 }
1749 
1760 __rte_experimental
1761 int
1763  uint32_t n);
1764 
1806 __rte_experimental
1807 uint32_t
1809  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1810  uint32_t max_nb_to_dequeue,
1811  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1812  void **out_user_data, uint8_t is_user_data_array,
1813  uint32_t *n_success, int *dequeue_status);
1814 
1838 __rte_experimental
1839 static __rte_always_inline void *
1841  int *dequeue_status, enum rte_crypto_op_status *op_status)
1842 {
1843  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1844  op_status);
1845 }
1846 
1856 __rte_experimental
1857 int
1859  uint32_t n);
1860 
1897 __rte_experimental
1898 struct rte_cryptodev_cb *
1899 rte_cryptodev_add_enq_callback(uint8_t dev_id,
1900  uint16_t qp_id,
1902  void *cb_arg);
1903 
1926 __rte_experimental
1927 int rte_cryptodev_remove_enq_callback(uint8_t dev_id,
1928  uint16_t qp_id,
1929  struct rte_cryptodev_cb *cb);
1930 
1966 __rte_experimental
1967 struct rte_cryptodev_cb *
1968 rte_cryptodev_add_deq_callback(uint8_t dev_id,
1969  uint16_t qp_id,
1971  void *cb_arg);
1972 
1994 __rte_experimental
1995 int rte_cryptodev_remove_deq_callback(uint8_t dev_id,
1996  uint16_t qp_id,
1997  struct rte_cryptodev_cb *cb);
1998 
1999 #ifdef __cplusplus
2000 }
2001 #endif
2002 
2003 #endif /* _RTE_CRYPTODEV_H_ */
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
#define __rte_always_inline
Definition: rte_common.h:228
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
__rte_experimental unsigned int rte_cryptodev_sym_get_existing_header_session_size(struct rte_cryptodev_sym_session *sess)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
void * security_ctx
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
uint64_t feature_flags
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_sym_session_clear(uint8_t dev_id, struct rte_cryptodev_sym_session *sess)
TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
__extension__ uint8_t attached
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
int rte_cryptodev_sym_session_init(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mempool)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
struct rte_mempool * session_pool
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint32_t cache_size
Definition: rte_mempool.h:229
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:356
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
dequeue_pkt_burst_t dequeue_burst
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
rte_crypto_asym_op_type
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
struct rte_cryptodev_cb_rcu * deq_cbs
void rte_cryptodev_stop(uint8_t dev_id)
const char * driver_name
int rte_cryptodev_close(uint8_t dev_id)
uint8_t driver_id
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(struct rte_cryptodev_sym_session *sess, void *data, uint16_t size)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
#define unlikely(x)
struct rte_cryptodev_cb_rcu * enq_cbs
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
unsigned int rte_cryptodev_sym_get_header_session_size(void)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
uint16_t min_mbuf_tailroom_req
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
struct rte_cryptodev_sym_session * rte_cryptodev_sym_session_create(struct rte_mempool *mempool)
int rte_cryptodev_sym_session_free(struct rte_cryptodev_sym_session *sess)
const char * rte_cryptodev_name_get(uint8_t dev_id)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(struct rte_cryptodev_sym_session *sess)
rte_crypto_op_type
Definition: rte_crypto.h:29
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
#define RTE_CRYPTODEV_NAME_MAX_LEN
__rte_experimental int rte_cryptodev_asym_session_clear(uint8_t dev_id, struct rte_cryptodev_asym_session *sess)
__rte_experimental int rte_cryptodev_asym_session_free(struct rte_cryptodev_asym_session *sess)
rte_cryptodev_callback_fn fn
enqueue_pkt_burst_t enqueue_burst
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_asym_session_init(uint8_t dev_id, struct rte_cryptodev_asym_session *sess, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mempool)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
struct rte_cryptodev_data * data
struct rte_device * device
__rte_experimental struct rte_cryptodev_asym_session * rte_cryptodev_asym_session_create(struct rte_mempool *mempool)
uint32_t elt_size
Definition: rte_mempool.h:232
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
__extension__ uint8_t dev_started
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
struct rte_cryptodev_ops * dev_ops
uint16_t min_mbuf_headroom_req
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
#define __rte_cache_aligned
Definition: rte_common.h:402
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:303
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
#define RTE_STD_C11
Definition: rte_common.h:42
rte_crypto_auth_algorithm
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
rte_crypto_sym_xform_type
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
struct rte_mempool * mp_session_private
int rte_cryptodev_start(uint8_t dev_id)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
uint16_t(* enqueue_pkt_burst_t)(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
uint16_t(* dequeue_pkt_burst_t)(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops)
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
struct rte_crypto_param_range modlen
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
struct rte_device * device
unsigned max_nb_sessions
rte_cryptodev_event_type
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
char name[RTE_MEMZONE_NAMESIZE]
Definition: rte_mempool.h:218
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
struct rte_cryptodev * rte_cryptodevs
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm