DPDK  21.11.0
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include "rte_kvargs.h"
22 #include "rte_crypto.h"
23 #include "rte_dev.h"
24 #include <rte_common.h>
25 #include <rte_config.h>
26 #include <rte_rcu_qsbr.h>
27 
28 #include "rte_cryptodev_trace_fp.h"
29 
30 extern const char **rte_cyptodev_names;
31 
32 /* Logging Macros */
33 
34 #define CDEV_LOG_ERR(...) \
35  RTE_LOG(ERR, CRYPTODEV, \
36  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
37  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
38 
39 #define CDEV_LOG_INFO(...) \
40  RTE_LOG(INFO, CRYPTODEV, \
41  RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
42  RTE_FMT_TAIL(__VA_ARGS__,)))
43 
44 #define CDEV_LOG_DEBUG(...) \
45  RTE_LOG(DEBUG, CRYPTODEV, \
46  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
47  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
48 
49 #define CDEV_PMD_TRACE(...) \
50  RTE_LOG(DEBUG, CRYPTODEV, \
51  RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
52  dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
53 
67 #define rte_crypto_op_ctod_offset(c, t, o) \
68  ((t)((char *)(c) + (o)))
69 
81 #define rte_crypto_op_ctophys_offset(c, o) \
82  (rte_iova_t)((c)->phys_addr + (o))
83 
88  uint16_t min;
89  uint16_t max;
90  uint16_t increment;
96 };
97 
103 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
104 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
105 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
106 
111  enum rte_crypto_sym_xform_type xform_type;
114  union {
115  struct {
118  uint16_t block_size;
120  struct rte_crypto_param_range key_size;
122  struct rte_crypto_param_range digest_size;
124  struct rte_crypto_param_range aad_size;
126  struct rte_crypto_param_range iv_size;
128  } auth;
130  struct {
133  uint16_t block_size;
135  struct rte_crypto_param_range key_size;
137  struct rte_crypto_param_range iv_size;
139  uint32_t dataunit_set;
145  } cipher;
147  struct {
150  uint16_t block_size;
152  struct rte_crypto_param_range key_size;
154  struct rte_crypto_param_range digest_size;
156  struct rte_crypto_param_range aad_size;
158  struct rte_crypto_param_range iv_size;
160  } aead;
161  };
162 };
163 
169  enum rte_crypto_asym_xform_type xform_type;
172  uint32_t op_types;
175  __extension__
176  union {
177  struct rte_crypto_param_range modlen;
181  };
182 };
183 
190 };
191 
192 
199  union {
204  };
205 };
206 
209  enum rte_crypto_sym_xform_type type;
210  union {
211  enum rte_crypto_cipher_algorithm cipher;
212  enum rte_crypto_auth_algorithm auth;
213  enum rte_crypto_aead_algorithm aead;
214  } algo;
215 };
216 
225 };
226 
238 rte_cryptodev_sym_capability_get(uint8_t dev_id,
239  const struct rte_cryptodev_sym_capability_idx *idx);
240 
251 __rte_experimental
253 rte_cryptodev_asym_capability_get(uint8_t dev_id,
254  const struct rte_cryptodev_asym_capability_idx *idx);
255 
268 int
270  const struct rte_cryptodev_symmetric_capability *capability,
271  uint16_t key_size, uint16_t iv_size);
272 
286 int
288  const struct rte_cryptodev_symmetric_capability *capability,
289  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
290 
305 int
307  const struct rte_cryptodev_symmetric_capability *capability,
308  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
309  uint16_t iv_size);
310 
321 __rte_experimental
322 int
324  const struct rte_cryptodev_asymmetric_xform_capability *capability,
325  enum rte_crypto_asym_op_type op_type);
326 
337 __rte_experimental
338 int
340  const struct rte_cryptodev_asymmetric_xform_capability *capability,
341  uint16_t modlen);
342 
354 int
356  const char *algo_string);
357 
369 int
371  const char *algo_string);
372 
384 int
386  const char *algo_string);
387 
399 __rte_experimental
400 int
402  const char *xform_string);
403 
404 
406 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
407  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
408 
409 
418 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
419 
420 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
421 
422 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
423 
424 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
425 
426 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
427 
428 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
429 
430 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
431 
432 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
433 
436 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
437 
438 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
439 
442 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
443 
446 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
447 
451 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
452 
455 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
456 
457 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
458 
459 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
460 
461 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
462 
463 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
464 
465 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
466 
467 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
468 
469 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
470 
471 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
472 
473 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
474 
475 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
476 
477 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
478 
479 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
480 
481 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
482 
483 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
484 
495 extern const char *
496 rte_cryptodev_get_feature_name(uint64_t flag);
497 
500  const char *driver_name;
501  uint8_t driver_id;
502  struct rte_device *device;
504  uint64_t feature_flags;
519  struct {
520  unsigned max_nb_sessions;
525  } sym;
526 };
527 
528 #define RTE_CRYPTODEV_DETACHED (0)
529 #define RTE_CRYPTODEV_ATTACHED (1)
530 
536 };
537 
540  uint32_t nb_descriptors;
545 };
546 
568 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
569  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
570 
580 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
581  enum rte_cryptodev_event_type event, void *cb_arg);
582 
583 
586  uint64_t enqueued_count;
588  uint64_t dequeued_count;
595 };
596 
597 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
598 
609 extern int
610 rte_cryptodev_get_dev_id(const char *name);
611 
622 extern const char *
623 rte_cryptodev_name_get(uint8_t dev_id);
624 
632 extern uint8_t
633 rte_cryptodev_count(void);
634 
643 extern uint8_t
644 rte_cryptodev_device_count_by_driver(uint8_t driver_id);
645 
657 uint8_t
658 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
659  uint8_t nb_devices);
660 /*
661  * Return the NUMA socket to which a device is connected
662  *
663  * @param dev_id
664  * The identifier of the device
665  * @return
666  * The NUMA socket id to which the device is connected or
667  * a default of zero if the socket could not be determined.
668  * -1 if returned is the dev_id value is out of range.
669  */
670 extern int
671 rte_cryptodev_socket_id(uint8_t dev_id);
672 
675  int socket_id;
676  uint16_t nb_queue_pairs;
678  uint64_t ff_disable;
685 };
686 
701 extern int
702 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
703 
719 extern int
720 rte_cryptodev_start(uint8_t dev_id);
721 
728 extern void
729 rte_cryptodev_stop(uint8_t dev_id);
730 
740 extern int
741 rte_cryptodev_close(uint8_t dev_id);
742 
764 extern int
765 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
766  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
767 
781 __rte_experimental
782 int
783 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
784 
792 extern uint16_t
793 rte_cryptodev_queue_pair_count(uint8_t dev_id);
794 
795 
807 extern int
808 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
809 
815 extern void
816 rte_cryptodev_stats_reset(uint8_t dev_id);
817 
831 extern void
832 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
833 
834 
848 extern int
849 rte_cryptodev_callback_register(uint8_t dev_id,
850  enum rte_cryptodev_event_type event,
851  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
852 
866 extern int
867 rte_cryptodev_callback_unregister(uint8_t dev_id,
868  enum rte_cryptodev_event_type event,
869  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
870 
871 struct rte_cryptodev_callback;
872 
874 RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
875 
885  void *arg;
887 };
888 
893 struct rte_cryptodev_cb_rcu {
894  struct rte_cryptodev_cb *next;
896  struct rte_rcu_qsbr *qsbr;
898 };
899 
900 void *
901 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
902 
908  uint64_t opaque_data;
910  uint16_t nb_drivers;
912  uint16_t user_data_sz;
914  __extension__ struct {
915  void *data;
916  uint16_t refcnt;
917  } sess_data[0];
919 };
920 
923  __extension__ void *sess_private_data[0];
925 };
926 
953 __rte_experimental
954 struct rte_mempool *
955 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
956  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
957  int socket_id);
958 
970 
980 __rte_experimental
983 
996 int
998 
1011 __rte_experimental
1012 int
1014 
1031 int
1032 rte_cryptodev_sym_session_init(uint8_t dev_id,
1033  struct rte_cryptodev_sym_session *sess,
1034  struct rte_crypto_sym_xform *xforms,
1035  struct rte_mempool *mempool);
1036 
1052 __rte_experimental
1053 int
1054 rte_cryptodev_asym_session_init(uint8_t dev_id,
1055  struct rte_cryptodev_asym_session *sess,
1056  struct rte_crypto_asym_xform *xforms,
1057  struct rte_mempool *mempool);
1058 
1073 int
1074 rte_cryptodev_sym_session_clear(uint8_t dev_id,
1075  struct rte_cryptodev_sym_session *sess);
1076 
1087 __rte_experimental
1088 int
1089 rte_cryptodev_asym_session_clear(uint8_t dev_id,
1090  struct rte_cryptodev_asym_session *sess);
1091 
1099 unsigned int
1101 
1113 __rte_experimental
1114 unsigned int
1116  struct rte_cryptodev_sym_session *sess);
1117 
1124 __rte_experimental
1125 unsigned int
1127 
1139 unsigned int
1141 
1152 __rte_experimental
1153 unsigned int
1155 
1164 unsigned int
1165 rte_cryptodev_is_valid_dev(uint8_t dev_id);
1166 
1175 int rte_cryptodev_driver_id_get(const char *name);
1176 
1185 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1186 
1199 __rte_experimental
1200 int
1202  struct rte_cryptodev_sym_session *sess,
1203  void *data,
1204  uint16_t size);
1205 
1216 __rte_experimental
1217 void *
1219  struct rte_cryptodev_sym_session *sess);
1220 
1233 __rte_experimental
1234 uint32_t
1236  struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs,
1237  struct rte_crypto_sym_vec *vec);
1238 
1248 __rte_experimental
1249 int
1250 rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id);
1251 
1257  struct rte_cryptodev_sym_session *crypto_sess;
1258  struct rte_crypto_sym_xform *xform;
1259  struct rte_security_session *sec_sess;
1260 };
1261 
1288  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1289  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1290 
1313  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1314  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1315  struct rte_crypto_va_iova_ptr *iv,
1316  struct rte_crypto_va_iova_ptr *digest,
1317  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1318  void *user_data);
1319 
1331 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1332  uint32_t n);
1333 
1343 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1344 
1353 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1354  uint32_t index, uint8_t is_op_success);
1355 
1397 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1398  uint8_t *drv_ctx,
1399  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1400  uint32_t max_nb_to_dequeue,
1401  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1402  void **out_user_data, uint8_t is_user_data_array,
1403  uint32_t *n_success, int *dequeue_status);
1404 
1428 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1429  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1430  enum rte_crypto_op_status *op_status);
1431 
1438  void *qp_data;
1439 
1441  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1444  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1446 
1447  /* Driver specific context data */
1448  __extension__ uint8_t drv_ctx_data[];
1449 };
1450 
1474 __rte_experimental
1475 int
1476 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1477  struct rte_crypto_raw_dp_ctx *ctx,
1478  enum rte_crypto_op_sess_type sess_type,
1479  union rte_cryptodev_session_ctx session_ctx,
1480  uint8_t is_update);
1481 
1506 __rte_experimental
1507 uint32_t
1509  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1510  void **user_data, int *enqueue_status);
1511 
1532 __rte_experimental
1533 static __rte_always_inline int
1535  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1536  union rte_crypto_sym_ofs ofs,
1537  struct rte_crypto_va_iova_ptr *iv,
1538  struct rte_crypto_va_iova_ptr *digest,
1539  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1540  void *user_data)
1541 {
1542  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1543  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1544 }
1545 
1556 __rte_experimental
1557 int
1559  uint32_t n);
1560 
1602 __rte_experimental
1603 uint32_t
1605  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1606  uint32_t max_nb_to_dequeue,
1607  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1608  void **out_user_data, uint8_t is_user_data_array,
1609  uint32_t *n_success, int *dequeue_status);
1610 
1634 __rte_experimental
1635 static __rte_always_inline void *
1637  int *dequeue_status, enum rte_crypto_op_status *op_status)
1638 {
1639  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1640  op_status);
1641 }
1642 
1652 __rte_experimental
1653 int
1655  uint32_t n);
1656 
1693 __rte_experimental
1694 struct rte_cryptodev_cb *
1695 rte_cryptodev_add_enq_callback(uint8_t dev_id,
1696  uint16_t qp_id,
1698  void *cb_arg);
1699 
1722 __rte_experimental
1723 int rte_cryptodev_remove_enq_callback(uint8_t dev_id,
1724  uint16_t qp_id,
1725  struct rte_cryptodev_cb *cb);
1726 
1762 __rte_experimental
1763 struct rte_cryptodev_cb *
1764 rte_cryptodev_add_deq_callback(uint8_t dev_id,
1765  uint16_t qp_id,
1767  void *cb_arg);
1768 
1790 __rte_experimental
1791 int rte_cryptodev_remove_deq_callback(uint8_t dev_id,
1792  uint16_t qp_id,
1793  struct rte_cryptodev_cb *cb);
1794 
1795 #include <rte_cryptodev_core.h>
1832 static inline uint16_t
1833 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1834  struct rte_crypto_op **ops, uint16_t nb_ops)
1835 {
1836  const struct rte_crypto_fp_ops *fp_ops;
1837  void *qp;
1838 
1839  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1840 
1841  fp_ops = &rte_crypto_fp_ops[dev_id];
1842  qp = fp_ops->qp.data[qp_id];
1843 
1844  nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1845 
1846 #ifdef RTE_CRYPTO_CALLBACKS
1847  if (unlikely(fp_ops->qp.deq_cb != NULL)) {
1848  struct rte_cryptodev_cb_rcu *list;
1849  struct rte_cryptodev_cb *cb;
1850 
1851  /* __ATOMIC_RELEASE memory order was used when the
1852  * call back was inserted into the list.
1853  * Since there is a clear dependency between loading
1854  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1855  * not required.
1856  */
1857  list = &fp_ops->qp.deq_cb[qp_id];
1858  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1859  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1860 
1861  while (cb != NULL) {
1862  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1863  cb->arg);
1864  cb = cb->next;
1865  };
1866 
1867  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1868  }
1869 #endif
1870  return nb_ops;
1871 }
1872 
1904 static inline uint16_t
1905 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1906  struct rte_crypto_op **ops, uint16_t nb_ops)
1907 {
1908  const struct rte_crypto_fp_ops *fp_ops;
1909  void *qp;
1910 
1911  fp_ops = &rte_crypto_fp_ops[dev_id];
1912  qp = fp_ops->qp.data[qp_id];
1913 #ifdef RTE_CRYPTO_CALLBACKS
1914  if (unlikely(fp_ops->qp.enq_cb != NULL)) {
1915  struct rte_cryptodev_cb_rcu *list;
1916  struct rte_cryptodev_cb *cb;
1917 
1918  /* __ATOMIC_RELEASE memory order was used when the
1919  * call back was inserted into the list.
1920  * Since there is a clear dependency between loading
1921  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1922  * not required.
1923  */
1924  list = &fp_ops->qp.enq_cb[qp_id];
1925  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1926  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1927 
1928  while (cb != NULL) {
1929  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1930  cb->arg);
1931  cb = cb->next;
1932  };
1933 
1934  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1935  }
1936 #endif
1937 
1938  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1939  return fp_ops->enqueue_burst(qp, ops, nb_ops);
1940 }
1941 
1942 
1943 
1944 #ifdef __cplusplus
1945 }
1946 #endif
1947 
1948 #endif /* _RTE_CRYPTODEV_H_ */
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
#define __rte_always_inline
Definition: rte_common.h:228
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
__rte_experimental unsigned int rte_cryptodev_sym_get_existing_header_session_size(struct rte_cryptodev_sym_session *sess)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_sym_session_clear(uint8_t dev_id, struct rte_cryptodev_sym_session *sess)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
int rte_cryptodev_sym_session_init(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mempool)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint32_t cache_size
Definition: rte_mempool.h:224
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:356
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:213
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
rte_crypto_asym_op_type
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
void rte_cryptodev_stop(uint8_t dev_id)
const char * driver_name
int rte_cryptodev_close(uint8_t dev_id)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(struct rte_cryptodev_sym_session *sess, void *data, uint16_t size)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
#define unlikely(x)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
unsigned int rte_cryptodev_sym_get_header_session_size(void)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
uint16_t min_mbuf_tailroom_req
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
struct rte_cryptodev_sym_session * rte_cryptodev_sym_session_create(struct rte_mempool *mempool)
int rte_cryptodev_sym_session_free(struct rte_cryptodev_sym_session *sess)
const char * rte_cryptodev_name_get(uint8_t dev_id)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(struct rte_cryptodev_sym_session *sess)
rte_crypto_op_type
Definition: rte_crypto.h:29
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental int rte_cryptodev_asym_session_clear(uint8_t dev_id, struct rte_cryptodev_asym_session *sess)
__rte_experimental int rte_cryptodev_asym_session_free(struct rte_cryptodev_asym_session *sess)
rte_cryptodev_callback_fn fn
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_asym_session_init(uint8_t dev_id, struct rte_cryptodev_asym_session *sess, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mempool)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
struct rte_device * device
__rte_experimental struct rte_cryptodev_asym_session * rte_cryptodev_asym_session_create(struct rte_mempool *mempool)
uint32_t elt_size
Definition: rte_mempool.h:227
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
uint16_t min_mbuf_headroom_req
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:303
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
#define RTE_STD_C11
Definition: rte_common.h:42
rte_crypto_auth_algorithm
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
rte_crypto_sym_xform_type
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
struct rte_mempool * mp_session_private
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_start(uint8_t dev_id)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
struct rte_crypto_param_range modlen
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
unsigned max_nb_sessions
rte_cryptodev_event_type
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm