DPDK  22.11.0-rc0
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include "rte_kvargs.h"
22 #include "rte_crypto.h"
23 #include <rte_common.h>
24 #include <rte_rcu_qsbr.h>
25 
26 #include "rte_cryptodev_trace_fp.h"
27 
28 extern const char **rte_cyptodev_names;
29 
30 /* Logging Macros */
31 
32 #define CDEV_LOG_ERR(...) \
33  RTE_LOG(ERR, CRYPTODEV, \
34  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
35  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
36 
37 #define CDEV_LOG_INFO(...) \
38  RTE_LOG(INFO, CRYPTODEV, \
39  RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
40  RTE_FMT_TAIL(__VA_ARGS__,)))
41 
42 #define CDEV_LOG_DEBUG(...) \
43  RTE_LOG(DEBUG, CRYPTODEV, \
44  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
45  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
46 
47 #define CDEV_PMD_TRACE(...) \
48  RTE_LOG(DEBUG, CRYPTODEV, \
49  RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
50  dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
51 
65 #define rte_crypto_op_ctod_offset(c, t, o) \
66  ((t)((char *)(c) + (o)))
67 
79 #define rte_crypto_op_ctophys_offset(c, o) \
80  (rte_iova_t)((c)->phys_addr + (o))
81 
86  uint16_t min;
87  uint16_t max;
88  uint16_t increment;
94 };
95 
101 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
102 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
103 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
104 
109  enum rte_crypto_sym_xform_type xform_type;
112  union {
113  struct {
116  uint16_t block_size;
118  struct rte_crypto_param_range key_size;
120  struct rte_crypto_param_range digest_size;
122  struct rte_crypto_param_range aad_size;
124  struct rte_crypto_param_range iv_size;
126  } auth;
128  struct {
131  uint16_t block_size;
133  struct rte_crypto_param_range key_size;
135  struct rte_crypto_param_range iv_size;
137  uint32_t dataunit_set;
143  } cipher;
145  struct {
148  uint16_t block_size;
150  struct rte_crypto_param_range key_size;
152  struct rte_crypto_param_range digest_size;
154  struct rte_crypto_param_range aad_size;
156  struct rte_crypto_param_range iv_size;
158  } aead;
159  };
160 };
161 
167  enum rte_crypto_asym_xform_type xform_type;
170  uint32_t op_types;
179  __extension__
180  union {
181  struct rte_crypto_param_range modlen;
185  };
186 };
187 
194 };
195 
196 
203  union {
208  };
209 };
210 
213  enum rte_crypto_sym_xform_type type;
214  union {
215  enum rte_crypto_cipher_algorithm cipher;
216  enum rte_crypto_auth_algorithm auth;
217  enum rte_crypto_aead_algorithm aead;
218  } algo;
219 };
220 
229 };
230 
242 rte_cryptodev_sym_capability_get(uint8_t dev_id,
243  const struct rte_cryptodev_sym_capability_idx *idx);
244 
255 __rte_experimental
257 rte_cryptodev_asym_capability_get(uint8_t dev_id,
258  const struct rte_cryptodev_asym_capability_idx *idx);
259 
272 int
274  const struct rte_cryptodev_symmetric_capability *capability,
275  uint16_t key_size, uint16_t iv_size);
276 
290 int
292  const struct rte_cryptodev_symmetric_capability *capability,
293  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
294 
309 int
311  const struct rte_cryptodev_symmetric_capability *capability,
312  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
313  uint16_t iv_size);
314 
325 __rte_experimental
326 int
328  const struct rte_cryptodev_asymmetric_xform_capability *capability,
329  enum rte_crypto_asym_op_type op_type);
330 
341 __rte_experimental
342 int
344  const struct rte_cryptodev_asymmetric_xform_capability *capability,
345  uint16_t modlen);
346 
358 int
360  const char *algo_string);
361 
373 int
375  const char *algo_string);
376 
388 int
390  const char *algo_string);
391 
403 __rte_experimental
404 int
406  const char *xform_string);
407 
408 
410 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
411  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
412 
413 
422 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
423 
424 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
425 
426 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
427 
428 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
429 
430 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
431 
432 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
433 
434 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
435 
436 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
437 
440 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
441 
442 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
443 
446 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
447 
450 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
451 
455 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
456 
459 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
460 
461 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
462 
463 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
464 
465 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
466 
467 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
468 
469 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
470 
471 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
472 
473 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
474 
475 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
476 
477 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
478 
479 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
480 
481 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
482 
483 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
484 
485 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
486 
487 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
488 
499 extern const char *
500 rte_cryptodev_get_feature_name(uint64_t flag);
501 
504  const char *driver_name;
505  uint8_t driver_id;
506  struct rte_device *device;
508  uint64_t feature_flags;
523  struct {
524  unsigned max_nb_sessions;
529  } sym;
530 };
531 
532 #define RTE_CRYPTODEV_DETACHED (0)
533 #define RTE_CRYPTODEV_ATTACHED (1)
534 
540 };
541 
544  uint32_t nb_descriptors;
549 };
550 
572 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
573  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
574 
584 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
585  enum rte_cryptodev_event_type event, void *cb_arg);
586 
587 
590  uint64_t enqueued_count;
592  uint64_t dequeued_count;
599 };
600 
601 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
602 
613 extern int
614 rte_cryptodev_get_dev_id(const char *name);
615 
626 extern const char *
627 rte_cryptodev_name_get(uint8_t dev_id);
628 
636 extern uint8_t
637 rte_cryptodev_count(void);
638 
647 extern uint8_t
648 rte_cryptodev_device_count_by_driver(uint8_t driver_id);
649 
661 uint8_t
662 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
663  uint8_t nb_devices);
664 /*
665  * Return the NUMA socket to which a device is connected
666  *
667  * @param dev_id
668  * The identifier of the device
669  * @return
670  * The NUMA socket id to which the device is connected or
671  * a default of zero if the socket could not be determined.
672  * -1 if returned is the dev_id value is out of range.
673  */
674 extern int
675 rte_cryptodev_socket_id(uint8_t dev_id);
676 
679  int socket_id;
680  uint16_t nb_queue_pairs;
682  uint64_t ff_disable;
689 };
690 
705 extern int
706 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
707 
723 extern int
724 rte_cryptodev_start(uint8_t dev_id);
725 
732 extern void
733 rte_cryptodev_stop(uint8_t dev_id);
734 
744 extern int
745 rte_cryptodev_close(uint8_t dev_id);
746 
768 extern int
769 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
770  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
771 
785 __rte_experimental
786 int
787 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
788 
796 extern uint16_t
797 rte_cryptodev_queue_pair_count(uint8_t dev_id);
798 
799 
811 extern int
812 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
813 
819 extern void
820 rte_cryptodev_stats_reset(uint8_t dev_id);
821 
835 extern void
836 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
837 
838 
852 extern int
853 rte_cryptodev_callback_register(uint8_t dev_id,
854  enum rte_cryptodev_event_type event,
855  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
856 
870 extern int
871 rte_cryptodev_callback_unregister(uint8_t dev_id,
872  enum rte_cryptodev_event_type event,
873  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
874 
875 struct rte_cryptodev_callback;
876 
878 RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
879 
889  void *arg;
891 };
892 
897 struct rte_cryptodev_cb_rcu {
898  struct rte_cryptodev_cb *next;
900  struct rte_rcu_qsbr *qsbr;
902 };
903 
904 void *
905 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
906 
912  uint64_t opaque_data;
914  uint16_t nb_drivers;
916  uint16_t user_data_sz;
918  __extension__ struct {
919  void *data;
920  uint16_t refcnt;
921  } sess_data[];
923 };
924 
951 __rte_experimental
952 struct rte_mempool *
953 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
954  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
955  int socket_id);
956 
977 __rte_experimental
978 struct rte_mempool *
979 rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
980  uint32_t cache_size, uint16_t user_data_size, int socket_id);
981 
993 
1011 __rte_experimental
1012 int
1013 rte_cryptodev_asym_session_create(uint8_t dev_id,
1014  struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1015  void **session);
1016 
1029 int
1031 
1043 __rte_experimental
1044 int
1045 rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1046 
1063 int
1064 rte_cryptodev_sym_session_init(uint8_t dev_id,
1065  struct rte_cryptodev_sym_session *sess,
1066  struct rte_crypto_sym_xform *xforms,
1067  struct rte_mempool *mempool);
1068 
1083 int
1084 rte_cryptodev_sym_session_clear(uint8_t dev_id,
1085  struct rte_cryptodev_sym_session *sess);
1086 
1094 unsigned int
1096 
1108 __rte_experimental
1109 unsigned int
1111  struct rte_cryptodev_sym_session *sess);
1112 
1119 __rte_experimental
1120 unsigned int
1122 
1134 unsigned int
1136 
1147 __rte_experimental
1148 unsigned int
1150 
1159 unsigned int
1160 rte_cryptodev_is_valid_dev(uint8_t dev_id);
1161 
1170 int rte_cryptodev_driver_id_get(const char *name);
1171 
1180 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1181 
1194 __rte_experimental
1195 int
1197  struct rte_cryptodev_sym_session *sess,
1198  void *data,
1199  uint16_t size);
1200 
1211 __rte_experimental
1212 void *
1214  struct rte_cryptodev_sym_session *sess);
1215 
1229 __rte_experimental
1230 int
1231 rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1232 
1243 __rte_experimental
1244 void *
1246 
1259 __rte_experimental
1260 uint32_t
1262  struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs,
1263  struct rte_crypto_sym_vec *vec);
1264 
1274 __rte_experimental
1275 int
1276 rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id);
1277 
1293 __rte_experimental
1294 int
1295 rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1296  enum rte_crypto_op_type op_type,
1297  enum rte_crypto_op_sess_type sess_type,
1298  void *ev_mdata, uint16_t size);
1299 
1305  struct rte_cryptodev_sym_session *crypto_sess;
1306  struct rte_crypto_sym_xform *xform;
1307  struct rte_security_session *sec_sess;
1308 };
1309 
1336  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1337  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1338 
1361  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1362  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1363  struct rte_crypto_va_iova_ptr *iv,
1364  struct rte_crypto_va_iova_ptr *digest,
1365  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1366  void *user_data);
1367 
1379 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1380  uint32_t n);
1381 
1391 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1392 
1401 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1402  uint32_t index, uint8_t is_op_success);
1403 
1445 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1446  uint8_t *drv_ctx,
1447  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1448  uint32_t max_nb_to_dequeue,
1449  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1450  void **out_user_data, uint8_t is_user_data_array,
1451  uint32_t *n_success, int *dequeue_status);
1452 
1476 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1477  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1478  enum rte_crypto_op_status *op_status);
1479 
1486  void *qp_data;
1487 
1489  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1492  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1494 
1495  /* Driver specific context data */
1496  __extension__ uint8_t drv_ctx_data[];
1497 };
1498 
1522 __rte_experimental
1523 int
1524 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1525  struct rte_crypto_raw_dp_ctx *ctx,
1526  enum rte_crypto_op_sess_type sess_type,
1527  union rte_cryptodev_session_ctx session_ctx,
1528  uint8_t is_update);
1529 
1554 __rte_experimental
1555 uint32_t
1557  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1558  void **user_data, int *enqueue_status);
1559 
1580 __rte_experimental
1581 static __rte_always_inline int
1583  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1584  union rte_crypto_sym_ofs ofs,
1585  struct rte_crypto_va_iova_ptr *iv,
1586  struct rte_crypto_va_iova_ptr *digest,
1587  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1588  void *user_data)
1589 {
1590  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1591  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1592 }
1593 
1604 __rte_experimental
1605 int
1607  uint32_t n);
1608 
1650 __rte_experimental
1651 uint32_t
1653  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1654  uint32_t max_nb_to_dequeue,
1655  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1656  void **out_user_data, uint8_t is_user_data_array,
1657  uint32_t *n_success, int *dequeue_status);
1658 
1682 __rte_experimental
1683 static __rte_always_inline void *
1685  int *dequeue_status, enum rte_crypto_op_status *op_status)
1686 {
1687  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1688  op_status);
1689 }
1690 
1700 __rte_experimental
1701 int
1703  uint32_t n);
1704 
1741 __rte_experimental
1742 struct rte_cryptodev_cb *
1743 rte_cryptodev_add_enq_callback(uint8_t dev_id,
1744  uint16_t qp_id,
1746  void *cb_arg);
1747 
1770 __rte_experimental
1771 int rte_cryptodev_remove_enq_callback(uint8_t dev_id,
1772  uint16_t qp_id,
1773  struct rte_cryptodev_cb *cb);
1774 
1810 __rte_experimental
1811 struct rte_cryptodev_cb *
1812 rte_cryptodev_add_deq_callback(uint8_t dev_id,
1813  uint16_t qp_id,
1815  void *cb_arg);
1816 
1838 __rte_experimental
1839 int rte_cryptodev_remove_deq_callback(uint8_t dev_id,
1840  uint16_t qp_id,
1841  struct rte_cryptodev_cb *cb);
1842 
1843 #include <rte_cryptodev_core.h>
1880 static inline uint16_t
1881 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1882  struct rte_crypto_op **ops, uint16_t nb_ops)
1883 {
1884  const struct rte_crypto_fp_ops *fp_ops;
1885  void *qp;
1886 
1887  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1888 
1889  fp_ops = &rte_crypto_fp_ops[dev_id];
1890  qp = fp_ops->qp.data[qp_id];
1891 
1892  nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1893 
1894 #ifdef RTE_CRYPTO_CALLBACKS
1895  if (unlikely(fp_ops->qp.deq_cb != NULL)) {
1896  struct rte_cryptodev_cb_rcu *list;
1897  struct rte_cryptodev_cb *cb;
1898 
1899  /* __ATOMIC_RELEASE memory order was used when the
1900  * call back was inserted into the list.
1901  * Since there is a clear dependency between loading
1902  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1903  * not required.
1904  */
1905  list = &fp_ops->qp.deq_cb[qp_id];
1906  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1907  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1908 
1909  while (cb != NULL) {
1910  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1911  cb->arg);
1912  cb = cb->next;
1913  };
1914 
1915  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1916  }
1917 #endif
1918  return nb_ops;
1919 }
1920 
1952 static inline uint16_t
1953 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1954  struct rte_crypto_op **ops, uint16_t nb_ops)
1955 {
1956  const struct rte_crypto_fp_ops *fp_ops;
1957  void *qp;
1958 
1959  fp_ops = &rte_crypto_fp_ops[dev_id];
1960  qp = fp_ops->qp.data[qp_id];
1961 #ifdef RTE_CRYPTO_CALLBACKS
1962  if (unlikely(fp_ops->qp.enq_cb != NULL)) {
1963  struct rte_cryptodev_cb_rcu *list;
1964  struct rte_cryptodev_cb *cb;
1965 
1966  /* __ATOMIC_RELEASE memory order was used when the
1967  * call back was inserted into the list.
1968  * Since there is a clear dependency between loading
1969  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1970  * not required.
1971  */
1972  list = &fp_ops->qp.enq_cb[qp_id];
1973  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1974  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1975 
1976  while (cb != NULL) {
1977  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1978  cb->arg);
1979  cb = cb->next;
1980  };
1981 
1982  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1983  }
1984 #endif
1985 
1986  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1987  return fp_ops->enqueue_burst(qp, ops, nb_ops);
1988 }
1989 
1990 
1991 
1992 #ifdef __cplusplus
1993 }
1994 #endif
1995 
1996 #endif /* _RTE_CRYPTODEV_H_ */
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
__rte_experimental int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
#define __rte_always_inline
Definition: rte_common.h:255
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
__rte_experimental unsigned int rte_cryptodev_sym_get_existing_header_session_size(struct rte_cryptodev_sym_session *sess)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_sym_session_clear(uint8_t dev_id, struct rte_cryptodev_sym_session *sess)
__rte_experimental int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
int rte_cryptodev_sym_session_init(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mempool)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint32_t cache_size
Definition: rte_mempool.h:214
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:352
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:203
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
rte_crypto_asym_op_type
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
void rte_cryptodev_stop(uint8_t dev_id)
const char * driver_name
int rte_cryptodev_close(uint8_t dev_id)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(struct rte_cryptodev_sym_session *sess, void *data, uint16_t size)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
#define unlikely(x)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
unsigned int rte_cryptodev_sym_get_header_session_size(void)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
__rte_experimental int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
__rte_experimental int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
uint16_t min_mbuf_tailroom_req
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
struct rte_cryptodev_sym_session * rte_cryptodev_sym_session_create(struct rte_mempool *mempool)
int rte_cryptodev_sym_session_free(struct rte_cryptodev_sym_session *sess)
const char * rte_cryptodev_name_get(uint8_t dev_id)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(struct rte_cryptodev_sym_session *sess)
rte_crypto_op_type
Definition: rte_crypto.h:29
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
rte_cryptodev_callback_fn fn
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
struct rte_device * device
uint32_t elt_size
Definition: rte_mempool.h:217
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
uint16_t min_mbuf_headroom_req
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
__rte_experimental void * rte_cryptodev_asym_session_get_user_data(void *sess)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:299
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
#define RTE_STD_C11
Definition: rte_common.h:39
rte_crypto_auth_algorithm
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
rte_crypto_sym_xform_type
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
struct rte_mempool * mp_session_private
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_start(uint8_t dev_id)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
struct rte_crypto_param_range modlen
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
unsigned max_nb_sessions
rte_cryptodev_event_type
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm