DPDK  23.07.0
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include <rte_compat.h>
22 #include "rte_kvargs.h"
23 #include "rte_crypto.h"
24 #include <rte_common.h>
25 #include <rte_rcu_qsbr.h>
26 
27 #include "rte_cryptodev_trace_fp.h"
28 
29 extern const char **rte_cyptodev_names;
30 
31 /* Logging Macros */
32 
33 #define CDEV_LOG_ERR(...) \
34  RTE_LOG(ERR, CRYPTODEV, \
35  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
36  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
37 
38 #define CDEV_LOG_INFO(...) \
39  RTE_LOG(INFO, CRYPTODEV, \
40  RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
41  RTE_FMT_TAIL(__VA_ARGS__,)))
42 
43 #define CDEV_LOG_DEBUG(...) \
44  RTE_LOG(DEBUG, CRYPTODEV, \
45  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
46  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
47 
48 #define CDEV_PMD_TRACE(...) \
49  RTE_LOG(DEBUG, CRYPTODEV, \
50  RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
51  dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
52 
66 #define rte_crypto_op_ctod_offset(c, t, o) \
67  ((t)((char *)(c) + (o)))
68 
80 #define rte_crypto_op_ctophys_offset(c, o) \
81  (rte_iova_t)((c)->phys_addr + (o))
82 
87  uint16_t min;
88  uint16_t max;
89  uint16_t increment;
95 };
96 
102 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
103 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
104 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
105 
110  enum rte_crypto_sym_xform_type xform_type;
113  union {
114  struct {
117  uint16_t block_size;
119  struct rte_crypto_param_range key_size;
121  struct rte_crypto_param_range digest_size;
123  struct rte_crypto_param_range aad_size;
125  struct rte_crypto_param_range iv_size;
127  } auth;
129  struct {
132  uint16_t block_size;
134  struct rte_crypto_param_range key_size;
136  struct rte_crypto_param_range iv_size;
138  uint32_t dataunit_set;
144  } cipher;
146  struct {
149  uint16_t block_size;
151  struct rte_crypto_param_range key_size;
153  struct rte_crypto_param_range digest_size;
155  struct rte_crypto_param_range aad_size;
157  struct rte_crypto_param_range iv_size;
159  } aead;
160  };
161 };
162 
167  enum rte_crypto_asym_xform_type xform_type;
170  uint32_t op_types;
179  __extension__
180  union {
181  struct rte_crypto_param_range modlen;
185  };
186 };
187 
193 };
194 
195 
202  union {
207  };
208 };
209 
212  enum rte_crypto_sym_xform_type type;
213  union {
214  enum rte_crypto_cipher_algorithm cipher;
215  enum rte_crypto_auth_algorithm auth;
216  enum rte_crypto_aead_algorithm aead;
217  } algo;
218 };
219 
227 };
228 
240 rte_cryptodev_sym_capability_get(uint8_t dev_id,
241  const struct rte_cryptodev_sym_capability_idx *idx);
242 
253 __rte_experimental
255 rte_cryptodev_asym_capability_get(uint8_t dev_id,
256  const struct rte_cryptodev_asym_capability_idx *idx);
257 
270 int
272  const struct rte_cryptodev_symmetric_capability *capability,
273  uint16_t key_size, uint16_t iv_size);
274 
288 int
290  const struct rte_cryptodev_symmetric_capability *capability,
291  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
292 
307 int
309  const struct rte_cryptodev_symmetric_capability *capability,
310  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
311  uint16_t iv_size);
312 
323 __rte_experimental
324 int
326  const struct rte_cryptodev_asymmetric_xform_capability *capability,
327  enum rte_crypto_asym_op_type op_type);
328 
339 __rte_experimental
340 int
342  const struct rte_cryptodev_asymmetric_xform_capability *capability,
343  uint16_t modlen);
344 
356 int
358  const char *algo_string);
359 
371 int
373  const char *algo_string);
374 
386 int
388  const char *algo_string);
389 
401 __rte_experimental
402 int
404  const char *xform_string);
405 
415 __rte_experimental
416 const char *
418 
428 __rte_experimental
429 const char *
431 
441 __rte_experimental
442 const char *
444 
454 __rte_experimental
455 const char *
457 
458 
460 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
461  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
462 
463 
472 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
473 
474 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
475 
476 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
477 
478 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
479 
480 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
481 
482 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
483 
484 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
485 
486 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
487 
490 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
491 
492 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
493 
496 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
497 
500 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
501 
505 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
506 
509 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
510 
511 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
512 
513 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
514 
515 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
516 
517 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
518 
519 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
520 
521 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
522 
523 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
524 
525 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
526 
527 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
528 
529 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
530 
531 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
532 
533 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
534 
535 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
536 
537 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
538 
548 const char *
549 rte_cryptodev_get_feature_name(uint64_t flag);
550 
552 /* Structure rte_cryptodev_info 8< */
554  const char *driver_name;
555  uint8_t driver_id;
556  struct rte_device *device;
558  uint64_t feature_flags;
573  struct {
574  unsigned max_nb_sessions;
579  } sym;
580 };
581 /* >8 End of structure rte_cryptodev_info. */
582 
583 #define RTE_CRYPTODEV_DETACHED (0)
584 #define RTE_CRYPTODEV_ATTACHED (1)
585 
591 };
592 
594 /* Structure rte_cryptodev_qp_conf 8<*/
596  uint32_t nb_descriptors;
599 };
600 /* >8 End of structure rte_cryptodev_qp_conf. */
601 
623 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
624  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
625 
635 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
636  enum rte_cryptodev_event_type event, void *cb_arg);
637 
638 
641  uint64_t enqueued_count;
643  uint64_t dequeued_count;
650 };
651 
652 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
653 
664 int
665 rte_cryptodev_get_dev_id(const char *name);
666 
677 const char *
678 rte_cryptodev_name_get(uint8_t dev_id);
679 
687 uint8_t
688 rte_cryptodev_count(void);
689 
698 uint8_t
699 rte_cryptodev_device_count_by_driver(uint8_t driver_id);
700 
712 uint8_t
713 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
714  uint8_t nb_devices);
715 /*
716  * Return the NUMA socket to which a device is connected
717  *
718  * @param dev_id
719  * The identifier of the device
720  * @return
721  * The NUMA socket id to which the device is connected or
722  * a default of zero if the socket could not be determined.
723  * -1 if returned is the dev_id value is out of range.
724  */
725 int
726 rte_cryptodev_socket_id(uint8_t dev_id);
727 
729 /* Structure rte_cryptodev_config 8< */
731  int socket_id;
732  uint16_t nb_queue_pairs;
734  uint64_t ff_disable;
741 };
742 /* >8 End of structure rte_cryptodev_config. */
743 
758 int
759 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
760 
776 int
777 rte_cryptodev_start(uint8_t dev_id);
778 
785 void
786 rte_cryptodev_stop(uint8_t dev_id);
787 
797 int
798 rte_cryptodev_close(uint8_t dev_id);
799 
821 int
822 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
823  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
824 
838 __rte_experimental
839 int
840 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
841 
849 uint16_t
850 rte_cryptodev_queue_pair_count(uint8_t dev_id);
851 
852 
864 int
865 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
866 
872 void
873 rte_cryptodev_stats_reset(uint8_t dev_id);
874 
888 void
889 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
890 
891 
905 int
906 rte_cryptodev_callback_register(uint8_t dev_id,
907  enum rte_cryptodev_event_type event,
908  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
909 
923 int
924 rte_cryptodev_callback_unregister(uint8_t dev_id,
925  enum rte_cryptodev_event_type event,
926  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
927 
943 __rte_experimental
944 int
945 rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id);
946 
947 struct rte_cryptodev_callback;
948 
950 RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
951 
961  void *arg;
963 };
964 
969 struct rte_cryptodev_cb_rcu {
970  struct rte_cryptodev_cb *next;
972  struct rte_rcu_qsbr *qsbr;
974 };
975 
976 void *
977 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
978 
1008 __rte_experimental
1009 struct rte_mempool *
1010 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
1011  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
1012  int socket_id);
1013 
1014 
1035 __rte_experimental
1036 struct rte_mempool *
1037 rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
1038  uint32_t cache_size, uint16_t user_data_size, int socket_id);
1039 
1056 void *
1057 rte_cryptodev_sym_session_create(uint8_t dev_id,
1058  struct rte_crypto_sym_xform *xforms,
1059  struct rte_mempool *mp);
1077 __rte_experimental
1078 int
1079 rte_cryptodev_asym_session_create(uint8_t dev_id,
1080  struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1081  void **session);
1082 
1095 int
1096 rte_cryptodev_sym_session_free(uint8_t dev_id,
1097  void *sess);
1098 
1110 __rte_experimental
1111 int
1112 rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1113 
1120 __rte_experimental
1121 unsigned int
1123 
1135 unsigned int
1137 
1148 __rte_experimental
1149 unsigned int
1151 
1160 unsigned int
1161 rte_cryptodev_is_valid_dev(uint8_t dev_id);
1162 
1171 int rte_cryptodev_driver_id_get(const char *name);
1172 
1181 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1182 
1195 __rte_experimental
1196 int
1198  void *data,
1199  uint16_t size);
1200 
1201 #define CRYPTO_SESS_OPAQUE_DATA_OFF 0
1202 
1205 static inline uint64_t
1207 {
1208  return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1209 }
1210 
1214 static inline void
1215 rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
1216 {
1217  uint64_t *data;
1218  data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1219  *data = opaque;
1220 }
1221 
1232 __rte_experimental
1233 void *
1235 
1249 __rte_experimental
1250 int
1251 rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1252 
1263 __rte_experimental
1264 void *
1266 
1279 __rte_experimental
1280 uint32_t
1282  void *sess, union rte_crypto_sym_ofs ofs,
1283  struct rte_crypto_sym_vec *vec);
1284 
1294 __rte_experimental
1295 int
1296 rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id);
1297 
1313 __rte_experimental
1314 int
1315 rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1316  enum rte_crypto_op_type op_type,
1317  enum rte_crypto_op_sess_type sess_type,
1318  void *ev_mdata, uint16_t size);
1319 
1324 union rte_cryptodev_session_ctx {void *crypto_sess;
1325  struct rte_crypto_sym_xform *xform;
1326  struct rte_security_session *sec_sess;
1327 };
1328 
1355  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1356  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1357 
1380  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1381  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1382  struct rte_crypto_va_iova_ptr *iv,
1383  struct rte_crypto_va_iova_ptr *digest,
1384  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1385  void *user_data);
1386 
1398 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1399  uint32_t n);
1400 
1410 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1411 
1420 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1421  uint32_t index, uint8_t is_op_success);
1422 
1464 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1465  uint8_t *drv_ctx,
1466  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1467  uint32_t max_nb_to_dequeue,
1468  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1469  void **out_user_data, uint8_t is_user_data_array,
1470  uint32_t *n_success, int *dequeue_status);
1471 
1495 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1496  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1497  enum rte_crypto_op_status *op_status);
1498 
1505  void *qp_data;
1506 
1508  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1511  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1513 
1514  /* Driver specific context data */
1515  __extension__ uint8_t drv_ctx_data[];
1516 };
1517 
1539 __rte_experimental
1540 int
1541 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1542  struct rte_crypto_raw_dp_ctx *ctx,
1543  enum rte_crypto_op_sess_type sess_type,
1544  union rte_cryptodev_session_ctx session_ctx,
1545  uint8_t is_update);
1546 
1571 __rte_experimental
1572 uint32_t
1574  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1575  void **user_data, int *enqueue_status);
1576 
1597 __rte_experimental
1598 static __rte_always_inline int
1600  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1601  union rte_crypto_sym_ofs ofs,
1602  struct rte_crypto_va_iova_ptr *iv,
1603  struct rte_crypto_va_iova_ptr *digest,
1604  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1605  void *user_data)
1606 {
1607  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1608  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1609 }
1610 
1621 __rte_experimental
1622 int
1624  uint32_t n);
1625 
1667 __rte_experimental
1668 uint32_t
1670  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1671  uint32_t max_nb_to_dequeue,
1672  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1673  void **out_user_data, uint8_t is_user_data_array,
1674  uint32_t *n_success, int *dequeue_status);
1675 
1699 __rte_experimental
1700 static __rte_always_inline void *
1702  int *dequeue_status, enum rte_crypto_op_status *op_status)
1703 {
1704  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1705  op_status);
1706 }
1707 
1717 __rte_experimental
1718 int
1720  uint32_t n);
1721 
1758 __rte_experimental
1759 struct rte_cryptodev_cb *
1760 rte_cryptodev_add_enq_callback(uint8_t dev_id,
1761  uint16_t qp_id,
1763  void *cb_arg);
1764 
1787 __rte_experimental
1788 int rte_cryptodev_remove_enq_callback(uint8_t dev_id,
1789  uint16_t qp_id,
1790  struct rte_cryptodev_cb *cb);
1791 
1827 __rte_experimental
1828 struct rte_cryptodev_cb *
1829 rte_cryptodev_add_deq_callback(uint8_t dev_id,
1830  uint16_t qp_id,
1832  void *cb_arg);
1833 
1855 __rte_experimental
1856 int rte_cryptodev_remove_deq_callback(uint8_t dev_id,
1857  uint16_t qp_id,
1858  struct rte_cryptodev_cb *cb);
1859 
1860 #include <rte_cryptodev_core.h>
1897 static inline uint16_t
1898 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1899  struct rte_crypto_op **ops, uint16_t nb_ops)
1900 {
1901  const struct rte_crypto_fp_ops *fp_ops;
1902  void *qp;
1903 
1904  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1905 
1906  fp_ops = &rte_crypto_fp_ops[dev_id];
1907  qp = fp_ops->qp.data[qp_id];
1908 
1909  nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1910 
1911 #ifdef RTE_CRYPTO_CALLBACKS
1912  if (unlikely(fp_ops->qp.deq_cb != NULL)) {
1913  struct rte_cryptodev_cb_rcu *list;
1914  struct rte_cryptodev_cb *cb;
1915 
1916  /* __ATOMIC_RELEASE memory order was used when the
1917  * call back was inserted into the list.
1918  * Since there is a clear dependency between loading
1919  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1920  * not required.
1921  */
1922  list = &fp_ops->qp.deq_cb[qp_id];
1923  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1924  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1925 
1926  while (cb != NULL) {
1927  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1928  cb->arg);
1929  cb = cb->next;
1930  };
1931 
1932  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1933  }
1934 #endif
1935  return nb_ops;
1936 }
1937 
1969 static inline uint16_t
1970 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1971  struct rte_crypto_op **ops, uint16_t nb_ops)
1972 {
1973  const struct rte_crypto_fp_ops *fp_ops;
1974  void *qp;
1975 
1976  fp_ops = &rte_crypto_fp_ops[dev_id];
1977  qp = fp_ops->qp.data[qp_id];
1978 #ifdef RTE_CRYPTO_CALLBACKS
1979  if (unlikely(fp_ops->qp.enq_cb != NULL)) {
1980  struct rte_cryptodev_cb_rcu *list;
1981  struct rte_cryptodev_cb *cb;
1982 
1983  /* __ATOMIC_RELEASE memory order was used when the
1984  * call back was inserted into the list.
1985  * Since there is a clear dependency between loading
1986  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1987  * not required.
1988  */
1989  list = &fp_ops->qp.enq_cb[qp_id];
1990  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1991  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1992 
1993  while (cb != NULL) {
1994  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1995  cb->arg);
1996  cb = cb->next;
1997  };
1998 
1999  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
2000  }
2001 #endif
2002 
2003  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
2004  return fp_ops->enqueue_burst(qp, ops, nb_ops);
2005 }
2006 
2007 
2008 
2009 #ifdef __cplusplus
2010 }
2011 #endif
2012 
2013 #endif /* _RTE_CRYPTODEV_H_ */
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
__rte_experimental int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
#define __rte_always_inline
Definition: rte_common.h:255
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(void *sess)
int rte_cryptodev_driver_id_get(const char *name)
__rte_experimental const char * rte_cryptodev_get_aead_algo_string(enum rte_crypto_aead_algorithm algo_enum)
__rte_experimental int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint32_t cache_size
Definition: rte_mempool.h:231
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:353
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:220
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
rte_crypto_asym_op_type
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
uint32_t size
Definition: rte_mempool.h:230
void rte_cryptodev_stop(uint8_t dev_id)
const char * driver_name
int rte_cryptodev_close(uint8_t dev_id)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
#define unlikely(x)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
__rte_experimental int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
__rte_experimental int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
uint16_t min_mbuf_tailroom_req
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
const char * rte_cryptodev_name_get(uint8_t dev_id)
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
rte_crypto_op_type
Definition: rte_crypto.h:28
__rte_experimental const char * rte_cryptodev_asym_get_xform_string(enum rte_crypto_asym_xform_type xform_enum)
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
rte_cryptodev_callback_fn fn
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
struct rte_device * device
uint32_t elt_size
Definition: rte_mempool.h:234
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
uint16_t min_mbuf_headroom_req
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
__rte_experimental void * rte_cryptodev_asym_session_get_user_data(void *sess)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:300
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
#define RTE_STD_C11
Definition: rte_common.h:39
rte_crypto_auth_algorithm
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
__rte_experimental const char * rte_cryptodev_get_cipher_algo_string(enum rte_crypto_cipher_algorithm algo_enum)
rte_crypto_sym_xform_type
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_start(uint8_t dev_id)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
rte_crypto_op_sess_type
Definition: rte_crypto.h:61
struct rte_crypto_param_range modlen
__rte_experimental int rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
unsigned max_nb_sessions
rte_cryptodev_event_type
__rte_experimental const char * rte_cryptodev_get_auth_algo_string(enum rte_crypto_auth_algorithm algo_enum)
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
rte_crypto_op_status
Definition: rte_crypto.h:38
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm