DPDK  24.11.0-rc0
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include <rte_compat.h>
22 #include "rte_kvargs.h"
23 #include "rte_crypto.h"
24 #include <rte_common.h>
25 #include <rte_rcu_qsbr.h>
26 
27 #include "rte_cryptodev_trace_fp.h"
28 
32 extern int rte_cryptodev_logtype;
33 #define RTE_LOGTYPE_CRYPTODEV rte_cryptodev_logtype
34 
35 /* Logging Macros */
36 #define CDEV_LOG_ERR(...) \
37  RTE_LOG_LINE_PREFIX(ERR, CRYPTODEV, \
38  "%s() line %u: ", __func__ RTE_LOG_COMMA __LINE__, __VA_ARGS__)
39 
40 #define CDEV_LOG_INFO(...) \
41  RTE_LOG_LINE(INFO, CRYPTODEV, "" __VA_ARGS__)
42 
43 #define CDEV_LOG_DEBUG(...) \
44  RTE_LOG_LINE_PREFIX(DEBUG, CRYPTODEV, \
45  "%s() line %u: ", __func__ RTE_LOG_COMMA __LINE__, __VA_ARGS__)
46 
47 #define CDEV_PMD_TRACE(...) \
48  RTE_LOG_LINE_PREFIX(DEBUG, CRYPTODEV, \
49  "[%s] %s: ", dev RTE_LOG_COMMA __func__, __VA_ARGS__)
50 
64 #define rte_crypto_op_ctod_offset(c, t, o) \
65  ((t)((char *)(c) + (o)))
66 
78 #define rte_crypto_op_ctophys_offset(c, o) \
79  (rte_iova_t)((c)->phys_addr + (o))
80 
85  uint16_t min;
86  uint16_t max;
87  uint16_t increment;
93 };
94 
100 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
101 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
102 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
103 
108  enum rte_crypto_sym_xform_type xform_type;
110  union {
111  struct {
114  uint16_t block_size;
116  struct rte_crypto_param_range key_size;
118  struct rte_crypto_param_range digest_size;
120  struct rte_crypto_param_range aad_size;
122  struct rte_crypto_param_range iv_size;
124  } auth;
126  struct {
129  uint16_t block_size;
131  struct rte_crypto_param_range key_size;
133  struct rte_crypto_param_range iv_size;
135  uint32_t dataunit_set;
141  } cipher;
143  struct {
146  uint16_t block_size;
148  struct rte_crypto_param_range key_size;
150  struct rte_crypto_param_range digest_size;
152  struct rte_crypto_param_range aad_size;
154  struct rte_crypto_param_range iv_size;
156  } aead;
157  };
158 };
159 
164  enum rte_crypto_asym_xform_type xform_type;
167  uint32_t op_types;
176  __extension__
177  union {
178  struct rte_crypto_param_range modlen;
183  uint8_t internal_rng;
188  };
189 
190  uint64_t hash_algos;
192 };
193 
199 };
200 
201 
207  union {
212  };
213 };
214 
217  enum rte_crypto_sym_xform_type type;
218  union {
219  enum rte_crypto_cipher_algorithm cipher;
220  enum rte_crypto_auth_algorithm auth;
221  enum rte_crypto_aead_algorithm aead;
222  } algo;
223 };
224 
232 };
233 
245 rte_cryptodev_sym_capability_get(uint8_t dev_id,
246  const struct rte_cryptodev_sym_capability_idx *idx);
247 
259 rte_cryptodev_asym_capability_get(uint8_t dev_id,
260  const struct rte_cryptodev_asym_capability_idx *idx);
261 
274 int
276  const struct rte_cryptodev_symmetric_capability *capability,
277  uint16_t key_size, uint16_t iv_size);
278 
292 int
294  const struct rte_cryptodev_symmetric_capability *capability,
295  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
296 
311 int
313  const struct rte_cryptodev_symmetric_capability *capability,
314  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
315  uint16_t iv_size);
316 
327 int
329  const struct rte_cryptodev_asymmetric_xform_capability *capability,
330  enum rte_crypto_asym_op_type op_type);
331 
342 int
344  const struct rte_cryptodev_asymmetric_xform_capability *capability,
345  uint16_t modlen);
346 
357 bool
359  const struct rte_cryptodev_asymmetric_xform_capability *capability,
360  enum rte_crypto_auth_algorithm hash);
361 
373 int
375  const char *algo_string);
376 
388 int
390  const char *algo_string);
391 
403 int
405  const char *algo_string);
406 
418 int
420  const char *xform_string);
421 
431 __rte_experimental
432 const char *
434 
444 __rte_experimental
445 const char *
447 
457 __rte_experimental
458 const char *
460 
470 __rte_experimental
471 const char *
473 
474 
476 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
477  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
478 
479 
488 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
489 
490 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
491 
492 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
493 
494 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
495 
496 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
497 
498 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
499 
500 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
501 
502 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
503 
506 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
507 
508 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
509 
512 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
513 
516 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
517 
521 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
522 
525 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
526 
527 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
528 
529 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
530 
531 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
532 
533 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
534 
535 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
536 
537 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
538 
539 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
540 
541 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
542 
543 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
544 
545 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
546 
547 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
548 
549 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
550 
551 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
552 
553 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
554 
555 #define RTE_CRYPTODEV_FF_SECURITY_RX_INJECT (1ULL << 28)
556 
566 const char *
567 rte_cryptodev_get_feature_name(uint64_t flag);
568 
570 /* Structure rte_cryptodev_info 8< */
572  const char *driver_name;
573  uint8_t driver_id;
574  struct rte_device *device;
576  uint64_t feature_flags;
591  struct {
592  unsigned max_nb_sessions;
597  } sym;
598 };
599 /* >8 End of structure rte_cryptodev_info. */
600 
601 #define RTE_CRYPTODEV_DETACHED (0)
602 #define RTE_CRYPTODEV_ATTACHED (1)
603 
609 };
610 
612 /* Structure rte_cryptodev_qp_conf 8<*/
614  uint32_t nb_descriptors;
617 };
618 /* >8 End of structure rte_cryptodev_qp_conf. */
619 
641 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
642  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
643 
653 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
654  enum rte_cryptodev_event_type event, void *cb_arg);
655 
656 
659  uint64_t enqueued_count;
661  uint64_t dequeued_count;
668 };
669 
670 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
671 
682 int
683 rte_cryptodev_get_dev_id(const char *name);
684 
695 const char *
696 rte_cryptodev_name_get(uint8_t dev_id);
697 
705 uint8_t
706 rte_cryptodev_count(void);
707 
716 uint8_t
717 rte_cryptodev_device_count_by_driver(uint8_t driver_id);
718 
730 uint8_t
731 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
732  uint8_t nb_devices);
733 /*
734  * Return the NUMA socket to which a device is connected
735  *
736  * @param dev_id
737  * The identifier of the device
738  * @return
739  * The NUMA socket id to which the device is connected or
740  * a default of zero if the socket could not be determined.
741  * -1 if returned is the dev_id value is out of range.
742  */
743 int
744 rte_cryptodev_socket_id(uint8_t dev_id);
745 
747 /* Structure rte_cryptodev_config 8< */
749  int socket_id;
750  uint16_t nb_queue_pairs;
752  uint64_t ff_disable;
759 };
760 /* >8 End of structure rte_cryptodev_config. */
761 
776 int
777 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
778 
794 int
795 rte_cryptodev_start(uint8_t dev_id);
796 
803 void
804 rte_cryptodev_stop(uint8_t dev_id);
805 
815 int
816 rte_cryptodev_close(uint8_t dev_id);
817 
839 int
840 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
841  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
842 
856 int
857 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
858 
866 uint16_t
867 rte_cryptodev_queue_pair_count(uint8_t dev_id);
868 
869 
881 int
882 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
883 
889 void
890 rte_cryptodev_stats_reset(uint8_t dev_id);
891 
905 void
906 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
907 
908 
922 int
923 rte_cryptodev_callback_register(uint8_t dev_id,
924  enum rte_cryptodev_event_type event,
925  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
926 
940 int
941 rte_cryptodev_callback_unregister(uint8_t dev_id,
942  enum rte_cryptodev_event_type event,
943  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
944 
960 __rte_experimental
961 int
962 rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id);
963 
964 struct rte_cryptodev_callback;
965 
967 RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
968 
974  RTE_ATOMIC(struct rte_cryptodev_cb *) next;
978  void *arg;
980 };
981 
986 struct rte_cryptodev_cb_rcu {
987  RTE_ATOMIC(struct rte_cryptodev_cb *) next;
989  struct rte_rcu_qsbr *qsbr;
991 };
992 
1002 void *
1003 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
1004 
1034 struct rte_mempool *
1035 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
1036  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
1037  int socket_id);
1038 
1039 
1060 struct rte_mempool *
1061 rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
1062  uint32_t cache_size, uint16_t user_data_size, int socket_id);
1063 
1080 void *
1081 rte_cryptodev_sym_session_create(uint8_t dev_id,
1082  struct rte_crypto_sym_xform *xforms,
1083  struct rte_mempool *mp);
1101 int
1102 rte_cryptodev_asym_session_create(uint8_t dev_id,
1103  struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1104  void **session);
1105 
1118 int
1119 rte_cryptodev_sym_session_free(uint8_t dev_id,
1120  void *sess);
1121 
1133 int
1134 rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1135 
1142 unsigned int
1144 
1156 unsigned int
1158 
1169 unsigned int
1171 
1180 unsigned int
1181 rte_cryptodev_is_valid_dev(uint8_t dev_id);
1182 
1191 int rte_cryptodev_driver_id_get(const char *name);
1192 
1201 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1202 
1215 int
1217  void *data,
1218  uint16_t size);
1219 
1220 #define CRYPTO_SESS_OPAQUE_DATA_OFF 0
1221 
1224 static inline uint64_t
1226 {
1227  return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1228 }
1229 
1233 static inline void
1234 rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
1235 {
1236  uint64_t *data;
1237  data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1238  *data = opaque;
1239 }
1240 
1251 void *
1253 
1267 int
1268 rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1269 
1280 void *
1282 
1295 uint32_t
1297  void *sess, union rte_crypto_sym_ofs ofs,
1298  struct rte_crypto_sym_vec *vec);
1299 
1309 int
1310 rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id);
1311 
1327 int
1328 rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1329  enum rte_crypto_op_type op_type,
1330  enum rte_crypto_op_sess_type sess_type,
1331  void *ev_mdata, uint16_t size);
1332 
1337 union rte_cryptodev_session_ctx {void *crypto_sess;
1338  struct rte_crypto_sym_xform *xform;
1339  struct rte_security_session *sec_sess;
1340 };
1341 
1368  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1369  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1370 
1393  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1394  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1395  struct rte_crypto_va_iova_ptr *iv,
1396  struct rte_crypto_va_iova_ptr *digest,
1397  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1398  void *user_data);
1399 
1411 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1412  uint32_t n);
1413 
1423 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1424 
1433 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1434  uint32_t index, uint8_t is_op_success);
1435 
1477 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1478  uint8_t *drv_ctx,
1479  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1480  uint32_t max_nb_to_dequeue,
1481  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1482  void **out_user_data, uint8_t is_user_data_array,
1483  uint32_t *n_success, int *dequeue_status);
1484 
1508 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1509  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1510  enum rte_crypto_op_status *op_status);
1511 
1518  void *qp_data;
1519 
1521  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1524  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1526 
1527  /* Driver specific context data */
1528  uint8_t drv_ctx_data[];
1529 };
1530 
1552 int
1553 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1554  struct rte_crypto_raw_dp_ctx *ctx,
1555  enum rte_crypto_op_sess_type sess_type,
1556  union rte_cryptodev_session_ctx session_ctx,
1557  uint8_t is_update);
1558 
1583 uint32_t
1585  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1586  void **user_data, int *enqueue_status);
1587 
1608 __rte_experimental
1609 static __rte_always_inline int
1611  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1612  union rte_crypto_sym_ofs ofs,
1613  struct rte_crypto_va_iova_ptr *iv,
1614  struct rte_crypto_va_iova_ptr *digest,
1615  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1616  void *user_data)
1617 {
1618  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1619  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1620 }
1621 
1632 int
1634  uint32_t n);
1635 
1677 uint32_t
1679  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1680  uint32_t max_nb_to_dequeue,
1681  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1682  void **out_user_data, uint8_t is_user_data_array,
1683  uint32_t *n_success, int *dequeue_status);
1684 
1708 __rte_experimental
1709 static __rte_always_inline void *
1711  int *dequeue_status, enum rte_crypto_op_status *op_status)
1712 {
1713  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1714  op_status);
1715 }
1716 
1726 int
1728  uint32_t n);
1729 
1765 struct rte_cryptodev_cb *
1766 rte_cryptodev_add_enq_callback(uint8_t dev_id,
1767  uint16_t qp_id,
1769  void *cb_arg);
1770 
1792 int rte_cryptodev_remove_enq_callback(uint8_t dev_id,
1793  uint16_t qp_id,
1794  struct rte_cryptodev_cb *cb);
1795 
1830 struct rte_cryptodev_cb *
1831 rte_cryptodev_add_deq_callback(uint8_t dev_id,
1832  uint16_t qp_id,
1834  void *cb_arg);
1835 
1857 int rte_cryptodev_remove_deq_callback(uint8_t dev_id,
1858  uint16_t qp_id,
1859  struct rte_cryptodev_cb *cb);
1860 
1861 #include <rte_cryptodev_core.h>
1898 static inline uint16_t
1899 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1900  struct rte_crypto_op **ops, uint16_t nb_ops)
1901 {
1902  const struct rte_crypto_fp_ops *fp_ops;
1903  void *qp;
1904 
1905  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1906 
1907  fp_ops = &rte_crypto_fp_ops[dev_id];
1908  qp = fp_ops->qp.data[qp_id];
1909 
1910  nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1911 
1912 #ifdef RTE_CRYPTO_CALLBACKS
1913  if (unlikely(fp_ops->qp.deq_cb[qp_id].next != NULL)) {
1914  struct rte_cryptodev_cb_rcu *list;
1915  struct rte_cryptodev_cb *cb;
1916 
1917  /* rte_memory_order_release memory order was used when the
1918  * call back was inserted into the list.
1919  * Since there is a clear dependency between loading
1920  * cb and cb->fn/cb->next, rte_memory_order_acquire memory order is
1921  * not required.
1922  */
1923  list = &fp_ops->qp.deq_cb[qp_id];
1924  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1925  cb = rte_atomic_load_explicit(&list->next, rte_memory_order_relaxed);
1926 
1927  while (cb != NULL) {
1928  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1929  cb->arg);
1930  cb = cb->next;
1931  };
1932 
1933  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1934  }
1935 #endif
1936  return nb_ops;
1937 }
1938 
1970 static inline uint16_t
1971 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1972  struct rte_crypto_op **ops, uint16_t nb_ops)
1973 {
1974  const struct rte_crypto_fp_ops *fp_ops;
1975  void *qp;
1976 
1977  fp_ops = &rte_crypto_fp_ops[dev_id];
1978  qp = fp_ops->qp.data[qp_id];
1979 #ifdef RTE_CRYPTO_CALLBACKS
1980  if (unlikely(fp_ops->qp.enq_cb[qp_id].next != NULL)) {
1981  struct rte_cryptodev_cb_rcu *list;
1982  struct rte_cryptodev_cb *cb;
1983 
1984  /* rte_memory_order_release memory order was used when the
1985  * call back was inserted into the list.
1986  * Since there is a clear dependency between loading
1987  * cb and cb->fn/cb->next, rte_memory_order_acquire memory order is
1988  * not required.
1989  */
1990  list = &fp_ops->qp.enq_cb[qp_id];
1991  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1992  cb = rte_atomic_load_explicit(&list->next, rte_memory_order_relaxed);
1993 
1994  while (cb != NULL) {
1995  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1996  cb->arg);
1997  cb = cb->next;
1998  };
1999 
2000  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
2001  }
2002 #endif
2003 
2004  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
2005  return fp_ops->enqueue_burst(qp, ops, nb_ops);
2006 }
2007 
2032 __rte_experimental
2033 static inline int
2034 rte_cryptodev_qp_depth_used(uint8_t dev_id, uint16_t qp_id)
2035 {
2036  const struct rte_crypto_fp_ops *fp_ops;
2037  void *qp;
2038  int rc;
2039 
2040  fp_ops = &rte_crypto_fp_ops[dev_id];
2041  qp = fp_ops->qp.data[qp_id];
2042 
2043  if (fp_ops->qp_depth_used == NULL) {
2044  rc = -ENOTSUP;
2045  goto out;
2046  }
2047 
2048  rc = fp_ops->qp_depth_used(qp);
2049 out:
2050  rte_cryptodev_trace_qp_depth_used(dev_id, qp_id);
2051  return rc;
2052 }
2053 
2054 
2055 #ifdef __cplusplus
2056 }
2057 #endif
2058 
2059 #endif /* _RTE_CRYPTODEV_H_ */
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
#define __rte_always_inline
Definition: rte_common.h:370
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
unsigned int rte_cryptodev_asym_get_header_session_size(void)
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
bool rte_cryptodev_asym_xform_capability_check_hash(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_auth_algorithm hash)
const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_driver_id_get(const char *name)
__rte_experimental const char * rte_cryptodev_get_aead_algo_string(enum rte_crypto_aead_algorithm algo_enum)
uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint32_t cache_size
Definition: rte_mempool.h:241
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:349
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:231
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
rte_crypto_asym_op_type
uint32_t size
Definition: rte_mempool.h:240
void rte_cryptodev_stop(uint8_t dev_id)
const char * driver_name
int rte_cryptodev_close(uint8_t dev_id)
void * rte_cryptodev_asym_session_get_user_data(void *sess)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
#define unlikely(x)
int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
uint16_t min_mbuf_tailroom_req
const char * rte_cryptodev_name_get(uint8_t dev_id)
rte_crypto_op_type
Definition: rte_crypto.h:28
__rte_experimental const char * rte_cryptodev_asym_get_xform_string(enum rte_crypto_asym_xform_type xform_enum)
rte_cryptodev_callback_fn fn
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
static __rte_experimental int rte_cryptodev_qp_depth_used(uint8_t dev_id, uint16_t qp_id)
int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
struct rte_device * device
int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
uint32_t elt_size
Definition: rte_mempool.h:244
int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
uint16_t min_mbuf_headroom_req
uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:296
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
rte_crypto_auth_algorithm
__rte_experimental const char * rte_cryptodev_get_cipher_algo_string(enum rte_crypto_cipher_algorithm algo_enum)
rte_crypto_sym_xform_type
int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_start(uint8_t dev_id)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
rte_crypto_op_sess_type
Definition: rte_crypto.h:61
struct rte_crypto_param_range modlen
__rte_experimental int rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
void * rte_cryptodev_sym_session_get_user_data(void *sess)
struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
unsigned max_nb_sessions
rte_cryptodev_event_type
__rte_experimental const char * rte_cryptodev_get_auth_algo_string(enum rte_crypto_auth_algorithm algo_enum)
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
void * rte_cryptodev_get_sec_ctx(uint8_t dev_id)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
rte_crypto_op_status
Definition: rte_crypto.h:38
int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm