DPDK  22.11.7-rc1
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include <rte_compat.h>
22 #include "rte_kvargs.h"
23 #include "rte_crypto.h"
24 #include <rte_common.h>
25 #include <rte_rcu_qsbr.h>
26 
27 #include "rte_cryptodev_trace_fp.h"
28 
29 /* Logging Macros */
30 
31 #define CDEV_LOG_ERR(...) \
32  RTE_LOG(ERR, CRYPTODEV, \
33  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
34  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
35 
36 #define CDEV_LOG_INFO(...) \
37  RTE_LOG(INFO, CRYPTODEV, \
38  RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
39  RTE_FMT_TAIL(__VA_ARGS__,)))
40 
41 #define CDEV_LOG_DEBUG(...) \
42  RTE_LOG(DEBUG, CRYPTODEV, \
43  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
44  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
45 
46 #define CDEV_PMD_TRACE(...) \
47  RTE_LOG(DEBUG, CRYPTODEV, \
48  RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
49  dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
50 
64 #define rte_crypto_op_ctod_offset(c, t, o) \
65  ((t)((char *)(c) + (o)))
66 
78 #define rte_crypto_op_ctophys_offset(c, o) \
79  (rte_iova_t)((c)->phys_addr + (o))
80 
85  uint16_t min;
86  uint16_t max;
87  uint16_t increment;
93 };
94 
100 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
101 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
102 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
103 
108  enum rte_crypto_sym_xform_type xform_type;
111  union {
112  struct {
115  uint16_t block_size;
117  struct rte_crypto_param_range key_size;
119  struct rte_crypto_param_range digest_size;
121  struct rte_crypto_param_range aad_size;
123  struct rte_crypto_param_range iv_size;
125  } auth;
127  struct {
130  uint16_t block_size;
132  struct rte_crypto_param_range key_size;
134  struct rte_crypto_param_range iv_size;
136  uint32_t dataunit_set;
142  } cipher;
144  struct {
147  uint16_t block_size;
149  struct rte_crypto_param_range key_size;
151  struct rte_crypto_param_range digest_size;
153  struct rte_crypto_param_range aad_size;
155  struct rte_crypto_param_range iv_size;
157  } aead;
158  };
159 };
160 
166  enum rte_crypto_asym_xform_type xform_type;
169  uint32_t op_types;
178  __extension__
179  union {
180  struct rte_crypto_param_range modlen;
184  };
185 };
186 
193 };
194 
195 
202  union {
207  };
208 };
209 
212  enum rte_crypto_sym_xform_type type;
213  union {
214  enum rte_crypto_cipher_algorithm cipher;
215  enum rte_crypto_auth_algorithm auth;
216  enum rte_crypto_aead_algorithm aead;
217  } algo;
218 };
219 
228 };
229 
241 rte_cryptodev_sym_capability_get(uint8_t dev_id,
242  const struct rte_cryptodev_sym_capability_idx *idx);
243 
254 __rte_experimental
256 rte_cryptodev_asym_capability_get(uint8_t dev_id,
257  const struct rte_cryptodev_asym_capability_idx *idx);
258 
271 int
273  const struct rte_cryptodev_symmetric_capability *capability,
274  uint16_t key_size, uint16_t iv_size);
275 
289 int
291  const struct rte_cryptodev_symmetric_capability *capability,
292  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
293 
308 int
310  const struct rte_cryptodev_symmetric_capability *capability,
311  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
312  uint16_t iv_size);
313 
324 __rte_experimental
325 int
327  const struct rte_cryptodev_asymmetric_xform_capability *capability,
328  enum rte_crypto_asym_op_type op_type);
329 
340 __rte_experimental
341 int
343  const struct rte_cryptodev_asymmetric_xform_capability *capability,
344  uint16_t modlen);
345 
357 int
359  const char *algo_string);
360 
372 int
374  const char *algo_string);
375 
387 int
389  const char *algo_string);
390 
402 __rte_experimental
403 int
405  const char *xform_string);
406 
407 
409 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
410  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
411 
412 
421 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
422 
423 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
424 
425 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
426 
427 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
428 
429 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
430 
431 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
432 
433 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
434 
435 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
436 
439 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
440 
441 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
442 
445 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
446 
449 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
450 
454 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
455 
458 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
459 
460 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
461 
462 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
463 
464 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
465 
466 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
467 
468 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
469 
470 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
471 
472 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
473 
474 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
475 
476 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
477 
478 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
479 
480 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
481 
482 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
483 
484 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
485 
486 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
487 
498 extern const char *
499 rte_cryptodev_get_feature_name(uint64_t flag);
500 
502 /* Structure rte_cryptodev_info 8< */
504  const char *driver_name;
505  uint8_t driver_id;
506  struct rte_device *device;
508  uint64_t feature_flags;
523  struct {
524  unsigned max_nb_sessions;
529  } sym;
530 };
531 /* >8 End of structure rte_cryptodev_info. */
532 
533 #define RTE_CRYPTODEV_DETACHED (0)
534 #define RTE_CRYPTODEV_ATTACHED (1)
535 
541 };
542 
544 /* Structure rte_cryptodev_qp_conf 8<*/
546  uint32_t nb_descriptors;
549 };
550 /* >8 End of structure rte_cryptodev_qp_conf. */
551 
573 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
574  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
575 
585 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
586  enum rte_cryptodev_event_type event, void *cb_arg);
587 
588 
591  uint64_t enqueued_count;
593  uint64_t dequeued_count;
600 };
601 
602 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
603 
614 extern int
615 rte_cryptodev_get_dev_id(const char *name);
616 
627 extern const char *
628 rte_cryptodev_name_get(uint8_t dev_id);
629 
637 extern uint8_t
638 rte_cryptodev_count(void);
639 
648 extern uint8_t
649 rte_cryptodev_device_count_by_driver(uint8_t driver_id);
650 
662 uint8_t
663 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
664  uint8_t nb_devices);
665 /*
666  * Return the NUMA socket to which a device is connected
667  *
668  * @param dev_id
669  * The identifier of the device
670  * @return
671  * The NUMA socket id to which the device is connected or
672  * a default of zero if the socket could not be determined.
673  * -1 if returned is the dev_id value is out of range.
674  */
675 extern int
676 rte_cryptodev_socket_id(uint8_t dev_id);
677 
679 /* Structure rte_cryptodev_config 8< */
681  int socket_id;
682  uint16_t nb_queue_pairs;
684  uint64_t ff_disable;
691 };
692 /* >8 End of structure rte_cryptodev_config. */
693 
708 extern int
709 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
710 
726 extern int
727 rte_cryptodev_start(uint8_t dev_id);
728 
735 extern void
736 rte_cryptodev_stop(uint8_t dev_id);
737 
747 extern int
748 rte_cryptodev_close(uint8_t dev_id);
749 
771 extern int
772 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
773  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
774 
788 __rte_experimental
789 int
790 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
791 
799 extern uint16_t
800 rte_cryptodev_queue_pair_count(uint8_t dev_id);
801 
802 
814 extern int
815 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
816 
822 extern void
823 rte_cryptodev_stats_reset(uint8_t dev_id);
824 
838 extern void
839 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
840 
841 
855 extern int
856 rte_cryptodev_callback_register(uint8_t dev_id,
857  enum rte_cryptodev_event_type event,
858  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
859 
873 extern int
874 rte_cryptodev_callback_unregister(uint8_t dev_id,
875  enum rte_cryptodev_event_type event,
876  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
877 
878 struct rte_cryptodev_callback;
879 
881 RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
882 
892  void *arg;
894 };
895 
900 struct rte_cryptodev_cb_rcu {
901  struct rte_cryptodev_cb *next;
903  struct rte_rcu_qsbr *qsbr;
905 };
906 
916 void *
917 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
918 
948 __rte_experimental
949 struct rte_mempool *
950 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
951  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
952  int socket_id);
953 
954 
975 __rte_experimental
976 struct rte_mempool *
977 rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
978  uint32_t cache_size, uint16_t user_data_size, int socket_id);
979 
996 void *
997 rte_cryptodev_sym_session_create(uint8_t dev_id,
998  struct rte_crypto_sym_xform *xforms,
999  struct rte_mempool *mp);
1017 __rte_experimental
1018 int
1019 rte_cryptodev_asym_session_create(uint8_t dev_id,
1020  struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1021  void **session);
1022 
1035 int
1036 rte_cryptodev_sym_session_free(uint8_t dev_id,
1037  void *sess);
1038 
1050 __rte_experimental
1051 int
1052 rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1053 
1060 __rte_experimental
1061 unsigned int
1063 
1075 unsigned int
1077 
1088 __rte_experimental
1089 unsigned int
1091 
1100 unsigned int
1101 rte_cryptodev_is_valid_dev(uint8_t dev_id);
1102 
1111 int rte_cryptodev_driver_id_get(const char *name);
1112 
1121 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1122 
1135 __rte_experimental
1136 int
1138  void *data,
1139  uint16_t size);
1140 
1141 #define CRYPTO_SESS_OPAQUE_DATA_OFF 0
1142 
1145 static inline uint64_t
1147 {
1148  return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1149 }
1150 
1154 static inline void
1155 rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
1156 {
1157  uint64_t *data;
1158  data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1159  *data = opaque;
1160 }
1161 
1172 __rte_experimental
1173 void *
1175 
1189 __rte_experimental
1190 int
1191 rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1192 
1203 __rte_experimental
1204 void *
1206 
1219 __rte_experimental
1220 uint32_t
1222  void *sess, union rte_crypto_sym_ofs ofs,
1223  struct rte_crypto_sym_vec *vec);
1224 
1234 __rte_experimental
1235 int
1236 rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id);
1237 
1253 __rte_experimental
1254 int
1255 rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1256  enum rte_crypto_op_type op_type,
1257  enum rte_crypto_op_sess_type sess_type,
1258  void *ev_mdata, uint16_t size);
1259 
1264 union rte_cryptodev_session_ctx {void *crypto_sess;
1265  struct rte_crypto_sym_xform *xform;
1266  struct rte_security_session *sec_sess;
1267 };
1268 
1295  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1296  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1297 
1320  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1321  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1322  struct rte_crypto_va_iova_ptr *iv,
1323  struct rte_crypto_va_iova_ptr *digest,
1324  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1325  void *user_data);
1326 
1338 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1339  uint32_t n);
1340 
1350 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1351 
1360 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1361  uint32_t index, uint8_t is_op_success);
1362 
1404 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1405  uint8_t *drv_ctx,
1406  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1407  uint32_t max_nb_to_dequeue,
1408  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1409  void **out_user_data, uint8_t is_user_data_array,
1410  uint32_t *n_success, int *dequeue_status);
1411 
1435 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1436  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1437  enum rte_crypto_op_status *op_status);
1438 
1445  void *qp_data;
1446 
1448  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1451  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1453 
1454  /* Driver specific context data */
1455  __extension__ uint8_t drv_ctx_data[];
1456 };
1457 
1481 __rte_experimental
1482 int
1483 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1484  struct rte_crypto_raw_dp_ctx *ctx,
1485  enum rte_crypto_op_sess_type sess_type,
1486  union rte_cryptodev_session_ctx session_ctx,
1487  uint8_t is_update);
1488 
1513 __rte_experimental
1514 uint32_t
1516  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1517  void **user_data, int *enqueue_status);
1518 
1539 __rte_experimental
1540 static __rte_always_inline int
1542  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1543  union rte_crypto_sym_ofs ofs,
1544  struct rte_crypto_va_iova_ptr *iv,
1545  struct rte_crypto_va_iova_ptr *digest,
1546  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1547  void *user_data)
1548 {
1549  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1550  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1551 }
1552 
1563 __rte_experimental
1564 int
1566  uint32_t n);
1567 
1609 __rte_experimental
1610 uint32_t
1612  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1613  uint32_t max_nb_to_dequeue,
1614  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1615  void **out_user_data, uint8_t is_user_data_array,
1616  uint32_t *n_success, int *dequeue_status);
1617 
1641 __rte_experimental
1642 static __rte_always_inline void *
1644  int *dequeue_status, enum rte_crypto_op_status *op_status)
1645 {
1646  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1647  op_status);
1648 }
1649 
1659 __rte_experimental
1660 int
1662  uint32_t n);
1663 
1700 __rte_experimental
1701 struct rte_cryptodev_cb *
1702 rte_cryptodev_add_enq_callback(uint8_t dev_id,
1703  uint16_t qp_id,
1705  void *cb_arg);
1706 
1729 __rte_experimental
1730 int rte_cryptodev_remove_enq_callback(uint8_t dev_id,
1731  uint16_t qp_id,
1732  struct rte_cryptodev_cb *cb);
1733 
1769 __rte_experimental
1770 struct rte_cryptodev_cb *
1771 rte_cryptodev_add_deq_callback(uint8_t dev_id,
1772  uint16_t qp_id,
1774  void *cb_arg);
1775 
1797 __rte_experimental
1798 int rte_cryptodev_remove_deq_callback(uint8_t dev_id,
1799  uint16_t qp_id,
1800  struct rte_cryptodev_cb *cb);
1801 
1802 #include <rte_cryptodev_core.h>
1839 static inline uint16_t
1840 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1841  struct rte_crypto_op **ops, uint16_t nb_ops)
1842 {
1843  const struct rte_crypto_fp_ops *fp_ops;
1844  void *qp;
1845 
1846  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1847 
1848  fp_ops = &rte_crypto_fp_ops[dev_id];
1849  qp = fp_ops->qp.data[qp_id];
1850 
1851  nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1852 
1853 #ifdef RTE_CRYPTO_CALLBACKS
1854  if (unlikely(fp_ops->qp.deq_cb[qp_id].next != NULL)) {
1855  struct rte_cryptodev_cb_rcu *list;
1856  struct rte_cryptodev_cb *cb;
1857 
1858  /* __ATOMIC_RELEASE memory order was used when the
1859  * call back was inserted into the list.
1860  * Since there is a clear dependency between loading
1861  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1862  * not required.
1863  */
1864  list = &fp_ops->qp.deq_cb[qp_id];
1865  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1866  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1867 
1868  while (cb != NULL) {
1869  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1870  cb->arg);
1871  cb = cb->next;
1872  };
1873 
1874  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1875  }
1876 #endif
1877  return nb_ops;
1878 }
1879 
1911 static inline uint16_t
1912 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1913  struct rte_crypto_op **ops, uint16_t nb_ops)
1914 {
1915  const struct rte_crypto_fp_ops *fp_ops;
1916  void *qp;
1917 
1918  fp_ops = &rte_crypto_fp_ops[dev_id];
1919  qp = fp_ops->qp.data[qp_id];
1920 #ifdef RTE_CRYPTO_CALLBACKS
1921  if (unlikely(fp_ops->qp.enq_cb[qp_id].next != NULL)) {
1922  struct rte_cryptodev_cb_rcu *list;
1923  struct rte_cryptodev_cb *cb;
1924 
1925  /* __ATOMIC_RELEASE memory order was used when the
1926  * call back was inserted into the list.
1927  * Since there is a clear dependency between loading
1928  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1929  * not required.
1930  */
1931  list = &fp_ops->qp.enq_cb[qp_id];
1932  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1933  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1934 
1935  while (cb != NULL) {
1936  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1937  cb->arg);
1938  cb = cb->next;
1939  };
1940 
1941  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1942  }
1943 #endif
1944 
1945  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1946  return fp_ops->enqueue_burst(qp, ops, nb_ops);
1947 }
1948 
1949 
1950 
1951 #ifdef __cplusplus
1952 }
1953 #endif
1954 
1955 #endif /* _RTE_CRYPTODEV_H_ */
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
__rte_experimental int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
#define __rte_always_inline
Definition: rte_common.h:255
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(void *sess)
int rte_cryptodev_driver_id_get(const char *name)
__rte_experimental int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint32_t cache_size
Definition: rte_mempool.h:231
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:354
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:220
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
rte_crypto_asym_op_type
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
uint32_t size
Definition: rte_mempool.h:230
void rte_cryptodev_stop(uint8_t dev_id)
const char * driver_name
int rte_cryptodev_close(uint8_t dev_id)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
#define unlikely(x)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
__rte_experimental int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
__rte_experimental int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
uint16_t min_mbuf_tailroom_req
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
const char * rte_cryptodev_name_get(uint8_t dev_id)
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
rte_crypto_op_type
Definition: rte_crypto.h:29
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
rte_cryptodev_callback_fn fn
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
struct rte_device * device
uint32_t elt_size
Definition: rte_mempool.h:234
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
uint16_t min_mbuf_headroom_req
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
__rte_experimental void * rte_cryptodev_asym_session_get_user_data(void *sess)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:301
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
#define RTE_STD_C11
Definition: rte_common.h:39
rte_crypto_auth_algorithm
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
rte_crypto_sym_xform_type
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_start(uint8_t dev_id)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
struct rte_crypto_param_range modlen
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
unsigned max_nb_sessions
rte_cryptodev_event_type
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
void * rte_cryptodev_get_sec_ctx(uint8_t dev_id)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm