5 #ifndef _RTE_CRYPTODEV_H_ 6 #define _RTE_CRYPTODEV_H_ 17 #include <rte_compat.h> 23 #include "rte_cryptodev_trace_fp.h" 28 extern int rte_cryptodev_logtype;
29 #define RTE_LOGTYPE_CRYPTODEV rte_cryptodev_logtype 32 #define CDEV_LOG_ERR(...) \ 33 RTE_LOG_LINE_PREFIX(ERR, CRYPTODEV, \ 34 "%s() line %u: ", __func__ RTE_LOG_COMMA __LINE__, __VA_ARGS__) 36 #define CDEV_LOG_INFO(...) \ 37 RTE_LOG_LINE(INFO, CRYPTODEV, "" __VA_ARGS__) 39 #define CDEV_LOG_DEBUG(...) \ 40 RTE_LOG_LINE_PREFIX(DEBUG, CRYPTODEV, \ 41 "%s() line %u: ", __func__ RTE_LOG_COMMA __LINE__, __VA_ARGS__) 43 #define CDEV_PMD_TRACE(...) \ 44 RTE_LOG_LINE_PREFIX(DEBUG, CRYPTODEV, \ 45 "[%s] %s: ", dev RTE_LOG_COMMA __func__, __VA_ARGS__) 60 #define rte_crypto_op_ctod_offset(c, t, o) \ 61 ((t)((char *)(c) + (o))) 74 #define rte_crypto_op_ctophys_offset(c, o) \ 75 (rte_iova_t)((c)->phys_addr + (o)) 96 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0) 97 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1) 98 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2) 185 uint32_t op_capa[RTE_CRYPTO_ASYM_OP_LIST_END];
276 uint16_t key_size, uint16_t iv_size);
294 uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
313 uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
394 const char *algo_string);
409 const char *algo_string);
424 const char *algo_string);
439 const char *xform_string);
495 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \ 496 { RTE_CRYPTO_OP_TYPE_UNDEFINED } 507 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0) 509 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1) 511 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2) 513 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3) 515 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4) 517 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5) 519 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6) 521 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7) 525 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8) 527 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9) 531 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10) 535 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11) 540 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12) 544 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13) 546 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14) 548 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15) 550 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16) 552 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17) 554 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18) 556 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19) 558 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20) 560 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21) 562 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22) 564 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23) 566 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24) 568 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25) 570 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26) 572 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27) 574 #define RTE_CRYPTODEV_FF_SECURITY_RX_INJECT (1ULL << 28) 620 #define RTE_CRYPTODEV_DETACHED (0) 621 #define RTE_CRYPTODEV_ATTACHED (1) 631 #define RTE_CRYPTODEV_QP_PRIORITY_HIGHEST 0 635 #define RTE_CRYPTODEV_QP_PRIORITY_NORMAL 128 639 #define RTE_CRYPTODEV_QP_PRIORITY_LOWEST 255 683 struct rte_crypto_op **ops, uint16_t nb_ops,
void *user_param);
711 #define RTE_CRYPTODEV_NAME_MAX_LEN (64) 785 rte_cryptodev_socket_id(uint8_t dev_id);
1034 struct rte_cryptodev_callback;
1056 struct rte_cryptodev_cb_rcu {
1059 struct rte_rcu_qsbr *qsbr;
1290 #define CRYPTO_SESS_OPAQUE_DATA_OFF 0 1294 static inline uint64_t
1297 return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1307 data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1401 void *ev_mdata, uint16_t
size);
1409 struct rte_security_session *sec_sess;
1504 uint32_t index, uint8_t is_op_success);
1550 uint32_t max_nb_to_dequeue,
1552 void **out_user_data, uint8_t is_user_data_array,
1553 uint32_t *n_success,
int *dequeue_status);
1578 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1579 void *qp, uint8_t *drv_ctx,
int *dequeue_status,
1598 uint8_t drv_ctx_data[];
1656 void **user_data,
int *enqueue_status);
1688 return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1689 n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1750 uint32_t max_nb_to_dequeue,
1752 void **out_user_data, uint8_t is_user_data_array,
1753 uint32_t *n_success,
int *dequeue_status);
1783 return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1972 static inline uint16_t
1976 const struct rte_crypto_fp_ops *fp_ops;
1979 rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (
void **)ops, nb_ops);
1981 fp_ops = &rte_crypto_fp_ops[dev_id];
1982 qp = fp_ops->qp.data[qp_id];
1984 nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1986 #ifdef RTE_CRYPTO_CALLBACKS 1987 if (
unlikely(fp_ops->qp.deq_cb[qp_id].next != NULL)) {
1988 struct rte_cryptodev_cb_rcu *list;
1997 list = &fp_ops->qp.deq_cb[qp_id];
1999 cb = rte_atomic_load_explicit(&list->next, rte_memory_order_relaxed);
2001 while (cb != NULL) {
2002 nb_ops = cb->
fn(dev_id, qp_id, ops, nb_ops,
2044 static inline uint16_t
2048 const struct rte_crypto_fp_ops *fp_ops;
2051 fp_ops = &rte_crypto_fp_ops[dev_id];
2052 qp = fp_ops->qp.data[qp_id];
2053 #ifdef RTE_CRYPTO_CALLBACKS 2054 if (
unlikely(fp_ops->qp.enq_cb[qp_id].next != NULL)) {
2055 struct rte_cryptodev_cb_rcu *list;
2064 list = &fp_ops->qp.enq_cb[qp_id];
2066 cb = rte_atomic_load_explicit(&list->next, rte_memory_order_relaxed);
2068 while (cb != NULL) {
2069 nb_ops = cb->
fn(dev_id, qp_id, ops, nb_ops,
2078 rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (
void **)ops, nb_ops);
2079 return fp_ops->enqueue_burst(qp, ops, nb_ops);
2110 const struct rte_crypto_fp_ops *fp_ops;
2114 fp_ops = &rte_crypto_fp_ops[dev_id];
2115 qp = fp_ops->qp.data[qp_id];
2117 if (fp_ops->qp_depth_used == NULL) {
2122 rc = fp_ops->qp_depth_used(qp);
2124 rte_cryptodev_trace_qp_depth_used(dev_id, qp_id);
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
#define __rte_always_inline
struct rte_cryptodev_cb * next
struct rte_mempool * mp_session
unsigned int rte_cryptodev_asym_get_header_session_size(void)
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
bool rte_cryptodev_asym_xform_capability_check_hash(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_auth_algorithm hash)
const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
rte_crypto_asym_xform_type
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_driver_id_get(const char *name)
__rte_experimental const char * rte_cryptodev_get_aead_algo_string(enum rte_crypto_aead_algorithm algo_enum)
uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
uint64_t dequeue_err_count
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
char name[RTE_MEMPOOL_NAMESIZE]
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
const struct rte_cryptodev_capabilities * capabilities
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
void rte_cryptodev_stop(uint8_t dev_id)
int rte_cryptodev_close(uint8_t dev_id)
void * rte_cryptodev_asym_session_get_user_data(void *sess)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
uint16_t min_mbuf_tailroom_req
const char * rte_cryptodev_name_get(uint8_t dev_id)
__rte_experimental int rte_cryptodev_queue_pair_reset(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental const char * rte_cryptodev_asym_get_xform_string(enum rte_crypto_asym_xform_type xform_enum)
rte_cryptodev_callback_fn fn
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
static __rte_experimental int rte_cryptodev_qp_depth_used(uint8_t dev_id, uint16_t qp_id)
int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_opcap(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type, uint8_t cap)
struct rte_device * device
int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
uint8_t rte_cryptodev_count(void)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
uint16_t min_mbuf_headroom_req
uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
rte_crypto_auth_algorithm
__rte_experimental const char * rte_cryptodev_get_cipher_algo_string(enum rte_crypto_cipher_algorithm algo_enum)
rte_crypto_sym_xform_type
int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_start(uint8_t dev_id)
uint64_t enqueue_err_count
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
__rte_experimental int rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
void * rte_cryptodev_sym_session_get_user_data(void *sess)
struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
__rte_experimental const char * rte_cryptodev_get_auth_algo_string(enum rte_crypto_auth_algorithm algo_enum)
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
int rte_cryptodev_get_dev_id(const char *name)
void * rte_cryptodev_get_sec_ctx(uint8_t dev_id)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
unsigned max_nb_queue_pairs
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm