158#define RTE_DMADEV_DEFAULT_MAX 64
217#define RTE_DMA_FOREACH_DEV(p) \
218 for (p = rte_dma_next_dev(0); \
220 p = rte_dma_next_dev(p + 1))
227#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
229#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
231#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
233#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
240#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
246#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
254#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
261#define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
268#define RTE_DMA_CAPA_PRI_POLICY_SP RTE_BIT64(8)
274#define RTE_DMA_CAPA_INTER_PROCESS_DOMAIN RTE_BIT64(9)
280#define RTE_DMA_CAPA_INTER_OS_DOMAIN RTE_BIT64(10)
286#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
288#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
290#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
292#define RTE_DMA_CAPA_OPS_ENQ_DEQ RTE_BIT64(35)
301#define RTE_DMA_CFG_FLAG_SILENT RTE_BIT64(0)
305#define RTE_DMA_CFG_FLAG_ENQ_DEQ RTE_BIT64(1)
732#define RTE_DMA_ALL_VCHAN 0xFFFFu
1128#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
1133#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
1138#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
1145#define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
1176 uint32_t
length, uint64_t flags)
1178 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1181#ifdef RTE_DMADEV_DEBUG
1184 if (obj->copy == NULL)
1188 ret = obj->copy(obj->dev_private, vchan, src, dst, length, flags);
1189 rte_dma_trace_copy(dev_id, vchan, src, dst, length, flags, ret);
1226 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
1229 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1232#ifdef RTE_DMADEV_DEBUG
1234 nb_src == 0 || nb_dst == 0)
1236 if (obj->copy_sg == NULL)
1240 ret = obj->copy_sg(obj->dev_private, vchan, src, dst, nb_src, nb_dst, flags);
1241 rte_dma_trace_copy_sg(dev_id, vchan, src, dst, nb_src, nb_dst, flags,
1275 rte_iova_t dst, uint32_t length, uint64_t flags)
1277 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1280#ifdef RTE_DMADEV_DEBUG
1283 if (obj->fill == NULL)
1287 ret = obj->fill(obj->dev_private, vchan, pattern, dst, length, flags);
1288 rte_dma_trace_fill(dev_id, vchan, pattern, dst, length, flags, ret);
1310 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1313#ifdef RTE_DMADEV_DEBUG
1316 if (obj->submit == NULL)
1320 ret = obj->submit(obj->dev_private, vchan);
1321 rte_dma_trace_submit(dev_id, vchan, ret);
1348static inline uint16_t
1350 uint16_t *last_idx,
bool *has_error)
1352 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1356#ifdef RTE_DMADEV_DEBUG
1359 if (obj->completed == NULL)
1371 if (last_idx == NULL)
1373 if (has_error == NULL)
1377 ret = obj->completed(obj->dev_private, vchan, nb_cpls, last_idx, has_error);
1378 rte_dma_trace_completed(dev_id, vchan, nb_cpls, last_idx, has_error,
1410static inline uint16_t
1412 const uint16_t nb_cpls, uint16_t *last_idx,
1415 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1418#ifdef RTE_DMADEV_DEBUG
1421 if (obj->completed_status == NULL)
1425 if (last_idx == NULL)
1428 ret = obj->completed_status(obj->dev_private, vchan, nb_cpls, last_idx, status);
1429 rte_dma_trace_completed_status(dev_id, vchan, nb_cpls, last_idx, status,
1447static inline uint16_t
1450 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1453#ifdef RTE_DMADEV_DEBUG
1456 if (obj->burst_capacity == NULL)
1459 ret = obj->burst_capacity(obj->dev_private, vchan);
1460 rte_dma_trace_burst_capacity(dev_id, vchan, ret);
1485static inline uint16_t
1488 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1491#ifdef RTE_DMADEV_DEBUG
1494 if (*obj->enqueue == NULL)
1498 ret = (*obj->enqueue)(obj->dev_private, vchan, ops, nb_ops);
1499 rte_dma_trace_enqueue_ops(dev_id, vchan, (
void **)ops, nb_ops);
1520static inline uint16_t
1523 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1526#ifdef RTE_DMADEV_DEBUG
1529 if (*obj->dequeue == NULL)
1533 ret = (*obj->dequeue)(obj->dev_private, vchan, ops, nb_ops);
1534 rte_dma_trace_dequeue_ops(dev_id, vchan, (
void **)ops, nb_ops);
int rte_dma_start(int16_t dev_id)
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int rte_dma_close(int16_t dev_id)
uint16_t rte_dma_count_avail(void)
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
int rte_dma_get_dev_id_by_name(const char *name)
__rte_experimental int rte_dma_access_pair_group_destroy(int16_t dev_id, int16_t group_id)
rte_dma_inter_domain_type
@ RTE_DMA_INTER_PROCESS_DOMAIN
@ RTE_DMA_INTER_DOMAIN_NONE
@ RTE_DMA_INTER_OS_DOMAIN
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
@ RTE_DMA_STATUS_INVALID_DST_ADDR
@ RTE_DMA_STATUS_PAGE_FAULT
@ RTE_DMA_STATUS_INVALID_LENGTH
@ RTE_DMA_STATUS_NOT_ATTEMPTED
@ RTE_DMA_STATUS_BUS_READ_ERROR
@ RTE_DMA_STATUS_INVALID_ADDR
@ RTE_DMA_STATUS_ERROR_UNKNOWN
@ RTE_DMA_STATUS_BUS_ERROR
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
@ RTE_DMA_STATUS_DATA_POISION
@ RTE_DMA_STATUS_INVALID_OPCODE
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
@ RTE_DMA_STATUS_USER_ABORT
@ RTE_DMA_STATUS_DEV_LINK_ERROR
@ RTE_DMA_STATUS_SUCCESSFUL
static uint16_t rte_dma_dequeue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
int rte_dma_dev_max(size_t dev_max)
void(* rte_dma_access_pair_group_event_cb_t)(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id, enum rte_dma_access_pair_group_event_type event)
int rte_dma_stop(int16_t dev_id)
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
rte_dma_access_pair_group_event_type
@ RTE_DMA_GROUP_EVENT_MEMBER_LEFT
@ RTE_DMA_GROUP_EVENT_GROUP_DESTROYED
int rte_dma_dump(int16_t dev_id, FILE *f)
__rte_experimental int rte_dma_access_pair_group_create(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token, int16_t *group_id, rte_dma_access_pair_group_event_cb_t cb)
__rte_experimental int rte_dma_access_pair_group_leave(int16_t dev_id, int16_t group_id)
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
int16_t rte_dma_next_dev(int16_t start_dev_id)
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
static uint16_t rte_dma_enqueue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
bool rte_dma_is_valid(int16_t dev_id)
__rte_experimental int rte_dma_access_pair_group_join(int16_t dev_id, int16_t group_id, rte_uuid_t token, rte_dma_access_pair_group_event_cb_t cb)
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
@ RTE_DMA_VCHAN_HALTED_ERROR
__rte_experimental int rte_dma_access_pair_group_handler_get(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id, uint16_t *handler)
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
void *(* handler)(void *sock_id)
unsigned char rte_uuid_t[16]
struct rte_mempool * pool
enum rte_dma_inter_domain_type type
enum rte_dma_status_code status
struct rte_dma_sge src_dst_seg[]
struct rte_mempool * op_mp
enum rte_dma_port_type port_type
__extension__ union rte_dma_port_param::@149::@151 pcie
struct rte_dma_inter_domain_param domain
enum rte_dma_direction direction
struct rte_dma_auto_free_param auto_free
struct rte_dma_port_param src_port
struct rte_dma_port_param dst_port