157#define RTE_DMADEV_DEFAULT_MAX 64
216#define RTE_DMA_FOREACH_DEV(p) \
217 for (p = rte_dma_next_dev(0); \
219 p = rte_dma_next_dev(p + 1))
226#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
228#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
230#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
232#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
239#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
245#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
253#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
260#define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
267#define RTE_DMA_CAPA_PRI_POLICY_SP RTE_BIT64(8)
273#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
275#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
277#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
646#define RTE_DMA_ALL_VCHAN 0xFFFFu
819#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
824#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
829#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
836#define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
867 uint32_t length, uint64_t flags)
869 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
872#ifdef RTE_DMADEV_DEBUG
875 if (obj->copy == NULL)
879 ret = obj->copy(obj->dev_private, vchan, src, dst, length, flags);
880 rte_dma_trace_copy(dev_id, vchan, src, dst, length, flags, ret);
917 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
920 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
923#ifdef RTE_DMADEV_DEBUG
925 nb_src == 0 || nb_dst == 0)
927 if (obj->copy_sg == NULL)
931 ret = obj->copy_sg(obj->dev_private, vchan, src, dst, nb_src, nb_dst, flags);
932 rte_dma_trace_copy_sg(dev_id, vchan, src, dst, nb_src, nb_dst, flags,
966 rte_iova_t dst, uint32_t length, uint64_t flags)
968 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
971#ifdef RTE_DMADEV_DEBUG
974 if (obj->fill == NULL)
978 ret = obj->fill(obj->dev_private, vchan, pattern, dst, length, flags);
979 rte_dma_trace_fill(dev_id, vchan, pattern, dst, length, flags, ret);
1001 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1004#ifdef RTE_DMADEV_DEBUG
1007 if (obj->submit == NULL)
1011 ret = obj->submit(obj->dev_private, vchan);
1012 rte_dma_trace_submit(dev_id, vchan, ret);
1039static inline uint16_t
1041 uint16_t *last_idx,
bool *has_error)
1043 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1047#ifdef RTE_DMADEV_DEBUG
1050 if (obj->completed == NULL)
1062 if (last_idx == NULL)
1064 if (has_error == NULL)
1068 ret = obj->completed(obj->dev_private, vchan, nb_cpls, last_idx, has_error);
1069 rte_dma_trace_completed(dev_id, vchan, nb_cpls, last_idx, has_error,
1101static inline uint16_t
1103 const uint16_t nb_cpls, uint16_t *last_idx,
1106 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1109#ifdef RTE_DMADEV_DEBUG
1112 if (obj->completed_status == NULL)
1116 if (last_idx == NULL)
1119 ret = obj->completed_status(obj->dev_private, vchan, nb_cpls, last_idx, status);
1120 rte_dma_trace_completed_status(dev_id, vchan, nb_cpls, last_idx, status,
1138static inline uint16_t
1141 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1144#ifdef RTE_DMADEV_DEBUG
1147 if (obj->burst_capacity == NULL)
1150 ret = obj->burst_capacity(obj->dev_private, vchan);
1151 rte_dma_trace_burst_capacity(dev_id, vchan, ret);
int rte_dma_start(int16_t dev_id)
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int rte_dma_close(int16_t dev_id)
uint16_t rte_dma_count_avail(void)
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
int rte_dma_get_dev_id_by_name(const char *name)
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
@ RTE_DMA_STATUS_INVALID_DST_ADDR
@ RTE_DMA_STATUS_PAGE_FAULT
@ RTE_DMA_STATUS_INVALID_LENGTH
@ RTE_DMA_STATUS_NOT_ATTEMPTED
@ RTE_DMA_STATUS_BUS_READ_ERROR
@ RTE_DMA_STATUS_INVALID_ADDR
@ RTE_DMA_STATUS_ERROR_UNKNOWN
@ RTE_DMA_STATUS_BUS_ERROR
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
@ RTE_DMA_STATUS_DATA_POISION
@ RTE_DMA_STATUS_INVALID_OPCODE
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
@ RTE_DMA_STATUS_USER_ABORT
@ RTE_DMA_STATUS_DEV_LINK_ERROR
@ RTE_DMA_STATUS_SUCCESSFUL
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
int rte_dma_dev_max(size_t dev_max)
int rte_dma_stop(int16_t dev_id)
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
int rte_dma_dump(int16_t dev_id, FILE *f)
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
int16_t rte_dma_next_dev(int16_t start_dev_id)
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
bool rte_dma_is_valid(int16_t dev_id)
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
@ RTE_DMA_VCHAN_HALTED_ERROR
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
struct rte_mempool * pool
enum rte_dma_port_type port_type
__extension__ union rte_dma_port_param::@135::@137 pcie
enum rte_dma_direction direction
struct rte_dma_auto_free_param auto_free
struct rte_dma_port_param src_port
struct rte_dma_port_param dst_port