157#define RTE_DMADEV_DEFAULT_MAX 64
216#define RTE_DMA_FOREACH_DEV(p) \
217 for (p = rte_dma_next_dev(0); \
219 p = rte_dma_next_dev(p + 1))
226#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
228#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
230#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
232#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
239#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
245#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
253#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
260#define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
267#define RTE_DMA_CAPA_PRI_POLICY_SP RTE_BIT64(8)
273#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
275#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
277#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
643#define RTE_DMA_ALL_VCHAN 0xFFFFu
816#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
821#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
826#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
833#define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
864 uint32_t length, uint64_t flags)
866 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
869#ifdef RTE_DMADEV_DEBUG
872 if (*obj->copy == NULL)
876 ret = (*obj->copy)(obj->dev_private, vchan, src, dst, length, flags);
877 rte_dma_trace_copy(dev_id, vchan, src, dst, length, flags, ret);
914 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
917 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
920#ifdef RTE_DMADEV_DEBUG
922 nb_src == 0 || nb_dst == 0)
924 if (*obj->copy_sg == NULL)
928 ret = (*obj->copy_sg)(obj->dev_private, vchan, src, dst, nb_src,
930 rte_dma_trace_copy_sg(dev_id, vchan, src, dst, nb_src, nb_dst, flags,
964 rte_iova_t dst, uint32_t length, uint64_t flags)
966 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
969#ifdef RTE_DMADEV_DEBUG
972 if (*obj->fill == NULL)
976 ret = (*obj->fill)(obj->dev_private, vchan, pattern, dst, length,
978 rte_dma_trace_fill(dev_id, vchan, pattern, dst, length, flags, ret);
1000 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1003#ifdef RTE_DMADEV_DEBUG
1006 if (*obj->submit == NULL)
1010 ret = (*obj->submit)(obj->dev_private, vchan);
1011 rte_dma_trace_submit(dev_id, vchan, ret);
1038static inline uint16_t
1040 uint16_t *last_idx,
bool *has_error)
1042 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1046#ifdef RTE_DMADEV_DEBUG
1049 if (*obj->completed == NULL)
1061 if (last_idx == NULL)
1063 if (has_error == NULL)
1067 ret = (*obj->completed)(obj->dev_private, vchan, nb_cpls, last_idx,
1069 rte_dma_trace_completed(dev_id, vchan, nb_cpls, last_idx, has_error,
1101static inline uint16_t
1103 const uint16_t nb_cpls, uint16_t *last_idx,
1106 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1109#ifdef RTE_DMADEV_DEBUG
1112 if (*obj->completed_status == NULL)
1116 if (last_idx == NULL)
1119 ret = (*obj->completed_status)(obj->dev_private, vchan, nb_cpls,
1121 rte_dma_trace_completed_status(dev_id, vchan, nb_cpls, last_idx, status,
1139static inline uint16_t
1142 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1145#ifdef RTE_DMADEV_DEBUG
1148 if (*obj->burst_capacity == NULL)
1151 ret = (*obj->burst_capacity)(obj->dev_private, vchan);
1152 rte_dma_trace_burst_capacity(dev_id, vchan, ret);
int rte_dma_start(int16_t dev_id)
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int rte_dma_close(int16_t dev_id)
uint16_t rte_dma_count_avail(void)
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
int rte_dma_get_dev_id_by_name(const char *name)
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
@ RTE_DMA_STATUS_INVALID_DST_ADDR
@ RTE_DMA_STATUS_PAGE_FAULT
@ RTE_DMA_STATUS_INVALID_LENGTH
@ RTE_DMA_STATUS_NOT_ATTEMPTED
@ RTE_DMA_STATUS_BUS_READ_ERROR
@ RTE_DMA_STATUS_INVALID_ADDR
@ RTE_DMA_STATUS_ERROR_UNKNOWN
@ RTE_DMA_STATUS_BUS_ERROR
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
@ RTE_DMA_STATUS_DATA_POISION
@ RTE_DMA_STATUS_INVALID_OPCODE
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
@ RTE_DMA_STATUS_USER_ABORT
@ RTE_DMA_STATUS_DEV_LINK_ERROR
@ RTE_DMA_STATUS_SUCCESSFUL
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
int rte_dma_dev_max(size_t dev_max)
int rte_dma_stop(int16_t dev_id)
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
int rte_dma_dump(int16_t dev_id, FILE *f)
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
int16_t rte_dma_next_dev(int16_t start_dev_id)
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
bool rte_dma_is_valid(int16_t dev_id)
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
@ RTE_DMA_VCHAN_HALTED_ERROR
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
struct rte_mempool * pool
enum rte_dma_port_type port_type
__extension__ struct rte_dma_port_param::@135::@137 pcie
enum rte_dma_direction direction
struct rte_dma_auto_free_param auto_free
struct rte_dma_port_param src_port
struct rte_dma_port_param dst_port