158#define RTE_DMADEV_DEFAULT_MAX 64
217#define RTE_DMA_FOREACH_DEV(p) \
218 for (p = rte_dma_next_dev(0); \
220 p = rte_dma_next_dev(p + 1))
227#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
229#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
231#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
233#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
240#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
246#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
254#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
261#define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
267#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
269#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
271#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
625#define RTE_DMA_ALL_VCHAN 0xFFFFu
789#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
794#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
799#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
806#define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
837 uint32_t length, uint64_t flags)
839 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
841#ifdef RTE_DMADEV_DEBUG
844 if (*obj->copy == NULL)
848 return (*obj->copy)(obj->dev_private, vchan, src, dst, length, flags);
883 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
886 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
888#ifdef RTE_DMADEV_DEBUG
890 nb_src == 0 || nb_dst == 0)
892 if (*obj->copy_sg == NULL)
896 return (*obj->copy_sg)(obj->dev_private, vchan, src, dst, nb_src,
928 rte_iova_t dst, uint32_t length, uint64_t flags)
930 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
932#ifdef RTE_DMADEV_DEBUG
935 if (*obj->fill == NULL)
939 return (*obj->fill)(obj->dev_private, vchan, pattern, dst, length,
960 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
962#ifdef RTE_DMADEV_DEBUG
965 if (*obj->submit == NULL)
969 return (*obj->submit)(obj->dev_private, vchan);
994static inline uint16_t
996 uint16_t *last_idx,
bool *has_error)
998 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1002#ifdef RTE_DMADEV_DEBUG
1005 if (*obj->completed == NULL)
1017 if (last_idx == NULL)
1019 if (has_error == NULL)
1023 return (*obj->completed)(obj->dev_private, vchan, nb_cpls, last_idx,
1053static inline uint16_t
1055 const uint16_t nb_cpls, uint16_t *last_idx,
1058 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1061#ifdef RTE_DMADEV_DEBUG
1064 if (*obj->completed_status == NULL)
1068 if (last_idx == NULL)
1071 return (*obj->completed_status)(obj->dev_private, vchan, nb_cpls,
1087static inline uint16_t
1090 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1092#ifdef RTE_DMADEV_DEBUG
1095 if (*obj->burst_capacity == NULL)
1098 return (*obj->burst_capacity)(obj->dev_private, vchan);
int rte_dma_start(int16_t dev_id)
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int rte_dma_close(int16_t dev_id)
uint16_t rte_dma_count_avail(void)
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
int rte_dma_get_dev_id_by_name(const char *name)
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
@ RTE_DMA_STATUS_INVALID_DST_ADDR
@ RTE_DMA_STATUS_PAGE_FAULT
@ RTE_DMA_STATUS_INVALID_LENGTH
@ RTE_DMA_STATUS_NOT_ATTEMPTED
@ RTE_DMA_STATUS_BUS_READ_ERROR
@ RTE_DMA_STATUS_INVALID_ADDR
@ RTE_DMA_STATUS_ERROR_UNKNOWN
@ RTE_DMA_STATUS_BUS_ERROR
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
@ RTE_DMA_STATUS_DATA_POISION
@ RTE_DMA_STATUS_INVALID_OPCODE
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
@ RTE_DMA_STATUS_USER_ABORT
@ RTE_DMA_STATUS_DEV_LINK_ERROR
@ RTE_DMA_STATUS_SUCCESSFUL
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
int rte_dma_dev_max(size_t dev_max)
int rte_dma_stop(int16_t dev_id)
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
int rte_dma_dump(int16_t dev_id, FILE *f)
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
int16_t rte_dma_next_dev(int16_t start_dev_id)
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
bool rte_dma_is_valid(int16_t dev_id)
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
@ RTE_DMA_VCHAN_HALTED_ERROR
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
struct rte_mempool * pool
enum rte_dma_port_type port_type
__extension__ struct rte_dma_port_param::@112::@114 pcie
enum rte_dma_direction direction
struct rte_dma_auto_free_param auto_free
struct rte_dma_port_param src_port
struct rte_dma_port_param dst_port