151#include <rte_compat.h>
159#define RTE_DMADEV_DEFAULT_MAX 64
235#define RTE_DMA_FOREACH_DEV(p) \
236 for (p = rte_dma_next_dev(0); \
238 p = rte_dma_next_dev(p + 1))
245#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
247#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
249#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
251#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
258#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
264#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
272#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
277#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
279#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
281#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
632#define RTE_DMA_ALL_VCHAN 0xFFFFu
812#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
817#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
822#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
857 uint32_t length, uint64_t flags)
859 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
861#ifdef RTE_DMADEV_DEBUG
864 RTE_FUNC_PTR_OR_ERR_RET(*obj->copy, -ENOTSUP);
867 return (*obj->copy)(obj->dev_private, vchan, src, dst, length, flags);
906 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
909 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
911#ifdef RTE_DMADEV_DEBUG
913 nb_src == 0 || nb_dst == 0)
915 RTE_FUNC_PTR_OR_ERR_RET(*obj->copy_sg, -ENOTSUP);
918 return (*obj->copy_sg)(obj->dev_private, vchan, src, dst, nb_src,
954 rte_iova_t dst, uint32_t length, uint64_t flags)
956 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
958#ifdef RTE_DMADEV_DEBUG
961 RTE_FUNC_PTR_OR_ERR_RET(*obj->fill, -ENOTSUP);
964 return (*obj->fill)(obj->dev_private, vchan, pattern, dst, length,
989 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
991#ifdef RTE_DMADEV_DEBUG
994 RTE_FUNC_PTR_OR_ERR_RET(*obj->submit, -ENOTSUP);
997 return (*obj->submit)(obj->dev_private, vchan);
1024static inline uint16_t
1026 uint16_t *last_idx,
bool *has_error)
1028 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1032#ifdef RTE_DMADEV_DEBUG
1035 RTE_FUNC_PTR_OR_ERR_RET(*obj->completed, 0);
1046 if (last_idx == NULL)
1048 if (has_error == NULL)
1052 return (*obj->completed)(obj->dev_private, vchan, nb_cpls, last_idx,
1084static inline uint16_t
1086 const uint16_t nb_cpls, uint16_t *last_idx,
1089 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1092#ifdef RTE_DMADEV_DEBUG
1095 RTE_FUNC_PTR_OR_ERR_RET(*obj->completed_status, 0);
1098 if (last_idx == NULL)
1101 return (*obj->completed_status)(obj->dev_private, vchan, nb_cpls,
1121static inline uint16_t
1124 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1126#ifdef RTE_DMADEV_DEBUG
1129 RTE_FUNC_PTR_OR_ERR_RET(*obj->burst_capacity, 0);
1131 return (*obj->burst_capacity)(obj->dev_private, vchan);
__rte_experimental int rte_dma_close(int16_t dev_id)
static __rte_experimental uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
__rte_experimental int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
static __rte_experimental int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
__rte_experimental uint16_t rte_dma_count_avail(void)
static __rte_experimental int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
static __rte_experimental uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
__rte_experimental int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
__rte_experimental int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
@ RTE_DMA_STATUS_INVALID_DST_ADDR
@ RTE_DMA_STATUS_PAGE_FAULT
@ RTE_DMA_STATUS_INVALID_LENGTH
@ RTE_DMA_STATUS_NOT_ATTEMPTED
@ RTE_DMA_STATUS_BUS_READ_ERROR
@ RTE_DMA_STATUS_INVALID_ADDR
@ RTE_DMA_STATUS_ERROR_UNKNOWN
@ RTE_DMA_STATUS_BUS_ERROR
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
@ RTE_DMA_STATUS_DATA_POISION
@ RTE_DMA_STATUS_INVALID_OPCODE
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
@ RTE_DMA_STATUS_USER_ABORT
@ RTE_DMA_STATUS_DEV_LINK_ERROR
@ RTE_DMA_STATUS_SUCCESSFUL
__rte_experimental int16_t rte_dma_next_dev(int16_t start_dev_id)
static __rte_experimental int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
__rte_experimental int rte_dma_dev_max(size_t dev_max)
__rte_experimental int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
__rte_experimental int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
static __rte_experimental int rte_dma_submit(int16_t dev_id, uint16_t vchan)
__rte_experimental int rte_dma_dump(int16_t dev_id, FILE *f)
__rte_experimental bool rte_dma_is_valid(int16_t dev_id)
__rte_experimental int rte_dma_start(int16_t dev_id)
__rte_experimental int rte_dma_get_dev_id_by_name(const char *name)
__rte_experimental int rte_dma_stop(int16_t dev_id)
@ RTE_DMA_VCHAN_HALTED_ERROR
static __rte_experimental uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
enum rte_dma_port_type port_type
__extension__ struct rte_dma_port_param::@142::@144 pcie
enum rte_dma_direction direction
struct rte_dma_port_param src_port
struct rte_dma_port_param dst_port