DPDK  24.11.0-rc3
rte_dmadev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2021 HiSilicon Limited
3  * Copyright(c) 2021 Intel Corporation
4  * Copyright(c) 2021 Marvell International Ltd
5  * Copyright(c) 2021 SmartShare Systems
6  */
7 
8 #ifndef RTE_DMADEV_H
9 #define RTE_DMADEV_H
10 
147 #include <stdint.h>
148 
149 #include <rte_bitops.h>
150 #include <rte_common.h>
151 
152 #ifdef __cplusplus
153 extern "C" {
154 #endif
155 
157 #define RTE_DMADEV_DEFAULT_MAX 64
158 
171 int rte_dma_dev_max(size_t dev_max);
172 
183 int rte_dma_get_dev_id_by_name(const char *name);
184 
194 bool rte_dma_is_valid(int16_t dev_id);
195 
203 uint16_t rte_dma_count_avail(void);
204 
213 int16_t rte_dma_next_dev(int16_t start_dev_id);
214 
216 #define RTE_DMA_FOREACH_DEV(p) \
217  for (p = rte_dma_next_dev(0); \
218  p != -1; \
219  p = rte_dma_next_dev(p + 1))
220 
221 
226 #define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
227 
228 #define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
229 
230 #define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
231 
232 #define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
233 
239 #define RTE_DMA_CAPA_SVA RTE_BIT64(4)
240 
245 #define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
246 
253 #define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
254 
260 #define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
261 
267 #define RTE_DMA_CAPA_PRI_POLICY_SP RTE_BIT64(8)
268 
273 #define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
274 
275 #define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
276 
277 #define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
278 
285 struct rte_dma_info {
286  const char *dev_name;
288  uint64_t dev_capa;
290  uint16_t max_vchans;
292  uint16_t max_desc;
294  uint16_t min_desc;
302  uint16_t max_sges;
304  int16_t numa_node;
306  uint16_t nb_vchans;
310  uint16_t nb_priorities;
311 };
312 
325 int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info);
326 
332 struct rte_dma_conf {
337  uint16_t nb_vchans;
346  /* The priority of the DMA device.
347  * This value should be lower than the field 'nb_priorities' of struct
348  * rte_dma_info which get from rte_dma_info_get(). If the DMA device
349  * does not support priority scheduling, this value should be zero.
350  *
351  * Lowest value indicates higher priority and vice-versa.
352  */
353  uint16_t priority;
354 };
355 
372 int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf);
373 
386 int rte_dma_start(int16_t dev_id);
387 
399 int rte_dma_stop(int16_t dev_id);
400 
412 int rte_dma_close(int16_t dev_id);
413 
452 };
453 
460  RTE_DMA_PORT_NONE,
462 };
463 
475  enum rte_dma_port_type port_type;
476  union {
525  __extension__
526  struct {
527  uint64_t coreid : 4;
528  uint64_t pfid : 8;
529  uint64_t vfen : 1;
530  uint64_t vfid : 16;
532  uint64_t pasid : 20;
534  uint64_t attr : 3;
536  uint64_t ph : 2;
538  uint64_t st : 16;
539  } pcie;
540  };
541  uint64_t reserved[2];
542 };
543 
548  union {
549  struct {
557  struct rte_mempool *pool;
558  } m2d;
559  };
561  uint64_t reserved[2];
562 };
563 
574  enum rte_dma_direction direction;
576  uint16_t nb_desc;
584  struct rte_dma_port_param src_port;
592  struct rte_dma_port_param dst_port;
600  struct rte_dma_auto_free_param auto_free;
601 };
602 
618 int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan,
619  const struct rte_dma_vchan_conf *conf);
620 
628  uint64_t submitted;
632  uint64_t completed;
634  uint64_t errors;
635 };
636 
643 #define RTE_DMA_ALL_VCHAN 0xFFFFu
644 
660 int rte_dma_stats_get(int16_t dev_id, uint16_t vchan,
661  struct rte_dma_stats *stats);
662 
675 int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan);
676 
687 };
688 
704 int
705 rte_dma_vchan_status(int16_t dev_id, uint16_t vchan, enum rte_dma_vchan_status *status);
706 
718 int rte_dma_dump(int16_t dev_id, FILE *f);
719 
782 };
783 
789 struct rte_dma_sge {
791  uint32_t length;
792 };
793 
794 #ifdef __cplusplus
795 }
796 #endif
797 
798 #include "rte_dmadev_core.h"
799 #include "rte_dmadev_trace_fp.h"
800 
801 #ifdef __cplusplus
802 extern "C" {
803 #endif
804 
816 #define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
817 
821 #define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
822 
826 #define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
827 
833 #define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
834 
862 static inline int
863 rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst,
864  uint32_t length, uint64_t flags)
865 {
866  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
867  int ret;
868 
869 #ifdef RTE_DMADEV_DEBUG
870  if (!rte_dma_is_valid(dev_id) || length == 0)
871  return -EINVAL;
872  if (*obj->copy == NULL)
873  return -ENOTSUP;
874 #endif
875 
876  ret = (*obj->copy)(obj->dev_private, vchan, src, dst, length, flags);
877  rte_dma_trace_copy(dev_id, vchan, src, dst, length, flags, ret);
878 
879  return ret;
880 }
881 
912 static inline int
913 rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src,
914  struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
915  uint64_t flags)
916 {
917  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
918  int ret;
919 
920 #ifdef RTE_DMADEV_DEBUG
921  if (!rte_dma_is_valid(dev_id) || src == NULL || dst == NULL ||
922  nb_src == 0 || nb_dst == 0)
923  return -EINVAL;
924  if (*obj->copy_sg == NULL)
925  return -ENOTSUP;
926 #endif
927 
928  ret = (*obj->copy_sg)(obj->dev_private, vchan, src, dst, nb_src,
929  nb_dst, flags);
930  rte_dma_trace_copy_sg(dev_id, vchan, src, dst, nb_src, nb_dst, flags,
931  ret);
932 
933  return ret;
934 }
935 
962 static inline int
963 rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern,
964  rte_iova_t dst, uint32_t length, uint64_t flags)
965 {
966  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
967  int ret;
968 
969 #ifdef RTE_DMADEV_DEBUG
970  if (!rte_dma_is_valid(dev_id) || length == 0)
971  return -EINVAL;
972  if (*obj->fill == NULL)
973  return -ENOTSUP;
974 #endif
975 
976  ret = (*obj->fill)(obj->dev_private, vchan, pattern, dst, length,
977  flags);
978  rte_dma_trace_fill(dev_id, vchan, pattern, dst, length, flags, ret);
979 
980  return ret;
981 }
982 
997 static inline int
998 rte_dma_submit(int16_t dev_id, uint16_t vchan)
999 {
1000  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1001  int ret;
1002 
1003 #ifdef RTE_DMADEV_DEBUG
1004  if (!rte_dma_is_valid(dev_id))
1005  return -EINVAL;
1006  if (*obj->submit == NULL)
1007  return -ENOTSUP;
1008 #endif
1009 
1010  ret = (*obj->submit)(obj->dev_private, vchan);
1011  rte_dma_trace_submit(dev_id, vchan, ret);
1012 
1013  return ret;
1014 }
1015 
1038 static inline uint16_t
1039 rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls,
1040  uint16_t *last_idx, bool *has_error)
1041 {
1042  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1043  uint16_t idx, ret;
1044  bool err;
1045 
1046 #ifdef RTE_DMADEV_DEBUG
1047  if (!rte_dma_is_valid(dev_id) || nb_cpls == 0)
1048  return 0;
1049  if (*obj->completed == NULL)
1050  return 0;
1051 #endif
1052 
1053  /* Ensure the pointer values are non-null to simplify drivers.
1054  * In most cases these should be compile time evaluated, since this is
1055  * an inline function.
1056  * - If NULL is explicitly passed as parameter, then compiler knows the
1057  * value is NULL
1058  * - If address of local variable is passed as parameter, then compiler
1059  * can know it's non-NULL.
1060  */
1061  if (last_idx == NULL)
1062  last_idx = &idx;
1063  if (has_error == NULL)
1064  has_error = &err;
1065 
1066  *has_error = false;
1067  ret = (*obj->completed)(obj->dev_private, vchan, nb_cpls, last_idx,
1068  has_error);
1069  rte_dma_trace_completed(dev_id, vchan, nb_cpls, last_idx, has_error,
1070  ret);
1071 
1072  return ret;
1073 }
1074 
1101 static inline uint16_t
1102 rte_dma_completed_status(int16_t dev_id, uint16_t vchan,
1103  const uint16_t nb_cpls, uint16_t *last_idx,
1104  enum rte_dma_status_code *status)
1105 {
1106  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1107  uint16_t idx, ret;
1108 
1109 #ifdef RTE_DMADEV_DEBUG
1110  if (!rte_dma_is_valid(dev_id) || nb_cpls == 0 || status == NULL)
1111  return 0;
1112  if (*obj->completed_status == NULL)
1113  return 0;
1114 #endif
1115 
1116  if (last_idx == NULL)
1117  last_idx = &idx;
1118 
1119  ret = (*obj->completed_status)(obj->dev_private, vchan, nb_cpls,
1120  last_idx, status);
1121  rte_dma_trace_completed_status(dev_id, vchan, nb_cpls, last_idx, status,
1122  ret);
1123 
1124  return ret;
1125 }
1126 
1139 static inline uint16_t
1140 rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
1141 {
1142  struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1143  uint16_t ret;
1144 
1145 #ifdef RTE_DMADEV_DEBUG
1146  if (!rte_dma_is_valid(dev_id))
1147  return 0;
1148  if (*obj->burst_capacity == NULL)
1149  return 0;
1150 #endif
1151  ret = (*obj->burst_capacity)(obj->dev_private, vchan);
1152  rte_dma_trace_burst_capacity(dev_id, vchan, ret);
1153 
1154  return ret;
1155 }
1156 
1157 #ifdef __cplusplus
1158 }
1159 #endif
1160 
1161 #endif /* RTE_DMADEV_H */
uint16_t max_desc
Definition: rte_dmadev.h:292
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
uint16_t nb_vchans
Definition: rte_dmadev.h:337
uint16_t rte_dma_count_avail(void)
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
int rte_dma_start(int16_t dev_id)
int rte_dma_stop(int16_t dev_id)
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
Definition: rte_dmadev.h:913
uint64_t rte_iova_t
Definition: rte_common.h:658
rte_iova_t addr
Definition: rte_dmadev.h:790
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
uint64_t dev_capa
Definition: rte_dmadev.h:288
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
Definition: rte_dmadev.h:863
bool rte_dma_is_valid(int16_t dev_id)
int rte_dma_get_dev_id_by_name(const char *name)
int rte_dma_close(int16_t dev_id)
int rte_dma_dev_max(size_t dev_max)
int16_t rte_dma_next_dev(int16_t start_dev_id)
rte_dma_direction
Definition: rte_dmadev.h:419
int rte_dma_dump(int16_t dev_id, FILE *f)
bool enable_silent
Definition: rte_dmadev.h:345
rte_dma_port_type
Definition: rte_dmadev.h:459
uint16_t max_sges
Definition: rte_dmadev.h:302
const char * dev_name
Definition: rte_dmadev.h:286
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
Definition: rte_dmadev.h:998
uint64_t errors
Definition: rte_dmadev.h:634
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int16_t numa_node
Definition: rte_dmadev.h:304
rte_dma_status_code
Definition: rte_dmadev.h:725
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
uint16_t min_desc
Definition: rte_dmadev.h:294
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
Definition: rte_dmadev.h:1102
uint16_t max_vchans
Definition: rte_dmadev.h:290
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
Definition: rte_dmadev.h:1140
rte_dma_vchan_status
Definition: rte_dmadev.h:683
uint64_t completed
Definition: rte_dmadev.h:632
uint64_t submitted
Definition: rte_dmadev.h:628
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
Definition: rte_dmadev.h:963
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
Definition: rte_dmadev.h:1039
uint16_t nb_vchans
Definition: rte_dmadev.h:306
uint32_t length
Definition: rte_dmadev.h:791
struct rte_mempool * pool
Definition: rte_dmadev.h:557
uint16_t nb_priorities
Definition: rte_dmadev.h:310