DPDK 25.11.0-rc3
rte_dmadev.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2021 HiSilicon Limited
3 * Copyright(c) 2021 Intel Corporation
4 * Copyright(c) 2021 Marvell International Ltd
5 * Copyright(c) 2021 SmartShare Systems
6 */
7
8#ifndef RTE_DMADEV_H
9#define RTE_DMADEV_H
10
147#include <stdint.h>
148#include <errno.h>
149
150#include <rte_bitops.h>
151#include <rte_common.h>
152#include <rte_uuid.h>
153
154#ifdef __cplusplus
155extern "C" {
156#endif
157
159#define RTE_DMADEV_DEFAULT_MAX 64
160
173int rte_dma_dev_max(size_t dev_max);
174
185int rte_dma_get_dev_id_by_name(const char *name);
186
196bool rte_dma_is_valid(int16_t dev_id);
197
205uint16_t rte_dma_count_avail(void);
206
215int16_t rte_dma_next_dev(int16_t start_dev_id);
216
218#define RTE_DMA_FOREACH_DEV(p) \
219 for (p = rte_dma_next_dev(0); \
220 p != -1; \
221 p = rte_dma_next_dev(p + 1))
222
223
228#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
230#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
232#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
234#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
241#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
247#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
255#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
262#define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
269#define RTE_DMA_CAPA_PRI_POLICY_SP RTE_BIT64(8)
275#define RTE_DMA_CAPA_INTER_PROCESS_DOMAIN RTE_BIT64(9)
281#define RTE_DMA_CAPA_INTER_OS_DOMAIN RTE_BIT64(10)
282
287#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
289#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
291#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
293#define RTE_DMA_CAPA_OPS_ENQ_DEQ RTE_BIT64(35)
302#define RTE_DMA_CFG_FLAG_SILENT RTE_BIT64(0)
306#define RTE_DMA_CFG_FLAG_ENQ_DEQ RTE_BIT64(1)
307
314 const char *dev_name;
316 uint64_t dev_capa;
318 uint16_t max_vchans;
320 uint16_t max_desc;
322 uint16_t min_desc;
330 uint16_t max_sges;
332 int16_t numa_node;
334 uint16_t nb_vchans;
339};
340
353int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info);
354
365 uint16_t nb_vchans;
366 /* The priority of the DMA device.
367 * This value should be lower than the field 'nb_priorities' of struct
368 * rte_dma_info which get from rte_dma_info_get(). If the DMA device
369 * does not support priority scheduling, this value should be zero.
370 *
371 * Lowest value indicates higher priority and vice-versa.
372 */
373 uint16_t priority;
375 uint64_t flags;
376};
377
394int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf);
395
408int rte_dma_start(int16_t dev_id);
409
421int rte_dma_stop(int16_t dev_id);
422
434int rte_dma_close(int16_t dev_id);
435
480};
481
488 RTE_DMA_PORT_NONE,
490};
491
504 union {
553 __extension__
554 union {
555 struct {
556 uint64_t coreid : 4;
557 uint64_t pfid : 8;
558 uint64_t vfen : 1;
559 uint64_t vfid : 16;
561 uint64_t pasid : 20;
563 uint64_t attr : 3;
565 uint64_t ph : 2;
567 uint64_t st : 16;
568 };
569 uint64_t val;
571 };
572 uint64_t reserved[2];
573};
574
579 union {
580 struct {
589 } m2d;
590 };
592 uint64_t reserved[2];
593};
594
614};
615
637 uint16_t src_handler;
639 uint16_t dst_handler;
641 uint64_t reserved[2];
642};
643
656 uint16_t nb_desc;
691};
692
708int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan,
709 const struct rte_dma_vchan_conf *conf);
710
718 uint64_t submitted;
722 uint64_t completed;
724 uint64_t errors;
725};
726
733#define RTE_DMA_ALL_VCHAN 0xFFFFu
734
750int rte_dma_stats_get(int16_t dev_id, uint16_t vchan,
751 struct rte_dma_stats *stats);
752
765int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan);
766
777};
778
794int
795rte_dma_vchan_status(int16_t dev_id, uint16_t vchan, enum rte_dma_vchan_status *status);
796
808int rte_dma_dump(int16_t dev_id, FILE *f);
809
830
856typedef void (*rte_dma_access_pair_group_event_cb_t)(int16_t dev_id,
857 int16_t group_id,
858 rte_uuid_t domain_id,
860
883__rte_experimental
884int rte_dma_access_pair_group_create(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token,
885 int16_t *group_id, rte_dma_access_pair_group_event_cb_t cb);
886
904__rte_experimental
905int rte_dma_access_pair_group_destroy(int16_t dev_id, int16_t group_id);
906
930__rte_experimental
931int rte_dma_access_pair_group_join(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token,
932 int16_t group_id, rte_dma_access_pair_group_event_cb_t cb);
933
951__rte_experimental
952int rte_dma_access_pair_group_leave(int16_t dev_id, int16_t group_id);
953
974__rte_experimental
975int rte_dma_access_pair_group_handler_get(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id,
976 uint16_t *handler);
977
1040};
1041
1049 uint32_t length;
1050};
1051
1061 uint64_t flags;
1067 uint32_t rsvd;
1074 uint64_t impl_opaque[2];
1078 uint64_t user_meta;
1092 uint64_t event_meta;
1096 int16_t dma_dev_id;
1100 uint16_t vchan;
1102 uint16_t nb_src;
1104 uint16_t nb_dst;
1107};
1108
1109#ifdef __cplusplus
1110}
1111#endif
1112
1113#include "rte_dmadev_core.h"
1114#include "rte_dmadev_trace_fp.h"
1115
1116#ifdef __cplusplus
1117extern "C" {
1118#endif
1119
1131#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
1136#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
1141#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
1148#define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
1177static inline int
1178rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst,
1179 uint32_t length, uint64_t flags)
1180{
1181 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1182 int ret;
1183
1184#ifdef RTE_DMADEV_DEBUG
1185 if (!rte_dma_is_valid(dev_id) || length == 0)
1186 return -EINVAL;
1187 if (obj->copy == NULL)
1188 return -ENOTSUP;
1189#endif
1190
1191 ret = obj->copy(obj->dev_private, vchan, src, dst, length, flags);
1192 rte_dma_trace_copy(dev_id, vchan, src, dst, length, flags, ret);
1193
1194 return ret;
1195}
1196
1227static inline int
1228rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src,
1229 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
1230 uint64_t flags)
1231{
1232 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1233 int ret;
1234
1235#ifdef RTE_DMADEV_DEBUG
1236 if (!rte_dma_is_valid(dev_id) || src == NULL || dst == NULL ||
1237 nb_src == 0 || nb_dst == 0)
1238 return -EINVAL;
1239 if (obj->copy_sg == NULL)
1240 return -ENOTSUP;
1241#endif
1242
1243 ret = obj->copy_sg(obj->dev_private, vchan, src, dst, nb_src, nb_dst, flags);
1244 rte_dma_trace_copy_sg(dev_id, vchan, src, dst, nb_src, nb_dst, flags,
1245 ret);
1246
1247 return ret;
1248}
1249
1276static inline int
1277rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern,
1278 rte_iova_t dst, uint32_t length, uint64_t flags)
1279{
1280 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1281 int ret;
1282
1283#ifdef RTE_DMADEV_DEBUG
1284 if (!rte_dma_is_valid(dev_id) || length == 0)
1285 return -EINVAL;
1286 if (obj->fill == NULL)
1287 return -ENOTSUP;
1288#endif
1289
1290 ret = obj->fill(obj->dev_private, vchan, pattern, dst, length, flags);
1291 rte_dma_trace_fill(dev_id, vchan, pattern, dst, length, flags, ret);
1292
1293 return ret;
1294}
1295
1310static inline int
1311rte_dma_submit(int16_t dev_id, uint16_t vchan)
1312{
1313 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1314 int ret;
1315
1316#ifdef RTE_DMADEV_DEBUG
1317 if (!rte_dma_is_valid(dev_id))
1318 return -EINVAL;
1319 if (obj->submit == NULL)
1320 return -ENOTSUP;
1321#endif
1322
1323 ret = obj->submit(obj->dev_private, vchan);
1324 rte_dma_trace_submit(dev_id, vchan, ret);
1325
1326 return ret;
1327}
1328
1351static inline uint16_t
1352rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls,
1353 uint16_t *last_idx, bool *has_error)
1354{
1355 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1356 uint16_t idx, ret;
1357 bool err;
1358
1359#ifdef RTE_DMADEV_DEBUG
1360 if (!rte_dma_is_valid(dev_id) || nb_cpls == 0)
1361 return 0;
1362 if (obj->completed == NULL)
1363 return 0;
1364#endif
1365
1366 /* Ensure the pointer values are non-null to simplify drivers.
1367 * In most cases these should be compile time evaluated, since this is
1368 * an inline function.
1369 * - If NULL is explicitly passed as parameter, then compiler knows the
1370 * value is NULL
1371 * - If address of local variable is passed as parameter, then compiler
1372 * can know it's non-NULL.
1373 */
1374 if (last_idx == NULL)
1375 last_idx = &idx;
1376 if (has_error == NULL)
1377 has_error = &err;
1378
1379 *has_error = false;
1380 ret = obj->completed(obj->dev_private, vchan, nb_cpls, last_idx, has_error);
1381 rte_dma_trace_completed(dev_id, vchan, nb_cpls, last_idx, has_error,
1382 ret);
1383
1384 return ret;
1385}
1386
1413static inline uint16_t
1414rte_dma_completed_status(int16_t dev_id, uint16_t vchan,
1415 const uint16_t nb_cpls, uint16_t *last_idx,
1416 enum rte_dma_status_code *status)
1417{
1418 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1419 uint16_t idx, ret;
1420
1421#ifdef RTE_DMADEV_DEBUG
1422 if (!rte_dma_is_valid(dev_id) || nb_cpls == 0 || status == NULL)
1423 return 0;
1424 if (obj->completed_status == NULL)
1425 return 0;
1426#endif
1427
1428 if (last_idx == NULL)
1429 last_idx = &idx;
1430
1431 ret = obj->completed_status(obj->dev_private, vchan, nb_cpls, last_idx, status);
1432 rte_dma_trace_completed_status(dev_id, vchan, nb_cpls, last_idx, status,
1433 ret);
1434
1435 return ret;
1436}
1437
1450static inline uint16_t
1451rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
1452{
1453 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1454 uint16_t ret;
1455
1456#ifdef RTE_DMADEV_DEBUG
1457 if (!rte_dma_is_valid(dev_id))
1458 return 0;
1459 if (obj->burst_capacity == NULL)
1460 return 0;
1461#endif
1462 ret = obj->burst_capacity(obj->dev_private, vchan);
1463 rte_dma_trace_burst_capacity(dev_id, vchan, ret);
1464
1465 return ret;
1466}
1467
1488static inline uint16_t
1489rte_dma_enqueue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
1490{
1491 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1492 uint16_t ret;
1493
1494#ifdef RTE_DMADEV_DEBUG
1495 if (!rte_dma_is_valid(dev_id))
1496 return 0;
1497 if (*obj->enqueue == NULL)
1498 return 0;
1499#endif
1500
1501 ret = (*obj->enqueue)(obj->dev_private, vchan, ops, nb_ops);
1502 rte_dma_trace_enqueue_ops(dev_id, vchan, (void **)ops, nb_ops);
1503
1504 return ret;
1505}
1506
1523static inline uint16_t
1524rte_dma_dequeue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
1525{
1526 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1527 uint16_t ret;
1528
1529#ifdef RTE_DMADEV_DEBUG
1530 if (!rte_dma_is_valid(dev_id))
1531 return 0;
1532 if (*obj->dequeue == NULL)
1533 return 0;
1534#endif
1535
1536 ret = (*obj->dequeue)(obj->dev_private, vchan, ops, nb_ops);
1537 rte_dma_trace_dequeue_ops(dev_id, vchan, (void **)ops, nb_ops);
1538
1539 return ret;
1540}
1541
1542#ifdef __cplusplus
1543}
1544#endif
1545
1546#endif /* RTE_DMADEV_H */
uint64_t rte_iova_t
Definition: rte_common.h:770
rte_dma_direction
Definition: rte_dmadev.h:441
@ RTE_DMA_DIR_MEM_TO_DEV
Definition: rte_dmadev.h:461
@ RTE_DMA_DIR_DEV_TO_MEM
Definition: rte_dmadev.h:470
@ RTE_DMA_DIR_MEM_TO_MEM
Definition: rte_dmadev.h:452
@ RTE_DMA_DIR_DEV_TO_DEV
Definition: rte_dmadev.h:479
int rte_dma_start(int16_t dev_id)
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int rte_dma_close(int16_t dev_id)
uint16_t rte_dma_count_avail(void)
__rte_experimental int rte_dma_access_pair_group_join(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token, int16_t group_id, rte_dma_access_pair_group_event_cb_t cb)
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
Definition: rte_dmadev.h:1178
int rte_dma_get_dev_id_by_name(const char *name)
__rte_experimental int rte_dma_access_pair_group_destroy(int16_t dev_id, int16_t group_id)
rte_dma_inter_domain_type
Definition: rte_dmadev.h:607
@ RTE_DMA_INTER_PROCESS_DOMAIN
Definition: rte_dmadev.h:611
@ RTE_DMA_INTER_DOMAIN_NONE
Definition: rte_dmadev.h:609
@ RTE_DMA_INTER_OS_DOMAIN
Definition: rte_dmadev.h:613
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
rte_dma_status_code
Definition: rte_dmadev.h:983
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
Definition: rte_dmadev.h:1028
@ RTE_DMA_STATUS_INVALID_DST_ADDR
Definition: rte_dmadev.h:1004
@ RTE_DMA_STATUS_PAGE_FAULT
Definition: rte_dmadev.h:1035
@ RTE_DMA_STATUS_INVALID_LENGTH
Definition: rte_dmadev.h:1011
@ RTE_DMA_STATUS_NOT_ATTEMPTED
Definition: rte_dmadev.h:1000
@ RTE_DMA_STATUS_BUS_READ_ERROR
Definition: rte_dmadev.h:1018
@ RTE_DMA_STATUS_INVALID_ADDR
Definition: rte_dmadev.h:1009
@ RTE_DMA_STATUS_ERROR_UNKNOWN
Definition: rte_dmadev.h:1039
@ RTE_DMA_STATUS_BUS_ERROR
Definition: rte_dmadev.h:1024
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
Definition: rte_dmadev.h:1020
@ RTE_DMA_STATUS_DATA_POISION
Definition: rte_dmadev.h:1026
@ RTE_DMA_STATUS_INVALID_OPCODE
Definition: rte_dmadev.h:1016
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
Definition: rte_dmadev.h:1002
@ RTE_DMA_STATUS_USER_ABORT
Definition: rte_dmadev.h:992
@ RTE_DMA_STATUS_DEV_LINK_ERROR
Definition: rte_dmadev.h:1033
@ RTE_DMA_STATUS_SUCCESSFUL
Definition: rte_dmadev.h:985
static uint16_t rte_dma_dequeue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
Definition: rte_dmadev.h:1524
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
Definition: rte_dmadev.h:1451
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
int rte_dma_dev_max(size_t dev_max)
void(* rte_dma_access_pair_group_event_cb_t)(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id, enum rte_dma_access_pair_group_event_type event)
Definition: rte_dmadev.h:856
int rte_dma_stop(int16_t dev_id)
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
Definition: rte_dmadev.h:1352
rte_dma_access_pair_group_event_type
Definition: rte_dmadev.h:824
@ RTE_DMA_GROUP_EVENT_MEMBER_LEFT
Definition: rte_dmadev.h:826
@ RTE_DMA_GROUP_EVENT_GROUP_DESTROYED
Definition: rte_dmadev.h:828
int rte_dma_dump(int16_t dev_id, FILE *f)
__rte_experimental int rte_dma_access_pair_group_create(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token, int16_t *group_id, rte_dma_access_pair_group_event_cb_t cb)
__rte_experimental int rte_dma_access_pair_group_leave(int16_t dev_id, int16_t group_id)
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
int16_t rte_dma_next_dev(int16_t start_dev_id)
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
Definition: rte_dmadev.h:1311
static uint16_t rte_dma_enqueue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
Definition: rte_dmadev.h:1489
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
Definition: rte_dmadev.h:1414
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
bool rte_dma_is_valid(int16_t dev_id)
rte_dma_port_type
Definition: rte_dmadev.h:487
@ RTE_DMA_PORT_PCIE
Definition: rte_dmadev.h:489
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
Definition: rte_dmadev.h:1277
rte_dma_vchan_status
Definition: rte_dmadev.h:773
@ RTE_DMA_VCHAN_HALTED_ERROR
Definition: rte_dmadev.h:776
@ RTE_DMA_VCHAN_ACTIVE
Definition: rte_dmadev.h:775
@ RTE_DMA_VCHAN_IDLE
Definition: rte_dmadev.h:774
__rte_experimental int rte_dma_access_pair_group_handler_get(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id, uint16_t *handler)
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
Definition: rte_dmadev.h:1228
unsigned char rte_uuid_t[16]
Definition: rte_uuid.h:24
struct rte_mempool * pool
Definition: rte_dmadev.h:588
uint64_t flags
Definition: rte_dmadev.h:375
uint16_t nb_vchans
Definition: rte_dmadev.h:365
uint64_t dev_capa
Definition: rte_dmadev.h:316
uint16_t max_sges
Definition: rte_dmadev.h:330
uint16_t max_vchans
Definition: rte_dmadev.h:318
uint16_t max_desc
Definition: rte_dmadev.h:320
uint16_t min_desc
Definition: rte_dmadev.h:322
const char * dev_name
Definition: rte_dmadev.h:314
uint16_t nb_priorities
Definition: rte_dmadev.h:338
uint16_t nb_vchans
Definition: rte_dmadev.h:334
int16_t numa_node
Definition: rte_dmadev.h:332
enum rte_dma_inter_domain_type type
Definition: rte_dmadev.h:635
enum rte_dma_status_code status
Definition: rte_dmadev.h:1065
uint64_t user_meta
Definition: rte_dmadev.h:1078
uint64_t event_meta
Definition: rte_dmadev.h:1092
uint16_t nb_dst
Definition: rte_dmadev.h:1104
uint16_t vchan
Definition: rte_dmadev.h:1100
uint16_t nb_src
Definition: rte_dmadev.h:1102
struct rte_dma_sge src_dst_seg[]
Definition: rte_dmadev.h:1106
uint64_t impl_opaque[2]
Definition: rte_dmadev.h:1074
int16_t dma_dev_id
Definition: rte_dmadev.h:1096
uint64_t flags
Definition: rte_dmadev.h:1061
uint32_t rsvd
Definition: rte_dmadev.h:1067
struct rte_mempool * op_mp
Definition: rte_dmadev.h:1063
enum rte_dma_port_type port_type
Definition: rte_dmadev.h:503
__extension__ union rte_dma_port_param::@149::@151 pcie
uint64_t reserved[2]
Definition: rte_dmadev.h:572
rte_iova_t addr
Definition: rte_dmadev.h:1048
uint32_t length
Definition: rte_dmadev.h:1049
uint64_t submitted
Definition: rte_dmadev.h:718
uint64_t errors
Definition: rte_dmadev.h:724
uint64_t completed
Definition: rte_dmadev.h:722
struct rte_dma_inter_domain_param domain
Definition: rte_dmadev.h:690
enum rte_dma_direction direction
Definition: rte_dmadev.h:654
struct rte_dma_auto_free_param auto_free
Definition: rte_dmadev.h:680
struct rte_dma_port_param src_port
Definition: rte_dmadev.h:664
struct rte_dma_port_param dst_port
Definition: rte_dmadev.h:672