DPDK 25.11.0-rc1
rte_dmadev.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2021 HiSilicon Limited
3 * Copyright(c) 2021 Intel Corporation
4 * Copyright(c) 2021 Marvell International Ltd
5 * Copyright(c) 2021 SmartShare Systems
6 */
7
8#ifndef RTE_DMADEV_H
9#define RTE_DMADEV_H
10
147#include <stdint.h>
148
149#include <rte_bitops.h>
150#include <rte_common.h>
151#include <rte_uuid.h>
152
153#ifdef __cplusplus
154extern "C" {
155#endif
156
158#define RTE_DMADEV_DEFAULT_MAX 64
159
172int rte_dma_dev_max(size_t dev_max);
173
184int rte_dma_get_dev_id_by_name(const char *name);
185
195bool rte_dma_is_valid(int16_t dev_id);
196
204uint16_t rte_dma_count_avail(void);
205
214int16_t rte_dma_next_dev(int16_t start_dev_id);
215
217#define RTE_DMA_FOREACH_DEV(p) \
218 for (p = rte_dma_next_dev(0); \
219 p != -1; \
220 p = rte_dma_next_dev(p + 1))
221
222
227#define RTE_DMA_CAPA_MEM_TO_MEM RTE_BIT64(0)
229#define RTE_DMA_CAPA_MEM_TO_DEV RTE_BIT64(1)
231#define RTE_DMA_CAPA_DEV_TO_MEM RTE_BIT64(2)
233#define RTE_DMA_CAPA_DEV_TO_DEV RTE_BIT64(3)
240#define RTE_DMA_CAPA_SVA RTE_BIT64(4)
246#define RTE_DMA_CAPA_SILENT RTE_BIT64(5)
254#define RTE_DMA_CAPA_HANDLES_ERRORS RTE_BIT64(6)
261#define RTE_DMA_CAPA_M2D_AUTO_FREE RTE_BIT64(7)
268#define RTE_DMA_CAPA_PRI_POLICY_SP RTE_BIT64(8)
274#define RTE_DMA_CAPA_INTER_PROCESS_DOMAIN RTE_BIT64(9)
280#define RTE_DMA_CAPA_INTER_OS_DOMAIN RTE_BIT64(10)
281
286#define RTE_DMA_CAPA_OPS_COPY RTE_BIT64(32)
288#define RTE_DMA_CAPA_OPS_COPY_SG RTE_BIT64(33)
290#define RTE_DMA_CAPA_OPS_FILL RTE_BIT64(34)
292#define RTE_DMA_CAPA_OPS_ENQ_DEQ RTE_BIT64(35)
301#define RTE_DMA_CFG_FLAG_SILENT RTE_BIT64(0)
305#define RTE_DMA_CFG_FLAG_ENQ_DEQ RTE_BIT64(1)
306
313 const char *dev_name;
315 uint64_t dev_capa;
317 uint16_t max_vchans;
319 uint16_t max_desc;
321 uint16_t min_desc;
329 uint16_t max_sges;
331 int16_t numa_node;
333 uint16_t nb_vchans;
338};
339
352int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info);
353
364 uint16_t nb_vchans;
365 /* The priority of the DMA device.
366 * This value should be lower than the field 'nb_priorities' of struct
367 * rte_dma_info which get from rte_dma_info_get(). If the DMA device
368 * does not support priority scheduling, this value should be zero.
369 *
370 * Lowest value indicates higher priority and vice-versa.
371 */
372 uint16_t priority;
374 uint64_t flags;
375};
376
393int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf);
394
407int rte_dma_start(int16_t dev_id);
408
420int rte_dma_stop(int16_t dev_id);
421
433int rte_dma_close(int16_t dev_id);
434
479};
480
487 RTE_DMA_PORT_NONE,
489};
490
503 union {
552 __extension__
553 union {
554 struct {
555 uint64_t coreid : 4;
556 uint64_t pfid : 8;
557 uint64_t vfen : 1;
558 uint64_t vfid : 16;
560 uint64_t pasid : 20;
562 uint64_t attr : 3;
564 uint64_t ph : 2;
566 uint64_t st : 16;
567 };
568 uint64_t val;
570 };
571 uint64_t reserved[2];
572};
573
578 union {
579 struct {
588 } m2d;
589 };
591 uint64_t reserved[2];
592};
593
613};
614
636 uint16_t src_handler;
638 uint16_t dst_handler;
640 uint64_t reserved[2];
641};
642
655 uint16_t nb_desc;
690};
691
707int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan,
708 const struct rte_dma_vchan_conf *conf);
709
717 uint64_t submitted;
721 uint64_t completed;
723 uint64_t errors;
724};
725
732#define RTE_DMA_ALL_VCHAN 0xFFFFu
733
749int rte_dma_stats_get(int16_t dev_id, uint16_t vchan,
750 struct rte_dma_stats *stats);
751
764int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan);
765
776};
777
793int
794rte_dma_vchan_status(int16_t dev_id, uint16_t vchan, enum rte_dma_vchan_status *status);
795
807int rte_dma_dump(int16_t dev_id, FILE *f);
808
829
855typedef void (*rte_dma_access_pair_group_event_cb_t)(int16_t dev_id,
856 int16_t group_id,
857 rte_uuid_t domain_id,
859
882__rte_experimental
883int rte_dma_access_pair_group_create(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token,
884 int16_t *group_id, rte_dma_access_pair_group_event_cb_t cb);
885
903__rte_experimental
904int rte_dma_access_pair_group_destroy(int16_t dev_id, int16_t group_id);
905
927__rte_experimental
928int rte_dma_access_pair_group_join(int16_t dev_id, int16_t group_id, rte_uuid_t token,
930
948__rte_experimental
949int rte_dma_access_pair_group_leave(int16_t dev_id, int16_t group_id);
950
971__rte_experimental
972int rte_dma_access_pair_group_handler_get(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id,
973 uint16_t *handler);
974
1037};
1038
1046 uint32_t length;
1047};
1048
1058 uint64_t flags;
1064 uint32_t rsvd;
1071 uint64_t impl_opaque[2];
1075 uint64_t user_meta;
1089 uint64_t event_meta;
1093 int16_t dma_dev_id;
1097 uint16_t vchan;
1099 uint16_t nb_src;
1101 uint16_t nb_dst;
1104};
1105
1106#ifdef __cplusplus
1107}
1108#endif
1109
1110#include "rte_dmadev_core.h"
1111#include "rte_dmadev_trace_fp.h"
1112
1113#ifdef __cplusplus
1114extern "C" {
1115#endif
1116
1128#define RTE_DMA_OP_FLAG_FENCE RTE_BIT64(0)
1133#define RTE_DMA_OP_FLAG_SUBMIT RTE_BIT64(1)
1138#define RTE_DMA_OP_FLAG_LLC RTE_BIT64(2)
1145#define RTE_DMA_OP_FLAG_AUTO_FREE RTE_BIT64(3)
1174static inline int
1175rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst,
1176 uint32_t length, uint64_t flags)
1177{
1178 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1179 int ret;
1180
1181#ifdef RTE_DMADEV_DEBUG
1182 if (!rte_dma_is_valid(dev_id) || length == 0)
1183 return -EINVAL;
1184 if (obj->copy == NULL)
1185 return -ENOTSUP;
1186#endif
1187
1188 ret = obj->copy(obj->dev_private, vchan, src, dst, length, flags);
1189 rte_dma_trace_copy(dev_id, vchan, src, dst, length, flags, ret);
1190
1191 return ret;
1192}
1193
1224static inline int
1225rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src,
1226 struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst,
1227 uint64_t flags)
1228{
1229 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1230 int ret;
1231
1232#ifdef RTE_DMADEV_DEBUG
1233 if (!rte_dma_is_valid(dev_id) || src == NULL || dst == NULL ||
1234 nb_src == 0 || nb_dst == 0)
1235 return -EINVAL;
1236 if (obj->copy_sg == NULL)
1237 return -ENOTSUP;
1238#endif
1239
1240 ret = obj->copy_sg(obj->dev_private, vchan, src, dst, nb_src, nb_dst, flags);
1241 rte_dma_trace_copy_sg(dev_id, vchan, src, dst, nb_src, nb_dst, flags,
1242 ret);
1243
1244 return ret;
1245}
1246
1273static inline int
1274rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern,
1275 rte_iova_t dst, uint32_t length, uint64_t flags)
1276{
1277 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1278 int ret;
1279
1280#ifdef RTE_DMADEV_DEBUG
1281 if (!rte_dma_is_valid(dev_id) || length == 0)
1282 return -EINVAL;
1283 if (obj->fill == NULL)
1284 return -ENOTSUP;
1285#endif
1286
1287 ret = obj->fill(obj->dev_private, vchan, pattern, dst, length, flags);
1288 rte_dma_trace_fill(dev_id, vchan, pattern, dst, length, flags, ret);
1289
1290 return ret;
1291}
1292
1307static inline int
1308rte_dma_submit(int16_t dev_id, uint16_t vchan)
1309{
1310 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1311 int ret;
1312
1313#ifdef RTE_DMADEV_DEBUG
1314 if (!rte_dma_is_valid(dev_id))
1315 return -EINVAL;
1316 if (obj->submit == NULL)
1317 return -ENOTSUP;
1318#endif
1319
1320 ret = obj->submit(obj->dev_private, vchan);
1321 rte_dma_trace_submit(dev_id, vchan, ret);
1322
1323 return ret;
1324}
1325
1348static inline uint16_t
1349rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls,
1350 uint16_t *last_idx, bool *has_error)
1351{
1352 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1353 uint16_t idx, ret;
1354 bool err;
1355
1356#ifdef RTE_DMADEV_DEBUG
1357 if (!rte_dma_is_valid(dev_id) || nb_cpls == 0)
1358 return 0;
1359 if (obj->completed == NULL)
1360 return 0;
1361#endif
1362
1363 /* Ensure the pointer values are non-null to simplify drivers.
1364 * In most cases these should be compile time evaluated, since this is
1365 * an inline function.
1366 * - If NULL is explicitly passed as parameter, then compiler knows the
1367 * value is NULL
1368 * - If address of local variable is passed as parameter, then compiler
1369 * can know it's non-NULL.
1370 */
1371 if (last_idx == NULL)
1372 last_idx = &idx;
1373 if (has_error == NULL)
1374 has_error = &err;
1375
1376 *has_error = false;
1377 ret = obj->completed(obj->dev_private, vchan, nb_cpls, last_idx, has_error);
1378 rte_dma_trace_completed(dev_id, vchan, nb_cpls, last_idx, has_error,
1379 ret);
1380
1381 return ret;
1382}
1383
1410static inline uint16_t
1411rte_dma_completed_status(int16_t dev_id, uint16_t vchan,
1412 const uint16_t nb_cpls, uint16_t *last_idx,
1413 enum rte_dma_status_code *status)
1414{
1415 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1416 uint16_t idx, ret;
1417
1418#ifdef RTE_DMADEV_DEBUG
1419 if (!rte_dma_is_valid(dev_id) || nb_cpls == 0 || status == NULL)
1420 return 0;
1421 if (obj->completed_status == NULL)
1422 return 0;
1423#endif
1424
1425 if (last_idx == NULL)
1426 last_idx = &idx;
1427
1428 ret = obj->completed_status(obj->dev_private, vchan, nb_cpls, last_idx, status);
1429 rte_dma_trace_completed_status(dev_id, vchan, nb_cpls, last_idx, status,
1430 ret);
1431
1432 return ret;
1433}
1434
1447static inline uint16_t
1448rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
1449{
1450 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1451 uint16_t ret;
1452
1453#ifdef RTE_DMADEV_DEBUG
1454 if (!rte_dma_is_valid(dev_id))
1455 return 0;
1456 if (obj->burst_capacity == NULL)
1457 return 0;
1458#endif
1459 ret = obj->burst_capacity(obj->dev_private, vchan);
1460 rte_dma_trace_burst_capacity(dev_id, vchan, ret);
1461
1462 return ret;
1463}
1464
1485static inline uint16_t
1486rte_dma_enqueue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
1487{
1488 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1489 uint16_t ret;
1490
1491#ifdef RTE_DMADEV_DEBUG
1492 if (!rte_dma_is_valid(dev_id))
1493 return 0;
1494 if (*obj->enqueue == NULL)
1495 return 0;
1496#endif
1497
1498 ret = (*obj->enqueue)(obj->dev_private, vchan, ops, nb_ops);
1499 rte_dma_trace_enqueue_ops(dev_id, vchan, (void **)ops, nb_ops);
1500
1501 return ret;
1502}
1503
1520static inline uint16_t
1521rte_dma_dequeue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
1522{
1523 struct rte_dma_fp_object *obj = &rte_dma_fp_objs[dev_id];
1524 uint16_t ret;
1525
1526#ifdef RTE_DMADEV_DEBUG
1527 if (!rte_dma_is_valid(dev_id))
1528 return 0;
1529 if (*obj->dequeue == NULL)
1530 return 0;
1531#endif
1532
1533 ret = (*obj->dequeue)(obj->dev_private, vchan, ops, nb_ops);
1534 rte_dma_trace_dequeue_ops(dev_id, vchan, (void **)ops, nb_ops);
1535
1536 return ret;
1537}
1538
1539#ifdef __cplusplus
1540}
1541#endif
1542
1543#endif /* RTE_DMADEV_H */
uint64_t rte_iova_t
Definition: rte_common.h:770
rte_dma_direction
Definition: rte_dmadev.h:440
@ RTE_DMA_DIR_MEM_TO_DEV
Definition: rte_dmadev.h:460
@ RTE_DMA_DIR_DEV_TO_MEM
Definition: rte_dmadev.h:469
@ RTE_DMA_DIR_MEM_TO_MEM
Definition: rte_dmadev.h:451
@ RTE_DMA_DIR_DEV_TO_DEV
Definition: rte_dmadev.h:478
int rte_dma_start(int16_t dev_id)
int rte_dma_info_get(int16_t dev_id, struct rte_dma_info *dev_info)
int rte_dma_close(int16_t dev_id)
uint16_t rte_dma_count_avail(void)
static int rte_dma_copy(int16_t dev_id, uint16_t vchan, rte_iova_t src, rte_iova_t dst, uint32_t length, uint64_t flags)
Definition: rte_dmadev.h:1175
int rte_dma_get_dev_id_by_name(const char *name)
__rte_experimental int rte_dma_access_pair_group_destroy(int16_t dev_id, int16_t group_id)
rte_dma_inter_domain_type
Definition: rte_dmadev.h:606
@ RTE_DMA_INTER_PROCESS_DOMAIN
Definition: rte_dmadev.h:610
@ RTE_DMA_INTER_DOMAIN_NONE
Definition: rte_dmadev.h:608
@ RTE_DMA_INTER_OS_DOMAIN
Definition: rte_dmadev.h:612
int rte_dma_configure(int16_t dev_id, const struct rte_dma_conf *dev_conf)
rte_dma_status_code
Definition: rte_dmadev.h:980
@ RTE_DMA_STATUS_DESCRIPTOR_READ_ERROR
Definition: rte_dmadev.h:1025
@ RTE_DMA_STATUS_INVALID_DST_ADDR
Definition: rte_dmadev.h:1001
@ RTE_DMA_STATUS_PAGE_FAULT
Definition: rte_dmadev.h:1032
@ RTE_DMA_STATUS_INVALID_LENGTH
Definition: rte_dmadev.h:1008
@ RTE_DMA_STATUS_NOT_ATTEMPTED
Definition: rte_dmadev.h:997
@ RTE_DMA_STATUS_BUS_READ_ERROR
Definition: rte_dmadev.h:1015
@ RTE_DMA_STATUS_INVALID_ADDR
Definition: rte_dmadev.h:1006
@ RTE_DMA_STATUS_ERROR_UNKNOWN
Definition: rte_dmadev.h:1036
@ RTE_DMA_STATUS_BUS_ERROR
Definition: rte_dmadev.h:1021
@ RTE_DMA_STATUS_BUS_WRITE_ERROR
Definition: rte_dmadev.h:1017
@ RTE_DMA_STATUS_DATA_POISION
Definition: rte_dmadev.h:1023
@ RTE_DMA_STATUS_INVALID_OPCODE
Definition: rte_dmadev.h:1013
@ RTE_DMA_STATUS_INVALID_SRC_ADDR
Definition: rte_dmadev.h:999
@ RTE_DMA_STATUS_USER_ABORT
Definition: rte_dmadev.h:989
@ RTE_DMA_STATUS_DEV_LINK_ERROR
Definition: rte_dmadev.h:1030
@ RTE_DMA_STATUS_SUCCESSFUL
Definition: rte_dmadev.h:982
static uint16_t rte_dma_dequeue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
Definition: rte_dmadev.h:1521
static uint16_t rte_dma_burst_capacity(int16_t dev_id, uint16_t vchan)
Definition: rte_dmadev.h:1448
int rte_dma_stats_reset(int16_t dev_id, uint16_t vchan)
int rte_dma_dev_max(size_t dev_max)
void(* rte_dma_access_pair_group_event_cb_t)(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id, enum rte_dma_access_pair_group_event_type event)
Definition: rte_dmadev.h:855
int rte_dma_stop(int16_t dev_id)
static uint16_t rte_dma_completed(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, bool *has_error)
Definition: rte_dmadev.h:1349
rte_dma_access_pair_group_event_type
Definition: rte_dmadev.h:823
@ RTE_DMA_GROUP_EVENT_MEMBER_LEFT
Definition: rte_dmadev.h:825
@ RTE_DMA_GROUP_EVENT_GROUP_DESTROYED
Definition: rte_dmadev.h:827
int rte_dma_dump(int16_t dev_id, FILE *f)
__rte_experimental int rte_dma_access_pair_group_create(int16_t dev_id, rte_uuid_t domain_id, rte_uuid_t token, int16_t *group_id, rte_dma_access_pair_group_event_cb_t cb)
__rte_experimental int rte_dma_access_pair_group_leave(int16_t dev_id, int16_t group_id)
int rte_dma_vchan_setup(int16_t dev_id, uint16_t vchan, const struct rte_dma_vchan_conf *conf)
int16_t rte_dma_next_dev(int16_t start_dev_id)
static int rte_dma_submit(int16_t dev_id, uint16_t vchan)
Definition: rte_dmadev.h:1308
static uint16_t rte_dma_enqueue_ops(int16_t dev_id, uint16_t vchan, struct rte_dma_op **ops, uint16_t nb_ops)
Definition: rte_dmadev.h:1486
static uint16_t rte_dma_completed_status(int16_t dev_id, uint16_t vchan, const uint16_t nb_cpls, uint16_t *last_idx, enum rte_dma_status_code *status)
Definition: rte_dmadev.h:1411
int rte_dma_stats_get(int16_t dev_id, uint16_t vchan, struct rte_dma_stats *stats)
bool rte_dma_is_valid(int16_t dev_id)
__rte_experimental int rte_dma_access_pair_group_join(int16_t dev_id, int16_t group_id, rte_uuid_t token, rte_dma_access_pair_group_event_cb_t cb)
rte_dma_port_type
Definition: rte_dmadev.h:486
@ RTE_DMA_PORT_PCIE
Definition: rte_dmadev.h:488
static int rte_dma_fill(int16_t dev_id, uint16_t vchan, uint64_t pattern, rte_iova_t dst, uint32_t length, uint64_t flags)
Definition: rte_dmadev.h:1274
rte_dma_vchan_status
Definition: rte_dmadev.h:772
@ RTE_DMA_VCHAN_HALTED_ERROR
Definition: rte_dmadev.h:775
@ RTE_DMA_VCHAN_ACTIVE
Definition: rte_dmadev.h:774
@ RTE_DMA_VCHAN_IDLE
Definition: rte_dmadev.h:773
__rte_experimental int rte_dma_access_pair_group_handler_get(int16_t dev_id, int16_t group_id, rte_uuid_t domain_id, uint16_t *handler)
static int rte_dma_copy_sg(int16_t dev_id, uint16_t vchan, struct rte_dma_sge *src, struct rte_dma_sge *dst, uint16_t nb_src, uint16_t nb_dst, uint64_t flags)
Definition: rte_dmadev.h:1225
void *(* handler)(void *sock_id)
unsigned char rte_uuid_t[16]
Definition: rte_uuid.h:24
struct rte_mempool * pool
Definition: rte_dmadev.h:587
uint64_t flags
Definition: rte_dmadev.h:374
uint16_t nb_vchans
Definition: rte_dmadev.h:364
uint64_t dev_capa
Definition: rte_dmadev.h:315
uint16_t max_sges
Definition: rte_dmadev.h:329
uint16_t max_vchans
Definition: rte_dmadev.h:317
uint16_t max_desc
Definition: rte_dmadev.h:319
uint16_t min_desc
Definition: rte_dmadev.h:321
const char * dev_name
Definition: rte_dmadev.h:313
uint16_t nb_priorities
Definition: rte_dmadev.h:337
uint16_t nb_vchans
Definition: rte_dmadev.h:333
int16_t numa_node
Definition: rte_dmadev.h:331
enum rte_dma_inter_domain_type type
Definition: rte_dmadev.h:634
enum rte_dma_status_code status
Definition: rte_dmadev.h:1062
uint64_t user_meta
Definition: rte_dmadev.h:1075
uint64_t event_meta
Definition: rte_dmadev.h:1089
uint16_t nb_dst
Definition: rte_dmadev.h:1101
uint16_t vchan
Definition: rte_dmadev.h:1097
uint16_t nb_src
Definition: rte_dmadev.h:1099
struct rte_dma_sge src_dst_seg[]
Definition: rte_dmadev.h:1103
uint64_t impl_opaque[2]
Definition: rte_dmadev.h:1071
int16_t dma_dev_id
Definition: rte_dmadev.h:1093
uint64_t flags
Definition: rte_dmadev.h:1058
uint32_t rsvd
Definition: rte_dmadev.h:1064
struct rte_mempool * op_mp
Definition: rte_dmadev.h:1060
enum rte_dma_port_type port_type
Definition: rte_dmadev.h:502
__extension__ union rte_dma_port_param::@149::@151 pcie
uint64_t reserved[2]
Definition: rte_dmadev.h:571
rte_iova_t addr
Definition: rte_dmadev.h:1045
uint32_t length
Definition: rte_dmadev.h:1046
uint64_t submitted
Definition: rte_dmadev.h:717
uint64_t errors
Definition: rte_dmadev.h:723
uint64_t completed
Definition: rte_dmadev.h:721
struct rte_dma_inter_domain_param domain
Definition: rte_dmadev.h:689
enum rte_dma_direction direction
Definition: rte_dmadev.h:653
struct rte_dma_auto_free_param auto_free
Definition: rte_dmadev.h:679
struct rte_dma_port_param src_port
Definition: rte_dmadev.h:663
struct rte_dma_port_param dst_port
Definition: rte_dmadev.h:671