10#ifndef _RTE_RING_ELEM_PVT_H_
11#define _RTE_RING_ELEM_PVT_H_
13#if defined(RTE_TOOLCHAIN_GCC) && (GCC_VERSION >= 120000)
14#pragma GCC diagnostic push
15#pragma GCC diagnostic ignored "-Wstringop-overflow"
16#pragma GCC diagnostic ignored "-Wstringop-overread"
20__rte_ring_enqueue_elems_32(
void *ring_table,
const void *obj_table,
21 uint32_t size, uint32_t idx, uint32_t n)
25 uint32_t *ring = (uint32_t *)ring_table;
26 const uint32_t *obj = (
const uint32_t *)obj_table;
28 if (
likely(idx + n <= size)) {
29 for (i = 0; i < (n & ~0x7); i += 8, idx += 8) {
31 ring[idx + 1] = obj[i + 1];
32 ring[idx + 2] = obj[i + 2];
33 ring[idx + 3] = obj[i + 3];
34 ring[idx + 4] = obj[i + 4];
35 ring[idx + 5] = obj[i + 5];
36 ring[idx + 6] = obj[i + 6];
37 ring[idx + 7] = obj[i + 7];
41 ring[idx++] = obj[i++];
43 ring[idx++] = obj[i++];
45 ring[idx++] = obj[i++];
47 ring[idx++] = obj[i++];
49 ring[idx++] = obj[i++];
51 ring[idx++] = obj[i++];
53 ring[idx++] = obj[i++];
56 for (i = 0; idx < size; i++, idx++)
59 for (idx = 0; i < n; i++, idx++)
65__rte_ring_enqueue_elems_64(
void *ring_table,
const void *obj_table,
66 uint32_t size, uint32_t idx, uint32_t n)
70 uint64_t *ring = (uint64_t *)ring_table;
71 const unaligned_uint64_t *obj = (
const unaligned_uint64_t *)obj_table;
73 if (
likely(idx + n <= size)) {
74 for (i = 0; i < (n & ~0x3); i += 4, idx += 4) {
76 ring[idx + 1] = obj[i + 1];
77 ring[idx + 2] = obj[i + 2];
78 ring[idx + 3] = obj[i + 3];
82 ring[idx++] = obj[i++];
84 ring[idx++] = obj[i++];
86 ring[idx++] = obj[i++];
89 for (i = 0; idx < size; i++, idx++)
92 for (idx = 0; i < n; i++, idx++)
98__rte_ring_enqueue_elems_128(
void *ring_table,
const void *obj_table,
99 uint32_t size, uint32_t idx, uint32_t n)
103 rte_int128_t *ring = (rte_int128_t *)ring_table;
104 const rte_int128_t *obj = (
const rte_int128_t *)obj_table;
106 if (
likely(idx + n <= size)) {
107 for (i = 0; i < (n & ~0x1); i += 2, idx += 2)
108 memcpy((
void *)(ring + idx),
109 (
const void *)(obj + i), 32);
112 memcpy((
void *)(ring + idx),
113 (
const void *)(obj + i), 16);
116 for (i = 0; idx < size; i++, idx++)
117 memcpy((
void *)(ring + idx),
118 (
const void *)(obj + i), 16);
120 for (idx = 0; i < n; i++, idx++)
121 memcpy((
void *)(ring + idx),
122 (
const void *)(obj + i), 16);
131__rte_ring_do_enqueue_elems(
void *ring_table,
const void *obj_table,
132 uint32_t size, uint32_t idx, uint32_t esize, uint32_t num)
138 __rte_ring_enqueue_elems_64(ring_table, obj_table, size,
140 else if (esize == 16)
141 __rte_ring_enqueue_elems_128(ring_table, obj_table, size,
144 uint32_t scale, nr_idx, nr_num, nr_size;
147 scale = esize /
sizeof(uint32_t);
148 nr_num = num * scale;
149 nr_idx = idx * scale;
150 nr_size = size * scale;
151 __rte_ring_enqueue_elems_32(ring_table, obj_table, nr_size,
157__rte_ring_enqueue_elems(
struct rte_ring *r, uint32_t prod_head,
158 const void *obj_table, uint32_t esize, uint32_t num)
160 __rte_ring_do_enqueue_elems(&r[1], obj_table, r->
size,
161 prod_head & r->
mask, esize, num);
165__rte_ring_dequeue_elems_32(
void *obj_table,
const void *ring_table,
166 uint32_t size, uint32_t idx, uint32_t n)
169 uint32_t *obj = (uint32_t *)obj_table;
170 const uint32_t *ring = (
const uint32_t *)ring_table;
172 if (
likely(idx + n <= size)) {
173 for (i = 0; i < (n & ~0x7); i += 8, idx += 8) {
175 obj[i + 1] = ring[idx + 1];
176 obj[i + 2] = ring[idx + 2];
177 obj[i + 3] = ring[idx + 3];
178 obj[i + 4] = ring[idx + 4];
179 obj[i + 5] = ring[idx + 5];
180 obj[i + 6] = ring[idx + 6];
181 obj[i + 7] = ring[idx + 7];
185 obj[i++] = ring[idx++];
187 obj[i++] = ring[idx++];
189 obj[i++] = ring[idx++];
191 obj[i++] = ring[idx++];
193 obj[i++] = ring[idx++];
195 obj[i++] = ring[idx++];
197 obj[i++] = ring[idx++];
200 for (i = 0; idx < size; i++, idx++)
203 for (idx = 0; i < n; i++, idx++)
209__rte_ring_dequeue_elems_64(
void *obj_table,
const void *ring_table,
210 uint32_t size, uint32_t idx, uint32_t n)
213 unaligned_uint64_t *obj = (unaligned_uint64_t *)obj_table;
214 const uint64_t *ring = (
const uint64_t *)ring_table;
216 if (
likely(idx + n <= size)) {
217 for (i = 0; i < (n & ~0x3); i += 4, idx += 4) {
219 obj[i + 1] = ring[idx + 1];
220 obj[i + 2] = ring[idx + 2];
221 obj[i + 3] = ring[idx + 3];
225 obj[i++] = ring[idx++];
227 obj[i++] = ring[idx++];
229 obj[i++] = ring[idx++];
232 for (i = 0; idx < size; i++, idx++)
235 for (idx = 0; i < n; i++, idx++)
241__rte_ring_dequeue_elems_128(
void *obj_table,
const void *ring_table,
242 uint32_t size, uint32_t idx, uint32_t n)
245 rte_int128_t *obj = (rte_int128_t *)obj_table;
246 const rte_int128_t *ring = (
const rte_int128_t *)ring_table;
248 if (
likely(idx + n <= size)) {
249 for (i = 0; i < (n & ~0x1); i += 2, idx += 2)
250 memcpy((obj + i), (
const void *)(ring + idx), 32);
253 memcpy((obj + i), (
const void *)(ring + idx), 16);
256 for (i = 0; idx < size; i++, idx++)
257 memcpy((obj + i), (
const void *)(ring + idx), 16);
259 for (idx = 0; i < n; i++, idx++)
260 memcpy((obj + i), (
const void *)(ring + idx), 16);
269__rte_ring_do_dequeue_elems(
void *obj_table,
const void *ring_table,
270 uint32_t size, uint32_t idx, uint32_t esize, uint32_t num)
276 __rte_ring_dequeue_elems_64(obj_table, ring_table, size,
278 else if (esize == 16)
279 __rte_ring_dequeue_elems_128(obj_table, ring_table, size,
282 uint32_t scale, nr_idx, nr_num, nr_size;
285 scale = esize /
sizeof(uint32_t);
286 nr_num = num * scale;
287 nr_idx = idx * scale;
288 nr_size = size * scale;
289 __rte_ring_dequeue_elems_32(obj_table, ring_table, nr_size,
295__rte_ring_dequeue_elems(
struct rte_ring *r, uint32_t cons_head,
296 void *obj_table, uint32_t esize, uint32_t num)
298 __rte_ring_do_dequeue_elems(obj_table, &r[1], r->
size,
299 cons_head & r->
mask, esize, num);
309#ifdef RTE_USE_C11_MEM_MODEL
339__rte_ring_move_prod_head(
struct rte_ring *r,
unsigned int is_sp,
341 uint32_t *old_head, uint32_t *new_head,
342 uint32_t *free_entries)
344 return __rte_ring_headtail_move_head(&r->prod, &r->cons, r->
capacity,
345 is_sp, n, behavior, old_head, new_head, free_entries);
372__rte_ring_move_cons_head(
struct rte_ring *r,
unsigned int is_sc,
374 uint32_t *old_head, uint32_t *new_head,
377 return __rte_ring_headtail_move_head(&r->cons, &r->prod, 0,
378 is_sc, n, behavior, old_head, new_head, entries);
406__rte_ring_do_enqueue_elem(
struct rte_ring *r,
const void *obj_table,
407 unsigned int esize,
unsigned int n,
409 unsigned int *free_space)
411 uint32_t prod_head, prod_next;
412 uint32_t free_entries;
414 n = __rte_ring_move_prod_head(r, is_sp, n, behavior,
415 &prod_head, &prod_next, &free_entries);
419 __rte_ring_enqueue_elems(r, prod_head, obj_table, esize, n);
421 __rte_ring_update_tail(&r->prod, prod_head, prod_next, is_sp, 1);
423 if (free_space != NULL)
424 *free_space = free_entries - n;
453__rte_ring_do_dequeue_elem(
struct rte_ring *r,
void *obj_table,
454 unsigned int esize,
unsigned int n,
456 unsigned int *available)
458 uint32_t cons_head, cons_next;
461 n = __rte_ring_move_cons_head(r, (
int)is_sc, n, behavior,
462 &cons_head, &cons_next, &entries);
466 __rte_ring_dequeue_elems(r, cons_head, obj_table, esize, n);
468 __rte_ring_update_tail(&r->cons, cons_head, cons_next, is_sc, 0);
471 if (available != NULL)
472 *available = entries - n;
476#if defined(RTE_TOOLCHAIN_GCC) && (GCC_VERSION >= 120000)
477#pragma GCC diagnostic pop
#define __rte_always_inline