22#include <rte_compat.h>
24#include <rte_stdatomic.h>
36#define RTE_BIT64(nr) (UINT64_C(1) << (nr))
44#define RTE_BIT32(nr) (UINT32_C(1) << (nr))
54#define RTE_SHIFT_VAL32(val, nr) (UINT32_C(val) << (nr))
64#define RTE_SHIFT_VAL64(val, nr) (UINT64_C(val) << (nr))
75#define RTE_GENMASK32(high, low) \
76 (((~UINT32_C(0)) << (low)) & (~UINT32_C(0) >> (31u - (high))))
87#define RTE_GENMASK64(high, low) \
88 (((~UINT64_C(0)) << (low)) & (~UINT64_C(0) >> (63u - (high))))
98#define RTE_FIELD_GET32(mask, reg) \
99 ((typeof(mask))(((reg) & (mask)) >> rte_ctz32(mask)))
109#define RTE_FIELD_GET64(mask, reg) \
110 ((typeof(mask))(((reg) & (mask)) >> rte_ctz64(mask)))
130#define rte_bit_test(addr, nr) \
132 uint32_t *: __rte_bit_test32, \
133 const uint32_t *: __rte_bit_test32, \
134 volatile uint32_t *: __rte_bit_v_test32, \
135 const volatile uint32_t *: __rte_bit_v_test32, \
136 uint64_t *: __rte_bit_test64, \
137 const uint64_t *: __rte_bit_test64, \
138 volatile uint64_t *: __rte_bit_v_test64, \
139 const volatile uint64_t *: __rte_bit_v_test64) \
160#define rte_bit_set(addr, nr) \
162 uint32_t *: __rte_bit_set32, \
163 volatile uint32_t *: __rte_bit_v_set32, \
164 uint64_t *: __rte_bit_set64, \
165 volatile uint64_t *: __rte_bit_v_set64) \
186#define rte_bit_clear(addr, nr) \
188 uint32_t *: __rte_bit_clear32, \
189 volatile uint32_t *: __rte_bit_v_clear32, \
190 uint64_t *: __rte_bit_clear64, \
191 volatile uint64_t *: __rte_bit_v_clear64) \
213#define rte_bit_assign(addr, nr, value) \
215 uint32_t *: __rte_bit_assign32, \
216 volatile uint32_t *: __rte_bit_v_assign32, \
217 uint64_t *: __rte_bit_assign64, \
218 volatile uint64_t *: __rte_bit_v_assign64) \
239#define rte_bit_flip(addr, nr) \
241 uint32_t *: __rte_bit_flip32, \
242 volatile uint32_t *: __rte_bit_v_flip32, \
243 uint64_t *: __rte_bit_flip64, \
244 volatile uint64_t *: __rte_bit_v_flip64) \
266#define rte_bit_atomic_test(addr, nr, memory_order) \
268 uint32_t *: __rte_bit_atomic_test32, \
269 const uint32_t *: __rte_bit_atomic_test32, \
270 volatile uint32_t *: __rte_bit_atomic_v_test32, \
271 const volatile uint32_t *: __rte_bit_atomic_v_test32, \
272 uint64_t *: __rte_bit_atomic_test64, \
273 const uint64_t *: __rte_bit_atomic_test64, \
274 volatile uint64_t *: __rte_bit_atomic_v_test64, \
275 const volatile uint64_t *: __rte_bit_atomic_v_test64) \
276 (addr, nr, memory_order)
295#define rte_bit_atomic_set(addr, nr, memory_order) \
297 uint32_t *: __rte_bit_atomic_set32, \
298 volatile uint32_t *: __rte_bit_atomic_v_set32, \
299 uint64_t *: __rte_bit_atomic_set64, \
300 volatile uint64_t *: __rte_bit_atomic_v_set64) \
301 (addr, nr, memory_order)
320#define rte_bit_atomic_clear(addr, nr, memory_order) \
322 uint32_t *: __rte_bit_atomic_clear32, \
323 volatile uint32_t *: __rte_bit_atomic_v_clear32, \
324 uint64_t *: __rte_bit_atomic_clear64, \
325 volatile uint64_t *: __rte_bit_atomic_v_clear64) \
326 (addr, nr, memory_order)
347#define rte_bit_atomic_assign(addr, nr, value, memory_order) \
349 uint32_t *: __rte_bit_atomic_assign32, \
350 volatile uint32_t *: __rte_bit_atomic_v_assign32, \
351 uint64_t *: __rte_bit_atomic_assign64, \
352 volatile uint64_t *: __rte_bit_atomic_v_assign64) \
353 (addr, nr, value, memory_order)
373#define rte_bit_atomic_flip(addr, nr, memory_order) \
375 uint32_t *: __rte_bit_atomic_flip32, \
376 volatile uint32_t *: __rte_bit_atomic_v_flip32, \
377 uint64_t *: __rte_bit_atomic_flip64, \
378 volatile uint64_t *: __rte_bit_atomic_v_flip64) \
379 (addr, nr, memory_order)
400#define rte_bit_atomic_test_and_set(addr, nr, memory_order) \
402 uint32_t *: __rte_bit_atomic_test_and_set32, \
403 volatile uint32_t *: __rte_bit_atomic_v_test_and_set32, \
404 uint64_t *: __rte_bit_atomic_test_and_set64, \
405 volatile uint64_t *: __rte_bit_atomic_v_test_and_set64) \
406 (addr, nr, memory_order)
427#define rte_bit_atomic_test_and_clear(addr, nr, memory_order) \
429 uint32_t *: __rte_bit_atomic_test_and_clear32, \
430 volatile uint32_t *: __rte_bit_atomic_v_test_and_clear32, \
431 uint64_t *: __rte_bit_atomic_test_and_clear64, \
432 volatile uint64_t *: __rte_bit_atomic_v_test_and_clear64) \
433 (addr, nr, memory_order)
457#define rte_bit_atomic_test_and_assign(addr, nr, value, memory_order) \
459 uint32_t *: __rte_bit_atomic_test_and_assign32, \
460 volatile uint32_t *: __rte_bit_atomic_v_test_and_assign32, \
461 uint64_t *: __rte_bit_atomic_test_and_assign64, \
462 volatile uint64_t *: __rte_bit_atomic_v_test_and_assign64) \
463 (addr, nr, value, memory_order)
465#define __RTE_GEN_BIT_TEST(variant, qualifier, size) \
468__rte_bit_ ## variant ## test ## size(const qualifier uint ## size ## _t *addr, unsigned int nr) \
470 RTE_ASSERT(nr < size); \
471 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
472 return *addr & mask; \
475#define __RTE_GEN_BIT_SET(variant, qualifier, size) \
478__rte_bit_ ## variant ## set ## size(qualifier uint ## size ## _t *addr, unsigned int nr) \
480 RTE_ASSERT(nr < size); \
481 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
485#define __RTE_GEN_BIT_CLEAR(variant, qualifier, size) \
488__rte_bit_ ## variant ## clear ## size(qualifier uint ## size ## _t *addr, unsigned int nr) \
490 RTE_ASSERT(nr < size); \
491 uint ## size ## _t mask = ~((uint ## size ## _t)1 << nr); \
495#define __RTE_GEN_BIT_ASSIGN(variant, qualifier, size) \
498__rte_bit_ ## variant ## assign ## size(qualifier uint ## size ## _t *addr, unsigned int nr, \
502 __rte_bit_ ## variant ## set ## size(addr, nr); \
504 __rte_bit_ ## variant ## clear ## size(addr, nr); \
507#define __RTE_GEN_BIT_FLIP(variant, qualifier, size) \
510__rte_bit_ ## variant ## flip ## size(qualifier uint ## size ## _t *addr, unsigned int nr) \
513 value = __rte_bit_ ## variant ## test ## size(addr, nr); \
514 __rte_bit_ ## variant ## assign ## size(addr, nr, !value); \
517#define __RTE_GEN_BIT_OPS(v, qualifier, size) \
518 __RTE_GEN_BIT_TEST(v, qualifier, size) \
519 __RTE_GEN_BIT_SET(v, qualifier, size) \
520 __RTE_GEN_BIT_CLEAR(v, qualifier, size) \
521 __RTE_GEN_BIT_ASSIGN(v, qualifier, size) \
522 __RTE_GEN_BIT_FLIP(v, qualifier, size)
524#define __RTE_GEN_BIT_OPS_SIZE(size) \
525 __RTE_GEN_BIT_OPS(,, size) \
526 __RTE_GEN_BIT_OPS(v_, volatile, size)
528#ifdef ALLOW_EXPERIMENTAL_API
529__RTE_GEN_BIT_OPS_SIZE(32)
530__RTE_GEN_BIT_OPS_SIZE(64)
533#define __RTE_GEN_BIT_ATOMIC_TEST(variant, qualifier, size) \
536__rte_bit_atomic_ ## variant ## test ## size(const qualifier uint ## size ## _t *addr, \
537 unsigned int nr, int memory_order) \
539 RTE_ASSERT(nr < size); \
540 const qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
541 (const qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
542 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
543 return rte_atomic_load_explicit(a_addr, memory_order) & mask; \
546#define __RTE_GEN_BIT_ATOMIC_SET(variant, qualifier, size) \
549__rte_bit_atomic_ ## variant ## set ## size(qualifier uint ## size ## _t *addr, \
550 unsigned int nr, int memory_order) \
552 RTE_ASSERT(nr < size); \
553 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
554 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
555 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
556 rte_atomic_fetch_or_explicit(a_addr, mask, memory_order); \
559#define __RTE_GEN_BIT_ATOMIC_CLEAR(variant, qualifier, size) \
562__rte_bit_atomic_ ## variant ## clear ## size(qualifier uint ## size ## _t *addr, \
563 unsigned int nr, int memory_order) \
565 RTE_ASSERT(nr < size); \
566 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
567 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
568 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
569 rte_atomic_fetch_and_explicit(a_addr, ~mask, memory_order); \
572#define __RTE_GEN_BIT_ATOMIC_FLIP(variant, qualifier, size) \
575__rte_bit_atomic_ ## variant ## flip ## size(qualifier uint ## size ## _t *addr, \
576 unsigned int nr, int memory_order) \
578 RTE_ASSERT(nr < size); \
579 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
580 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
581 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
582 rte_atomic_fetch_xor_explicit(a_addr, mask, memory_order); \
585#define __RTE_GEN_BIT_ATOMIC_ASSIGN(variant, qualifier, size) \
588__rte_bit_atomic_## variant ## assign ## size(qualifier uint ## size ## _t *addr, \
589 unsigned int nr, bool value, int memory_order) \
592 __rte_bit_atomic_ ## variant ## set ## size(addr, nr, memory_order); \
594 __rte_bit_atomic_ ## variant ## clear ## size(addr, nr, memory_order); \
597#define __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(variant, qualifier, size) \
600__rte_bit_atomic_ ## variant ## test_and_set ## size(qualifier uint ## size ## _t *addr, \
601 unsigned int nr, int memory_order) \
603 RTE_ASSERT(nr < size); \
604 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
605 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
606 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
607 uint ## size ## _t prev; \
608 prev = rte_atomic_fetch_or_explicit(a_addr, mask, memory_order); \
609 return prev & mask; \
612#define __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(variant, qualifier, size) \
615__rte_bit_atomic_ ## variant ## test_and_clear ## size(qualifier uint ## size ## _t *addr, \
616 unsigned int nr, int memory_order) \
618 RTE_ASSERT(nr < size); \
619 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
620 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
621 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
622 uint ## size ## _t prev; \
623 prev = rte_atomic_fetch_and_explicit(a_addr, ~mask, memory_order); \
624 return prev & mask; \
627#define __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(variant, qualifier, size) \
630__rte_bit_atomic_ ## variant ## test_and_assign ## size( \
631 qualifier uint ## size ## _t *addr, unsigned int nr, bool value, \
635 return __rte_bit_atomic_ ## variant ## test_and_set ## size(addr, nr, \
638 return __rte_bit_atomic_ ## variant ## test_and_clear ## size(addr, nr, \
642#define __RTE_GEN_BIT_ATOMIC_OPS(variant, qualifier, size) \
643 __RTE_GEN_BIT_ATOMIC_TEST(variant, qualifier, size) \
644 __RTE_GEN_BIT_ATOMIC_SET(variant, qualifier, size) \
645 __RTE_GEN_BIT_ATOMIC_CLEAR(variant, qualifier, size) \
646 __RTE_GEN_BIT_ATOMIC_ASSIGN(variant, qualifier, size) \
647 __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(variant, qualifier, size) \
648 __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(variant, qualifier, size) \
649 __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(variant, qualifier, size) \
650 __RTE_GEN_BIT_ATOMIC_FLIP(variant, qualifier, size)
652#define __RTE_GEN_BIT_ATOMIC_OPS_SIZE(size) \
653 __RTE_GEN_BIT_ATOMIC_OPS(,, size) \
654 __RTE_GEN_BIT_ATOMIC_OPS(v_, volatile, size)
656#ifdef ALLOW_EXPERIMENTAL_API
657__RTE_GEN_BIT_ATOMIC_OPS_SIZE(32)
658__RTE_GEN_BIT_ATOMIC_OPS_SIZE(64)
673static inline uint32_t
678 uint32_t mask = UINT32_C(1) << nr;
679 return (*addr) & mask;
696 *addr = (*addr) | mask;
713 *addr = (*addr) & (~mask);
727static inline uint32_t
733 uint32_t val = *addr;
749static inline uint32_t
755 uint32_t val = *addr;
756 *addr = val & (~mask);
772static inline uint64_t
778 return (*addr) & mask;
795 (*addr) = (*addr) | mask;
812 *addr = (*addr) & (~mask);
826static inline uint64_t
832 uint64_t val = *addr;
848static inline uint64_t
854 uint64_t val = *addr;
855 *addr = val & (~mask);
859#ifdef RTE_TOOLCHAIN_MSVC
869static inline unsigned int
874 (void)_BitScanReverse(&rv, v);
876 return (
unsigned int)(
sizeof(v) * CHAR_BIT - 1 - rv);
887static inline unsigned int
892 (void)_BitScanReverse64(&rv, v);
894 return (
unsigned int)(
sizeof(v) * CHAR_BIT - 1 - rv);
905static inline unsigned int
910 (void)_BitScanForward(&rv, v);
912 return (
unsigned int)rv;
923static inline unsigned int
928 (void)_BitScanForward64(&rv, v);
930 return (
unsigned int)rv;
941static inline unsigned int
944 return (
unsigned int)__popcnt(v);
955static inline unsigned int
958 return (
unsigned int)__popcnt64(v);
971static inline unsigned int
974 return (
unsigned int)__builtin_clz(v);
985static inline unsigned int
988 return (
unsigned int)__builtin_clzll(v);
999static inline unsigned int
1002 return (
unsigned int)__builtin_ctz(v);
1013static inline unsigned int
1016 return (
unsigned int)__builtin_ctzll(v);
1027static inline unsigned int
1030 return (
unsigned int)__builtin_popcount(v);
1041static inline unsigned int
1044 return (
unsigned int)__builtin_popcountll(v);
1059static inline uint32_t
1081static inline uint64_t
1105static inline uint32_t
1146static inline uint32_t
1187static inline uint32_t
1190 return (x == 0) ? 0 : 32 -
rte_clz32(x);
1205static inline uint32_t
1208 return (x == 0) ? 0 : 64 -
rte_clz64(x);
1216#define RTE_IS_POWER_OF_2(n) ((n) && !(((n) - 1) & (n)))
1227 return n && !(n & (n - 1));
1239static inline uint32_t
1257static inline uint32_t
1262 return x - (x >> 1);
1274static inline uint64_t
1292static inline uint64_t
1297 return v - (v >> 1);
1311static inline uint32_t
1331static inline uint32_t
1353#undef rte_bit_assign
1356#undef rte_bit_atomic_test
1357#undef rte_bit_atomic_set
1358#undef rte_bit_atomic_clear
1359#undef rte_bit_atomic_assign
1360#undef rte_bit_atomic_flip
1361#undef rte_bit_atomic_test_and_set
1362#undef rte_bit_atomic_test_and_clear
1363#undef rte_bit_atomic_test_and_assign
1365#define __RTE_BIT_OVERLOAD_V_2(family, v, fun, qualifier, size, arg1_type, arg1_name) \
1367rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name) \
1369 __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name); \
1372#define __RTE_BIT_OVERLOAD_SZ_2(family, fun, qualifier, size, arg1_type, arg1_name) \
1373 __RTE_BIT_OVERLOAD_V_2(family,, fun, qualifier, size, arg1_type, arg1_name) \
1374 __RTE_BIT_OVERLOAD_V_2(family, v_, fun, qualifier volatile, size, arg1_type, arg1_name)
1376#define __RTE_BIT_OVERLOAD_2(family, fun, qualifier, arg1_type, arg1_name) \
1377 __RTE_BIT_OVERLOAD_SZ_2(family, fun, qualifier, 32, arg1_type, arg1_name) \
1378 __RTE_BIT_OVERLOAD_SZ_2(family, fun, qualifier, 64, arg1_type, arg1_name)
1380#define __RTE_BIT_OVERLOAD_V_2R(family, v, fun, qualifier, size, ret_type, arg1_type, arg1_name) \
1381static inline ret_type \
1382rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name) \
1384 return __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name); \
1387#define __RTE_BIT_OVERLOAD_SZ_2R(family, fun, qualifier, size, ret_type, arg1_type, arg1_name) \
1388 __RTE_BIT_OVERLOAD_V_2R(family,, fun, qualifier, size, ret_type, arg1_type, arg1_name) \
1389 __RTE_BIT_OVERLOAD_V_2R(family, v_, fun, qualifier volatile, size, ret_type, arg1_type, \
1392#define __RTE_BIT_OVERLOAD_2R(family, fun, qualifier, ret_type, arg1_type, arg1_name) \
1393 __RTE_BIT_OVERLOAD_SZ_2R(family, fun, qualifier, 32, ret_type, arg1_type, arg1_name) \
1394 __RTE_BIT_OVERLOAD_SZ_2R(family, fun, qualifier, 64, ret_type, arg1_type, arg1_name)
1396#define __RTE_BIT_OVERLOAD_V_3(family, v, fun, qualifier, size, arg1_type, arg1_name, \
1397 arg2_type, arg2_name) \
1399rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1400 arg2_type arg2_name) \
1402 __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name); \
1405#define __RTE_BIT_OVERLOAD_SZ_3(family, fun, qualifier, size, arg1_type, arg1_name, \
1406 arg2_type, arg2_name) \
1407 __RTE_BIT_OVERLOAD_V_3(family,, fun, qualifier, size, arg1_type, arg1_name, \
1408 arg2_type, arg2_name) \
1409 __RTE_BIT_OVERLOAD_V_3(family, v_, fun, qualifier volatile, size, arg1_type, arg1_name, \
1410 arg2_type, arg2_name)
1412#define __RTE_BIT_OVERLOAD_3(family, fun, qualifier, arg1_type, arg1_name, arg2_type, arg2_name) \
1413 __RTE_BIT_OVERLOAD_SZ_3(family, fun, qualifier, 32, arg1_type, arg1_name, \
1414 arg2_type, arg2_name) \
1415 __RTE_BIT_OVERLOAD_SZ_3(family, fun, qualifier, 64, arg1_type, arg1_name, \
1416 arg2_type, arg2_name)
1418#define __RTE_BIT_OVERLOAD_V_3R(family, v, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1419 arg2_type, arg2_name) \
1420static inline ret_type \
1421rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1422 arg2_type arg2_name) \
1424 return __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name); \
1427#define __RTE_BIT_OVERLOAD_SZ_3R(family, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1428 arg2_type, arg2_name) \
1429 __RTE_BIT_OVERLOAD_V_3R(family,, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1430 arg2_type, arg2_name) \
1431 __RTE_BIT_OVERLOAD_V_3R(family, v_, fun, qualifier volatile, size, ret_type, \
1432 arg1_type, arg1_name, arg2_type, arg2_name)
1434#define __RTE_BIT_OVERLOAD_3R(family, fun, qualifier, ret_type, arg1_type, arg1_name, \
1435 arg2_type, arg2_name) \
1436 __RTE_BIT_OVERLOAD_SZ_3R(family, fun, qualifier, 32, ret_type, arg1_type, arg1_name, \
1437 arg2_type, arg2_name) \
1438 __RTE_BIT_OVERLOAD_SZ_3R(family, fun, qualifier, 64, ret_type, arg1_type, arg1_name, \
1439 arg2_type, arg2_name)
1441#define __RTE_BIT_OVERLOAD_V_4(family, v, fun, qualifier, size, arg1_type, arg1_name, \
1442 arg2_type, arg2_name, arg3_type, arg3_name) \
1444rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1445 arg2_type arg2_name, arg3_type arg3_name) \
1447 __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name, arg3_name); \
1450#define __RTE_BIT_OVERLOAD_SZ_4(family, fun, qualifier, size, arg1_type, arg1_name, \
1451 arg2_type, arg2_name, arg3_type, arg3_name) \
1452 __RTE_BIT_OVERLOAD_V_4(family,, fun, qualifier, size, arg1_type, arg1_name, \
1453 arg2_type, arg2_name, arg3_type, arg3_name) \
1454 __RTE_BIT_OVERLOAD_V_4(family, v_, fun, qualifier volatile, size, arg1_type, arg1_name, \
1455 arg2_type, arg2_name, arg3_type, arg3_name)
1457#define __RTE_BIT_OVERLOAD_4(family, fun, qualifier, arg1_type, arg1_name, arg2_type, arg2_name, \
1458 arg3_type, arg3_name) \
1459 __RTE_BIT_OVERLOAD_SZ_4(family, fun, qualifier, 32, arg1_type, arg1_name, \
1460 arg2_type, arg2_name, arg3_type, arg3_name) \
1461 __RTE_BIT_OVERLOAD_SZ_4(family, fun, qualifier, 64, arg1_type, arg1_name, \
1462 arg2_type, arg2_name, arg3_type, arg3_name)
1464#define __RTE_BIT_OVERLOAD_V_4R(family, v, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1465 arg2_type, arg2_name, arg3_type, arg3_name) \
1466static inline ret_type \
1467rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1468 arg2_type arg2_name, arg3_type arg3_name) \
1470 return __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name, \
1474#define __RTE_BIT_OVERLOAD_SZ_4R(family, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1475 arg2_type, arg2_name, arg3_type, arg3_name) \
1476 __RTE_BIT_OVERLOAD_V_4R(family,, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1477 arg2_type, arg2_name, arg3_type, arg3_name) \
1478 __RTE_BIT_OVERLOAD_V_4R(family, v_, fun, qualifier volatile, size, ret_type, \
1479 arg1_type, arg1_name, arg2_type, arg2_name, arg3_type, arg3_name)
1481#define __RTE_BIT_OVERLOAD_4R(family, fun, qualifier, ret_type, arg1_type, arg1_name, \
1482 arg2_type, arg2_name, arg3_type, arg3_name) \
1483 __RTE_BIT_OVERLOAD_SZ_4R(family, fun, qualifier, 32, ret_type, arg1_type, arg1_name, \
1484 arg2_type, arg2_name, arg3_type, arg3_name) \
1485 __RTE_BIT_OVERLOAD_SZ_4R(family, fun, qualifier, 64, ret_type, arg1_type, arg1_name, \
1486 arg2_type, arg2_name, arg3_type, arg3_name)
1488#ifdef ALLOW_EXPERIMENTAL_API
1489__RTE_BIT_OVERLOAD_2R(, test,
const,
bool,
unsigned int, nr)
1490__RTE_BIT_OVERLOAD_2(, set,,
unsigned int, nr)
1491__RTE_BIT_OVERLOAD_2(, clear,,
unsigned int, nr)
1492__RTE_BIT_OVERLOAD_3(, assign,,
unsigned int, nr,
bool, value)
1493__RTE_BIT_OVERLOAD_2(, flip,,
unsigned int, nr)
1495__RTE_BIT_OVERLOAD_3R(atomic_, test,
const,
bool,
unsigned int, nr,
int, memory_order)
1496__RTE_BIT_OVERLOAD_3(atomic_, set,,
unsigned int, nr,
int, memory_order)
1497__RTE_BIT_OVERLOAD_3(atomic_, clear,,
unsigned int, nr,
int, memory_order)
1498__RTE_BIT_OVERLOAD_4(atomic_, assign,,
unsigned int, nr,
bool, value,
int, memory_order)
1499__RTE_BIT_OVERLOAD_3(atomic_, flip,,
unsigned int, nr,
int, memory_order)
1500__RTE_BIT_OVERLOAD_3R(atomic_, test_and_set,,
bool,
unsigned int, nr,
int, memory_order)
1501__RTE_BIT_OVERLOAD_3R(atomic_, test_and_clear,,
bool,
unsigned int, nr,
int, memory_order)
1502__RTE_BIT_OVERLOAD_4R(atomic_, test_and_assign,,
bool,
unsigned int, nr,
bool, value,
static void rte_bit_relaxed_set32(unsigned int nr, volatile uint32_t *addr)
static uint32_t rte_log2_u32(uint32_t v)
static uint32_t rte_fls_u32(uint32_t x)
static void rte_bit_relaxed_clear64(unsigned int nr, volatile uint64_t *addr)
static int rte_bsf32_safe(uint32_t v, uint32_t *pos)
static uint64_t rte_bit_relaxed_test_and_clear64(unsigned int nr, volatile uint64_t *addr)
static uint32_t rte_bsf64(uint64_t v)
static unsigned int rte_clz32(uint32_t v)
static uint32_t rte_bit_relaxed_test_and_set32(unsigned int nr, volatile uint32_t *addr)
static uint32_t rte_align32pow2(uint32_t x)
static uint32_t rte_bit_relaxed_test_and_clear32(unsigned int nr, volatile uint32_t *addr)
static unsigned int rte_popcount32(uint32_t v)
static uint32_t rte_align32prevpow2(uint32_t x)
static uint64_t rte_bit_relaxed_test_and_set64(unsigned int nr, volatile uint64_t *addr)
static uint64_t rte_align64pow2(uint64_t v)
static unsigned int rte_ctz64(uint64_t v)
static uint32_t rte_bsf32(uint32_t v)
static int rte_bsf64_safe(uint64_t v, uint32_t *pos)
static uint64_t rte_bit_relaxed_get64(unsigned int nr, volatile uint64_t *addr)
static unsigned int rte_popcount64(uint64_t v)
static unsigned int rte_ctz32(uint32_t v)
static uint64_t rte_align64prevpow2(uint64_t v)
static unsigned int rte_clz64(uint64_t v)
static void rte_bit_relaxed_clear32(unsigned int nr, volatile uint32_t *addr)
static uint32_t rte_bit_relaxed_get32(unsigned int nr, volatile uint32_t *addr)
static void rte_bit_relaxed_set64(unsigned int nr, volatile uint64_t *addr)
static int rte_is_power_of_2(uint32_t n)
static uint64_t rte_combine64ms1b(uint64_t v)
static uint32_t rte_combine32ms1b(uint32_t x)
static uint32_t rte_log2_u64(uint64_t v)
static uint32_t rte_fls_u64(uint64_t x)