DPDK 25.03.0-rc0
rte_bitops.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2020 Arm Limited
3 * Copyright(c) 2010-2019 Intel Corporation
4 * Copyright(c) 2023 Microsoft Corporation
5 * Copyright(c) 2024 Ericsson AB
6 */
7
8#ifndef _RTE_BITOPS_H_
9#define _RTE_BITOPS_H_
10
20#include <stdint.h>
21
22#include <rte_compat.h>
23#include <rte_debug.h>
24#include <rte_stdatomic.h>
25
26#ifdef __cplusplus
27extern "C" {
28#endif
29
36#define RTE_BIT64(nr) (UINT64_C(1) << (nr))
37
44#define RTE_BIT32(nr) (UINT32_C(1) << (nr))
45
54#define RTE_SHIFT_VAL32(val, nr) (UINT32_C(val) << (nr))
55
64#define RTE_SHIFT_VAL64(val, nr) (UINT64_C(val) << (nr))
65
75#define RTE_GENMASK32(high, low) \
76 (((~UINT32_C(0)) << (low)) & (~UINT32_C(0) >> (31u - (high))))
77
87#define RTE_GENMASK64(high, low) \
88 (((~UINT64_C(0)) << (low)) & (~UINT64_C(0) >> (63u - (high))))
89
98#define RTE_FIELD_GET32(mask, reg) \
99 ((typeof(mask))(((reg) & (mask)) >> rte_ctz32(mask)))
100
109#define RTE_FIELD_GET64(mask, reg) \
110 ((typeof(mask))(((reg) & (mask)) >> rte_ctz64(mask)))
111
130#define rte_bit_test(addr, nr) \
131 _Generic((addr), \
132 uint32_t *: __rte_bit_test32, \
133 const uint32_t *: __rte_bit_test32, \
134 volatile uint32_t *: __rte_bit_v_test32, \
135 const volatile uint32_t *: __rte_bit_v_test32, \
136 uint64_t *: __rte_bit_test64, \
137 const uint64_t *: __rte_bit_test64, \
138 volatile uint64_t *: __rte_bit_v_test64, \
139 const volatile uint64_t *: __rte_bit_v_test64) \
140 (addr, nr)
141
160#define rte_bit_set(addr, nr) \
161 _Generic((addr), \
162 uint32_t *: __rte_bit_set32, \
163 volatile uint32_t *: __rte_bit_v_set32, \
164 uint64_t *: __rte_bit_set64, \
165 volatile uint64_t *: __rte_bit_v_set64) \
166 (addr, nr)
167
186#define rte_bit_clear(addr, nr) \
187 _Generic((addr), \
188 uint32_t *: __rte_bit_clear32, \
189 volatile uint32_t *: __rte_bit_v_clear32, \
190 uint64_t *: __rte_bit_clear64, \
191 volatile uint64_t *: __rte_bit_v_clear64) \
192 (addr, nr)
193
213#define rte_bit_assign(addr, nr, value) \
214 _Generic((addr), \
215 uint32_t *: __rte_bit_assign32, \
216 volatile uint32_t *: __rte_bit_v_assign32, \
217 uint64_t *: __rte_bit_assign64, \
218 volatile uint64_t *: __rte_bit_v_assign64) \
219 (addr, nr, value)
220
239#define rte_bit_flip(addr, nr) \
240 _Generic((addr), \
241 uint32_t *: __rte_bit_flip32, \
242 volatile uint32_t *: __rte_bit_v_flip32, \
243 uint64_t *: __rte_bit_flip64, \
244 volatile uint64_t *: __rte_bit_v_flip64) \
245 (addr, nr)
246
266#define rte_bit_atomic_test(addr, nr, memory_order) \
267 _Generic((addr), \
268 uint32_t *: __rte_bit_atomic_test32, \
269 const uint32_t *: __rte_bit_atomic_test32, \
270 volatile uint32_t *: __rte_bit_atomic_v_test32, \
271 const volatile uint32_t *: __rte_bit_atomic_v_test32, \
272 uint64_t *: __rte_bit_atomic_test64, \
273 const uint64_t *: __rte_bit_atomic_test64, \
274 volatile uint64_t *: __rte_bit_atomic_v_test64, \
275 const volatile uint64_t *: __rte_bit_atomic_v_test64) \
276 (addr, nr, memory_order)
277
295#define rte_bit_atomic_set(addr, nr, memory_order) \
296 _Generic((addr), \
297 uint32_t *: __rte_bit_atomic_set32, \
298 volatile uint32_t *: __rte_bit_atomic_v_set32, \
299 uint64_t *: __rte_bit_atomic_set64, \
300 volatile uint64_t *: __rte_bit_atomic_v_set64) \
301 (addr, nr, memory_order)
302
320#define rte_bit_atomic_clear(addr, nr, memory_order) \
321 _Generic((addr), \
322 uint32_t *: __rte_bit_atomic_clear32, \
323 volatile uint32_t *: __rte_bit_atomic_v_clear32, \
324 uint64_t *: __rte_bit_atomic_clear64, \
325 volatile uint64_t *: __rte_bit_atomic_v_clear64) \
326 (addr, nr, memory_order)
327
347#define rte_bit_atomic_assign(addr, nr, value, memory_order) \
348 _Generic((addr), \
349 uint32_t *: __rte_bit_atomic_assign32, \
350 volatile uint32_t *: __rte_bit_atomic_v_assign32, \
351 uint64_t *: __rte_bit_atomic_assign64, \
352 volatile uint64_t *: __rte_bit_atomic_v_assign64) \
353 (addr, nr, value, memory_order)
354
373#define rte_bit_atomic_flip(addr, nr, memory_order) \
374 _Generic((addr), \
375 uint32_t *: __rte_bit_atomic_flip32, \
376 volatile uint32_t *: __rte_bit_atomic_v_flip32, \
377 uint64_t *: __rte_bit_atomic_flip64, \
378 volatile uint64_t *: __rte_bit_atomic_v_flip64) \
379 (addr, nr, memory_order)
380
400#define rte_bit_atomic_test_and_set(addr, nr, memory_order) \
401 _Generic((addr), \
402 uint32_t *: __rte_bit_atomic_test_and_set32, \
403 volatile uint32_t *: __rte_bit_atomic_v_test_and_set32, \
404 uint64_t *: __rte_bit_atomic_test_and_set64, \
405 volatile uint64_t *: __rte_bit_atomic_v_test_and_set64) \
406 (addr, nr, memory_order)
407
427#define rte_bit_atomic_test_and_clear(addr, nr, memory_order) \
428 _Generic((addr), \
429 uint32_t *: __rte_bit_atomic_test_and_clear32, \
430 volatile uint32_t *: __rte_bit_atomic_v_test_and_clear32, \
431 uint64_t *: __rte_bit_atomic_test_and_clear64, \
432 volatile uint64_t *: __rte_bit_atomic_v_test_and_clear64) \
433 (addr, nr, memory_order)
434
457#define rte_bit_atomic_test_and_assign(addr, nr, value, memory_order) \
458 _Generic((addr), \
459 uint32_t *: __rte_bit_atomic_test_and_assign32, \
460 volatile uint32_t *: __rte_bit_atomic_v_test_and_assign32, \
461 uint64_t *: __rte_bit_atomic_test_and_assign64, \
462 volatile uint64_t *: __rte_bit_atomic_v_test_and_assign64) \
463 (addr, nr, value, memory_order)
464
465#define __RTE_GEN_BIT_TEST(variant, qualifier, size) \
466__rte_experimental \
467static inline bool \
468__rte_bit_ ## variant ## test ## size(const qualifier uint ## size ## _t *addr, unsigned int nr) \
469{ \
470 RTE_ASSERT(nr < size); \
471 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
472 return *addr & mask; \
473}
474
475#define __RTE_GEN_BIT_SET(variant, qualifier, size) \
476__rte_experimental \
477static inline void \
478__rte_bit_ ## variant ## set ## size(qualifier uint ## size ## _t *addr, unsigned int nr) \
479{ \
480 RTE_ASSERT(nr < size); \
481 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
482 *addr |= mask; \
483}
484
485#define __RTE_GEN_BIT_CLEAR(variant, qualifier, size) \
486__rte_experimental \
487static inline void \
488__rte_bit_ ## variant ## clear ## size(qualifier uint ## size ## _t *addr, unsigned int nr) \
489{ \
490 RTE_ASSERT(nr < size); \
491 uint ## size ## _t mask = ~((uint ## size ## _t)1 << nr); \
492 (*addr) &= mask; \
493}
494
495#define __RTE_GEN_BIT_ASSIGN(variant, qualifier, size) \
496__rte_experimental \
497static inline void \
498__rte_bit_ ## variant ## assign ## size(qualifier uint ## size ## _t *addr, unsigned int nr, \
499 bool value) \
500{ \
501 if (value) \
502 __rte_bit_ ## variant ## set ## size(addr, nr); \
503 else \
504 __rte_bit_ ## variant ## clear ## size(addr, nr); \
505}
506
507#define __RTE_GEN_BIT_FLIP(variant, qualifier, size) \
508__rte_experimental \
509static inline void \
510__rte_bit_ ## variant ## flip ## size(qualifier uint ## size ## _t *addr, unsigned int nr) \
511{ \
512 bool value; \
513 value = __rte_bit_ ## variant ## test ## size(addr, nr); \
514 __rte_bit_ ## variant ## assign ## size(addr, nr, !value); \
515}
516
517#define __RTE_GEN_BIT_OPS(v, qualifier, size) \
518 __RTE_GEN_BIT_TEST(v, qualifier, size) \
519 __RTE_GEN_BIT_SET(v, qualifier, size) \
520 __RTE_GEN_BIT_CLEAR(v, qualifier, size) \
521 __RTE_GEN_BIT_ASSIGN(v, qualifier, size) \
522 __RTE_GEN_BIT_FLIP(v, qualifier, size)
523
524#define __RTE_GEN_BIT_OPS_SIZE(size) \
525 __RTE_GEN_BIT_OPS(,, size) \
526 __RTE_GEN_BIT_OPS(v_, volatile, size)
527
528#ifdef ALLOW_EXPERIMENTAL_API
529__RTE_GEN_BIT_OPS_SIZE(32)
530__RTE_GEN_BIT_OPS_SIZE(64)
531#endif
532
533#define __RTE_GEN_BIT_ATOMIC_TEST(variant, qualifier, size) \
534__rte_experimental \
535static inline bool \
536__rte_bit_atomic_ ## variant ## test ## size(const qualifier uint ## size ## _t *addr, \
537 unsigned int nr, int memory_order) \
538{ \
539 RTE_ASSERT(nr < size); \
540 const qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
541 (const qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
542 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
543 return rte_atomic_load_explicit(a_addr, memory_order) & mask; \
544}
545
546#define __RTE_GEN_BIT_ATOMIC_SET(variant, qualifier, size) \
547__rte_experimental \
548static inline void \
549__rte_bit_atomic_ ## variant ## set ## size(qualifier uint ## size ## _t *addr, \
550 unsigned int nr, int memory_order) \
551{ \
552 RTE_ASSERT(nr < size); \
553 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
554 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
555 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
556 rte_atomic_fetch_or_explicit(a_addr, mask, memory_order); \
557}
558
559#define __RTE_GEN_BIT_ATOMIC_CLEAR(variant, qualifier, size) \
560__rte_experimental \
561static inline void \
562__rte_bit_atomic_ ## variant ## clear ## size(qualifier uint ## size ## _t *addr, \
563 unsigned int nr, int memory_order) \
564{ \
565 RTE_ASSERT(nr < size); \
566 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
567 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
568 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
569 rte_atomic_fetch_and_explicit(a_addr, ~mask, memory_order); \
570}
571
572#define __RTE_GEN_BIT_ATOMIC_FLIP(variant, qualifier, size) \
573__rte_experimental \
574static inline void \
575__rte_bit_atomic_ ## variant ## flip ## size(qualifier uint ## size ## _t *addr, \
576 unsigned int nr, int memory_order) \
577{ \
578 RTE_ASSERT(nr < size); \
579 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
580 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
581 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
582 rte_atomic_fetch_xor_explicit(a_addr, mask, memory_order); \
583}
584
585#define __RTE_GEN_BIT_ATOMIC_ASSIGN(variant, qualifier, size) \
586__rte_experimental \
587static inline void \
588__rte_bit_atomic_## variant ## assign ## size(qualifier uint ## size ## _t *addr, \
589 unsigned int nr, bool value, int memory_order) \
590{ \
591 if (value) \
592 __rte_bit_atomic_ ## variant ## set ## size(addr, nr, memory_order); \
593 else \
594 __rte_bit_atomic_ ## variant ## clear ## size(addr, nr, memory_order); \
595}
596
597#define __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(variant, qualifier, size) \
598__rte_experimental \
599static inline bool \
600__rte_bit_atomic_ ## variant ## test_and_set ## size(qualifier uint ## size ## _t *addr, \
601 unsigned int nr, int memory_order) \
602{ \
603 RTE_ASSERT(nr < size); \
604 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
605 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
606 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
607 uint ## size ## _t prev; \
608 prev = rte_atomic_fetch_or_explicit(a_addr, mask, memory_order); \
609 return prev & mask; \
610}
611
612#define __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(variant, qualifier, size) \
613__rte_experimental \
614static inline bool \
615__rte_bit_atomic_ ## variant ## test_and_clear ## size(qualifier uint ## size ## _t *addr, \
616 unsigned int nr, int memory_order) \
617{ \
618 RTE_ASSERT(nr < size); \
619 qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
620 (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
621 uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
622 uint ## size ## _t prev; \
623 prev = rte_atomic_fetch_and_explicit(a_addr, ~mask, memory_order); \
624 return prev & mask; \
625}
626
627#define __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(variant, qualifier, size) \
628__rte_experimental \
629static inline bool \
630__rte_bit_atomic_ ## variant ## test_and_assign ## size( \
631 qualifier uint ## size ## _t *addr, unsigned int nr, bool value, \
632 int memory_order) \
633{ \
634 if (value) \
635 return __rte_bit_atomic_ ## variant ## test_and_set ## size(addr, nr, \
636 memory_order); \
637 else \
638 return __rte_bit_atomic_ ## variant ## test_and_clear ## size(addr, nr, \
639 memory_order); \
640}
641
642#define __RTE_GEN_BIT_ATOMIC_OPS(variant, qualifier, size) \
643 __RTE_GEN_BIT_ATOMIC_TEST(variant, qualifier, size) \
644 __RTE_GEN_BIT_ATOMIC_SET(variant, qualifier, size) \
645 __RTE_GEN_BIT_ATOMIC_CLEAR(variant, qualifier, size) \
646 __RTE_GEN_BIT_ATOMIC_ASSIGN(variant, qualifier, size) \
647 __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(variant, qualifier, size) \
648 __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(variant, qualifier, size) \
649 __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(variant, qualifier, size) \
650 __RTE_GEN_BIT_ATOMIC_FLIP(variant, qualifier, size)
651
652#define __RTE_GEN_BIT_ATOMIC_OPS_SIZE(size) \
653 __RTE_GEN_BIT_ATOMIC_OPS(,, size) \
654 __RTE_GEN_BIT_ATOMIC_OPS(v_, volatile, size)
655
656#ifdef ALLOW_EXPERIMENTAL_API
657__RTE_GEN_BIT_ATOMIC_OPS_SIZE(32)
658__RTE_GEN_BIT_ATOMIC_OPS_SIZE(64)
659#endif
660
661/*------------------------ 32-bit relaxed operations ------------------------*/
662
673static inline uint32_t
674rte_bit_relaxed_get32(unsigned int nr, volatile uint32_t *addr)
675{
676 RTE_ASSERT(nr < 32);
677
678 uint32_t mask = UINT32_C(1) << nr;
679 return (*addr) & mask;
680}
681
690static inline void
691rte_bit_relaxed_set32(unsigned int nr, volatile uint32_t *addr)
692{
693 RTE_ASSERT(nr < 32);
694
695 uint32_t mask = RTE_BIT32(nr);
696 *addr = (*addr) | mask;
697}
698
707static inline void
708rte_bit_relaxed_clear32(unsigned int nr, volatile uint32_t *addr)
709{
710 RTE_ASSERT(nr < 32);
711
712 uint32_t mask = RTE_BIT32(nr);
713 *addr = (*addr) & (~mask);
714}
715
727static inline uint32_t
728rte_bit_relaxed_test_and_set32(unsigned int nr, volatile uint32_t *addr)
729{
730 RTE_ASSERT(nr < 32);
731
732 uint32_t mask = RTE_BIT32(nr);
733 uint32_t val = *addr;
734 *addr = val | mask;
735 return val & mask;
736}
737
749static inline uint32_t
750rte_bit_relaxed_test_and_clear32(unsigned int nr, volatile uint32_t *addr)
751{
752 RTE_ASSERT(nr < 32);
753
754 uint32_t mask = RTE_BIT32(nr);
755 uint32_t val = *addr;
756 *addr = val & (~mask);
757 return val & mask;
758}
759
760/*------------------------ 64-bit relaxed operations ------------------------*/
761
772static inline uint64_t
773rte_bit_relaxed_get64(unsigned int nr, volatile uint64_t *addr)
774{
775 RTE_ASSERT(nr < 64);
776
777 uint64_t mask = RTE_BIT64(nr);
778 return (*addr) & mask;
779}
780
789static inline void
790rte_bit_relaxed_set64(unsigned int nr, volatile uint64_t *addr)
791{
792 RTE_ASSERT(nr < 64);
793
794 uint64_t mask = RTE_BIT64(nr);
795 (*addr) = (*addr) | mask;
796}
797
806static inline void
807rte_bit_relaxed_clear64(unsigned int nr, volatile uint64_t *addr)
808{
809 RTE_ASSERT(nr < 64);
810
811 uint64_t mask = RTE_BIT64(nr);
812 *addr = (*addr) & (~mask);
813}
814
826static inline uint64_t
827rte_bit_relaxed_test_and_set64(unsigned int nr, volatile uint64_t *addr)
828{
829 RTE_ASSERT(nr < 64);
830
831 uint64_t mask = RTE_BIT64(nr);
832 uint64_t val = *addr;
833 *addr = val | mask;
834 return val;
835}
836
848static inline uint64_t
849rte_bit_relaxed_test_and_clear64(unsigned int nr, volatile uint64_t *addr)
850{
851 RTE_ASSERT(nr < 64);
852
853 uint64_t mask = RTE_BIT64(nr);
854 uint64_t val = *addr;
855 *addr = val & (~mask);
856 return val & mask;
857}
858
859#ifdef RTE_TOOLCHAIN_MSVC
860
869static inline unsigned int
870rte_clz32(uint32_t v)
871{
872 unsigned long rv;
873
874 (void)_BitScanReverse(&rv, v);
875
876 return (unsigned int)(sizeof(v) * CHAR_BIT - 1 - rv);
877}
878
887static inline unsigned int
888rte_clz64(uint64_t v)
889{
890 unsigned long rv;
891
892 (void)_BitScanReverse64(&rv, v);
893
894 return (unsigned int)(sizeof(v) * CHAR_BIT - 1 - rv);
895}
896
905static inline unsigned int
906rte_ctz32(uint32_t v)
907{
908 unsigned long rv;
909
910 (void)_BitScanForward(&rv, v);
911
912 return (unsigned int)rv;
913}
914
923static inline unsigned int
924rte_ctz64(uint64_t v)
925{
926 unsigned long rv;
927
928 (void)_BitScanForward64(&rv, v);
929
930 return (unsigned int)rv;
931}
932
941static inline unsigned int
942rte_popcount32(uint32_t v)
943{
944 return (unsigned int)__popcnt(v);
945}
946
955static inline unsigned int
956rte_popcount64(uint64_t v)
957{
958 return (unsigned int)__popcnt64(v);
959}
960
961#else
962
971static inline unsigned int
972rte_clz32(uint32_t v)
973{
974 return (unsigned int)__builtin_clz(v);
975}
976
985static inline unsigned int
986rte_clz64(uint64_t v)
987{
988 return (unsigned int)__builtin_clzll(v);
989}
990
999static inline unsigned int
1000rte_ctz32(uint32_t v)
1001{
1002 return (unsigned int)__builtin_ctz(v);
1003}
1004
1013static inline unsigned int
1014rte_ctz64(uint64_t v)
1015{
1016 return (unsigned int)__builtin_ctzll(v);
1017}
1018
1027static inline unsigned int
1029{
1030 return (unsigned int)__builtin_popcount(v);
1031}
1032
1041static inline unsigned int
1043{
1044 return (unsigned int)__builtin_popcountll(v);
1045}
1046
1047#endif
1048
1059static inline uint32_t
1061{
1062 x |= x >> 1;
1063 x |= x >> 2;
1064 x |= x >> 4;
1065 x |= x >> 8;
1066 x |= x >> 16;
1067
1068 return x;
1069}
1070
1081static inline uint64_t
1083{
1084 v |= v >> 1;
1085 v |= v >> 2;
1086 v |= v >> 4;
1087 v |= v >> 8;
1088 v |= v >> 16;
1089 v |= v >> 32;
1090
1091 return v;
1092}
1093
1105static inline uint32_t
1106rte_bsf32(uint32_t v)
1107{
1108 return (uint32_t)rte_ctz32(v);
1109}
1110
1125static inline int
1126rte_bsf32_safe(uint32_t v, uint32_t *pos)
1127{
1128 if (v == 0)
1129 return 0;
1130
1131 *pos = rte_bsf32(v);
1132 return 1;
1133}
1134
1146static inline uint32_t
1147rte_bsf64(uint64_t v)
1148{
1149 return (uint32_t)rte_ctz64(v);
1150}
1151
1166static inline int
1167rte_bsf64_safe(uint64_t v, uint32_t *pos)
1168{
1169 if (v == 0)
1170 return 0;
1171
1172 *pos = rte_bsf64(v);
1173 return 1;
1174}
1175
1187static inline uint32_t
1188rte_fls_u32(uint32_t x)
1189{
1190 return (x == 0) ? 0 : 32 - rte_clz32(x);
1191}
1192
1205static inline uint32_t
1206rte_fls_u64(uint64_t x)
1207{
1208 return (x == 0) ? 0 : 64 - rte_clz64(x);
1209}
1210
1211/*********** Macros to work with powers of 2 ********/
1212
1216#define RTE_IS_POWER_OF_2(n) ((n) && !(((n) - 1) & (n)))
1217
1224static inline int
1226{
1227 return n && !(n & (n - 1));
1228}
1229
1239static inline uint32_t
1241{
1242 x--;
1243 x = rte_combine32ms1b(x);
1244
1245 return x + 1;
1246}
1247
1257static inline uint32_t
1259{
1260 x = rte_combine32ms1b(x);
1261
1262 return x - (x >> 1);
1263}
1264
1274static inline uint64_t
1276{
1277 v--;
1278 v = rte_combine64ms1b(v);
1279
1280 return v + 1;
1281}
1282
1292static inline uint64_t
1294{
1295 v = rte_combine64ms1b(v);
1296
1297 return v - (v >> 1);
1298}
1299
1311static inline uint32_t
1312rte_log2_u32(uint32_t v)
1313{
1314 if (v == 0)
1315 return 0;
1316 v = rte_align32pow2(v);
1317 return rte_bsf32(v);
1318}
1319
1331static inline uint32_t
1332rte_log2_u64(uint64_t v)
1333{
1334 if (v == 0)
1335 return 0;
1336 v = rte_align64pow2(v);
1337 /* we checked for v being 0 already, so no undefined behavior */
1338 return rte_bsf64(v);
1339}
1340
1341#ifdef __cplusplus
1342}
1343
1344/*
1345 * Since C++ doesn't support generic selection (i.e., _Generic),
1346 * function overloading is used instead. Such functions must be
1347 * defined outside 'extern "C"' to be accepted by the compiler.
1348 */
1349
1350#undef rte_bit_test
1351#undef rte_bit_set
1352#undef rte_bit_clear
1353#undef rte_bit_assign
1354#undef rte_bit_flip
1355
1356#undef rte_bit_atomic_test
1357#undef rte_bit_atomic_set
1358#undef rte_bit_atomic_clear
1359#undef rte_bit_atomic_assign
1360#undef rte_bit_atomic_flip
1361#undef rte_bit_atomic_test_and_set
1362#undef rte_bit_atomic_test_and_clear
1363#undef rte_bit_atomic_test_and_assign
1364
1365#define __RTE_BIT_OVERLOAD_V_2(family, v, fun, qualifier, size, arg1_type, arg1_name) \
1366static inline void \
1367rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name) \
1368{ \
1369 __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name); \
1370}
1371
1372#define __RTE_BIT_OVERLOAD_SZ_2(family, fun, qualifier, size, arg1_type, arg1_name) \
1373 __RTE_BIT_OVERLOAD_V_2(family,, fun, qualifier, size, arg1_type, arg1_name) \
1374 __RTE_BIT_OVERLOAD_V_2(family, v_, fun, qualifier volatile, size, arg1_type, arg1_name)
1375
1376#define __RTE_BIT_OVERLOAD_2(family, fun, qualifier, arg1_type, arg1_name) \
1377 __RTE_BIT_OVERLOAD_SZ_2(family, fun, qualifier, 32, arg1_type, arg1_name) \
1378 __RTE_BIT_OVERLOAD_SZ_2(family, fun, qualifier, 64, arg1_type, arg1_name)
1379
1380#define __RTE_BIT_OVERLOAD_V_2R(family, v, fun, qualifier, size, ret_type, arg1_type, arg1_name) \
1381static inline ret_type \
1382rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name) \
1383{ \
1384 return __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name); \
1385}
1386
1387#define __RTE_BIT_OVERLOAD_SZ_2R(family, fun, qualifier, size, ret_type, arg1_type, arg1_name) \
1388 __RTE_BIT_OVERLOAD_V_2R(family,, fun, qualifier, size, ret_type, arg1_type, arg1_name) \
1389 __RTE_BIT_OVERLOAD_V_2R(family, v_, fun, qualifier volatile, size, ret_type, arg1_type, \
1390 arg1_name)
1391
1392#define __RTE_BIT_OVERLOAD_2R(family, fun, qualifier, ret_type, arg1_type, arg1_name) \
1393 __RTE_BIT_OVERLOAD_SZ_2R(family, fun, qualifier, 32, ret_type, arg1_type, arg1_name) \
1394 __RTE_BIT_OVERLOAD_SZ_2R(family, fun, qualifier, 64, ret_type, arg1_type, arg1_name)
1395
1396#define __RTE_BIT_OVERLOAD_V_3(family, v, fun, qualifier, size, arg1_type, arg1_name, \
1397 arg2_type, arg2_name) \
1398static inline void \
1399rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1400 arg2_type arg2_name) \
1401{ \
1402 __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name); \
1403}
1404
1405#define __RTE_BIT_OVERLOAD_SZ_3(family, fun, qualifier, size, arg1_type, arg1_name, \
1406 arg2_type, arg2_name) \
1407 __RTE_BIT_OVERLOAD_V_3(family,, fun, qualifier, size, arg1_type, arg1_name, \
1408 arg2_type, arg2_name) \
1409 __RTE_BIT_OVERLOAD_V_3(family, v_, fun, qualifier volatile, size, arg1_type, arg1_name, \
1410 arg2_type, arg2_name)
1411
1412#define __RTE_BIT_OVERLOAD_3(family, fun, qualifier, arg1_type, arg1_name, arg2_type, arg2_name) \
1413 __RTE_BIT_OVERLOAD_SZ_3(family, fun, qualifier, 32, arg1_type, arg1_name, \
1414 arg2_type, arg2_name) \
1415 __RTE_BIT_OVERLOAD_SZ_3(family, fun, qualifier, 64, arg1_type, arg1_name, \
1416 arg2_type, arg2_name)
1417
1418#define __RTE_BIT_OVERLOAD_V_3R(family, v, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1419 arg2_type, arg2_name) \
1420static inline ret_type \
1421rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1422 arg2_type arg2_name) \
1423{ \
1424 return __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name); \
1425}
1426
1427#define __RTE_BIT_OVERLOAD_SZ_3R(family, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1428 arg2_type, arg2_name) \
1429 __RTE_BIT_OVERLOAD_V_3R(family,, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1430 arg2_type, arg2_name) \
1431 __RTE_BIT_OVERLOAD_V_3R(family, v_, fun, qualifier volatile, size, ret_type, \
1432 arg1_type, arg1_name, arg2_type, arg2_name)
1433
1434#define __RTE_BIT_OVERLOAD_3R(family, fun, qualifier, ret_type, arg1_type, arg1_name, \
1435 arg2_type, arg2_name) \
1436 __RTE_BIT_OVERLOAD_SZ_3R(family, fun, qualifier, 32, ret_type, arg1_type, arg1_name, \
1437 arg2_type, arg2_name) \
1438 __RTE_BIT_OVERLOAD_SZ_3R(family, fun, qualifier, 64, ret_type, arg1_type, arg1_name, \
1439 arg2_type, arg2_name)
1440
1441#define __RTE_BIT_OVERLOAD_V_4(family, v, fun, qualifier, size, arg1_type, arg1_name, \
1442 arg2_type, arg2_name, arg3_type, arg3_name) \
1443static inline void \
1444rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1445 arg2_type arg2_name, arg3_type arg3_name) \
1446{ \
1447 __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name, arg3_name); \
1448}
1449
1450#define __RTE_BIT_OVERLOAD_SZ_4(family, fun, qualifier, size, arg1_type, arg1_name, \
1451 arg2_type, arg2_name, arg3_type, arg3_name) \
1452 __RTE_BIT_OVERLOAD_V_4(family,, fun, qualifier, size, arg1_type, arg1_name, \
1453 arg2_type, arg2_name, arg3_type, arg3_name) \
1454 __RTE_BIT_OVERLOAD_V_4(family, v_, fun, qualifier volatile, size, arg1_type, arg1_name, \
1455 arg2_type, arg2_name, arg3_type, arg3_name)
1456
1457#define __RTE_BIT_OVERLOAD_4(family, fun, qualifier, arg1_type, arg1_name, arg2_type, arg2_name, \
1458 arg3_type, arg3_name) \
1459 __RTE_BIT_OVERLOAD_SZ_4(family, fun, qualifier, 32, arg1_type, arg1_name, \
1460 arg2_type, arg2_name, arg3_type, arg3_name) \
1461 __RTE_BIT_OVERLOAD_SZ_4(family, fun, qualifier, 64, arg1_type, arg1_name, \
1462 arg2_type, arg2_name, arg3_type, arg3_name)
1463
1464#define __RTE_BIT_OVERLOAD_V_4R(family, v, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1465 arg2_type, arg2_name, arg3_type, arg3_name) \
1466static inline ret_type \
1467rte_bit_ ## family ## fun(qualifier uint ## size ## _t *addr, arg1_type arg1_name, \
1468 arg2_type arg2_name, arg3_type arg3_name) \
1469{ \
1470 return __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, arg2_name, \
1471 arg3_name); \
1472}
1473
1474#define __RTE_BIT_OVERLOAD_SZ_4R(family, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1475 arg2_type, arg2_name, arg3_type, arg3_name) \
1476 __RTE_BIT_OVERLOAD_V_4R(family,, fun, qualifier, size, ret_type, arg1_type, arg1_name, \
1477 arg2_type, arg2_name, arg3_type, arg3_name) \
1478 __RTE_BIT_OVERLOAD_V_4R(family, v_, fun, qualifier volatile, size, ret_type, \
1479 arg1_type, arg1_name, arg2_type, arg2_name, arg3_type, arg3_name)
1480
1481#define __RTE_BIT_OVERLOAD_4R(family, fun, qualifier, ret_type, arg1_type, arg1_name, \
1482 arg2_type, arg2_name, arg3_type, arg3_name) \
1483 __RTE_BIT_OVERLOAD_SZ_4R(family, fun, qualifier, 32, ret_type, arg1_type, arg1_name, \
1484 arg2_type, arg2_name, arg3_type, arg3_name) \
1485 __RTE_BIT_OVERLOAD_SZ_4R(family, fun, qualifier, 64, ret_type, arg1_type, arg1_name, \
1486 arg2_type, arg2_name, arg3_type, arg3_name)
1487
1488#ifdef ALLOW_EXPERIMENTAL_API
1489__RTE_BIT_OVERLOAD_2R(, test, const, bool, unsigned int, nr)
1490__RTE_BIT_OVERLOAD_2(, set,, unsigned int, nr)
1491__RTE_BIT_OVERLOAD_2(, clear,, unsigned int, nr)
1492__RTE_BIT_OVERLOAD_3(, assign,, unsigned int, nr, bool, value)
1493__RTE_BIT_OVERLOAD_2(, flip,, unsigned int, nr)
1494
1495__RTE_BIT_OVERLOAD_3R(atomic_, test, const, bool, unsigned int, nr, int, memory_order)
1496__RTE_BIT_OVERLOAD_3(atomic_, set,, unsigned int, nr, int, memory_order)
1497__RTE_BIT_OVERLOAD_3(atomic_, clear,, unsigned int, nr, int, memory_order)
1498__RTE_BIT_OVERLOAD_4(atomic_, assign,, unsigned int, nr, bool, value, int, memory_order)
1499__RTE_BIT_OVERLOAD_3(atomic_, flip,, unsigned int, nr, int, memory_order)
1500__RTE_BIT_OVERLOAD_3R(atomic_, test_and_set,, bool, unsigned int, nr, int, memory_order)
1501__RTE_BIT_OVERLOAD_3R(atomic_, test_and_clear,, bool, unsigned int, nr, int, memory_order)
1502__RTE_BIT_OVERLOAD_4R(atomic_, test_and_assign,, bool, unsigned int, nr, bool, value,
1503 int, memory_order)
1504#endif
1505
1506#endif
1507
1508#endif /* _RTE_BITOPS_H_ */
#define RTE_BIT64(nr)
Definition: rte_bitops.h:36
static void rte_bit_relaxed_set32(unsigned int nr, volatile uint32_t *addr)
Definition: rte_bitops.h:691
static uint32_t rte_log2_u32(uint32_t v)
Definition: rte_bitops.h:1312
static uint32_t rte_fls_u32(uint32_t x)
Definition: rte_bitops.h:1188
static void rte_bit_relaxed_clear64(unsigned int nr, volatile uint64_t *addr)
Definition: rte_bitops.h:807
static int rte_bsf32_safe(uint32_t v, uint32_t *pos)
Definition: rte_bitops.h:1126
#define RTE_BIT32(nr)
Definition: rte_bitops.h:44
static uint64_t rte_bit_relaxed_test_and_clear64(unsigned int nr, volatile uint64_t *addr)
Definition: rte_bitops.h:849
static uint32_t rte_bsf64(uint64_t v)
Definition: rte_bitops.h:1147
static unsigned int rte_clz32(uint32_t v)
Definition: rte_bitops.h:972
static uint32_t rte_bit_relaxed_test_and_set32(unsigned int nr, volatile uint32_t *addr)
Definition: rte_bitops.h:728
static uint32_t rte_align32pow2(uint32_t x)
Definition: rte_bitops.h:1240
static uint32_t rte_bit_relaxed_test_and_clear32(unsigned int nr, volatile uint32_t *addr)
Definition: rte_bitops.h:750
static unsigned int rte_popcount32(uint32_t v)
Definition: rte_bitops.h:1028
static uint32_t rte_align32prevpow2(uint32_t x)
Definition: rte_bitops.h:1258
static uint64_t rte_bit_relaxed_test_and_set64(unsigned int nr, volatile uint64_t *addr)
Definition: rte_bitops.h:827
static uint64_t rte_align64pow2(uint64_t v)
Definition: rte_bitops.h:1275
static unsigned int rte_ctz64(uint64_t v)
Definition: rte_bitops.h:1014
static uint32_t rte_bsf32(uint32_t v)
Definition: rte_bitops.h:1106
static int rte_bsf64_safe(uint64_t v, uint32_t *pos)
Definition: rte_bitops.h:1167
static uint64_t rte_bit_relaxed_get64(unsigned int nr, volatile uint64_t *addr)
Definition: rte_bitops.h:773
static unsigned int rte_popcount64(uint64_t v)
Definition: rte_bitops.h:1042
static unsigned int rte_ctz32(uint32_t v)
Definition: rte_bitops.h:1000
static uint64_t rte_align64prevpow2(uint64_t v)
Definition: rte_bitops.h:1293
static unsigned int rte_clz64(uint64_t v)
Definition: rte_bitops.h:986
static void rte_bit_relaxed_clear32(unsigned int nr, volatile uint32_t *addr)
Definition: rte_bitops.h:708
static uint32_t rte_bit_relaxed_get32(unsigned int nr, volatile uint32_t *addr)
Definition: rte_bitops.h:674
static void rte_bit_relaxed_set64(unsigned int nr, volatile uint64_t *addr)
Definition: rte_bitops.h:790
static int rte_is_power_of_2(uint32_t n)
Definition: rte_bitops.h:1225
static uint64_t rte_combine64ms1b(uint64_t v)
Definition: rte_bitops.h:1082
static uint32_t rte_combine32ms1b(uint32_t x)
Definition: rte_bitops.h:1060
static uint32_t rte_log2_u64(uint64_t v)
Definition: rte_bitops.h:1332
static uint32_t rte_fls_u64(uint64_t x)
Definition: rte_bitops.h:1206