34 #ifndef __INCLUDE_RTE_BITMAP_H__
35 #define __INCLUDE_RTE_BITMAP_H__
74 #ifndef RTE_BITMAP_OPTIMIZATIONS
75 #define RTE_BITMAP_OPTIMIZATIONS 1
79 #define RTE_BITMAP_SLAB_BIT_SIZE 64
80 #define RTE_BITMAP_SLAB_BIT_SIZE_LOG2 6
81 #define RTE_BITMAP_SLAB_BIT_MASK (RTE_BITMAP_SLAB_BIT_SIZE - 1)
84 #define RTE_BITMAP_CL_BIT_SIZE (RTE_CACHE_LINE_SIZE * 8)
85 #define RTE_BITMAP_CL_BIT_SIZE_LOG2 (RTE_CACHE_LINE_SIZE_LOG2 + 3)
86 #define RTE_BITMAP_CL_BIT_MASK (RTE_BITMAP_CL_BIT_SIZE - 1)
88 #define RTE_BITMAP_CL_SLAB_SIZE (RTE_BITMAP_CL_BIT_SIZE / RTE_BITMAP_SLAB_BIT_SIZE)
89 #define RTE_BITMAP_CL_SLAB_SIZE_LOG2 (RTE_BITMAP_CL_BIT_SIZE_LOG2 - RTE_BITMAP_SLAB_BIT_SIZE_LOG2)
90 #define RTE_BITMAP_CL_SLAB_MASK (RTE_BITMAP_CL_SLAB_SIZE - 1)
111 __rte_bitmap_index1_inc(
struct rte_bitmap *bmp)
116 static inline uint64_t
117 __rte_bitmap_mask1_get(
struct rte_bitmap *bmp)
123 __rte_bitmap_index2_set(
struct rte_bitmap *bmp)
125 bmp->
index2 = (((bmp->
index1 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2) + bmp->
offset1) << RTE_BITMAP_CL_SLAB_SIZE_LOG2);
128 #if RTE_BITMAP_OPTIMIZATIONS
131 rte_bsf64(uint64_t slab, uint32_t *pos)
137 *pos = __builtin_ctzll(slab);
144 rte_bsf64(uint64_t slab, uint32_t *pos)
153 for (i = 0, mask = 1; i < RTE_BITMAP_SLAB_BIT_SIZE; i ++, mask <<= 1) {
165 static inline uint32_t
166 __rte_bitmap_get_memory_footprint(uint32_t n_bits,
167 uint32_t *array1_byte_offset, uint32_t *array1_slabs,
168 uint32_t *array2_byte_offset, uint32_t *array2_slabs)
170 uint32_t n_slabs_context, n_slabs_array1, n_cache_lines_context_and_array1;
171 uint32_t n_cache_lines_array2;
172 uint32_t n_bytes_total;
174 n_cache_lines_array2 = (n_bits + RTE_BITMAP_CL_BIT_SIZE - 1) / RTE_BITMAP_CL_BIT_SIZE;
175 n_slabs_array1 = (n_cache_lines_array2 + RTE_BITMAP_SLAB_BIT_SIZE - 1) / RTE_BITMAP_SLAB_BIT_SIZE;
177 n_slabs_context = (
sizeof(
struct rte_bitmap) + (RTE_BITMAP_SLAB_BIT_SIZE / 8) - 1) / (RTE_BITMAP_SLAB_BIT_SIZE / 8);
178 n_cache_lines_context_and_array1 = (n_slabs_context + n_slabs_array1 + RTE_BITMAP_CL_SLAB_SIZE - 1) / RTE_BITMAP_CL_SLAB_SIZE;
179 n_bytes_total = (n_cache_lines_context_and_array1 + n_cache_lines_array2) * RTE_CACHE_LINE_SIZE;
181 if (array1_byte_offset) {
182 *array1_byte_offset = n_slabs_context * (RTE_BITMAP_SLAB_BIT_SIZE / 8);
185 *array1_slabs = n_slabs_array1;
187 if (array2_byte_offset) {
188 *array2_byte_offset = n_cache_lines_context_and_array1 * RTE_CACHE_LINE_SIZE;
191 *array2_slabs = n_cache_lines_array2 * RTE_BITMAP_CL_SLAB_SIZE;
194 return n_bytes_total;
198 __rte_bitmap_scan_init(
struct rte_bitmap *bmp)
201 bmp->
offset1 = RTE_BITMAP_SLAB_BIT_SIZE - 1;
202 __rte_bitmap_index2_set(bmp);
203 bmp->
index2 += RTE_BITMAP_CL_SLAB_SIZE;
216 static inline uint32_t
223 return __rte_bitmap_get_memory_footprint(n_bits, NULL, NULL, NULL, NULL);
242 uint32_t array1_byte_offset, array1_slabs, array2_byte_offset, array2_slabs;
254 size = __rte_bitmap_get_memory_footprint(n_bits,
255 &array1_byte_offset, &array1_slabs,
256 &array2_byte_offset, &array2_slabs);
257 if (size < mem_size) {
262 memset(mem, 0, size);
265 bmp->
array1 = (uint64_t *) &mem[array1_byte_offset];
267 bmp->
array2 = (uint64_t *) &mem[array2_byte_offset];
270 __rte_bitmap_scan_init(bmp);
305 __rte_bitmap_scan_init(bmp);
324 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
339 static inline uint64_t
345 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
346 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
348 return (*slab2) & (1lu << offset2);
362 uint64_t *slab1, *slab2;
366 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
367 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
368 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
369 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
373 *slab2 |= 1lu << offset2;
390 uint64_t *slab1, *slab2;
394 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
395 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
396 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
404 static inline uint64_t
405 __rte_bitmap_line_not_empty(uint64_t *slab2)
407 uint64_t v1, v2, v3, v4;
409 v1 = slab2[0] | slab2[1];
410 v2 = slab2[2] | slab2[3];
411 v3 = slab2[4] | slab2[5];
412 v4 = slab2[6] | slab2[7];
430 uint64_t *slab1, *slab2;
434 index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
435 offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK;
439 *slab2 &= ~(1lu << offset2);
445 index2 &= ~ RTE_BITMAP_CL_SLAB_MASK;
447 if (__rte_bitmap_line_not_empty(slab2)) {
452 index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2);
453 offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK;
461 __rte_bitmap_scan_search(
struct rte_bitmap *bmp)
468 value1 &= __rte_bitmap_mask1_get(bmp);
470 if (rte_bsf64(value1, &bmp->
offset1)) {
474 __rte_bitmap_index1_inc(bmp);
478 for (i = 0; i < bmp->
array1_size; i ++, __rte_bitmap_index1_inc(bmp)) {
481 if (rte_bsf64(value1, &bmp->
offset1)) {
490 __rte_bitmap_scan_read_init(
struct rte_bitmap *bmp)
492 __rte_bitmap_index2_set(bmp);
498 __rte_bitmap_scan_read(
struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab)
503 for ( ; bmp->
go2 ; bmp->
index2 ++, slab2 ++, bmp->
go2 = bmp->
index2 & RTE_BITMAP_CL_SLAB_MASK) {
505 *pos = bmp->
index2 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2;
510 bmp->
go2 = bmp->
index2 & RTE_BITMAP_CL_SLAB_MASK;
542 if (__rte_bitmap_scan_read(bmp, pos, slab)) {
547 if (__rte_bitmap_scan_search(bmp)) {
548 __rte_bitmap_scan_read_init(bmp);
549 __rte_bitmap_scan_read(bmp, pos, slab);