43 #include <sys/queue.h>
57 #define RTE_LPM_NAMESIZE 32
62 #define RTE_LPM_HEAP 0
67 #define RTE_LPM_MEMZONE 1
70 #define RTE_LPM_MAX_DEPTH 32
73 #define RTE_LPM_TBL24_NUM_ENTRIES (1 << 24)
76 #define RTE_LPM_TBL8_GROUP_NUM_ENTRIES 256
79 #define RTE_LPM_TBL8_NUM_GROUPS 256
82 #define RTE_LPM_TBL8_NUM_ENTRIES (RTE_LPM_TBL8_NUM_GROUPS * \
83 RTE_LPM_TBL8_GROUP_NUM_ENTRIES)
86 #if defined(RTE_LIBRTE_LPM_DEBUG)
87 #define RTE_LPM_RETURN_IF_TRUE(cond, retval) do { \
88 if (cond) return (retval); \
91 #define RTE_LPM_RETURN_IF_TRUE(cond, retval)
95 #define RTE_LPM_VALID_EXT_ENTRY_BITMASK 0x0300
98 #define RTE_LPM_LOOKUP_SUCCESS 0x0100
100 #if RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
102 struct rte_lpm_tbl24_entry {
110 uint8_t ext_entry :1;
115 struct rte_lpm_tbl8_entry {
119 uint8_t valid_group :1;
123 struct rte_lpm_tbl24_entry {
125 uint8_t ext_entry :1;
133 struct rte_lpm_tbl8_entry {
135 uint8_t valid_group :1;
142 struct rte_lpm_rule {
148 struct rte_lpm_rule_info {
162 struct rte_lpm_tbl24_entry tbl24[RTE_LPM_TBL24_NUM_ENTRIES] \
164 struct rte_lpm_tbl8_entry tbl8[RTE_LPM_TBL8_NUM_ENTRIES] \
166 struct rte_lpm_rule rules_tbl[0] \
192 rte_lpm_create(
const char *name,
int socket_id,
int max_rules,
int flags);
233 rte_lpm_add(
struct rte_lpm *lpm, uint32_t ip, uint8_t depth, uint8_t next_hop);
293 unsigned tbl24_index = (ip >> 8);
297 RTE_LPM_RETURN_IF_TRUE(((lpm == NULL) || (next_hop == NULL)), -EINVAL);
300 tbl_entry = *(
const uint16_t *)&lpm->tbl24[tbl24_index];
303 if (
unlikely((tbl_entry & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
304 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
306 unsigned tbl8_index = (uint8_t)ip +
307 ((uint8_t)tbl_entry * RTE_LPM_TBL8_GROUP_NUM_ENTRIES);
309 tbl_entry = *(
const uint16_t *)&lpm->tbl8[tbl8_index];
312 *next_hop = (uint8_t)tbl_entry;
336 #define rte_lpm_lookup_bulk(lpm, ips, next_hops, n) \
337 rte_lpm_lookup_bulk_func(lpm, ips, next_hops, n)
340 rte_lpm_lookup_bulk_func(
const struct rte_lpm *lpm,
const uint32_t * ips,
341 uint16_t * next_hops,
const unsigned n)
344 unsigned tbl24_indexes[n];
347 RTE_LPM_RETURN_IF_TRUE(((lpm == NULL) || (ips == NULL) ||
348 (next_hops == NULL)), -EINVAL);
350 for (i = 0; i < n; i++) {
351 tbl24_indexes[i] = ips[i] >> 8;
354 for (i = 0; i < n; i++) {
356 next_hops[i] = *(
const uint16_t *)&lpm->tbl24[tbl24_indexes[i]];
359 if (
unlikely((next_hops[i] & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
360 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
362 unsigned tbl8_index = (uint8_t)ips[i] +
363 ((uint8_t)next_hops[i] *
364 RTE_LPM_TBL8_GROUP_NUM_ENTRIES);
366 next_hops[i] = *(
const uint16_t *)&lpm->tbl8[tbl8_index];
373 #define RTE_LPM_MASKX4_RES UINT64_C(0x00ff00ff00ff00ff)
403 const __m128i mask8 =
404 _mm_set_epi32(UINT8_MAX, UINT8_MAX, UINT8_MAX, UINT8_MAX);
410 const uint64_t mask_xv =
411 ((uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK |
412 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 16 |
413 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 32 |
414 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 48);
420 const uint64_t mask_v =
427 i24 = _mm_srli_epi32(ip, CHAR_BIT);
430 idx = _mm_cvtsi128_si64(i24);
431 i24 = _mm_srli_si128(i24,
sizeof(uint64_t));
433 tbl[0] = *(
const uint16_t *)&lpm->tbl24[(uint32_t)idx];
434 tbl[1] = *(
const uint16_t *)&lpm->tbl24[idx >> 32];
436 idx = _mm_cvtsi128_si64(i24);
438 tbl[2] = *(
const uint16_t *)&lpm->tbl24[(uint32_t)idx];
439 tbl[3] = *(
const uint16_t *)&lpm->tbl24[idx >> 32];
442 i8.x = _mm_and_si128(ip, mask8);
444 pt = (uint64_t)tbl[0] |
445 (uint64_t)tbl[1] << 16 |
446 (uint64_t)tbl[2] << 32 |
447 (uint64_t)tbl[3] << 48;
450 if (
likely((pt & mask_xv) == mask_v)) {
451 uintptr_t ph = (uintptr_t)hop;
452 *(uint64_t *)ph = pt & RTE_LPM_MASKX4_RES;
456 if (
unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
457 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
458 i8.u32[0] = i8.u32[0] +
459 (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
460 tbl[0] = *(
const uint16_t *)&lpm->tbl8[i8.u32[0]];
462 if (
unlikely((pt >> 16 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
463 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
464 i8.u32[1] = i8.u32[1] +
465 (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
466 tbl[1] = *(
const uint16_t *)&lpm->tbl8[i8.u32[1]];
468 if (
unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
469 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
470 i8.u32[2] = i8.u32[2] +
471 (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
472 tbl[2] = *(
const uint16_t *)&lpm->tbl8[i8.u32[2]];
474 if (
unlikely((pt >> 48 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
475 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
476 i8.u32[3] = i8.u32[3] +
477 (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
478 tbl[3] = *(
const uint16_t *)&lpm->tbl8[i8.u32[3]];