43 #include <sys/queue.h>
57 #define RTE_LPM_NAMESIZE 32
60 #define RTE_LPM_MAX_DEPTH 32
63 #define RTE_LPM_TBL24_NUM_ENTRIES (1 << 24)
66 #define RTE_LPM_TBL8_GROUP_NUM_ENTRIES 256
69 #define RTE_LPM_TBL8_NUM_GROUPS 256
72 #define RTE_LPM_TBL8_NUM_ENTRIES (RTE_LPM_TBL8_NUM_GROUPS * \
73 RTE_LPM_TBL8_GROUP_NUM_ENTRIES)
76 #if defined(RTE_LIBRTE_LPM_DEBUG)
77 #define RTE_LPM_RETURN_IF_TRUE(cond, retval) do { \
78 if (cond) return (retval); \
81 #define RTE_LPM_RETURN_IF_TRUE(cond, retval)
85 #define RTE_LPM_VALID_EXT_ENTRY_BITMASK 0x0300
88 #define RTE_LPM_LOOKUP_SUCCESS 0x0100
90 #if RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
92 struct rte_lpm_tbl24_entry {
100 uint8_t ext_entry :1;
105 struct rte_lpm_tbl8_entry {
109 uint8_t valid_group :1;
113 struct rte_lpm_tbl24_entry {
115 uint8_t ext_entry :1;
123 struct rte_lpm_tbl8_entry {
125 uint8_t valid_group :1;
132 struct rte_lpm_rule {
138 struct rte_lpm_rule_info {
151 struct rte_lpm_tbl24_entry tbl24[RTE_LPM_TBL24_NUM_ENTRIES] \
153 struct rte_lpm_tbl8_entry tbl8[RTE_LPM_TBL8_NUM_ENTRIES] \
155 struct rte_lpm_rule rules_tbl[0] \
181 rte_lpm_create(
const char *name,
int socket_id,
int max_rules,
int flags);
222 rte_lpm_add(
struct rte_lpm *lpm, uint32_t ip, uint8_t depth, uint8_t next_hop);
282 unsigned tbl24_index = (ip >> 8);
286 RTE_LPM_RETURN_IF_TRUE(((lpm == NULL) || (next_hop == NULL)), -EINVAL);
289 tbl_entry = *(
const uint16_t *)&lpm->tbl24[tbl24_index];
292 if (
unlikely((tbl_entry & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
293 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
295 unsigned tbl8_index = (uint8_t)ip +
296 ((uint8_t)tbl_entry * RTE_LPM_TBL8_GROUP_NUM_ENTRIES);
298 tbl_entry = *(
const uint16_t *)&lpm->tbl8[tbl8_index];
301 *next_hop = (uint8_t)tbl_entry;
325 #define rte_lpm_lookup_bulk(lpm, ips, next_hops, n) \
326 rte_lpm_lookup_bulk_func(lpm, ips, next_hops, n)
329 rte_lpm_lookup_bulk_func(
const struct rte_lpm *lpm,
const uint32_t * ips,
330 uint16_t * next_hops,
const unsigned n)
333 unsigned tbl24_indexes[n];
336 RTE_LPM_RETURN_IF_TRUE(((lpm == NULL) || (ips == NULL) ||
337 (next_hops == NULL)), -EINVAL);
339 for (i = 0; i < n; i++) {
340 tbl24_indexes[i] = ips[i] >> 8;
343 for (i = 0; i < n; i++) {
345 next_hops[i] = *(
const uint16_t *)&lpm->tbl24[tbl24_indexes[i]];
348 if (
unlikely((next_hops[i] & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
349 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
351 unsigned tbl8_index = (uint8_t)ips[i] +
352 ((uint8_t)next_hops[i] *
353 RTE_LPM_TBL8_GROUP_NUM_ENTRIES);
355 next_hops[i] = *(
const uint16_t *)&lpm->tbl8[tbl8_index];
362 #define RTE_LPM_MASKX4_RES UINT64_C(0x00ff00ff00ff00ff)
392 const __m128i mask8 =
393 _mm_set_epi32(UINT8_MAX, UINT8_MAX, UINT8_MAX, UINT8_MAX);
399 const uint64_t mask_xv =
400 ((uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK |
401 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 16 |
402 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 32 |
403 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 48);
409 const uint64_t mask_v =
416 i24 = _mm_srli_epi32(ip, CHAR_BIT);
419 idx = _mm_cvtsi128_si64(i24);
420 i24 = _mm_srli_si128(i24,
sizeof(uint64_t));
422 tbl[0] = *(
const uint16_t *)&lpm->tbl24[(uint32_t)idx];
423 tbl[1] = *(
const uint16_t *)&lpm->tbl24[idx >> 32];
425 idx = _mm_cvtsi128_si64(i24);
427 tbl[2] = *(
const uint16_t *)&lpm->tbl24[(uint32_t)idx];
428 tbl[3] = *(
const uint16_t *)&lpm->tbl24[idx >> 32];
431 i8.x = _mm_and_si128(ip, mask8);
433 pt = (uint64_t)tbl[0] |
434 (uint64_t)tbl[1] << 16 |
435 (uint64_t)tbl[2] << 32 |
436 (uint64_t)tbl[3] << 48;
439 if (
likely((pt & mask_xv) == mask_v)) {
440 uintptr_t ph = (uintptr_t)hop;
441 *(uint64_t *)ph = pt & RTE_LPM_MASKX4_RES;
445 if (
unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
446 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
447 i8.u32[0] = i8.u32[0] +
448 (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
449 tbl[0] = *(
const uint16_t *)&lpm->tbl8[i8.u32[0]];
451 if (
unlikely((pt >> 16 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
452 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
453 i8.u32[1] = i8.u32[1] +
454 (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
455 tbl[1] = *(
const uint16_t *)&lpm->tbl8[i8.u32[1]];
457 if (
unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
458 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
459 i8.u32[2] = i8.u32[2] +
460 (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
461 tbl[2] = *(
const uint16_t *)&lpm->tbl8[i8.u32[2]];
463 if (
unlikely((pt >> 48 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
464 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
465 i8.u32[3] = i8.u32[3] +
466 (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
467 tbl[3] = *(
const uint16_t *)&lpm->tbl8[i8.u32[3]];