5 #ifndef _RTE_LPM_SSE_H_
6 #define _RTE_LPM_SSE_H_
25 uint64_t idx, pt, pt2;
29 _mm_set_epi32(UINT8_MAX, UINT8_MAX, UINT8_MAX, UINT8_MAX);
35 const uint64_t mask_xv =
36 ((uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK |
37 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 32);
43 const uint64_t mask_v =
48 i24 = _mm_srli_epi32(ip, CHAR_BIT);
51 idx = _mm_cvtsi128_si64(i24);
53 i24 = _mm_srli_si128(i24, 8);
55 ptbl = (
const uint32_t *)&lpm->tbl24[(uint32_t)idx];
57 ptbl = (
const uint32_t *)&lpm->tbl24[idx >> 32];
60 idx = _mm_cvtsi128_si64(i24);
62 ptbl = (
const uint32_t *)&lpm->tbl24[(uint32_t)idx];
64 ptbl = (
const uint32_t *)&lpm->tbl24[idx >> 32];
68 i8.x = _mm_and_si128(ip, mask8);
70 pt = (uint64_t)tbl[0] |
71 (uint64_t)tbl[1] << 32;
72 pt2 = (uint64_t)tbl[2] |
73 (uint64_t)tbl[3] << 32;
76 if (
likely((pt & mask_xv) == mask_v) &&
77 likely((pt2 & mask_xv) == mask_v)) {
78 *(uint64_t *)hop = pt & RTE_LPM_MASKX4_RES;
79 *(uint64_t *)(hop + 2) = pt2 & RTE_LPM_MASKX4_RES;
83 if (
unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
84 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
85 i8.u32[0] = i8.u32[0] +
86 (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
87 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[0]];
90 if (
unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
91 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
92 i8.u32[1] = i8.u32[1] +
93 (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
94 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[1]];
97 if (
unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
98 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
99 i8.u32[2] = i8.u32[2] +
100 (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
101 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[2]];
104 if (
unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
105 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
106 i8.u32[3] = i8.u32[3] +
107 (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
108 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[3]];