34 #ifndef _RTE_LPM_SSE_H_
35 #define _RTE_LPM_SSE_H_
54 uint64_t idx, pt, pt2;
58 _mm_set_epi32(UINT8_MAX, UINT8_MAX, UINT8_MAX, UINT8_MAX);
64 const uint64_t mask_xv =
65 ((uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK |
66 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 32);
72 const uint64_t mask_v =
77 i24 = _mm_srli_epi32(ip, CHAR_BIT);
80 idx = _mm_cvtsi128_si64(i24);
81 i24 = _mm_srli_si128(i24,
sizeof(uint64_t));
83 ptbl = (
const uint32_t *)&lpm->tbl24[(uint32_t)idx];
85 ptbl = (
const uint32_t *)&lpm->tbl24[idx >> 32];
88 idx = _mm_cvtsi128_si64(i24);
90 ptbl = (
const uint32_t *)&lpm->tbl24[(uint32_t)idx];
92 ptbl = (
const uint32_t *)&lpm->tbl24[idx >> 32];
96 i8.x = _mm_and_si128(ip, mask8);
98 pt = (uint64_t)tbl[0] |
99 (uint64_t)tbl[1] << 32;
100 pt2 = (uint64_t)tbl[2] |
101 (uint64_t)tbl[3] << 32;
104 if (
likely((pt & mask_xv) == mask_v) &&
105 likely((pt2 & mask_xv) == mask_v)) {
106 *(uint64_t *)hop = pt & RTE_LPM_MASKX4_RES;
107 *(uint64_t *)(hop + 2) = pt2 & RTE_LPM_MASKX4_RES;
111 if (
unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
112 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
113 i8.u32[0] = i8.u32[0] +
114 (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
115 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[0]];
118 if (
unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
119 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
120 i8.u32[1] = i8.u32[1] +
121 (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
122 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[1]];
125 if (
unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
126 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
127 i8.u32[2] = i8.u32[2] +
128 (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
129 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[2]];
132 if (
unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
133 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
134 i8.u32[3] = i8.u32[3] +
135 (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
136 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[3]];