34 #ifndef _RTE_LPM_SSE_H_
35 #define _RTE_LPM_SSE_H_
53 uint64_t idx, pt, pt2;
57 _mm_set_epi32(UINT8_MAX, UINT8_MAX, UINT8_MAX, UINT8_MAX);
63 const uint64_t mask_xv =
64 ((uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK |
65 (uint64_t)RTE_LPM_VALID_EXT_ENTRY_BITMASK << 32);
71 const uint64_t mask_v =
76 i24 = _mm_srli_epi32(ip, CHAR_BIT);
79 idx = _mm_cvtsi128_si64(i24);
80 i24 = _mm_srli_si128(i24,
sizeof(uint64_t));
82 ptbl = (
const uint32_t *)&lpm->tbl24[(uint32_t)idx];
84 ptbl = (
const uint32_t *)&lpm->tbl24[idx >> 32];
87 idx = _mm_cvtsi128_si64(i24);
89 ptbl = (
const uint32_t *)&lpm->tbl24[(uint32_t)idx];
91 ptbl = (
const uint32_t *)&lpm->tbl24[idx >> 32];
95 i8.x = _mm_and_si128(ip, mask8);
97 pt = (uint64_t)tbl[0] |
98 (uint64_t)tbl[1] << 32;
99 pt2 = (uint64_t)tbl[2] |
100 (uint64_t)tbl[3] << 32;
103 if (
likely((pt & mask_xv) == mask_v) &&
104 likely((pt2 & mask_xv) == mask_v)) {
105 *(uint64_t *)hop = pt & RTE_LPM_MASKX4_RES;
106 *(uint64_t *)(hop + 2) = pt2 & RTE_LPM_MASKX4_RES;
110 if (
unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
111 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
112 i8.u32[0] = i8.u32[0] +
113 (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
114 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[0]];
117 if (
unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
118 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
119 i8.u32[1] = i8.u32[1] +
120 (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
121 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[1]];
124 if (
unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
125 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
126 i8.u32[2] = i8.u32[2] +
127 (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
128 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[2]];
131 if (
unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
132 RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
133 i8.u32[3] = i8.u32[3] +
134 (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
135 ptbl = (
const uint32_t *)&lpm->tbl8[i8.u32[3]];