14#define RTE_LPM_LOOKUP_SUCCESS 0x01000000
15#define RTE_LPM_VALID_EXT_ENTRY_BITMASK 0x03000000
18 const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4], uint32_t defv)
22 const uint32_t *tbl24_p = (
const uint32_t *)lpm->tbl24;
23 uint32_t tbl_entries[4] = {
24 tbl24_p[((uint32_t)ip[0]) >> 8],
25 tbl24_p[((uint32_t)ip[1]) >> 8],
26 tbl24_p[((uint32_t)ip[2]) >> 8],
27 tbl24_p[((uint32_t)ip[3]) >> 8],
29 vuint32m1_t vtbl_entry = __riscv_vle32_v_u32m1(tbl_entries, vl);
31 vbool32_t mask = __riscv_vmseq_vx_u32m1_b32(
32 __riscv_vand_vx_u32m1(vtbl_entry, RTE_LPM_VALID_EXT_ENTRY_BITMASK, vl),
33 RTE_LPM_VALID_EXT_ENTRY_BITMASK, vl);
35 vuint32m1_t vtbl8_index = __riscv_vsll_vx_u32m1(
36 __riscv_vadd_vv_u32m1(
37 __riscv_vsll_vx_u32m1(__riscv_vand_vx_u32m1(vtbl_entry, 0x00FFFFFF, vl), 8, vl),
38 __riscv_vand_vx_u32m1(
39 __riscv_vle32_v_u32m1((
const uint32_t *)&ip, vl), 0x000000FF, vl),
43 vtbl_entry = __riscv_vluxei32_v_u32m1_mu(
44 mask, vtbl_entry, (
const uint32_t *)(lpm->tbl8), vtbl8_index, vl);
46 vuint32m1_t vnext_hop = __riscv_vand_vx_u32m1(vtbl_entry, 0x00FFFFFF, vl);
47 mask = __riscv_vmseq_vx_u32m1_b32(
50 vnext_hop = __riscv_vmerge_vxm_u32m1(vnext_hop, defv, mask, vl);
52 __riscv_vse32_v_u32m1(hop, vnext_hop, vl);
static void rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4], uint32_t defv)
#define RTE_LPM_LOOKUP_SUCCESS