28 #include <rte_config.h> 32 #if defined(RTE_ARCH_X86) || defined(__ARM_NEON) 40 static const __m128i rte_thash_ipv6_bswap_mask = {
41 0x0405060700010203ULL, 0x0C0D0E0F08090A0BULL};
48 #define RTE_THASH_V4_L3_LEN ((sizeof(struct rte_ipv4_tuple) - \ 49 sizeof(((struct rte_ipv4_tuple *)0)->sctp_tag)) / 4) 56 #define RTE_THASH_V4_L4_LEN ((sizeof(struct rte_ipv4_tuple)) / 4) 62 #define RTE_THASH_V6_L3_LEN ((sizeof(struct rte_ipv6_tuple) - \ 63 sizeof(((struct rte_ipv6_tuple *)0)->sctp_tag)) / 4) 70 #define RTE_THASH_V6_L4_LEN ((sizeof(struct rte_ipv6_tuple)) / 4) 107 union rte_thash_tuple {
130 for (i = 0; i < (len >> 2); i++)
144 union rte_thash_tuple *targ)
147 __m128i ipv6 = _mm_loadu_si128((
const __m128i *)orig->
src_addr);
148 *(__m128i *)targ->v6.src_addr =
149 _mm_shuffle_epi8(ipv6, rte_thash_ipv6_bswap_mask);
150 ipv6 = _mm_loadu_si128((
const __m128i *)orig->
dst_addr);
151 *(__m128i *)targ->v6.dst_addr =
152 _mm_shuffle_epi8(ipv6, rte_thash_ipv6_bswap_mask);
153 #elif defined(__ARM_NEON) 154 uint8x16_t ipv6 = vld1q_u8((uint8_t
const *)orig->
src_addr);
155 vst1q_u8((uint8_t *)targ->v6.src_addr, vrev32q_u8(ipv6));
156 ipv6 = vld1q_u8((uint8_t
const *)orig->
dst_addr);
157 vst1q_u8((uint8_t *)targ->v6.dst_addr, vrev32q_u8(ipv6));
160 for (i = 0; i < 4; i++) {
161 *((uint32_t *)targ->v6.src_addr + i) =
163 *((uint32_t *)targ->v6.dst_addr + i) =
180 static inline uint32_t
182 const uint8_t *rss_key)
184 uint32_t i, j, map, ret = 0;
186 for (j = 0; j < input_len; j++) {
187 for (map = input_tuple[j]; map; map &= (map - 1)) {
190 (uint32_t)((uint64_t)(
rte_cpu_to_be_32(((
const uint32_t *)rss_key)[j + 1])) >>
210 static inline uint32_t
212 const uint8_t *rss_key)
214 uint32_t i, j, map, ret = 0;
216 for (j = 0; j < input_len; j++) {
217 for (map = input_tuple[j]; map; map &= (map - 1)) {
219 ret ^= ((
const uint32_t *)rss_key)[j] << (31 - i) |
220 (uint32_t)((uint64_t)(((
const uint32_t *)rss_key)[j + 1]) >> (i + 1));
227 #define RTE_THASH_RETA_SZ_MIN 2U 229 #define RTE_THASH_RETA_SZ_MAX 16U 235 #define RTE_THASH_IGNORE_PERIOD_OVERFLOW 0x1 240 #define RTE_THASH_MINIMAL_SEQ 0x2 243 struct rte_thash_ctx;
245 struct rte_thash_subtuple_helper;
273 struct rte_thash_ctx *
275 uint8_t *key, uint32_t flags);
291 struct rte_thash_ctx *
350 struct rte_thash_subtuple_helper *
371 uint32_t hash, uint32_t desired_hash);
443 struct rte_thash_subtuple_helper *h,
444 uint8_t *tuple,
unsigned int tuple_len,
445 uint32_t desired_value,
unsigned int attempts,
int(* rte_thash_check_tuple_t)(void *userdata, uint8_t *tuple)
static rte_be32_t rte_cpu_to_be_32(uint32_t x)
__rte_experimental int rte_thash_add_helper(struct rte_thash_ctx *ctx, const char *name, uint32_t len, uint32_t offset)
static void rte_convert_rss_key(const uint32_t *orig, uint32_t *targ, int len)
__rte_experimental int rte_thash_adjust_tuple(struct rte_thash_ctx *ctx, struct rte_thash_subtuple_helper *h, uint8_t *tuple, unsigned int tuple_len, uint32_t desired_value, unsigned int attempts, rte_thash_check_tuple_t fn, void *userdata)
static uint32_t rte_bsf32(uint32_t v)
static void rte_thash_load_v6_addrs(const struct rte_ipv6_hdr *orig, union rte_thash_tuple *targ)
static uint32_t rte_softrss(uint32_t *input_tuple, uint32_t input_len, const uint8_t *rss_key)
static uint32_t rte_be_to_cpu_32(rte_be32_t x)
__rte_experimental struct rte_thash_ctx * rte_thash_find_existing(const char *name)
static uint32_t rte_softrss_be(uint32_t *input_tuple, uint32_t input_len, const uint8_t *rss_key)
__extension__ struct rte_eth_link __rte_aligned(8)
__rte_experimental uint32_t rte_thash_get_complement(struct rte_thash_subtuple_helper *h, uint32_t hash, uint32_t desired_hash)
__rte_experimental const uint8_t * rte_thash_get_key(struct rte_thash_ctx *ctx)
__rte_experimental void rte_thash_free_ctx(struct rte_thash_ctx *ctx)
__rte_experimental struct rte_thash_ctx * rte_thash_init_ctx(const char *name, uint32_t key_len, uint32_t reta_sz, uint8_t *key, uint32_t flags)
__rte_experimental struct rte_thash_subtuple_helper * rte_thash_get_helper(struct rte_thash_ctx *ctx, const char *name)