5 #ifndef _RTE_THASH_X86_GFNI_H_ 6 #define _RTE_THASH_X86_GFNI_H_ 16 #include <rte_compat.h> 23 #if defined(__GFNI__) && defined(__AVX512F__) 24 #define RTE_THASH_GFNI_DEFINED 26 #define RTE_THASH_FIRST_ITER_MSK 0x0f0f0f0f0f0e0c08 27 #define RTE_THASH_PERM_MSK 0x0f0f0f0f0f0f0f0f 28 #define RTE_THASH_FIRST_ITER_MSK_2 0xf0f0f0f0f0e0c080 29 #define RTE_THASH_PERM_MSK_2 0xf0f0f0f0f0f0f0f0 30 #define RTE_THASH_REWIND_MSK 0x0000000000113377 34 __rte_thash_xor_reduce(__m512i xor_acc, uint32_t *val_1, uint32_t *val_2)
36 __m256i tmp_256_1, tmp_256_2;
37 __m128i tmp128_1, tmp128_2;
39 tmp_256_1 = _mm512_castsi512_si256(xor_acc);
40 tmp_256_2 = _mm512_extracti32x8_epi32(xor_acc, 1);
41 tmp_256_1 = _mm256_xor_si256(tmp_256_1, tmp_256_2);
43 tmp128_1 = _mm256_castsi256_si128(tmp_256_1);
44 tmp128_2 = _mm256_extracti32x4_epi32(tmp_256_1, 1);
45 tmp128_1 = _mm_xor_si128(tmp128_1, tmp128_2);
47 #ifdef RTE_ARCH_X86_64 48 uint64_t tmp_1, tmp_2;
49 tmp_1 = _mm_extract_epi64(tmp128_1, 0);
50 tmp_2 = _mm_extract_epi64(tmp128_1, 1);
53 *val_1 = (uint32_t)tmp_1;
54 *val_2 = (uint32_t)(tmp_1 >> 32);
56 uint32_t tmp_1, tmp_2;
57 tmp_1 = _mm_extract_epi32(tmp128_1, 0);
58 tmp_2 = _mm_extract_epi32(tmp128_1, 1);
59 tmp_1 ^= _mm_extract_epi32(tmp128_1, 2);
60 tmp_2 ^= _mm_extract_epi32(tmp128_1, 3);
69 __rte_thash_gfni(
const uint64_t *mtrx,
const uint8_t *tuple,
70 const uint8_t *secondary_tuple,
int len)
72 __m512i permute_idx = _mm512_set_epi32(0x07060504, 0x07060504,
73 0x06050403, 0x06050403,
74 0x05040302, 0x05040302,
75 0x04030201, 0x04030201,
76 0x03020100, 0x03020100,
77 0x020100FF, 0x020100FF,
78 0x0100FFFE, 0x0100FFFE,
79 0x00FFFEFD, 0x00FFFEFD);
80 const __m512i rewind_idx = _mm512_set_epi32(0x00000000, 0x00000000,
81 0x00000000, 0x00000000,
82 0x00000000, 0x00000000,
83 0x00000000, 0x00000000,
84 0x00000000, 0x00000000,
85 0x0000003B, 0x0000003B,
86 0x00003B3A, 0x00003B3A,
87 0x003B3A39, 0x003B3A39);
88 const __mmask64 rewind_mask = RTE_THASH_REWIND_MSK;
89 const __m512i shift_8 = _mm512_set1_epi8(8);
90 __m512i xor_acc = _mm512_setzero_si512();
91 __m512i perm_bytes = _mm512_setzero_si512();
92 __m512i vals, matrixes, tuple_bytes_2;
93 __m512i tuple_bytes = _mm512_setzero_si512();
94 __mmask64 load_mask, permute_mask_2;
95 __mmask64 permute_mask = 0;
96 int chunk_len = 0, i = 0;
98 const int prepend = 3;
100 for (; len > 0; len -= 64, tuple += 64) {
102 perm_bytes = _mm512_maskz_permutexvar_epi8(rewind_mask,
103 rewind_idx, perm_bytes);
105 permute_mask = RTE_THASH_FIRST_ITER_MSK;
106 load_mask = (len >= 64) ? UINT64_MAX : ((1ULL << len) - 1);
107 tuple_bytes = _mm512_maskz_loadu_epi8(load_mask, tuple);
108 if (secondary_tuple) {
109 permute_mask_2 = RTE_THASH_FIRST_ITER_MSK_2;
110 tuple_bytes_2 = _mm512_maskz_loadu_epi8(load_mask,
115 for (i = 0; i < ((chunk_len + prepend) / 8); i++, mtrx += 8) {
116 perm_bytes = _mm512_mask_permutexvar_epi8(perm_bytes,
117 permute_mask, permute_idx, tuple_bytes);
121 _mm512_mask_permutexvar_epi8(perm_bytes,
122 permute_mask_2, permute_idx,
125 matrixes = _mm512_maskz_loadu_epi64(UINT8_MAX, mtrx);
126 vals = _mm512_gf2p8affine_epi64_epi8(perm_bytes,
129 xor_acc = _mm512_xor_si512(xor_acc, vals);
130 permute_idx = _mm512_add_epi8(permute_idx, shift_8);
131 permute_mask = RTE_THASH_PERM_MSK;
133 permute_mask_2 = RTE_THASH_PERM_MSK_2;
137 int rest_len = (chunk_len + prepend) % 8;
139 mtrx_msk = (1 << (rest_len % 8)) - 1;
140 matrixes = _mm512_maskz_loadu_epi64(mtrx_msk, mtrx);
142 perm_bytes = _mm512_maskz_permutexvar_epi8(rewind_mask,
143 rewind_idx, perm_bytes);
145 perm_bytes = _mm512_mask_permutexvar_epi8(perm_bytes,
146 permute_mask, permute_idx, tuple_bytes);
150 _mm512_mask_permutexvar_epi8(
151 perm_bytes, permute_mask_2,
152 permute_idx, tuple_bytes_2);
155 vals = _mm512_gf2p8affine_epi64_epi8(perm_bytes, matrixes, 0);
156 xor_acc = _mm512_xor_si512(xor_acc, vals);
176 static inline uint32_t
177 rte_thash_gfni(
const uint64_t *m,
const uint8_t *tuple,
int len)
179 uint32_t val, val_zero;
181 __m512i xor_acc = __rte_thash_gfni(m, tuple, NULL, len);
182 __rte_thash_xor_reduce(xor_acc, &val, &val_zero);
205 rte_thash_gfni_bulk(
const uint64_t *mtrx,
int len, uint8_t *tuple[],
206 uint32_t val[], uint32_t num)
212 for (i = 0; i != (num & ~1); i += 2) {
213 xor_acc = __rte_thash_gfni(mtrx, tuple[i], tuple[i + 1], len);
214 __rte_thash_xor_reduce(xor_acc, val + i, val + i + 1);
218 xor_acc = __rte_thash_gfni(mtrx, tuple[i], NULL, len);
219 __rte_thash_xor_reduce(xor_acc, val + i, &val_zero);
static unsigned int rte_popcount64(uint64_t v)