49 #define RTE_ACL_MAX_CATEGORIES 16
51 #define RTE_ACL_RESULTS_MULTIPLIER (XMM_SIZE / sizeof(uint32_t))
53 #define RTE_ACL_MAX_LEVELS 64
54 #define RTE_ACL_MAX_FIELDS 64
56 union rte_acl_field_types {
64 RTE_ACL_FIELD_TYPE_MASK = 0,
65 RTE_ACL_FIELD_TYPE_RANGE,
66 RTE_ACL_FIELD_TYPE_BITMASK
117 RTE_ACL_TYPE_SHIFT = 29,
118 RTE_ACL_MAX_INDEX =
RTE_LEN2MASK(RTE_ACL_TYPE_SHIFT, uint32_t),
119 RTE_ACL_MAX_PRIORITY = RTE_ACL_MAX_INDEX,
120 RTE_ACL_MIN_PRIORITY = 1,
123 #define RTE_ACL_INVALID_USERDATA 0
125 #define RTE_ACL_MASKLEN_TO_BITMASK(v, s) \
126 ((v) == 0 ? (v) : (typeof(v))((uint64_t)-1 << ((s) * CHAR_BIT - (v))))
142 #define RTE_ACL_RULE_DEF(name, fld_num) struct name {\
143 struct rte_acl_rule_data data; \
144 struct rte_acl_field field[fld_num]; \
149 #define RTE_ACL_RULE_SZ(fld_num) \
150 (sizeof(struct rte_acl_rule) + sizeof(struct rte_acl_field) * (fld_num))
154 #define RTE_ACL_NAMESIZE 32
269 RTE_ACL_CLASSIFY_DEFAULT = 0,
309 const uint8_t **data,
310 uint32_t *results, uint32_t num,
311 uint32_t categories);
349 const uint8_t **data,
350 uint32_t *results, uint32_t num,
367 rte_acl_set_ctx_classify(
struct rte_acl_ctx *ctx,
union rte_acl_field_types value
struct rte_acl_ctx * rte_acl_create(const struct rte_acl_param *param)
void rte_acl_dump(const struct rte_acl_ctx *ctx)
void rte_acl_reset(struct rte_acl_ctx *ctx)
int rte_acl_build(struct rte_acl_ctx *ctx, const struct rte_acl_config *cfg)
struct rte_acl_field_def defs[RTE_ACL_MAX_FIELDS]
#define RTE_LEN2MASK(ln, tp)
void rte_acl_list_dump(void)
void rte_acl_reset_rules(struct rte_acl_ctx *ctx)
void rte_acl_free(struct rte_acl_ctx *ctx)
struct rte_acl_ctx * rte_acl_find_existing(const char *name)
union rte_acl_field_types mask_range
#define RTE_ACL_RULE_DEF(name, fld_num)
int rte_acl_add_rules(struct rte_acl_ctx *ctx, const struct rte_acl_rule *rules, uint32_t num)
int rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data, uint32_t *results, uint32_t num, uint32_t categories)