DPDK 25.03.0-rc0
rte_spinlock.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_SPINLOCK_H_
6#define _RTE_SPINLOCK_H_
7
20#include <rte_lcore.h>
21#ifdef RTE_FORCE_INTRINSICS
22#include <rte_common.h>
23#endif
24#include <rte_lock_annotations.h>
25#include <rte_pause.h>
26#include <rte_stdatomic.h>
27
28#ifdef __cplusplus
29extern "C" {
30#endif
31
35typedef struct __rte_lockable {
36 volatile RTE_ATOMIC(int) locked;
38
42#define RTE_SPINLOCK_INITIALIZER { 0 }
43
50static inline void
52{
53 sl->locked = 0;
54}
55
62static inline void
64 __rte_exclusive_lock_function(sl);
65
66#ifdef RTE_FORCE_INTRINSICS
67static inline void
69 __rte_no_thread_safety_analysis
70{
71 int exp = 0;
72
73 while (!rte_atomic_compare_exchange_strong_explicit(&sl->locked, &exp, 1,
74 rte_memory_order_acquire, rte_memory_order_relaxed)) {
75 rte_wait_until_equal_32((volatile uint32_t *)(uintptr_t)&sl->locked,
76 0, rte_memory_order_relaxed);
77 exp = 0;
78 }
79}
80#endif
81
88static inline void
90 __rte_unlock_function(sl);
91
92#ifdef RTE_FORCE_INTRINSICS
93static inline void
95 __rte_no_thread_safety_analysis
96{
97 rte_atomic_store_explicit(&sl->locked, 0, rte_memory_order_release);
98}
99#endif
100
110static inline int
112 __rte_exclusive_trylock_function(1, sl);
113
114#ifdef RTE_FORCE_INTRINSICS
115static inline int
117 __rte_no_thread_safety_analysis
118{
119 int exp = 0;
120 return rte_atomic_compare_exchange_strong_explicit(&sl->locked, &exp, 1,
121 rte_memory_order_acquire, rte_memory_order_relaxed);
122}
123#endif
124
134{
135 return rte_atomic_load_explicit(&sl->locked, rte_memory_order_acquire);
136}
137
144static inline int rte_tm_supported(void);
145
159static inline void
161 __rte_exclusive_lock_function(sl);
162
170static inline void
172 __rte_unlock_function(sl);
173
191static inline int
193 __rte_exclusive_trylock_function(1, sl);
194
198typedef struct {
200 volatile int user;
201 volatile int count;
203
207#define RTE_SPINLOCK_RECURSIVE_INITIALIZER {RTE_SPINLOCK_INITIALIZER, -1, 0}
208
216{
217 rte_spinlock_init(&slr->sl);
218 slr->user = -1;
219 slr->count = 0;
220}
221
229 __rte_no_thread_safety_analysis
230{
231 int id = rte_gettid();
232
233 if (slr->user != id) {
234 rte_spinlock_lock(&slr->sl);
235 slr->user = id;
236 }
237 slr->count++;
238}
246 __rte_no_thread_safety_analysis
247{
248 if (--(slr->count) == 0) {
249 slr->user = -1;
250 rte_spinlock_unlock(&slr->sl);
251 }
252
253}
254
265 __rte_no_thread_safety_analysis
266{
267 int id = rte_gettid();
268
269 if (slr->user != id) {
270 if (rte_spinlock_trylock(&slr->sl) == 0)
271 return 0;
272 slr->user = id;
273 }
274 slr->count++;
275 return 1;
276}
277
278
294
304
324
325#ifdef __cplusplus
326}
327#endif
328
329#endif /* _RTE_SPINLOCK_H_ */
#define __rte_warn_unused_result
Definition: rte_common.h:404
static int rte_gettid(void)
Definition: rte_eal.h:438
static __rte_always_inline void rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected, rte_memory_order memorder)
Definition: rte_pause.h:95
static __rte_warn_unused_result int rte_spinlock_trylock(rte_spinlock_t *sl) sl)
static int rte_tm_supported(void)
static __rte_warn_unused_result int rte_spinlock_recursive_trylock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_recursive_lock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_unlock(rte_spinlock_t *sl)
static void rte_spinlock_recursive_unlock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_lock(rte_spinlock_t *sl)
static void rte_spinlock_lock_tm(rte_spinlock_t *sl)
static void rte_spinlock_recursive_init(rte_spinlock_recursive_t *slr)
Definition: rte_spinlock.h:215
static __rte_warn_unused_result int rte_spinlock_recursive_trylock(rte_spinlock_recursive_t *slr) __rte_no_thread_safety_analysis
Definition: rte_spinlock.h:264
static void rte_spinlock_recursive_unlock(rte_spinlock_recursive_t *slr) __rte_no_thread_safety_analysis
Definition: rte_spinlock.h:245
static void rte_spinlock_unlock_tm(rte_spinlock_t *sl)
static __rte_warn_unused_result int rte_spinlock_trylock_tm(rte_spinlock_t *sl) sl)
static int rte_spinlock_is_locked(rte_spinlock_t *sl)
Definition: rte_spinlock.h:133
static void rte_spinlock_recursive_lock(rte_spinlock_recursive_t *slr) __rte_no_thread_safety_analysis
Definition: rte_spinlock.h:228
static void rte_spinlock_init(rte_spinlock_t *sl)
Definition: rte_spinlock.h:51