DPDK  24.03.0-rc4
rte_spinlock.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_SPINLOCK_H_
6 #define _RTE_SPINLOCK_H_
7 
20 #include <rte_lcore.h>
21 #ifdef RTE_FORCE_INTRINSICS
22 #include <rte_common.h>
23 #endif
24 #include <rte_lock_annotations.h>
25 #include <rte_pause.h>
26 #include <rte_stdatomic.h>
27 
31 typedef struct __rte_lockable {
32  volatile RTE_ATOMIC(int) locked;
34 
38 #define RTE_SPINLOCK_INITIALIZER { 0 }
39 
46 static inline void
48 {
49  sl->locked = 0;
50 }
51 
58 static inline void
60  __rte_exclusive_lock_function(sl);
61 
62 #ifdef RTE_FORCE_INTRINSICS
63 static inline void
65  __rte_no_thread_safety_analysis
66 {
67  int exp = 0;
68 
69  while (!rte_atomic_compare_exchange_strong_explicit(&sl->locked, &exp, 1,
70  rte_memory_order_acquire, rte_memory_order_relaxed)) {
71  rte_wait_until_equal_32((volatile uint32_t *)(uintptr_t)&sl->locked,
72  0, rte_memory_order_relaxed);
73  exp = 0;
74  }
75 }
76 #endif
77 
84 static inline void
86  __rte_unlock_function(sl);
87 
88 #ifdef RTE_FORCE_INTRINSICS
89 static inline void
91  __rte_no_thread_safety_analysis
92 {
93  rte_atomic_store_explicit(&sl->locked, 0, rte_memory_order_release);
94 }
95 #endif
96 
106 static inline int
108  __rte_exclusive_trylock_function(1, sl);
109 
110 #ifdef RTE_FORCE_INTRINSICS
111 static inline int
113  __rte_no_thread_safety_analysis
114 {
115  int exp = 0;
116  return rte_atomic_compare_exchange_strong_explicit(&sl->locked, &exp, 1,
117  rte_memory_order_acquire, rte_memory_order_relaxed);
118 }
119 #endif
120 
129 static inline int rte_spinlock_is_locked (rte_spinlock_t *sl)
130 {
131  return rte_atomic_load_explicit(&sl->locked, rte_memory_order_acquire);
132 }
133 
140 static inline int rte_tm_supported(void);
141 
155 static inline void
157  __rte_exclusive_lock_function(sl);
158 
166 static inline void
168  __rte_unlock_function(sl);
169 
187 static inline int
189  __rte_exclusive_trylock_function(1, sl);
190 
194 typedef struct {
196  volatile int user;
197  volatile int count;
199 
203 #define RTE_SPINLOCK_RECURSIVE_INITIALIZER {RTE_SPINLOCK_INITIALIZER, -1, 0}
204 
212 {
213  rte_spinlock_init(&slr->sl);
214  slr->user = -1;
215  slr->count = 0;
216 }
217 
225  __rte_no_thread_safety_analysis
226 {
227  int id = rte_gettid();
228 
229  if (slr->user != id) {
230  rte_spinlock_lock(&slr->sl);
231  slr->user = id;
232  }
233  slr->count++;
234 }
242  __rte_no_thread_safety_analysis
243 {
244  if (--(slr->count) == 0) {
245  slr->user = -1;
246  rte_spinlock_unlock(&slr->sl);
247  }
248 
249 }
250 
261  __rte_no_thread_safety_analysis
262 {
263  int id = rte_gettid();
264 
265  if (slr->user != id) {
266  if (rte_spinlock_trylock(&slr->sl) == 0)
267  return 0;
268  slr->user = id;
269  }
270  slr->count++;
271  return 1;
272 }
273 
274 
288 static inline void rte_spinlock_recursive_lock_tm(
290 
298 static inline void rte_spinlock_recursive_unlock_tm(
300 
318 static inline int rte_spinlock_recursive_trylock_tm(
320 
321 #endif /* _RTE_SPINLOCK_H_ */
static __rte_warn_unused_result int rte_spinlock_trylock_tm(rte_spinlock_t *sl) sl)
static void rte_spinlock_recursive_lock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_lock(rte_spinlock_t *sl)
static __rte_always_inline void rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected, rte_memory_order memorder)
Definition: rte_pause.h:91
static void rte_spinlock_recursive_unlock(rte_spinlock_recursive_t *slr) __rte_no_thread_safety_analysis
Definition: rte_spinlock.h:241
static __rte_warn_unused_result int rte_spinlock_recursive_trylock_tm(rte_spinlock_recursive_t *slr)
static void rte_spinlock_recursive_init(rte_spinlock_recursive_t *slr)
Definition: rte_spinlock.h:211
static void rte_spinlock_unlock(rte_spinlock_t *sl)
static void rte_spinlock_unlock_tm(rte_spinlock_t *sl)
static __rte_warn_unused_result int rte_spinlock_recursive_trylock(rte_spinlock_recursive_t *slr) __rte_no_thread_safety_analysis
Definition: rte_spinlock.h:260
static void rte_spinlock_init(rte_spinlock_t *sl)
Definition: rte_spinlock.h:47
static int rte_tm_supported(void)
static __rte_warn_unused_result int rte_spinlock_trylock(rte_spinlock_t *sl) sl)
static void rte_spinlock_recursive_unlock_tm(rte_spinlock_recursive_t *slr)
static int rte_spinlock_is_locked(rte_spinlock_t *sl)
Definition: rte_spinlock.h:129
static void rte_spinlock_recursive_lock(rte_spinlock_recursive_t *slr) __rte_no_thread_safety_analysis
Definition: rte_spinlock.h:224
static void rte_spinlock_lock_tm(rte_spinlock_t *sl)
static int rte_gettid(void)
Definition: rte_eal.h:438
#define __rte_warn_unused_result
Definition: rte_common.h:346