DPDK  16.11.11
rte_atomic.h
Go to the documentation of this file.
1 /*-
2  * BSD LICENSE
3  *
4  * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  *
11  * * Redistributions of source code must retain the above copyright
12  * notice, this list of conditions and the following disclaimer.
13  * * Redistributions in binary form must reproduce the above copyright
14  * notice, this list of conditions and the following disclaimer in
15  * the documentation and/or other materials provided with the
16  * distribution.
17  * * Neither the name of Intel Corporation nor the names of its
18  * contributors may be used to endorse or promote products derived
19  * from this software without specific prior written permission.
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24  * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25  * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27  * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32  */
33 
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
36 
44 #include <stdint.h>
45 #include <rte_common.h>
46 
47 #ifdef __DOXYGEN__
48 
56 static inline void rte_mb(void);
57 
65 static inline void rte_wmb(void);
66 
74 static inline void rte_rmb(void);
75 
83 static inline void rte_smp_mb(void);
84 
92 static inline void rte_smp_wmb(void);
93 
101 static inline void rte_smp_rmb(void);
102 
103 #endif /* __DOXYGEN__ */
104 
111 #define rte_compiler_barrier() do { \
112  asm volatile ("" : : : "memory"); \
113 } while(0)
114 
115 /*------------------------- 16 bit atomic operations -------------------------*/
116 
133 static inline int
134 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
135 
136 #ifdef RTE_FORCE_INTRINSICS
137 static inline int
138 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
139 {
140  return __sync_bool_compare_and_swap(dst, exp, src);
141 }
142 #endif
143 
147 typedef struct {
148  volatile int16_t cnt;
150 
154 #define RTE_ATOMIC16_INIT(val) { (val) }
155 
162 static inline void
164 {
165  v->cnt = 0;
166 }
167 
176 static inline int16_t
178 {
179  return v->cnt;
180 }
181 
190 static inline void
191 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
192 {
193  v->cnt = new_value;
194 }
195 
204 static inline void
206 {
207  __sync_fetch_and_add(&v->cnt, inc);
208 }
209 
218 static inline void
220 {
221  __sync_fetch_and_sub(&v->cnt, dec);
222 }
223 
230 static inline void
232 
233 #ifdef RTE_FORCE_INTRINSICS
234 static inline void
236 {
237  rte_atomic16_add(v, 1);
238 }
239 #endif
240 
247 static inline void
249 
250 #ifdef RTE_FORCE_INTRINSICS
251 static inline void
253 {
254  rte_atomic16_sub(v, 1);
255 }
256 #endif
257 
271 static inline int16_t
273 {
274  return __sync_add_and_fetch(&v->cnt, inc);
275 }
276 
291 static inline int16_t
293 {
294  return __sync_sub_and_fetch(&v->cnt, dec);
295 }
296 
308 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
309 
310 #ifdef RTE_FORCE_INTRINSICS
311 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
312 {
313  return __sync_add_and_fetch(&v->cnt, 1) == 0;
314 }
315 #endif
316 
328 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
329 
330 #ifdef RTE_FORCE_INTRINSICS
331 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
332 {
333  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
334 }
335 #endif
336 
348 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
349 
350 #ifdef RTE_FORCE_INTRINSICS
351 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
352 {
353  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
354 }
355 #endif
356 
363 static inline void rte_atomic16_clear(rte_atomic16_t *v)
364 {
365  v->cnt = 0;
366 }
367 
368 /*------------------------- 32 bit atomic operations -------------------------*/
369 
386 static inline int
387 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
388 
389 #ifdef RTE_FORCE_INTRINSICS
390 static inline int
391 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
392 {
393  return __sync_bool_compare_and_swap(dst, exp, src);
394 }
395 #endif
396 
400 typedef struct {
401  volatile int32_t cnt;
403 
407 #define RTE_ATOMIC32_INIT(val) { (val) }
408 
415 static inline void
417 {
418  v->cnt = 0;
419 }
420 
429 static inline int32_t
431 {
432  return v->cnt;
433 }
434 
443 static inline void
444 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
445 {
446  v->cnt = new_value;
447 }
448 
457 static inline void
459 {
460  __sync_fetch_and_add(&v->cnt, inc);
461 }
462 
471 static inline void
473 {
474  __sync_fetch_and_sub(&v->cnt, dec);
475 }
476 
483 static inline void
485 
486 #ifdef RTE_FORCE_INTRINSICS
487 static inline void
489 {
490  rte_atomic32_add(v, 1);
491 }
492 #endif
493 
500 static inline void
502 
503 #ifdef RTE_FORCE_INTRINSICS
504 static inline void
506 {
507  rte_atomic32_sub(v,1);
508 }
509 #endif
510 
524 static inline int32_t
526 {
527  return __sync_add_and_fetch(&v->cnt, inc);
528 }
529 
544 static inline int32_t
546 {
547  return __sync_sub_and_fetch(&v->cnt, dec);
548 }
549 
561 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
562 
563 #ifdef RTE_FORCE_INTRINSICS
564 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
565 {
566  return __sync_add_and_fetch(&v->cnt, 1) == 0;
567 }
568 #endif
569 
581 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
582 
583 #ifdef RTE_FORCE_INTRINSICS
584 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
585 {
586  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
587 }
588 #endif
589 
601 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
602 
603 #ifdef RTE_FORCE_INTRINSICS
604 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
605 {
606  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
607 }
608 #endif
609 
616 static inline void rte_atomic32_clear(rte_atomic32_t *v)
617 {
618  v->cnt = 0;
619 }
620 
621 /*------------------------- 64 bit atomic operations -------------------------*/
622 
638 static inline int
639 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
640 
641 #ifdef RTE_FORCE_INTRINSICS
642 static inline int
643 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
644 {
645  return __sync_bool_compare_and_swap(dst, exp, src);
646 }
647 #endif
648 
652 typedef struct {
653  volatile int64_t cnt;
655 
659 #define RTE_ATOMIC64_INIT(val) { (val) }
660 
667 static inline void
669 
670 #ifdef RTE_FORCE_INTRINSICS
671 static inline void
673 {
674 #ifdef __LP64__
675  v->cnt = 0;
676 #else
677  int success = 0;
678  uint64_t tmp;
679 
680  while (success == 0) {
681  tmp = v->cnt;
682  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
683  tmp, 0);
684  }
685 #endif
686 }
687 #endif
688 
697 static inline int64_t
699 
700 #ifdef RTE_FORCE_INTRINSICS
701 static inline int64_t
703 {
704 #ifdef __LP64__
705  return v->cnt;
706 #else
707  int success = 0;
708  uint64_t tmp;
709 
710  while (success == 0) {
711  tmp = v->cnt;
712  /* replace the value by itself */
713  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
714  tmp, tmp);
715  }
716  return tmp;
717 #endif
718 }
719 #endif
720 
729 static inline void
730 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
731 
732 #ifdef RTE_FORCE_INTRINSICS
733 static inline void
734 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
735 {
736 #ifdef __LP64__
737  v->cnt = new_value;
738 #else
739  int success = 0;
740  uint64_t tmp;
741 
742  while (success == 0) {
743  tmp = v->cnt;
744  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
745  tmp, new_value);
746  }
747 #endif
748 }
749 #endif
750 
759 static inline void
760 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
761 
762 #ifdef RTE_FORCE_INTRINSICS
763 static inline void
764 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
765 {
766  __sync_fetch_and_add(&v->cnt, inc);
767 }
768 #endif
769 
778 static inline void
779 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
780 
781 #ifdef RTE_FORCE_INTRINSICS
782 static inline void
783 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
784 {
785  __sync_fetch_and_sub(&v->cnt, dec);
786 }
787 #endif
788 
795 static inline void
797 
798 #ifdef RTE_FORCE_INTRINSICS
799 static inline void
801 {
802  rte_atomic64_add(v, 1);
803 }
804 #endif
805 
812 static inline void
814 
815 #ifdef RTE_FORCE_INTRINSICS
816 static inline void
818 {
819  rte_atomic64_sub(v, 1);
820 }
821 #endif
822 
836 static inline int64_t
837 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
838 
839 #ifdef RTE_FORCE_INTRINSICS
840 static inline int64_t
842 {
843  return __sync_add_and_fetch(&v->cnt, inc);
844 }
845 #endif
846 
860 static inline int64_t
861 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
862 
863 #ifdef RTE_FORCE_INTRINSICS
864 static inline int64_t
866 {
867  return __sync_sub_and_fetch(&v->cnt, dec);
868 }
869 #endif
870 
882 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
883 
884 #ifdef RTE_FORCE_INTRINSICS
885 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
886 {
887  return rte_atomic64_add_return(v, 1) == 0;
888 }
889 #endif
890 
902 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
903 
904 #ifdef RTE_FORCE_INTRINSICS
905 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
906 {
907  return rte_atomic64_sub_return(v, 1) == 0;
908 }
909 #endif
910 
922 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
923 
924 #ifdef RTE_FORCE_INTRINSICS
925 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
926 {
927  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
928 }
929 #endif
930 
937 static inline void rte_atomic64_clear(rte_atomic64_t *v);
938 
939 #ifdef RTE_FORCE_INTRINSICS
940 static inline void rte_atomic64_clear(rte_atomic64_t *v)
941 {
942  rte_atomic64_set(v, 0);
943 }
944 #endif
945 
946 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:177
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:148
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:458
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:292
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:472
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:444
volatile int32_t cnt
Definition: rte_atomic.h:401
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:525
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:205
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:416
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:363
volatile int64_t cnt
Definition: rte_atomic.h:653
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:219
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:191
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:272
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:616
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:545
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:163
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:430
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)