DPDK  2.1.0
rte_atomic.h
Go to the documentation of this file.
1 /*-
2  * BSD LICENSE
3  *
4  * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  *
11  * * Redistributions of source code must retain the above copyright
12  * notice, this list of conditions and the following disclaimer.
13  * * Redistributions in binary form must reproduce the above copyright
14  * notice, this list of conditions and the following disclaimer in
15  * the documentation and/or other materials provided with the
16  * distribution.
17  * * Neither the name of Intel Corporation nor the names of its
18  * contributors may be used to endorse or promote products derived
19  * from this software without specific prior written permission.
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24  * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25  * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27  * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32  */
33 
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
36 
44 #include <stdint.h>
45 
46 #ifdef __DOXYGEN__
47 
55 static inline void rte_mb(void);
56 
64 static inline void rte_wmb(void);
65 
73 static inline void rte_rmb(void);
74 
75 #endif /* __DOXYGEN__ */
76 
83 #define rte_compiler_barrier() do { \
84  asm volatile ("" : : : "memory"); \
85 } while(0)
86 
87 /*------------------------- 16 bit atomic operations -------------------------*/
88 
105 static inline int
106 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
107 
108 #ifdef RTE_FORCE_INTRINSICS
109 static inline int
110 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
111 {
112  return __sync_bool_compare_and_swap(dst, exp, src);
113 }
114 #endif
115 
119 typedef struct {
120  volatile int16_t cnt;
122 
126 #define RTE_ATOMIC16_INIT(val) { (val) }
127 
134 static inline void
136 {
137  v->cnt = 0;
138 }
139 
148 static inline int16_t
150 {
151  return v->cnt;
152 }
153 
162 static inline void
163 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
164 {
165  v->cnt = new_value;
166 }
167 
176 static inline void
178 {
179  __sync_fetch_and_add(&v->cnt, inc);
180 }
181 
190 static inline void
192 {
193  __sync_fetch_and_sub(&v->cnt, dec);
194 }
195 
202 static inline void
204 
205 #ifdef RTE_FORCE_INTRINSICS
206 static inline void
208 {
209  rte_atomic16_add(v, 1);
210 }
211 #endif
212 
219 static inline void
221 
222 #ifdef RTE_FORCE_INTRINSICS
223 static inline void
225 {
226  rte_atomic16_sub(v, 1);
227 }
228 #endif
229 
243 static inline int16_t
245 {
246  return __sync_add_and_fetch(&v->cnt, inc);
247 }
248 
263 static inline int16_t
265 {
266  return __sync_sub_and_fetch(&v->cnt, dec);
267 }
268 
280 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
281 
282 #ifdef RTE_FORCE_INTRINSICS
283 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
284 {
285  return (__sync_add_and_fetch(&v->cnt, 1) == 0);
286 }
287 #endif
288 
300 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
301 
302 #ifdef RTE_FORCE_INTRINSICS
303 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
304 {
305  return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
306 }
307 #endif
308 
320 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
321 
322 #ifdef RTE_FORCE_INTRINSICS
323 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
324 {
325  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
326 }
327 #endif
328 
335 static inline void rte_atomic16_clear(rte_atomic16_t *v)
336 {
337  v->cnt = 0;
338 }
339 
340 /*------------------------- 32 bit atomic operations -------------------------*/
341 
358 static inline int
359 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
360 
361 #ifdef RTE_FORCE_INTRINSICS
362 static inline int
363 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
364 {
365  return __sync_bool_compare_and_swap(dst, exp, src);
366 }
367 #endif
368 
372 typedef struct {
373  volatile int32_t cnt;
375 
379 #define RTE_ATOMIC32_INIT(val) { (val) }
380 
387 static inline void
389 {
390  v->cnt = 0;
391 }
392 
401 static inline int32_t
403 {
404  return v->cnt;
405 }
406 
415 static inline void
416 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
417 {
418  v->cnt = new_value;
419 }
420 
429 static inline void
431 {
432  __sync_fetch_and_add(&v->cnt, inc);
433 }
434 
443 static inline void
445 {
446  __sync_fetch_and_sub(&v->cnt, dec);
447 }
448 
455 static inline void
457 
458 #ifdef RTE_FORCE_INTRINSICS
459 static inline void
461 {
462  rte_atomic32_add(v, 1);
463 }
464 #endif
465 
472 static inline void
474 
475 #ifdef RTE_FORCE_INTRINSICS
476 static inline void
478 {
479  rte_atomic32_sub(v,1);
480 }
481 #endif
482 
496 static inline int32_t
498 {
499  return __sync_add_and_fetch(&v->cnt, inc);
500 }
501 
516 static inline int32_t
518 {
519  return __sync_sub_and_fetch(&v->cnt, dec);
520 }
521 
533 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
534 
535 #ifdef RTE_FORCE_INTRINSICS
536 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
537 {
538  return (__sync_add_and_fetch(&v->cnt, 1) == 0);
539 }
540 #endif
541 
553 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
554 
555 #ifdef RTE_FORCE_INTRINSICS
556 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
557 {
558  return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
559 }
560 #endif
561 
573 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
574 
575 #ifdef RTE_FORCE_INTRINSICS
576 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
577 {
578  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
579 }
580 #endif
581 
588 static inline void rte_atomic32_clear(rte_atomic32_t *v)
589 {
590  v->cnt = 0;
591 }
592 
593 /*------------------------- 64 bit atomic operations -------------------------*/
594 
610 static inline int
611 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
612 
613 #ifdef RTE_FORCE_INTRINSICS
614 static inline int
615 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
616 {
617  return __sync_bool_compare_and_swap(dst, exp, src);
618 }
619 #endif
620 
624 typedef struct {
625  volatile int64_t cnt;
627 
631 #define RTE_ATOMIC64_INIT(val) { (val) }
632 
639 static inline void
641 
642 #ifdef RTE_FORCE_INTRINSICS
643 static inline void
645 {
646 #ifdef __LP64__
647  v->cnt = 0;
648 #else
649  int success = 0;
650  uint64_t tmp;
651 
652  while (success == 0) {
653  tmp = v->cnt;
654  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
655  tmp, 0);
656  }
657 #endif
658 }
659 #endif
660 
669 static inline int64_t
671 
672 #ifdef RTE_FORCE_INTRINSICS
673 static inline int64_t
675 {
676 #ifdef __LP64__
677  return v->cnt;
678 #else
679  int success = 0;
680  uint64_t tmp;
681 
682  while (success == 0) {
683  tmp = v->cnt;
684  /* replace the value by itself */
685  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
686  tmp, tmp);
687  }
688  return tmp;
689 #endif
690 }
691 #endif
692 
701 static inline void
702 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
703 
704 #ifdef RTE_FORCE_INTRINSICS
705 static inline void
706 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
707 {
708 #ifdef __LP64__
709  v->cnt = new_value;
710 #else
711  int success = 0;
712  uint64_t tmp;
713 
714  while (success == 0) {
715  tmp = v->cnt;
716  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
717  tmp, new_value);
718  }
719 #endif
720 }
721 #endif
722 
731 static inline void
732 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
733 
734 #ifdef RTE_FORCE_INTRINSICS
735 static inline void
736 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
737 {
738  __sync_fetch_and_add(&v->cnt, inc);
739 }
740 #endif
741 
750 static inline void
751 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
752 
753 #ifdef RTE_FORCE_INTRINSICS
754 static inline void
755 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
756 {
757  __sync_fetch_and_sub(&v->cnt, dec);
758 }
759 #endif
760 
767 static inline void
769 
770 #ifdef RTE_FORCE_INTRINSICS
771 static inline void
773 {
774  rte_atomic64_add(v, 1);
775 }
776 #endif
777 
784 static inline void
786 
787 #ifdef RTE_FORCE_INTRINSICS
788 static inline void
790 {
791  rte_atomic64_sub(v, 1);
792 }
793 #endif
794 
808 static inline int64_t
809 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
810 
811 #ifdef RTE_FORCE_INTRINSICS
812 static inline int64_t
814 {
815  return __sync_add_and_fetch(&v->cnt, inc);
816 }
817 #endif
818 
832 static inline int64_t
833 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
834 
835 #ifdef RTE_FORCE_INTRINSICS
836 static inline int64_t
838 {
839  return __sync_sub_and_fetch(&v->cnt, dec);
840 }
841 #endif
842 
854 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
855 
856 #ifdef RTE_FORCE_INTRINSICS
857 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
858 {
859  return rte_atomic64_add_return(v, 1) == 0;
860 }
861 #endif
862 
874 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
875 
876 #ifdef RTE_FORCE_INTRINSICS
877 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
878 {
879  return rte_atomic64_sub_return(v, 1) == 0;
880 }
881 #endif
882 
894 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
895 
896 #ifdef RTE_FORCE_INTRINSICS
897 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
898 {
899  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
900 }
901 #endif
902 
909 static inline void rte_atomic64_clear(rte_atomic64_t *v);
910 
911 #ifdef RTE_FORCE_INTRINSICS
912 static inline void rte_atomic64_clear(rte_atomic64_t *v)
913 {
914  rte_atomic64_set(v, 0);
915 }
916 #endif
917 
918 #endif /* _RTE_ATOMIC_H_ */