DPDK  16.04.0
rte_atomic.h
Go to the documentation of this file.
1 /*-
2  * BSD LICENSE
3  *
4  * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  *
11  * * Redistributions of source code must retain the above copyright
12  * notice, this list of conditions and the following disclaimer.
13  * * Redistributions in binary form must reproduce the above copyright
14  * notice, this list of conditions and the following disclaimer in
15  * the documentation and/or other materials provided with the
16  * distribution.
17  * * Neither the name of Intel Corporation nor the names of its
18  * contributors may be used to endorse or promote products derived
19  * from this software without specific prior written permission.
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24  * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25  * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27  * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32  */
33 
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
36 
44 #include <stdint.h>
45 
46 #ifdef __DOXYGEN__
47 
55 static inline void rte_mb(void);
56 
64 static inline void rte_wmb(void);
65 
73 static inline void rte_rmb(void);
74 
82 static inline void rte_smp_mb(void);
83 
91 static inline void rte_smp_wmb(void);
92 
100 static inline void rte_smp_rmb(void);
101 
102 #endif /* __DOXYGEN__ */
103 
110 #define rte_compiler_barrier() do { \
111  asm volatile ("" : : : "memory"); \
112 } while(0)
113 
114 /*------------------------- 16 bit atomic operations -------------------------*/
115 
132 static inline int
133 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
134 
135 #ifdef RTE_FORCE_INTRINSICS
136 static inline int
137 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
138 {
139  return __sync_bool_compare_and_swap(dst, exp, src);
140 }
141 #endif
142 
146 typedef struct {
147  volatile int16_t cnt;
149 
153 #define RTE_ATOMIC16_INIT(val) { (val) }
154 
161 static inline void
163 {
164  v->cnt = 0;
165 }
166 
175 static inline int16_t
177 {
178  return v->cnt;
179 }
180 
189 static inline void
190 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
191 {
192  v->cnt = new_value;
193 }
194 
203 static inline void
205 {
206  __sync_fetch_and_add(&v->cnt, inc);
207 }
208 
217 static inline void
219 {
220  __sync_fetch_and_sub(&v->cnt, dec);
221 }
222 
229 static inline void
231 
232 #ifdef RTE_FORCE_INTRINSICS
233 static inline void
235 {
236  rte_atomic16_add(v, 1);
237 }
238 #endif
239 
246 static inline void
248 
249 #ifdef RTE_FORCE_INTRINSICS
250 static inline void
252 {
253  rte_atomic16_sub(v, 1);
254 }
255 #endif
256 
270 static inline int16_t
272 {
273  return __sync_add_and_fetch(&v->cnt, inc);
274 }
275 
290 static inline int16_t
292 {
293  return __sync_sub_and_fetch(&v->cnt, dec);
294 }
295 
307 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
308 
309 #ifdef RTE_FORCE_INTRINSICS
310 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
311 {
312  return __sync_add_and_fetch(&v->cnt, 1) == 0;
313 }
314 #endif
315 
327 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
328 
329 #ifdef RTE_FORCE_INTRINSICS
330 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
331 {
332  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
333 }
334 #endif
335 
347 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
348 
349 #ifdef RTE_FORCE_INTRINSICS
350 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
351 {
352  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
353 }
354 #endif
355 
362 static inline void rte_atomic16_clear(rte_atomic16_t *v)
363 {
364  v->cnt = 0;
365 }
366 
367 /*------------------------- 32 bit atomic operations -------------------------*/
368 
385 static inline int
386 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
387 
388 #ifdef RTE_FORCE_INTRINSICS
389 static inline int
390 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
391 {
392  return __sync_bool_compare_and_swap(dst, exp, src);
393 }
394 #endif
395 
399 typedef struct {
400  volatile int32_t cnt;
402 
406 #define RTE_ATOMIC32_INIT(val) { (val) }
407 
414 static inline void
416 {
417  v->cnt = 0;
418 }
419 
428 static inline int32_t
430 {
431  return v->cnt;
432 }
433 
442 static inline void
443 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
444 {
445  v->cnt = new_value;
446 }
447 
456 static inline void
458 {
459  __sync_fetch_and_add(&v->cnt, inc);
460 }
461 
470 static inline void
472 {
473  __sync_fetch_and_sub(&v->cnt, dec);
474 }
475 
482 static inline void
484 
485 #ifdef RTE_FORCE_INTRINSICS
486 static inline void
488 {
489  rte_atomic32_add(v, 1);
490 }
491 #endif
492 
499 static inline void
501 
502 #ifdef RTE_FORCE_INTRINSICS
503 static inline void
505 {
506  rte_atomic32_sub(v,1);
507 }
508 #endif
509 
523 static inline int32_t
525 {
526  return __sync_add_and_fetch(&v->cnt, inc);
527 }
528 
543 static inline int32_t
545 {
546  return __sync_sub_and_fetch(&v->cnt, dec);
547 }
548 
560 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
561 
562 #ifdef RTE_FORCE_INTRINSICS
563 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
564 {
565  return __sync_add_and_fetch(&v->cnt, 1) == 0;
566 }
567 #endif
568 
580 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
581 
582 #ifdef RTE_FORCE_INTRINSICS
583 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
584 {
585  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
586 }
587 #endif
588 
600 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
601 
602 #ifdef RTE_FORCE_INTRINSICS
603 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
604 {
605  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
606 }
607 #endif
608 
615 static inline void rte_atomic32_clear(rte_atomic32_t *v)
616 {
617  v->cnt = 0;
618 }
619 
620 /*------------------------- 64 bit atomic operations -------------------------*/
621 
637 static inline int
638 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
639 
640 #ifdef RTE_FORCE_INTRINSICS
641 static inline int
642 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
643 {
644  return __sync_bool_compare_and_swap(dst, exp, src);
645 }
646 #endif
647 
651 typedef struct {
652  volatile int64_t cnt;
654 
658 #define RTE_ATOMIC64_INIT(val) { (val) }
659 
666 static inline void
668 
669 #ifdef RTE_FORCE_INTRINSICS
670 static inline void
672 {
673 #ifdef __LP64__
674  v->cnt = 0;
675 #else
676  int success = 0;
677  uint64_t tmp;
678 
679  while (success == 0) {
680  tmp = v->cnt;
681  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
682  tmp, 0);
683  }
684 #endif
685 }
686 #endif
687 
696 static inline int64_t
698 
699 #ifdef RTE_FORCE_INTRINSICS
700 static inline int64_t
702 {
703 #ifdef __LP64__
704  return v->cnt;
705 #else
706  int success = 0;
707  uint64_t tmp;
708 
709  while (success == 0) {
710  tmp = v->cnt;
711  /* replace the value by itself */
712  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
713  tmp, tmp);
714  }
715  return tmp;
716 #endif
717 }
718 #endif
719 
728 static inline void
729 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
730 
731 #ifdef RTE_FORCE_INTRINSICS
732 static inline void
733 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
734 {
735 #ifdef __LP64__
736  v->cnt = new_value;
737 #else
738  int success = 0;
739  uint64_t tmp;
740 
741  while (success == 0) {
742  tmp = v->cnt;
743  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
744  tmp, new_value);
745  }
746 #endif
747 }
748 #endif
749 
758 static inline void
759 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
760 
761 #ifdef RTE_FORCE_INTRINSICS
762 static inline void
763 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
764 {
765  __sync_fetch_and_add(&v->cnt, inc);
766 }
767 #endif
768 
777 static inline void
778 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
779 
780 #ifdef RTE_FORCE_INTRINSICS
781 static inline void
782 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
783 {
784  __sync_fetch_and_sub(&v->cnt, dec);
785 }
786 #endif
787 
794 static inline void
796 
797 #ifdef RTE_FORCE_INTRINSICS
798 static inline void
800 {
801  rte_atomic64_add(v, 1);
802 }
803 #endif
804 
811 static inline void
813 
814 #ifdef RTE_FORCE_INTRINSICS
815 static inline void
817 {
818  rte_atomic64_sub(v, 1);
819 }
820 #endif
821 
835 static inline int64_t
836 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
837 
838 #ifdef RTE_FORCE_INTRINSICS
839 static inline int64_t
841 {
842  return __sync_add_and_fetch(&v->cnt, inc);
843 }
844 #endif
845 
859 static inline int64_t
860 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
861 
862 #ifdef RTE_FORCE_INTRINSICS
863 static inline int64_t
865 {
866  return __sync_sub_and_fetch(&v->cnt, dec);
867 }
868 #endif
869 
881 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
882 
883 #ifdef RTE_FORCE_INTRINSICS
884 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
885 {
886  return rte_atomic64_add_return(v, 1) == 0;
887 }
888 #endif
889 
901 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
902 
903 #ifdef RTE_FORCE_INTRINSICS
904 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
905 {
906  return rte_atomic64_sub_return(v, 1) == 0;
907 }
908 #endif
909 
921 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
922 
923 #ifdef RTE_FORCE_INTRINSICS
924 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
925 {
926  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
927 }
928 #endif
929 
936 static inline void rte_atomic64_clear(rte_atomic64_t *v);
937 
938 #ifdef RTE_FORCE_INTRINSICS
939 static inline void rte_atomic64_clear(rte_atomic64_t *v)
940 {
941  rte_atomic64_set(v, 0);
942 }
943 #endif
944 
945 #endif /* _RTE_ATOMIC_H_ */