atomic_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
136 #ifndef ATOMIC_UTILS_H
137 #define ATOMIC_UTILS_H
138 
139 #include <stdint.h>
140 
141 #include "irq.h"
142 #include "atomic_utils_arch.h"
143 
144 #ifdef __cplusplus
145 extern "C" {
146 #endif
147 
148 /* Declarations and documentation: */
149 
150 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
151 
170 typedef struct {
171  uint8_t *dest;
172  uint8_t mask;
174 
180 typedef struct {
181  uint16_t *dest;
182  uint16_t mask;
184 
190 typedef struct {
191  uint32_t *dest;
192  uint32_t mask;
194 
200 typedef struct {
201  uint64_t *dest;
202  uint64_t mask;
205 #endif /* HAS_ATOMIC_BIT */
206 
217 static inline uint8_t atomic_load_u8(const uint8_t *var);
224 static inline uint16_t atomic_load_u16(const uint16_t *var);
231 static inline uint32_t atomic_load_u32(const uint32_t *var);
238 static inline uint64_t atomic_load_u64(const uint64_t *var);
250 static inline void atomic_store_u8(uint8_t *dest, uint8_t val);
256 static inline void atomic_store_u16(uint16_t *dest, uint16_t val);
262 static inline void atomic_store_u32(uint32_t *dest, uint32_t val);
268 static inline void atomic_store_u64(uint64_t *dest, uint64_t val);
280 static inline void atomic_fetch_add_u8(uint8_t *dest, uint8_t summand);
286 static inline void atomic_fetch_add_u16(uint16_t *dest, uint16_t summand);
292 static inline void atomic_fetch_add_u32(uint32_t *dest, uint32_t summand);
298 static inline void atomic_fetch_add_u64(uint64_t *dest, uint64_t summand);
311 static inline void atomic_fetch_sub_u8(uint8_t *dest, uint8_t subtrahend);
318 static inline void atomic_fetch_sub_u16(uint16_t *dest, uint16_t subtrahend);
325 static inline void atomic_fetch_sub_u32(uint32_t *dest, uint32_t subtrahend);
332 static inline void atomic_fetch_sub_u64(uint64_t *dest, uint64_t subtrahend);
345 static inline void atomic_fetch_or_u8(uint8_t *dest, uint8_t val);
352 static inline void atomic_fetch_or_u16(uint16_t *dest, uint16_t val);
359 static inline void atomic_fetch_or_u32(uint32_t *dest, uint32_t val);
366 static inline void atomic_fetch_or_u64(uint64_t *dest, uint64_t val);
379 static inline void atomic_fetch_xor_u8(uint8_t *dest, uint8_t val);
386 static inline void atomic_fetch_xor_u16(uint16_t *dest, uint16_t val);
393 static inline void atomic_fetch_xor_u32(uint32_t *dest, uint32_t val);
400 static inline void atomic_fetch_xor_u64(uint64_t *dest, uint64_t val);
413 static inline void atomic_fetch_and_u8(uint8_t *dest, uint8_t val);
420 static inline void atomic_fetch_and_u16(uint16_t *dest, uint16_t val);
427 static inline void atomic_fetch_and_u32(uint32_t *dest, uint32_t val);
434 static inline void atomic_fetch_and_u64(uint64_t *dest, uint64_t val);
446 static inline atomic_bit_u8_t atomic_bit_u8(uint8_t *dest, uint8_t bit);
447 
453 static inline atomic_bit_u16_t atomic_bit_u16(uint16_t *dest, uint8_t bit);
454 
460 static inline atomic_bit_u32_t atomic_bit_u32(uint32_t *dest, uint8_t bit);
461 
467 static inline atomic_bit_u64_t atomic_bit_u64(uint64_t *dest, uint8_t bit);
478 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
483 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
488 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
493 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
504 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
509 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
514 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
519 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
532 static inline void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t summand);
539 static inline void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t summand);
546 static inline void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t summand);
553 static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t summand);
566 static inline void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t subtrahend);
573 static inline void semi_atomic_fetch_sub_u16(uint16_t *dest,
574  uint16_t subtrahend);
581 static inline void semi_atomic_fetch_sub_u32(uint32_t *dest,
582  uint32_t subtrahend);
589 static inline void semi_atomic_fetch_sub_u64(uint64_t *dest,
590  uint64_t subtrahend);
603 static inline void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val);
610 static inline void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val);
617 static inline void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val);
624 static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val);
637 static inline void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val);
644 static inline void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val);
651 static inline void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val);
658 static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val);
671 static inline void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val);
678 static inline void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val);
685 static inline void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val);
692 static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val);
695 /* Fallback implementations of atomic utility functions: */
696 
700 #define CONCAT(a, b) a ## b
701 
705 #define CONCAT4(a, b, c, d) a ## b ## c ## d
706 
714 #define ATOMIC_LOAD_IMPL(name, type) \
715  static inline type CONCAT(atomic_load_, name)(const type *var) \
716  { \
717  unsigned state = irq_disable(); \
718  /* var can be register allocated, hence the memory barrier of \
719  * irq_disable() and irq_restore() may not apply here. Using volatile \
720  * ensures that the compiler allocates it in memory and that the \
721  * memory access is not optimized out. */ \
722  type result = *((const volatile type *)var); \
723  irq_restore(state); \
724  return result; \
725  }
726 
727 #ifndef HAS_ATOMIC_LOAD_U8
728 ATOMIC_LOAD_IMPL(u8, uint8_t)
729 #endif
730 #ifndef HAS_ATOMIC_LOAD_U16
731 ATOMIC_LOAD_IMPL(u16, uint16_t)
732 #endif
733 #ifndef HAS_ATOMIC_LOAD_U32
734 ATOMIC_LOAD_IMPL(u32, uint32_t)
735 #endif
736 #ifndef HAS_ATOMIC_LOAD_U64
737 ATOMIC_LOAD_IMPL(u64, uint64_t)
738 #endif
739 
747 #define ATOMIC_STORE_IMPL(name, type) \
748  static inline void CONCAT(atomic_store_, name)(type *dest, type val) \
749  { \
750  unsigned state = irq_disable(); \
751  /* dest can be register allocated, hence the memory barrier of \
752  * irq_disable() and irq_restore() may not apply here. Using volatile \
753  * ensures that the compiler allocates it in memory and that the \
754  * memory access is not optimized out. */ \
755  *((volatile type *)dest) = val; \
756  irq_restore(state); \
757  }
758 
759 #ifndef HAS_ATOMIC_STORE_U8
760 ATOMIC_STORE_IMPL(u8, uint8_t)
761 #endif
762 #ifndef HAS_ATOMIC_STORE_U16
763 ATOMIC_STORE_IMPL(u16, uint16_t)
764 #endif
765 #ifndef HAS_ATOMIC_STORE_U32
766 ATOMIC_STORE_IMPL(u32, uint32_t)
767 #endif
768 #ifndef HAS_ATOMIC_STORE_U64
769 ATOMIC_STORE_IMPL(u64, uint64_t)
770 #endif
771 
781 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
782  static inline void CONCAT4(atomic_fetch_, opname, _, name)(type *dest, \
783  type val) \
784  { \
785  unsigned state = irq_disable(); \
786  /* dest can be register allocated, hence the memory barrier of \
787  * irq_disable() and irq_restore() may not apply here. Using volatile \
788  * ensures that the compiler allocates it in memory and that the \
789  * memory access is not optimized out. */ \
790  volatile type *tmp = dest; \
791  *tmp = *tmp op val; \
792  irq_restore(state); \
793  }
794 
795 #ifndef HAS_ATOMIC_FETCH_ADD_U8
796 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
797 #endif
798 #ifndef HAS_ATOMIC_FETCH_ADD_U16
799 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
800 #endif
801 #ifndef HAS_ATOMIC_FETCH_ADD_U32
802 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
803 #endif
804 #ifndef HAS_ATOMIC_FETCH_ADD_U64
805 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
806 #endif
807 
808 #ifndef HAS_ATOMIC_FETCH_SUB_U8
809 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
810 #endif
811 #ifndef HAS_ATOMIC_FETCH_SUB_U16
812 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
813 #endif
814 #ifndef HAS_ATOMIC_FETCH_SUB_U32
815 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
816 #endif
817 #ifndef HAS_ATOMIC_FETCH_SUB_U64
818 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
819 #endif
820 
821 #ifndef HAS_ATOMIC_FETCH_OR_U8
822 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
823 #endif
824 #ifndef HAS_ATOMIC_FETCH_OR_U16
825 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
826 #endif
827 #ifndef HAS_ATOMIC_FETCH_OR_U32
828 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
829 #endif
830 #ifndef HAS_ATOMIC_FETCH_OR_U64
831 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
832 #endif
833 
834 #ifndef HAS_ATOMIC_FETCH_XOR_U8
835 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
836 #endif
837 #ifndef HAS_ATOMIC_FETCH_XOR_U16
838 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
839 #endif
840 #ifndef HAS_ATOMIC_FETCH_XOR_U32
841 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
842 #endif
843 #ifndef HAS_ATOMIC_FETCH_XOR_U64
844 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
845 #endif
846 
847 #ifndef HAS_ATOMIC_FETCH_AND_U8
848 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
849 #endif
850 #ifndef HAS_ATOMIC_FETCH_AND_U16
851 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
852 #endif
853 #ifndef HAS_ATOMIC_FETCH_AND_U32
854 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
855 #endif
856 #ifndef HAS_ATOMIC_FETCH_AND_U64
857 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
858 #endif
859 
860 #ifndef HAS_ATOMIC_BIT
861 static inline atomic_bit_u8_t atomic_bit_u8(uint8_t *dest, uint8_t bit)
862 {
863  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
864  return result;
865 }
866 static inline atomic_bit_u16_t atomic_bit_u16(uint16_t *dest, uint8_t bit)
867 {
868  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
869  return result;
870 }
871 static inline atomic_bit_u32_t atomic_bit_u32(uint32_t *dest, uint8_t bit)
872 {
873  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
874  return result;
875 }
876 static inline atomic_bit_u64_t atomic_bit_u64(uint64_t *dest, uint8_t bit)
877 {
878  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
879  return result;
880 }
881 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
882 {
883  atomic_fetch_or_u8(bit.dest, bit.mask);
884 }
885 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
886 {
887  atomic_fetch_or_u16(bit.dest, bit.mask);
888 }
889 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
890 {
891  atomic_fetch_or_u32(bit.dest, bit.mask);
892 }
893 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
894 {
895  atomic_fetch_or_u64(bit.dest, bit.mask);
896 }
897 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
898 {
899  atomic_fetch_and_u8(bit.dest, ~bit.mask);
900 }
901 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit)
902 {
903  atomic_fetch_and_u16(bit.dest, ~bit.mask);
904 }
905 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit)
906 {
907  atomic_fetch_and_u32(bit.dest, ~bit.mask);
908 }
909 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit)
910 {
911  atomic_fetch_and_u64(bit.dest, ~bit.mask);
912 }
913 #endif
914 
915 /* Provide semi_atomic_*() functions on top.
916  *
917  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
918  * - Else:
919  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
920  * store atomic, as we can avoid touching the IRQ state register that
921  * way
922  * - Else: We need to disable and re-enable IRQs anyway, we just use the
923  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
924  * as well
925  */
926 
927 /* FETCH_ADD */
928 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
929 static inline void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t val) {
931 }
932 #else
933 static inline void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t val) {
934  atomic_store_u8(dest, *dest + val);
935 }
936 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
937 
938 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
939 static inline void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t val) {
941 }
942 #else
943 static inline void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t val) {
944  atomic_store_u16(dest, *dest + val);
945 }
946 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
947 
948 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
949 static inline void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t val) {
951 }
952 #else
953 static inline void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t val) {
954  atomic_store_u32(dest, *dest + val);
955 }
956 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
957 
958 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
959 static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t val) {
960  atomic_fetch_add_u64(dest, val);
961 }
962 #else
963 static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t val) {
964  atomic_store_u64(dest, *dest + val);
965 }
966 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
967 
968 /* FETCH_SUB */
969 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
970 static inline void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t val) {
972 }
973 #else
974 static inline void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t val) {
975  atomic_store_u8(dest, *dest - val);
976 }
977 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
978 
979 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
980 static inline void semi_atomic_fetch_sub_u16(uint16_t *dest, uint16_t val) {
982 }
983 #else
984 static inline void semi_atomic_fetch_sub_u16(uint16_t *dest, uint16_t val) {
985  atomic_store_u16(dest, *dest - val);
986 }
987 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
988 
989 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
990 static inline void semi_atomic_fetch_sub_u32(uint32_t *dest, uint32_t val) {
992 }
993 #else
994 static inline void semi_atomic_fetch_sub_u32(uint32_t *dest, uint32_t val) {
995  atomic_store_u32(dest, *dest - val);
996 }
997 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
998 
999 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1000 static inline void semi_atomic_fetch_sub_u64(uint64_t *dest, uint64_t val) {
1001  atomic_fetch_sub_u64(dest, val);
1003 #else
1004 static inline void semi_atomic_fetch_sub_u64(uint64_t *dest, uint64_t val) {
1005  atomic_store_u64(dest, *dest - val);
1006 }
1007 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1008 
1009 /* FETCH_OR */
1010 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1011 static inline void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val) {
1013 }
1014 #else
1015 static inline void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val) {
1016  atomic_store_u8(dest, *dest | val);
1017 }
1018 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1019 
1020 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1021 static inline void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val) {
1023 }
1024 #else
1025 static inline void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val) {
1026  atomic_store_u16(dest, *dest | val);
1027 }
1028 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1029 
1030 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1031 static inline void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val) {
1033 }
1034 #else
1035 static inline void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val) {
1036  atomic_store_u32(dest, *dest | val);
1037 }
1038 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1039 
1040 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1041 static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val) {
1042  atomic_fetch_or_u64(dest, val);
1044 #else
1045 static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val) {
1046  atomic_store_u64(dest, *dest | val);
1047 }
1048 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1049 
1050 /* FETCH_XOR */
1051 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1052 static inline void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val) {
1054 }
1055 #else
1056 static inline void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val) {
1057  atomic_store_u8(dest, *dest ^ val);
1058 }
1059 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1060 
1061 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1062 static inline void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val) {
1064 }
1065 #else
1066 static inline void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val) {
1067  atomic_store_u16(dest, *dest ^ val);
1068 }
1069 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1070 
1071 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1072 static inline void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val) {
1074 }
1075 #else
1076 static inline void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val) {
1077  atomic_store_u32(dest, *dest ^ val);
1078 }
1079 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1080 
1081 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1082 static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val) {
1083  atomic_fetch_xor_u64(dest, val);
1085 #else
1086 static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val) {
1087  atomic_store_u64(dest, *dest ^ val);
1088 }
1089 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1090 
1091 /* FETCH_AND */
1092 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1093 static inline void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val) {
1095 }
1096 #else
1097 static inline void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val) {
1098  atomic_store_u8(dest, *dest & val);
1099 }
1100 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1101 
1102 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1103 static inline void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val) {
1105 }
1106 #else
1107 static inline void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val) {
1108  atomic_store_u16(dest, *dest & val);
1109 }
1110 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1111 
1112 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1113 static inline void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val) {
1115 }
1116 #else
1117 static inline void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val) {
1118  atomic_store_u32(dest, *dest & val);
1119 }
1120 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1121 
1122 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1123 static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val) {
1124  atomic_fetch_and_u64(dest, val);
1125 }
1126 #else
1127 static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val) {
1128  atomic_store_u64(dest, *dest & val);
1129 }
1130 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1131 
1132 #ifdef __cplusplus
1133 }
1134 #endif
1135 
1136 #endif /* ATOMIC_UTILS_H */
1137 
atomic_bit_u32
static atomic_bit_u32_t atomic_bit_u32(uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
Definition: atomic_utils.h:862
atomic_fetch_and_u64
static void atomic_fetch_and_u64(uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
atomic_fetch_or_u8
static void atomic_fetch_or_u8(uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
atomic_fetch_or_u64
static void atomic_fetch_or_u64(uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
atomic_fetch_and_u8
static void atomic_fetch_and_u8(uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
atomic_bit_u64_t
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:200
atomic_bit_u8
static atomic_bit_u8_t atomic_bit_u8(uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
Definition: atomic_utils.h:852
atomic_store_u32
static void atomic_store_u32(uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
semi_atomic_fetch_sub_u32
static void semi_atomic_fetch_sub_u32(uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
Definition: atomic_utils.h:981
atomic_fetch_or_u16
static void atomic_fetch_or_u16(uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
semi_atomic_fetch_xor_u8
static void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
Definition: atomic_utils.h:1043
atomic_fetch_sub_u8
static void atomic_fetch_sub_u8(uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
atomic_bit_u8_t::mask
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:172
atomic_fetch_or_u32
static void atomic_fetch_or_u32(uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
atomic_store_u64
static void atomic_store_u64(uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
atomic_fetch_xor_u32
static void atomic_fetch_xor_u32(uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
atomic_clear_bit_u32
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:896
atomic_clear_bit_u16
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:892
atomic_fetch_and_u16
static void atomic_fetch_and_u16(uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
atomic_set_bit_u16
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:876
semi_atomic_fetch_sub_u8
static void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
Definition: atomic_utils.h:961
atomic_set_bit_u8
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:872
atomic_clear_bit_u64
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:900
atomic_clear_bit_u8
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:888
semi_atomic_fetch_or_u32
static void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
Definition: atomic_utils.h:1022
semi_atomic_fetch_and_u8
static void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
Definition: atomic_utils.h:1084
atomic_fetch_add_u64
static void atomic_fetch_add_u64(uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
atomic_bit_u32_t::mask
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:192
atomic_store_u16
static void atomic_store_u16(uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
atomic_bit_u32_t::dest
uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:191
ATOMIC_STORE_IMPL
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
Definition: atomic_utils.h:744
atomic_bit_u64_t::mask
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:202
atomic_load_u16
static uint16_t atomic_load_u16(const uint16_t *var)
Load an uint16_t atomically.
atomic_fetch_add_u8
static void atomic_fetch_add_u8(uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
atomic_store_u8
static void atomic_store_u8(uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
atomic_bit_u64
static atomic_bit_u64_t atomic_bit_u64(uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
Definition: atomic_utils.h:867
ATOMIC_FETCH_OP_IMPL
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
Definition: atomic_utils.h:775
atomic_bit_u16_t
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:180
semi_atomic_fetch_and_u64
static void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
Definition: atomic_utils.h:1114
atomic_bit_u16
static atomic_bit_u16_t atomic_bit_u16(uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
Definition: atomic_utils.h:857
semi_atomic_fetch_and_u32
static void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
Definition: atomic_utils.h:1104
semi_atomic_fetch_sub_u16
static void semi_atomic_fetch_sub_u16(uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
Definition: atomic_utils.h:971
atomic_fetch_add_u32
static void atomic_fetch_add_u32(uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
irq.h
IRQ driver interface.
atomic_bit_u8_t::dest
uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:171
semi_atomic_fetch_or_u64
static void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
Definition: atomic_utils.h:1032
atomic_load_u32
static uint32_t atomic_load_u32(const uint32_t *var)
Load an uint32_t atomically.
semi_atomic_fetch_xor_u32
static void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
Definition: atomic_utils.h:1063
semi_atomic_fetch_sub_u64
static void semi_atomic_fetch_sub_u64(uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
Definition: atomic_utils.h:991
atomic_fetch_xor_u8
static void atomic_fetch_xor_u8(uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
atomic_bit_u8_t
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:170
atomic_fetch_sub_u16
static void atomic_fetch_sub_u16(uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
atomic_fetch_sub_u64
static void atomic_fetch_sub_u64(uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
atomic_fetch_xor_u64
static void atomic_fetch_xor_u64(uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
semi_atomic_fetch_add_u32
static void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:940
semi_atomic_fetch_or_u16
static void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
Definition: atomic_utils.h:1012
semi_atomic_fetch_or_u8
static void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
Definition: atomic_utils.h:1002
semi_atomic_fetch_add_u8
static void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:920
atomic_fetch_sub_u32
static void atomic_fetch_sub_u32(uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
semi_atomic_fetch_xor_u64
static void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
Definition: atomic_utils.h:1073
ATOMIC_LOAD_IMPL
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
Definition: atomic_utils.h:714
atomic_load_u64
static uint64_t atomic_load_u64(const uint64_t *var)
Load an uint64_t atomically.
atomic_fetch_add_u16
static void atomic_fetch_add_u16(uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
atomic_bit_u64_t::dest
uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:201
atomic_bit_u32_t
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:190
semi_atomic_fetch_and_u16
static void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
Definition: atomic_utils.h:1094
atomic_load_u8
static uint8_t atomic_load_u8(const uint8_t *var)
Load an uint8_t atomically.
semi_atomic_fetch_add_u16
static void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:930
semi_atomic_fetch_xor_u16
static void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
Definition: atomic_utils.h:1053
atomic_fetch_xor_u16
static void atomic_fetch_xor_u16(uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
atomic_bit_u16_t::mask
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:182
semi_atomic_fetch_add_u64
static void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:950
atomic_bit_u16_t::dest
uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:181
atomic_set_bit_u32
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:880
atomic_set_bit_u64
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:884
atomic_fetch_and_u32
static void atomic_fetch_and_u32(uint32_t *dest, uint32_t val)
Atomic version of *dest &= val