atomic_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
136 #ifndef ATOMIC_UTILS_H
137 #define ATOMIC_UTILS_H
138 
139 #include <stdint.h>
140 
141 #include "irq.h"
142 #include "macros/utils.h"
143 #include "sched.h"
144 
145 #include "atomic_utils_arch.h" /* IWYU pragma: export */
146 
147 #ifdef __cplusplus
148 extern "C" {
149 #endif
150 
151 /* Declarations and documentation: */
152 
153 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
173 typedef struct {
174  volatile uint8_t *dest;
175  uint8_t mask;
177 
183 typedef struct {
184  volatile uint16_t *dest;
185  uint16_t mask;
187 
193 typedef struct {
194  volatile uint32_t *dest;
195  uint32_t mask;
197 
203 typedef struct {
204  volatile uint64_t *dest;
205  uint64_t mask;
208 #endif /* HAS_ATOMIC_BIT */
209 
220 static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
227 static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
234 static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
241 static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
242 
249 static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
250  if (sizeof(uintptr_t) == 2) {
251  return atomic_load_u16((const volatile uint16_t *)var);
252  }
253  else if (sizeof(uintptr_t) == 4) {
254  return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
255  }
256  else {
257  return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
258  }
259 }
266 static inline void * atomic_load_ptr(void **ptr_addr) {
267  return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
268 }
275 static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
276 {
277  return atomic_load_u16((const volatile uint16_t *)var);
278 }
290 static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
296 static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
302 static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
308 static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
309 
316 static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
317 {
318  if (sizeof(uintptr_t) == 2) {
319  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
320  }
321  else if (sizeof(uintptr_t) == 4) {
322  atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
323  }
324  else {
325  atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
326  }
327 }
334 static inline void atomic_store_ptr(void **dest, const void *val) {
335  atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
336 }
343 static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
344  kernel_pid_t val)
345 {
346  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
347 }
360 static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
361  uint8_t summand);
368 static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
369  uint16_t summand);
376 static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
377  uint32_t summand);
384 static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
385  uint64_t summand);
399 static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
400  uint8_t subtrahend);
408 static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
409  uint16_t subtrahend);
417 static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
418  uint32_t subtrahend);
426 static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
427  uint64_t subtrahend);
441 static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
449 static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
450  uint16_t val);
458 static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
459  uint32_t val);
467 static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
468  uint64_t val);
482 static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
490 static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
491  uint16_t val);
499 static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
500  uint32_t val);
508 static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
509  uint64_t val);
523 static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
531 static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
532  uint16_t val);
540 static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
541  uint32_t val);
549 static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
550  uint64_t val);
562 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
563  uint8_t bit);
564 
570 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
571  uint8_t bit);
572 
578 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
579  uint8_t bit);
580 
586 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
587  uint8_t bit);
598 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
603 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
608 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
613 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
624 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
629 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
634 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
639 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
653 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
654  uint8_t summand);
662 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
663  uint16_t summand);
671 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
672  uint32_t summand);
680 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
681  uint64_t summand);
695 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
696  uint8_t subtrahend);
704 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
705  uint16_t subtrahend);
713 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
714  uint32_t subtrahend);
722 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
723  uint64_t subtrahend);
737 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
745 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
746  uint16_t val);
754 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
755  uint32_t val);
763 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
764  uint64_t val);
778 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
779  uint8_t val);
787 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
788  uint16_t val);
796 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
797  uint32_t val);
805 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
806  uint64_t val);
820 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
821  uint8_t val);
829 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
830  uint16_t val);
838 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
839  uint32_t val);
847 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
848  uint64_t val);
851 /* Fallback implementations of atomic utility functions: */
852 
860 #define ATOMIC_LOAD_IMPL(name, type) \
861  static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
862  { \
863  unsigned state = irq_disable(); \
864  type result = *var; \
865  irq_restore(state); \
866  return result; \
867  }
868 
869 #ifndef HAS_ATOMIC_LOAD_U8
870 ATOMIC_LOAD_IMPL(u8, uint8_t)
871 #endif
872 #ifndef HAS_ATOMIC_LOAD_U16
873 ATOMIC_LOAD_IMPL(u16, uint16_t)
874 #endif
875 #ifndef HAS_ATOMIC_LOAD_U32
876 ATOMIC_LOAD_IMPL(u32, uint32_t)
877 #endif
878 #ifndef HAS_ATOMIC_LOAD_U64
879 ATOMIC_LOAD_IMPL(u64, uint64_t)
880 #endif
881 
889 #define ATOMIC_STORE_IMPL(name, type) \
890  static inline void CONCAT(atomic_store_, name) \
891  (volatile type *dest, type val) \
892  { \
893  unsigned state = irq_disable(); \
894  *dest = val; \
895  irq_restore(state); \
896  }
897 
898 #ifndef HAS_ATOMIC_STORE_U8
899 ATOMIC_STORE_IMPL(u8, uint8_t)
900 #endif
901 #ifndef HAS_ATOMIC_STORE_U16
902 ATOMIC_STORE_IMPL(u16, uint16_t)
903 #endif
904 #ifndef HAS_ATOMIC_STORE_U32
905 ATOMIC_STORE_IMPL(u32, uint32_t)
906 #endif
907 #ifndef HAS_ATOMIC_STORE_U64
908 ATOMIC_STORE_IMPL(u64, uint64_t)
909 #endif
910 
920 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
921  static inline type CONCAT4(atomic_fetch_, opname, _, name) \
922  (volatile type *dest, type val) \
923  { \
924  unsigned state = irq_disable(); \
925  const type result = *dest; \
926  *dest = result op val; \
927  irq_restore(state); \
928  return result; \
929  }
930 
931 #ifndef HAS_ATOMIC_FETCH_ADD_U8
932 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
933 #endif
934 #ifndef HAS_ATOMIC_FETCH_ADD_U16
935 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
936 #endif
937 #ifndef HAS_ATOMIC_FETCH_ADD_U32
938 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
939 #endif
940 #ifndef HAS_ATOMIC_FETCH_ADD_U64
941 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
942 #endif
943 
944 #ifndef HAS_ATOMIC_FETCH_SUB_U8
945 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
946 #endif
947 #ifndef HAS_ATOMIC_FETCH_SUB_U16
948 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
949 #endif
950 #ifndef HAS_ATOMIC_FETCH_SUB_U32
951 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
952 #endif
953 #ifndef HAS_ATOMIC_FETCH_SUB_U64
954 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
955 #endif
956 
957 #ifndef HAS_ATOMIC_FETCH_OR_U8
958 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
959 #endif
960 #ifndef HAS_ATOMIC_FETCH_OR_U16
961 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
962 #endif
963 #ifndef HAS_ATOMIC_FETCH_OR_U32
964 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
965 #endif
966 #ifndef HAS_ATOMIC_FETCH_OR_U64
967 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
968 #endif
969 
970 #ifndef HAS_ATOMIC_FETCH_XOR_U8
971 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
972 #endif
973 #ifndef HAS_ATOMIC_FETCH_XOR_U16
974 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
975 #endif
976 #ifndef HAS_ATOMIC_FETCH_XOR_U32
977 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
978 #endif
979 #ifndef HAS_ATOMIC_FETCH_XOR_U64
980 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
981 #endif
982 
983 #ifndef HAS_ATOMIC_FETCH_AND_U8
984 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
985 #endif
986 #ifndef HAS_ATOMIC_FETCH_AND_U16
987 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
988 #endif
989 #ifndef HAS_ATOMIC_FETCH_AND_U32
990 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
991 #endif
992 #ifndef HAS_ATOMIC_FETCH_AND_U64
993 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
994 #endif
995 
996 #ifndef HAS_ATOMIC_BIT
997 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
998  uint8_t bit)
999 {
1000  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1001  return result;
1002 }
1003 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1004  uint8_t bit)
1005 {
1006  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1007  return result;
1008 }
1009 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1010  uint8_t bit)
1011 {
1012  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1013  return result;
1014 }
1015 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1016  uint8_t bit)
1017 {
1018  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1019  return result;
1020 }
1021 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1022 {
1023  atomic_fetch_or_u8(bit.dest, bit.mask);
1024 }
1025 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
1026 {
1027  atomic_fetch_or_u16(bit.dest, bit.mask);
1028 }
1029 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
1030 {
1031  atomic_fetch_or_u32(bit.dest, bit.mask);
1032 }
1033 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
1034 {
1035  atomic_fetch_or_u64(bit.dest, bit.mask);
1036 }
1037 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
1038 {
1039  atomic_fetch_and_u8(bit.dest, ~bit.mask);
1040 }
1042 {
1043  atomic_fetch_and_u16(bit.dest, ~bit.mask);
1044 }
1046 {
1047  atomic_fetch_and_u32(bit.dest, ~bit.mask);
1048 }
1050 {
1051  atomic_fetch_and_u64(bit.dest, ~bit.mask);
1052 }
1053 #endif
1054 
1055 /* Provide semi_atomic_*() functions on top.
1056  *
1057  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1058  * - Else:
1059  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1060  * store atomic, as we can avoid touching the IRQ state register that
1061  * way
1062  * - Else: We need to disable and re-enable IRQs anyway, we just use the
1063  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1064  * as well
1065  */
1066 
1067 /* FETCH_ADD */
1068 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1069 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1070  uint8_t val)
1071 {
1072  return atomic_fetch_add_u8(dest, val);
1073 }
1074 #else
1075 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1076  uint8_t val)
1077 {
1078  uint8_t result = atomic_load_u8(dest);
1079  atomic_store_u8(dest, result + val);
1080  return result;
1081 }
1082 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1083 
1084 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1085 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1086  uint16_t val)
1087 {
1088  return atomic_fetch_add_u16(dest, val);
1089 }
1090 #else
1091 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1092  uint16_t val)
1093 {
1094  uint16_t result = atomic_load_u16(dest);
1095  atomic_store_u16(dest, result + val);
1096  return result;
1097 }
1098 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1099 
1100 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1101 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1102  uint32_t val)
1103 {
1104  return atomic_fetch_add_u32(dest, val);
1105 }
1106 #else
1107 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1108  uint32_t val)
1109 {
1110  uint32_t result = atomic_load_u32(dest);
1111  atomic_store_u32(dest, result + val);
1112  return result;
1113 }
1114 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1115 
1116 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1117 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1118  uint64_t val)
1119 {
1120  return atomic_fetch_add_u64(dest, val);
1121 }
1122 #else
1123 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1124  uint64_t val)
1125 {
1126  atomic_store_u64(dest, *dest + val);
1127 }
1128 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1129 
1130 /* FETCH_SUB */
1131 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1132 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1133  uint8_t val)
1134 {
1135  return atomic_fetch_sub_u8(dest, val);
1136 }
1137 #else
1138 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1139  uint8_t val)
1140 {
1141  uint8_t result = atomic_load_u8(dest);
1142  atomic_store_u8(dest, result - val);
1143  return result;
1144 }
1145 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1146 
1147 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1148 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1149  uint16_t val)
1150 {
1151  return atomic_fetch_sub_u16(dest, val);
1152 }
1153 #else
1154 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1155  uint16_t val)
1156 {
1157  uint16_t result = atomic_load_u16(dest);
1158  atomic_store_u16(dest, result - val);
1159  return result;
1160 }
1161 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1162 
1163 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1164 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1165  uint32_t val)
1166 {
1167  return atomic_fetch_sub_u32(dest, val);
1168 }
1169 #else
1170 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1171  uint32_t val)
1172 {
1173  uint32_t result = atomic_load_u32(dest);
1174  atomic_store_u32(dest, result - val);
1175  return result;
1176 }
1177 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1178 
1179 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1180 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1181  uint64_t val)
1182 {
1183  return atomic_fetch_sub_u64(dest, val);
1184 }
1185 #else
1186 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1187  uint64_t val)
1188 {
1189  uint64_t result = atomic_load_u64(dest);
1190  atomic_store_u64(dest, result - val);
1191  return result;
1192 }
1193 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1194 
1195 /* FETCH_OR */
1196 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1197 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1198  uint8_t val)
1199 {
1200  return atomic_fetch_or_u8(dest, val);
1201 }
1202 #else
1203 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1204  uint8_t val)
1205 {
1206  uint8_t result = atomic_load_u8(dest);
1207  atomic_store_u8(dest, result | val);
1208  return result;
1209 }
1210 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1211 
1212 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1213 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1214  uint16_t val)
1215 {
1216  return atomic_fetch_or_u16(dest, val);
1217 }
1218 #else
1219 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1220  uint16_t val)
1221 {
1222  uint16_t result = atomic_load_u16(dest);
1223  atomic_store_u16(dest, result | val);
1224  return result;
1225 }
1226 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1227 
1228 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1229 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1230  uint32_t val)
1231 {
1232  return atomic_fetch_or_u32(dest, val);
1233 }
1234 #else
1235 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1236  uint32_t val)
1237 {
1238  uint32_t result = atomic_load_u32(dest);
1239  atomic_store_u32(dest, result | val);
1240  return result;
1241 }
1242 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1243 
1244 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1245 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1246  uint64_t val)
1247 {
1248  return atomic_fetch_or_u64(dest, val);
1249 }
1250 #else
1251 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1252  uint64_t val)
1253 {
1254  uint64_t result = atomic_load_u64(dest);
1255  atomic_store_u64(dest, result | val);
1256  return result;
1257 }
1258 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1259 
1260 /* FETCH_XOR */
1261 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1262 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1263  uint8_t val)
1264 {
1265  return atomic_fetch_xor_u8(dest, val);
1266 }
1267 #else
1268 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1269  uint8_t val)
1270 {
1271  uint8_t result = atomic_load_u8(dest);
1272  atomic_store_u8(dest, result ^ val);
1273  return result;
1274 }
1275 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1276 
1277 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1278 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1279  uint16_t val)
1280 {
1281  return atomic_fetch_xor_u16(dest, val);
1282 }
1283 #else
1284 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1285  uint16_t val)
1286 {
1287  uint16_t result = atomic_load_u16(dest);
1288  atomic_store_u16(dest, result ^ val);
1289  return result;
1290 }
1291 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1292 
1293 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1294 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1295  uint32_t val)
1296 {
1297  return atomic_fetch_xor_u32(dest, val);
1298 }
1299 #else
1300 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1301  uint32_t val)
1302 {
1303  uint32_t result = atomic_load_u32(dest);
1304  atomic_store_u32(dest, result ^ val);
1305  return result;
1306 }
1307 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1308 
1309 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1310 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1311  uint64_t val)
1312 {
1313  return atomic_fetch_xor_u64(dest, val);
1314 }
1315 #else
1316 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1317  uint64_t val)
1318 {
1319  uint64_t result = atomic_load_u64(dest);
1320  atomic_store_u64(dest, result ^ val);
1321  return result;
1322 }
1323 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1324 
1325 /* FETCH_AND */
1326 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1327 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1328  uint8_t val)
1329 {
1330  return atomic_fetch_and_u8(dest, val);
1331 }
1332 #else
1333 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1334  uint8_t val)
1335 {
1336  uint8_t result = atomic_load_u8(dest);
1337  atomic_store_u8(dest, result & val);
1338  return result;
1339 }
1340 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1341 
1342 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1343 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1344  uint16_t val)
1345 {
1346  return atomic_fetch_and_u16(dest, val);
1347 }
1348 #else
1349 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1350  uint16_t val)
1351 {
1352  uint16_t result = atomic_load_u16(dest);
1353  atomic_store_u16(dest, result & val);
1354  return result;
1355 }
1356 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1357 
1358 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1359 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1360  uint32_t val)
1361 {
1362  return atomic_fetch_and_u32(dest, val);
1363 }
1364 #else
1365 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1366  uint32_t val)
1367 {
1368  uint32_t result = atomic_load_u32(dest);
1369  atomic_store_u32(dest, result & val);
1370  return result;
1371 }
1372 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1373 
1374 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1375 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1376  uint64_t val)
1377 {
1378  return atomic_fetch_and_u64(dest, val);
1379 }
1380 #else
1381 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1382  uint64_t val)
1383 {
1384  uint64_t result = atomic_load_u64(dest);
1385  atomic_store_u64(dest, result & val);
1386  return result;
1387 }
1388 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1389 
1390 #ifdef __cplusplus
1391 }
1392 #endif
1393 
1394 #endif /* ATOMIC_UTILS_H */
Various helper macros.
int16_t kernel_pid_t
Unique process identifier.
Definition: sched.h:139
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
Definition: atomic_utils.h:316
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
Definition: atomic_utils.h:920
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
Definition: atomic_utils.h:860
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
Definition: atomic_utils.h:275
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
Definition: atomic_utils.h:334
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
Definition: atomic_utils.h:266
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
Definition: atomic_utils.h:997
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
Definition: atomic_utils.h:343
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
Definition: atomic_utils.h:249
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
Definition: atomic_utils.h:889
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:183
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:185
volatile uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:184
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:193
volatile uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:194
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:195
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:203
volatile uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:204
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:205
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:173
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:175
volatile uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:174