atomic_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
9 #pragma once
10 
138 #include <stdint.h>
139 
140 #include "irq.h"
141 #include "macros/utils.h"
142 #include "sched.h"
143 
144 #include "atomic_utils_arch.h" /* IWYU pragma: export */
145 
146 #ifdef __cplusplus
147 extern "C" {
148 #endif
149 
150 /* NOLINTBEGIN(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name)
151  *
152  * The macros ATOMIC_LOAD_IMPL() and friends do not surround the argument used
153  * to pass the type with parenthesis. Suppressing the clang-tidy warning here,
154  * as adding parenthesis around a type would be a synstax error.
155  *
156  * The macro ATOMIC_FETCH_OP_IMPL() uses `val` as argument value. But we want
157  * the declaration may be more specific (e.g. summand instead of val).
158  */
159 
160 /* Declarations and documentation: */
161 
162 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
182 typedef struct {
183  volatile uint8_t *dest;
184  uint8_t mask;
186 
192 typedef struct {
193  volatile uint16_t *dest;
194  uint16_t mask;
196 
202 typedef struct {
203  volatile uint32_t *dest;
204  uint32_t mask;
206 
212 typedef struct {
213  volatile uint64_t *dest;
214  uint64_t mask;
217 #endif /* HAS_ATOMIC_BIT */
218 
229 static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
236 static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
243 static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
250 static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
261 static inline unsigned atomic_load_unsigned(const volatile unsigned *var)
262 {
263  if (sizeof(uint64_t) == sizeof(unsigned)) {
264  return atomic_load_u64((volatile void *)var);
265  }
266 
267  if (sizeof(uint32_t) == sizeof(unsigned)) {
268  return atomic_load_u32((volatile void *)var);
269  }
270 
271  return atomic_load_u16((volatile void *)var);
272 }
273 
280 static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
281  if (sizeof(uintptr_t) == 2) {
282  return atomic_load_u16((const volatile uint16_t *)var);
283  }
284 
285  if (sizeof(uintptr_t) == 4) {
286  return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
287  }
288 
289  return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
290 }
297 static inline void * atomic_load_ptr(void **ptr_addr) {
298  return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
299 }
306 static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
307 {
308  return (kernel_pid_t)atomic_load_u16((const volatile uint16_t *)var);
309 }
321 static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
327 static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
333 static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
339 static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
349 static inline void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
350 {
351  if (sizeof(uint64_t) == sizeof(unsigned)) {
352  atomic_store_u64((volatile void *)dest, val);
353  }
354  else if (sizeof(uint32_t) == sizeof(unsigned)) {
355  atomic_store_u32((volatile void *)dest, val);
356  }
357  else {
358  atomic_store_u16((volatile void *)dest, val);
359  }
360 }
361 
368 static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
369 {
370  if (sizeof(uintptr_t) == 2) {
371  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
372  }
373  else if (sizeof(uintptr_t) == 4) {
374  atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
375  }
376  else {
377  atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
378  }
379 }
386 static inline void atomic_store_ptr(void **dest, const void *val) {
387  atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
388 }
395 static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
396  kernel_pid_t val)
397 {
398  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
399 }
412 static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
413  uint8_t summand);
420 static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
421  uint16_t summand);
428 static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
429  uint32_t summand);
436 static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
437  uint64_t summand);
448 static inline unsigned atomic_fetch_add_unsigned(volatile unsigned *dest,
449  unsigned summand)
450 {
451  if (sizeof(unsigned) == sizeof(uint64_t)) {
452  return atomic_fetch_add_u64((volatile void *)dest, summand);
453  }
454 
455  if (sizeof(unsigned) == sizeof(uint32_t)) {
456  return atomic_fetch_add_u32((volatile void *)dest, summand);
457  }
458 
459  return atomic_fetch_add_u16((volatile void *)dest, summand);
460 }
474 static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
475  uint8_t subtrahend);
483 static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
484  uint16_t subtrahend);
492 static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
493  uint32_t subtrahend);
501 static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
502  uint64_t subtrahend);
514 static inline unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest,
515  unsigned subtrahend)
516 {
517  if (sizeof(unsigned) == sizeof(uint64_t)) {
518  return atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
519  }
520 
521  if (sizeof(unsigned) == sizeof(uint32_t)) {
522  return atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
523  }
524 
525  return atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
526 }
540 static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
548 static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
549  uint16_t val);
557 static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
558  uint32_t val);
566 static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
567  uint64_t val);
579 static inline unsigned atomic_fetch_or_unsigned(volatile unsigned *dest,
580  unsigned val)
581 {
582  if (sizeof(unsigned) == sizeof(uint64_t)) {
583  return atomic_fetch_or_u64((volatile void *)dest, val);
584  }
585 
586  if (sizeof(unsigned) == sizeof(uint32_t)) {
587  return atomic_fetch_or_u32((volatile void *)dest, val);
588  }
589 
590  return atomic_fetch_or_u16((volatile void *)dest, val);
591 }
605 static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
613 static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
614  uint16_t val);
622 static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
623  uint32_t val);
631 static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
632  uint64_t val);
644 static inline unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest,
645  unsigned val)
646 {
647  if (sizeof(unsigned) == sizeof(uint64_t)) {
648  return atomic_fetch_xor_u64((volatile void *)dest, val);
649  }
650 
651  if (sizeof(unsigned) == sizeof(uint32_t)) {
652  return atomic_fetch_xor_u32((volatile void *)dest, val);
653  }
654 
655  return atomic_fetch_xor_u16((volatile void *)dest, val);
656 }
670 static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
678 static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
679  uint16_t val);
687 static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
688  uint32_t val);
696 static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
697  uint64_t val);
709 static inline unsigned atomic_fetch_and_unsigned(volatile unsigned *dest,
710  unsigned val)
711 {
712  if (sizeof(unsigned) == sizeof(uint64_t)) {
713  return atomic_fetch_and_u64((volatile void *)dest, val);
714  }
715 
716  if (sizeof(unsigned) == sizeof(uint32_t)) {
717  return atomic_fetch_and_u32((volatile void *)dest, val);
718  }
719 
720  return atomic_fetch_and_u16((volatile void *)dest, val);
721 }
733 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
734  uint8_t bit);
735 
741 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
742  uint8_t bit);
743 
749 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
750  uint8_t bit);
751 
757 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
758  uint8_t bit);
769 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
774 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
779 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
784 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
795 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
800 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
805 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
810 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
824 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
825  uint8_t summand);
833 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
834  uint16_t summand);
842 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
843  uint32_t summand);
851 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
852  uint64_t summand);
864 static inline unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest,
865  unsigned summand)
866 {
867  if (sizeof(unsigned) == sizeof(uint64_t)) {
868  return semi_atomic_fetch_add_u64((volatile void *)dest, summand);
869  }
870 
871  if (sizeof(unsigned) == sizeof(uint32_t)) {
872  return semi_atomic_fetch_add_u32((volatile void *)dest, summand);
873  }
874 
875  return semi_atomic_fetch_add_u16((volatile void *)dest, summand);
876 }
890 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
891  uint8_t subtrahend);
899 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
900  uint16_t subtrahend);
908 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
909  uint32_t subtrahend);
917 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
918  uint64_t subtrahend);
930 static inline unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest,
931  unsigned subtrahend)
932 {
933  if (sizeof(unsigned) == sizeof(uint64_t)) {
934  return semi_atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
935  }
936 
937  if (sizeof(unsigned) == sizeof(uint32_t)) {
938  return semi_atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
939  }
940 
941  return semi_atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
942 }
956 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
964 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
965  uint16_t val);
973 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
974  uint32_t val);
982 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
983  uint64_t val);
995 static inline unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest,
996  unsigned val)
997 {
998  if (sizeof(unsigned) == sizeof(uint64_t)) {
999  return semi_atomic_fetch_or_u64((volatile void *)dest, val);
1000  }
1001 
1002  if (sizeof(unsigned) == sizeof(uint32_t)) {
1003  return semi_atomic_fetch_or_u32((volatile void *)dest, val);
1004  }
1005 
1006  return semi_atomic_fetch_or_u16((volatile void *)dest, val);
1007 }
1021 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1022  uint8_t val);
1030 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1031  uint16_t val);
1039 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1040  uint32_t val);
1048 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1049  uint64_t val);
1061 static inline unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest,
1062  unsigned val)
1063 {
1064  if (sizeof(unsigned) == sizeof(uint64_t)) {
1065  return semi_atomic_fetch_xor_u64((volatile void *)dest, val);
1066  }
1067 
1068  if (sizeof(unsigned) == sizeof(uint32_t)) {
1069  return semi_atomic_fetch_xor_u32((volatile void *)dest, val);
1070  }
1071 
1072  return semi_atomic_fetch_xor_u16((volatile void *)dest, val);
1073 }
1087 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1088  uint8_t val);
1096 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1097  uint16_t val);
1105 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1106  uint32_t val);
1114 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1115  uint64_t val);
1127 static inline unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest,
1128  unsigned val)
1129 {
1130  if (sizeof(unsigned) == sizeof(uint64_t)) {
1131  return semi_atomic_fetch_and_u64((volatile void *)dest, val);
1132  }
1133 
1134  if (sizeof(unsigned) == sizeof(uint32_t)) {
1135  return semi_atomic_fetch_and_u32((volatile void *)dest, val);
1136  }
1137 
1138  return semi_atomic_fetch_and_u16((volatile void *)dest, val);
1139 }
1142 /* Fallback implementations of atomic utility functions: */
1143 
1151 #define ATOMIC_LOAD_IMPL(name, type) \
1152  static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1153  { \
1154  unsigned state = irq_disable(); \
1155  type result = *var; \
1156  irq_restore(state); \
1157  return result; \
1158  }
1159 
1160 #ifndef HAS_ATOMIC_LOAD_U8
1161 ATOMIC_LOAD_IMPL(u8, uint8_t)
1162 #endif
1163 #ifndef HAS_ATOMIC_LOAD_U16
1164 ATOMIC_LOAD_IMPL(u16, uint16_t)
1165 #endif
1166 #ifndef HAS_ATOMIC_LOAD_U32
1167 ATOMIC_LOAD_IMPL(u32, uint32_t)
1168 #endif
1169 #ifndef HAS_ATOMIC_LOAD_U64
1170 ATOMIC_LOAD_IMPL(u64, uint64_t)
1171 #endif
1172 
1180 #define ATOMIC_STORE_IMPL(name, type) \
1181  static inline void CONCAT(atomic_store_, name) \
1182  (volatile type *dest, type val) \
1183  { \
1184  unsigned state = irq_disable(); \
1185  *dest = val; \
1186  irq_restore(state); \
1187  }
1188 
1189 #ifndef HAS_ATOMIC_STORE_U8
1190 ATOMIC_STORE_IMPL(u8, uint8_t)
1191 #endif
1192 #ifndef HAS_ATOMIC_STORE_U16
1193 ATOMIC_STORE_IMPL(u16, uint16_t)
1194 #endif
1195 #ifndef HAS_ATOMIC_STORE_U32
1196 ATOMIC_STORE_IMPL(u32, uint32_t)
1197 #endif
1198 #ifndef HAS_ATOMIC_STORE_U64
1199 ATOMIC_STORE_IMPL(u64, uint64_t)
1200 #endif
1201 
1211 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1212  static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1213  (volatile type *dest, type val) \
1214  { \
1215  unsigned state = irq_disable(); \
1216  const type result = *dest; \
1217  *dest = result op val; \
1218  irq_restore(state); \
1219  return result; \
1220  }
1221 
1222 #ifndef HAS_ATOMIC_FETCH_ADD_U8
1223 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
1224 #endif
1225 #ifndef HAS_ATOMIC_FETCH_ADD_U16
1226 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
1227 #endif
1228 #ifndef HAS_ATOMIC_FETCH_ADD_U32
1229 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
1230 #endif
1231 #ifndef HAS_ATOMIC_FETCH_ADD_U64
1232 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
1233 #endif
1234 
1235 #ifndef HAS_ATOMIC_FETCH_SUB_U8
1236 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
1237 #endif
1238 #ifndef HAS_ATOMIC_FETCH_SUB_U16
1239 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
1240 #endif
1241 #ifndef HAS_ATOMIC_FETCH_SUB_U32
1242 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
1243 #endif
1244 #ifndef HAS_ATOMIC_FETCH_SUB_U64
1245 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
1246 #endif
1247 
1248 #ifndef HAS_ATOMIC_FETCH_OR_U8
1249 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
1250 #endif
1251 #ifndef HAS_ATOMIC_FETCH_OR_U16
1252 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
1253 #endif
1254 #ifndef HAS_ATOMIC_FETCH_OR_U32
1255 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
1256 #endif
1257 #ifndef HAS_ATOMIC_FETCH_OR_U64
1258 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
1259 #endif
1260 
1261 #ifndef HAS_ATOMIC_FETCH_XOR_U8
1262 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
1263 #endif
1264 #ifndef HAS_ATOMIC_FETCH_XOR_U16
1265 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
1266 #endif
1267 #ifndef HAS_ATOMIC_FETCH_XOR_U32
1268 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
1269 #endif
1270 #ifndef HAS_ATOMIC_FETCH_XOR_U64
1271 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
1272 #endif
1273 
1274 #ifndef HAS_ATOMIC_FETCH_AND_U8
1275 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
1276 #endif
1277 #ifndef HAS_ATOMIC_FETCH_AND_U16
1278 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
1279 #endif
1280 #ifndef HAS_ATOMIC_FETCH_AND_U32
1281 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
1282 #endif
1283 #ifndef HAS_ATOMIC_FETCH_AND_U64
1284 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
1285 #endif
1286 
1287 #ifndef HAS_ATOMIC_BIT
1288 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
1289  uint8_t bit)
1290 {
1291  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1292  return result;
1293 }
1294 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1295  uint8_t bit)
1296 {
1297  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1298  return result;
1299 }
1300 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1301  uint8_t bit)
1302 {
1303  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1304  return result;
1305 }
1306 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1307  uint8_t bit)
1308 {
1309  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1310  return result;
1311 }
1312 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1313 {
1314  atomic_fetch_or_u8(bit.dest, bit.mask);
1315 }
1316 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
1317 {
1318  atomic_fetch_or_u16(bit.dest, bit.mask);
1319 }
1320 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
1321 {
1322  atomic_fetch_or_u32(bit.dest, bit.mask);
1323 }
1324 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
1325 {
1326  atomic_fetch_or_u64(bit.dest, bit.mask);
1327 }
1328 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
1329 {
1330  atomic_fetch_and_u8(bit.dest, ~bit.mask);
1331 }
1333 {
1334  atomic_fetch_and_u16(bit.dest, ~bit.mask);
1335 }
1337 {
1338  atomic_fetch_and_u32(bit.dest, ~bit.mask);
1339 }
1341 {
1342  atomic_fetch_and_u64(bit.dest, ~bit.mask);
1343 }
1344 #endif
1345 
1346 /* Provide semi_atomic_*() functions on top.
1347  *
1348  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1349  * - Else:
1350  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1351  * store atomic, as we can avoid touching the IRQ state register that
1352  * way
1353  * - Else: We need to disable and re-enable IRQs anyway, we just use the
1354  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1355  * as well
1356  */
1357 
1358 /* FETCH_ADD */
1359 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1360 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1361  uint8_t val)
1362 {
1363  return atomic_fetch_add_u8(dest, val);
1364 }
1365 #else
1366 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1367  uint8_t val)
1368 {
1369  uint8_t result = atomic_load_u8(dest);
1370  atomic_store_u8(dest, result + val);
1371  return result;
1372 }
1373 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1374 
1375 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1376 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1377  uint16_t val)
1378 {
1379  return atomic_fetch_add_u16(dest, val);
1380 }
1381 #else
1382 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1383  uint16_t val)
1384 {
1385  uint16_t result = atomic_load_u16(dest);
1386  atomic_store_u16(dest, result + val);
1387  return result;
1388 }
1389 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1390 
1391 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1392 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1393  uint32_t val)
1394 {
1395  return atomic_fetch_add_u32(dest, val);
1396 }
1397 #else
1398 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1399  uint32_t val)
1400 {
1401  uint32_t result = atomic_load_u32(dest);
1402  atomic_store_u32(dest, result + val);
1403  return result;
1404 }
1405 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1406 
1407 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1408 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1409  uint64_t val)
1410 {
1411  return atomic_fetch_add_u64(dest, val);
1412 }
1413 #else
1414 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1415  uint64_t val)
1416 {
1417  atomic_store_u64(dest, *dest + val);
1418 }
1419 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1420 
1421 /* FETCH_SUB */
1422 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1423 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1424  uint8_t val)
1425 {
1426  return atomic_fetch_sub_u8(dest, val);
1427 }
1428 #else
1429 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1430  uint8_t val)
1431 {
1432  uint8_t result = atomic_load_u8(dest);
1433  atomic_store_u8(dest, result - val);
1434  return result;
1435 }
1436 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1437 
1438 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1439 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1440  uint16_t val)
1441 {
1442  return atomic_fetch_sub_u16(dest, val);
1443 }
1444 #else
1445 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1446  uint16_t val)
1447 {
1448  uint16_t result = atomic_load_u16(dest);
1449  atomic_store_u16(dest, result - val);
1450  return result;
1451 }
1452 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1453 
1454 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1455 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1456  uint32_t val)
1457 {
1458  return atomic_fetch_sub_u32(dest, val);
1459 }
1460 #else
1461 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1462  uint32_t val)
1463 {
1464  uint32_t result = atomic_load_u32(dest);
1465  atomic_store_u32(dest, result - val);
1466  return result;
1467 }
1468 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1469 
1470 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1471 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1472  uint64_t val)
1473 {
1474  return atomic_fetch_sub_u64(dest, val);
1475 }
1476 #else
1477 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1478  uint64_t val)
1479 {
1480  uint64_t result = atomic_load_u64(dest);
1481  atomic_store_u64(dest, result - val);
1482  return result;
1483 }
1484 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1485 
1486 /* FETCH_OR */
1487 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1488 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1489  uint8_t val)
1490 {
1491  return atomic_fetch_or_u8(dest, val);
1492 }
1493 #else
1494 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1495  uint8_t val)
1496 {
1497  uint8_t result = atomic_load_u8(dest);
1498  atomic_store_u8(dest, result | val);
1499  return result;
1500 }
1501 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1502 
1503 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1504 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1505  uint16_t val)
1506 {
1507  return atomic_fetch_or_u16(dest, val);
1508 }
1509 #else
1510 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1511  uint16_t val)
1512 {
1513  uint16_t result = atomic_load_u16(dest);
1514  atomic_store_u16(dest, result | val);
1515  return result;
1516 }
1517 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1518 
1519 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1520 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1521  uint32_t val)
1522 {
1523  return atomic_fetch_or_u32(dest, val);
1524 }
1525 #else
1526 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1527  uint32_t val)
1528 {
1529  uint32_t result = atomic_load_u32(dest);
1530  atomic_store_u32(dest, result | val);
1531  return result;
1532 }
1533 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1534 
1535 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1536 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1537  uint64_t val)
1538 {
1539  return atomic_fetch_or_u64(dest, val);
1540 }
1541 #else
1542 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1543  uint64_t val)
1544 {
1545  uint64_t result = atomic_load_u64(dest);
1546  atomic_store_u64(dest, result | val);
1547  return result;
1548 }
1549 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1550 
1551 /* FETCH_XOR */
1552 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1553 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1554  uint8_t val)
1555 {
1556  return atomic_fetch_xor_u8(dest, val);
1557 }
1558 #else
1559 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1560  uint8_t val)
1561 {
1562  uint8_t result = atomic_load_u8(dest);
1563  atomic_store_u8(dest, result ^ val);
1564  return result;
1565 }
1566 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1567 
1568 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1569 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1570  uint16_t val)
1571 {
1572  return atomic_fetch_xor_u16(dest, val);
1573 }
1574 #else
1575 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1576  uint16_t val)
1577 {
1578  uint16_t result = atomic_load_u16(dest);
1579  atomic_store_u16(dest, result ^ val);
1580  return result;
1581 }
1582 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1583 
1584 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1585 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1586  uint32_t val)
1587 {
1588  return atomic_fetch_xor_u32(dest, val);
1589 }
1590 #else
1591 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1592  uint32_t val)
1593 {
1594  uint32_t result = atomic_load_u32(dest);
1595  atomic_store_u32(dest, result ^ val);
1596  return result;
1597 }
1598 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1599 
1600 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1601 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1602  uint64_t val)
1603 {
1604  return atomic_fetch_xor_u64(dest, val);
1605 }
1606 #else
1607 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1608  uint64_t val)
1609 {
1610  uint64_t result = atomic_load_u64(dest);
1611  atomic_store_u64(dest, result ^ val);
1612  return result;
1613 }
1614 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1615 
1616 /* FETCH_AND */
1617 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1618 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1619  uint8_t val)
1620 {
1621  return atomic_fetch_and_u8(dest, val);
1622 }
1623 #else
1624 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1625  uint8_t val)
1626 {
1627  uint8_t result = atomic_load_u8(dest);
1628  atomic_store_u8(dest, result & val);
1629  return result;
1630 }
1631 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1632 
1633 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1634 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1635  uint16_t val)
1636 {
1637  return atomic_fetch_and_u16(dest, val);
1638 }
1639 #else
1640 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1641  uint16_t val)
1642 {
1643  uint16_t result = atomic_load_u16(dest);
1644  atomic_store_u16(dest, result & val);
1645  return result;
1646 }
1647 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1648 
1649 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1650 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1651  uint32_t val)
1652 {
1653  return atomic_fetch_and_u32(dest, val);
1654 }
1655 #else
1656 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1657  uint32_t val)
1658 {
1659  uint32_t result = atomic_load_u32(dest);
1660  atomic_store_u32(dest, result & val);
1661  return result;
1662 }
1663 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1664 
1665 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1666 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1667  uint64_t val)
1668 {
1669  return atomic_fetch_and_u64(dest, val);
1670 }
1671 #else
1672 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1673  uint64_t val)
1674 {
1675  uint64_t result = atomic_load_u64(dest);
1676  atomic_store_u64(dest, result & val);
1677  return result;
1678 }
1679 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1680 
1681 #ifdef __cplusplus
1682 }
1683 #endif
1684 
1685 /* NOLINTEND(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name) */
Various helper macros.
int16_t kernel_pid_t
Unique process identifier.
Definition: sched.h:138
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
Definition: atomic_utils.h:368
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
Definition: atomic_utils.h:306
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
Definition: atomic_utils.h:579
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
Definition: atomic_utils.h:386
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
Definition: atomic_utils.h:448
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
Definition: atomic_utils.h:930
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
Definition: atomic_utils.h:297
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
Definition: atomic_utils.h:349
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
Definition: atomic_utils.h:995
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
Definition: atomic_utils.h:395
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
Definition: atomic_utils.h:280
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
Definition: atomic_utils.h:514
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:864
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
Definition: atomic_utils.h:644
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
Definition: atomic_utils.h:261
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
Definition: atomic_utils.h:709
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:192
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:194
volatile uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:193
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:202
volatile uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:203
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:204
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:212
volatile uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:213
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:214
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:182
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:184
volatile uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:183