atomic_utils.h
Go to the documentation of this file.
1 /*
2  * SPDX-FileCopyrightText: 2020 Otto-von-Guericke-Universität Magdeburg
3  * SPDX-License-Identifier: LGPL-2.1-only
4  */
5 
6 #pragma once
7 
135 #include <limits.h>
136 #include <stdint.h>
137 
138 #include "irq.h"
139 #include "macros/utils.h"
140 #include "sched.h"
141 
142 #include "atomic_utils_arch.h" /* IWYU pragma: export */
143 
144 #ifdef __cplusplus
145 extern "C" {
146 #endif
147 
148 /* NOLINTBEGIN(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name)
149  *
150  * The macros ATOMIC_LOAD_IMPL() and friends do not surround the argument used
151  * to pass the type with parenthesis. Suppressing the clang-tidy warning here,
152  * as adding parenthesis around a type would be a synstax error.
153  *
154  * The macro ATOMIC_FETCH_OP_IMPL() uses `val` as argument value. But we want
155  * the declaration may be more specific (e.g. summand instead of val).
156  */
157 
158 /* Declarations and documentation: */
159 
160 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
180 typedef struct {
181  volatile uint8_t *dest;
182  uint8_t mask;
184 
190 typedef struct {
191  volatile uint16_t *dest;
192  uint16_t mask;
194 
200 typedef struct {
201  volatile uint32_t *dest;
202  uint32_t mask;
204 
210 typedef struct {
211  volatile uint64_t *dest;
212  uint64_t mask;
214 
216 #endif /* HAS_ATOMIC_BIT */
217 
221 #if UINT_MAX == UINT16_MAX
223 #elif UINT_MAX == UINT32_MAX
225 #else
227 #endif
228 
239 static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
246 static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
253 static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
260 static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
271 static inline unsigned atomic_load_unsigned(const volatile unsigned *var)
272 {
273  if (sizeof(uint64_t) == sizeof(unsigned)) {
274  return atomic_load_u64((volatile void *)var);
275  }
276 
277  if (sizeof(uint32_t) == sizeof(unsigned)) {
278  return atomic_load_u32((volatile void *)var);
279  }
280 
281  return atomic_load_u16((volatile void *)var);
282 }
283 
290 static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
291  if (sizeof(uintptr_t) == 2) {
292  return atomic_load_u16((const volatile uint16_t *)var);
293  }
294 
295  if (sizeof(uintptr_t) == 4) {
296  return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
297  }
298 
299  return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
300 }
307 static inline void * atomic_load_ptr(void **ptr_addr) {
308  return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
309 }
316 static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
317 {
318  return (kernel_pid_t)atomic_load_u16((const volatile uint16_t *)var);
319 }
331 static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
337 static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
343 static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
349 static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
359 static inline void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
360 {
361  if (sizeof(uint64_t) == sizeof(unsigned)) {
362  atomic_store_u64((volatile void *)dest, val);
363  }
364  else if (sizeof(uint32_t) == sizeof(unsigned)) {
365  atomic_store_u32((volatile void *)dest, val);
366  }
367  else {
368  atomic_store_u16((volatile void *)dest, val);
369  }
370 }
371 
378 static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
379 {
380  if (sizeof(uintptr_t) == 2) {
381  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
382  }
383  else if (sizeof(uintptr_t) == 4) {
384  atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
385  }
386  else {
387  atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
388  }
389 }
396 static inline void atomic_store_ptr(void **dest, const void *val) {
397  atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
398 }
405 static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
406  kernel_pid_t val)
407 {
408  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
409 }
422 static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
423  uint8_t summand);
430 static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
431  uint16_t summand);
438 static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
439  uint32_t summand);
446 static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
447  uint64_t summand);
458 static inline unsigned atomic_fetch_add_unsigned(volatile unsigned *dest,
459  unsigned summand)
460 {
461  if (sizeof(unsigned) == sizeof(uint64_t)) {
462  return atomic_fetch_add_u64((volatile void *)dest, summand);
463  }
464 
465  if (sizeof(unsigned) == sizeof(uint32_t)) {
466  return atomic_fetch_add_u32((volatile void *)dest, summand);
467  }
468 
469  return atomic_fetch_add_u16((volatile void *)dest, summand);
470 }
484 static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
485  uint8_t subtrahend);
493 static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
494  uint16_t subtrahend);
502 static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
503  uint32_t subtrahend);
511 static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
512  uint64_t subtrahend);
524 static inline unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest,
525  unsigned subtrahend)
526 {
527  if (sizeof(unsigned) == sizeof(uint64_t)) {
528  return atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
529  }
530 
531  if (sizeof(unsigned) == sizeof(uint32_t)) {
532  return atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
533  }
534 
535  return atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
536 }
550 static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
558 static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
559  uint16_t val);
567 static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
568  uint32_t val);
576 static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
577  uint64_t val);
589 static inline unsigned atomic_fetch_or_unsigned(volatile unsigned *dest,
590  unsigned val)
591 {
592  if (sizeof(unsigned) == sizeof(uint64_t)) {
593  return atomic_fetch_or_u64((volatile void *)dest, val);
594  }
595 
596  if (sizeof(unsigned) == sizeof(uint32_t)) {
597  return atomic_fetch_or_u32((volatile void *)dest, val);
598  }
599 
600  return atomic_fetch_or_u16((volatile void *)dest, val);
601 }
615 static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
623 static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
624  uint16_t val);
632 static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
633  uint32_t val);
641 static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
642  uint64_t val);
654 static inline unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest,
655  unsigned val)
656 {
657  if (sizeof(unsigned) == sizeof(uint64_t)) {
658  return atomic_fetch_xor_u64((volatile void *)dest, val);
659  }
660 
661  if (sizeof(unsigned) == sizeof(uint32_t)) {
662  return atomic_fetch_xor_u32((volatile void *)dest, val);
663  }
664 
665  return atomic_fetch_xor_u16((volatile void *)dest, val);
666 }
680 static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
688 static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
689  uint16_t val);
697 static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
698  uint32_t val);
706 static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
707  uint64_t val);
719 static inline unsigned atomic_fetch_and_unsigned(volatile unsigned *dest,
720  unsigned val)
721 {
722  if (sizeof(unsigned) == sizeof(uint64_t)) {
723  return atomic_fetch_and_u64((volatile void *)dest, val);
724  }
725 
726  if (sizeof(unsigned) == sizeof(uint32_t)) {
727  return atomic_fetch_and_u32((volatile void *)dest, val);
728  }
729 
730  return atomic_fetch_and_u16((volatile void *)dest, val);
731 }
745 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
746  uint8_t bit);
747 
755 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
756  uint8_t bit);
757 
765 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
766  uint8_t bit);
767 
775 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
776  uint8_t bit);
777 
785 static inline atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest,
786  uint8_t bit)
787 {
788  /* Some archs define uint32_t as unsigned long, uint16_t as short etc.,
789  * we need to cast. */
790 #if UINT_MAX == UINT16_MAX
791  return atomic_bit_u16((uint16_t volatile *)dest, bit);
792 #elif UINT_MAX == UINT32_MAX
793  return atomic_bit_u32((uint32_t volatile *)dest, bit);
794 #else
795  return atomic_bit_u64((uint64_t volatile *)dest, bit);
796 #endif
797 }
808 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
813 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
818 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
823 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
829 {
830 #if UINT_MAX == UINT16_MAX
831  atomic_set_bit_u16(bit);
832 #elif UINT_MAX == UINT32_MAX
833  atomic_set_bit_u32(bit);
834 #else
835  atomic_set_bit_u64(bit);
836 #endif
837 }
848 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
853 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
858 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
863 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
869 {
870 #if UINT_MAX == UINT16_MAX
872 #elif UINT_MAX == UINT32_MAX
874 #else
876 #endif
877 }
891 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
892  uint8_t summand);
900 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
901  uint16_t summand);
909 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
910  uint32_t summand);
918 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
919  uint64_t summand);
931 static inline unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest,
932  unsigned summand)
933 {
934  if (sizeof(unsigned) == sizeof(uint64_t)) {
935  return semi_atomic_fetch_add_u64((volatile void *)dest, summand);
936  }
937 
938  if (sizeof(unsigned) == sizeof(uint32_t)) {
939  return semi_atomic_fetch_add_u32((volatile void *)dest, summand);
940  }
941 
942  return semi_atomic_fetch_add_u16((volatile void *)dest, summand);
943 }
957 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
958  uint8_t subtrahend);
966 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
967  uint16_t subtrahend);
975 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
976  uint32_t subtrahend);
984 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
985  uint64_t subtrahend);
997 static inline unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest,
998  unsigned subtrahend)
999 {
1000  if (sizeof(unsigned) == sizeof(uint64_t)) {
1001  return semi_atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
1002  }
1003 
1004  if (sizeof(unsigned) == sizeof(uint32_t)) {
1005  return semi_atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
1006  }
1007 
1008  return semi_atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
1009 }
1023 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
1031 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1032  uint16_t val);
1040 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1041  uint32_t val);
1049 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1050  uint64_t val);
1062 static inline unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest,
1063  unsigned val)
1064 {
1065  if (sizeof(unsigned) == sizeof(uint64_t)) {
1066  return semi_atomic_fetch_or_u64((volatile void *)dest, val);
1067  }
1068 
1069  if (sizeof(unsigned) == sizeof(uint32_t)) {
1070  return semi_atomic_fetch_or_u32((volatile void *)dest, val);
1071  }
1072 
1073  return semi_atomic_fetch_or_u16((volatile void *)dest, val);
1074 }
1088 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1089  uint8_t val);
1097 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1098  uint16_t val);
1106 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1107  uint32_t val);
1115 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1116  uint64_t val);
1128 static inline unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest,
1129  unsigned val)
1130 {
1131  if (sizeof(unsigned) == sizeof(uint64_t)) {
1132  return semi_atomic_fetch_xor_u64((volatile void *)dest, val);
1133  }
1134 
1135  if (sizeof(unsigned) == sizeof(uint32_t)) {
1136  return semi_atomic_fetch_xor_u32((volatile void *)dest, val);
1137  }
1138 
1139  return semi_atomic_fetch_xor_u16((volatile void *)dest, val);
1140 }
1154 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1155  uint8_t val);
1163 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1164  uint16_t val);
1172 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1173  uint32_t val);
1181 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1182  uint64_t val);
1194 static inline unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest,
1195  unsigned val)
1196 {
1197  if (sizeof(unsigned) == sizeof(uint64_t)) {
1198  return semi_atomic_fetch_and_u64((volatile void *)dest, val);
1199  }
1200 
1201  if (sizeof(unsigned) == sizeof(uint32_t)) {
1202  return semi_atomic_fetch_and_u32((volatile void *)dest, val);
1203  }
1204 
1205  return semi_atomic_fetch_and_u16((volatile void *)dest, val);
1206 }
1209 /* Fallback implementations of atomic utility functions: */
1210 
1218 #define ATOMIC_LOAD_IMPL(name, type) \
1219  static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1220  { \
1221  unsigned state = irq_disable(); \
1222  type result = *var; \
1223  irq_restore(state); \
1224  return result; \
1225  }
1226 
1227 #ifndef HAS_ATOMIC_LOAD_U8
1228 ATOMIC_LOAD_IMPL(u8, uint8_t)
1229 #endif
1230 #ifndef HAS_ATOMIC_LOAD_U16
1231 ATOMIC_LOAD_IMPL(u16, uint16_t)
1232 #endif
1233 #ifndef HAS_ATOMIC_LOAD_U32
1234 ATOMIC_LOAD_IMPL(u32, uint32_t)
1235 #endif
1236 #ifndef HAS_ATOMIC_LOAD_U64
1237 ATOMIC_LOAD_IMPL(u64, uint64_t)
1238 #endif
1239 
1247 #define ATOMIC_STORE_IMPL(name, type) \
1248  static inline void CONCAT(atomic_store_, name) \
1249  (volatile type *dest, type val) \
1250  { \
1251  unsigned state = irq_disable(); \
1252  *dest = val; \
1253  irq_restore(state); \
1254  }
1255 
1256 #ifndef HAS_ATOMIC_STORE_U8
1257 ATOMIC_STORE_IMPL(u8, uint8_t)
1258 #endif
1259 #ifndef HAS_ATOMIC_STORE_U16
1260 ATOMIC_STORE_IMPL(u16, uint16_t)
1261 #endif
1262 #ifndef HAS_ATOMIC_STORE_U32
1263 ATOMIC_STORE_IMPL(u32, uint32_t)
1264 #endif
1265 #ifndef HAS_ATOMIC_STORE_U64
1266 ATOMIC_STORE_IMPL(u64, uint64_t)
1267 #endif
1268 
1278 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1279  static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1280  (volatile type *dest, type val) \
1281  { \
1282  unsigned state = irq_disable(); \
1283  const type result = *dest; \
1284  *dest = result op val; \
1285  irq_restore(state); \
1286  return result; \
1287  }
1288 
1289 #ifndef HAS_ATOMIC_FETCH_ADD_U8
1290 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
1291 #endif
1292 #ifndef HAS_ATOMIC_FETCH_ADD_U16
1293 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
1294 #endif
1295 #ifndef HAS_ATOMIC_FETCH_ADD_U32
1296 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
1297 #endif
1298 #ifndef HAS_ATOMIC_FETCH_ADD_U64
1299 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
1300 #endif
1301 
1302 #ifndef HAS_ATOMIC_FETCH_SUB_U8
1303 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
1304 #endif
1305 #ifndef HAS_ATOMIC_FETCH_SUB_U16
1306 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
1307 #endif
1308 #ifndef HAS_ATOMIC_FETCH_SUB_U32
1309 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
1310 #endif
1311 #ifndef HAS_ATOMIC_FETCH_SUB_U64
1312 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
1313 #endif
1314 
1315 #ifndef HAS_ATOMIC_FETCH_OR_U8
1316 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
1317 #endif
1318 #ifndef HAS_ATOMIC_FETCH_OR_U16
1319 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
1320 #endif
1321 #ifndef HAS_ATOMIC_FETCH_OR_U32
1322 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
1323 #endif
1324 #ifndef HAS_ATOMIC_FETCH_OR_U64
1325 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
1326 #endif
1327 
1328 #ifndef HAS_ATOMIC_FETCH_XOR_U8
1329 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
1330 #endif
1331 #ifndef HAS_ATOMIC_FETCH_XOR_U16
1332 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
1333 #endif
1334 #ifndef HAS_ATOMIC_FETCH_XOR_U32
1335 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
1336 #endif
1337 #ifndef HAS_ATOMIC_FETCH_XOR_U64
1338 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
1339 #endif
1340 
1341 #ifndef HAS_ATOMIC_FETCH_AND_U8
1342 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
1343 #endif
1344 #ifndef HAS_ATOMIC_FETCH_AND_U16
1345 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
1346 #endif
1347 #ifndef HAS_ATOMIC_FETCH_AND_U32
1348 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
1349 #endif
1350 #ifndef HAS_ATOMIC_FETCH_AND_U64
1351 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
1352 #endif
1353 
1354 #ifndef HAS_ATOMIC_BIT
1355 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
1356  uint8_t bit)
1357 {
1358  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1359  return result;
1360 }
1361 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1362  uint8_t bit)
1363 {
1364  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1365  return result;
1366 }
1367 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1368  uint8_t bit)
1369 {
1370  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1371  return result;
1372 }
1373 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1374  uint8_t bit)
1375 {
1376  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1377  return result;
1378 }
1379 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1380 {
1381  atomic_fetch_or_u8(bit.dest, bit.mask);
1382 }
1383 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
1384 {
1385  atomic_fetch_or_u16(bit.dest, bit.mask);
1386 }
1387 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
1388 {
1389  atomic_fetch_or_u32(bit.dest, bit.mask);
1390 }
1391 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
1392 {
1393  atomic_fetch_or_u64(bit.dest, bit.mask);
1394 }
1395 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
1396 {
1397  atomic_fetch_and_u8(bit.dest, ~bit.mask);
1398 }
1400 {
1401  atomic_fetch_and_u16(bit.dest, ~bit.mask);
1402 }
1404 {
1405  atomic_fetch_and_u32(bit.dest, ~bit.mask);
1406 }
1408 {
1409  atomic_fetch_and_u64(bit.dest, ~bit.mask);
1410 }
1411 #endif
1412 
1413 /* Provide semi_atomic_*() functions on top.
1414  *
1415  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1416  * - Else:
1417  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1418  * store atomic, as we can avoid touching the IRQ state register that
1419  * way
1420  * - Else: We need to disable and re-enable IRQs anyway, we just use the
1421  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1422  * as well
1423  */
1424 
1425 /* FETCH_ADD */
1426 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1427 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1428  uint8_t val)
1429 {
1430  return atomic_fetch_add_u8(dest, val);
1431 }
1432 #else
1433 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1434  uint8_t val)
1435 {
1436  uint8_t result = atomic_load_u8(dest);
1437  atomic_store_u8(dest, result + val);
1438  return result;
1439 }
1440 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1441 
1442 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1443 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1444  uint16_t val)
1445 {
1446  return atomic_fetch_add_u16(dest, val);
1447 }
1448 #else
1449 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1450  uint16_t val)
1451 {
1452  uint16_t result = atomic_load_u16(dest);
1453  atomic_store_u16(dest, result + val);
1454  return result;
1455 }
1456 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1457 
1458 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1459 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1460  uint32_t val)
1461 {
1462  return atomic_fetch_add_u32(dest, val);
1463 }
1464 #else
1465 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1466  uint32_t val)
1467 {
1468  uint32_t result = atomic_load_u32(dest);
1469  atomic_store_u32(dest, result + val);
1470  return result;
1471 }
1472 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1473 
1474 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1475 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1476  uint64_t val)
1477 {
1478  return atomic_fetch_add_u64(dest, val);
1479 }
1480 #else
1481 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1482  uint64_t val)
1483 {
1484  atomic_store_u64(dest, *dest + val);
1485 }
1486 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1487 
1488 /* FETCH_SUB */
1489 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1490 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1491  uint8_t val)
1492 {
1493  return atomic_fetch_sub_u8(dest, val);
1494 }
1495 #else
1496 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1497  uint8_t val)
1498 {
1499  uint8_t result = atomic_load_u8(dest);
1500  atomic_store_u8(dest, result - val);
1501  return result;
1502 }
1503 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1504 
1505 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1506 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1507  uint16_t val)
1508 {
1509  return atomic_fetch_sub_u16(dest, val);
1510 }
1511 #else
1512 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1513  uint16_t val)
1514 {
1515  uint16_t result = atomic_load_u16(dest);
1516  atomic_store_u16(dest, result - val);
1517  return result;
1518 }
1519 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1520 
1521 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1522 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1523  uint32_t val)
1524 {
1525  return atomic_fetch_sub_u32(dest, val);
1526 }
1527 #else
1528 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1529  uint32_t val)
1530 {
1531  uint32_t result = atomic_load_u32(dest);
1532  atomic_store_u32(dest, result - val);
1533  return result;
1534 }
1535 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1536 
1537 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1538 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1539  uint64_t val)
1540 {
1541  return atomic_fetch_sub_u64(dest, val);
1542 }
1543 #else
1544 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1545  uint64_t val)
1546 {
1547  uint64_t result = atomic_load_u64(dest);
1548  atomic_store_u64(dest, result - val);
1549  return result;
1550 }
1551 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1552 
1553 /* FETCH_OR */
1554 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1555 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1556  uint8_t val)
1557 {
1558  return atomic_fetch_or_u8(dest, val);
1559 }
1560 #else
1561 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1562  uint8_t val)
1563 {
1564  uint8_t result = atomic_load_u8(dest);
1565  atomic_store_u8(dest, result | val);
1566  return result;
1567 }
1568 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1569 
1570 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1571 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1572  uint16_t val)
1573 {
1574  return atomic_fetch_or_u16(dest, val);
1575 }
1576 #else
1577 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1578  uint16_t val)
1579 {
1580  uint16_t result = atomic_load_u16(dest);
1581  atomic_store_u16(dest, result | val);
1582  return result;
1583 }
1584 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1585 
1586 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1587 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1588  uint32_t val)
1589 {
1590  return atomic_fetch_or_u32(dest, val);
1591 }
1592 #else
1593 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1594  uint32_t val)
1595 {
1596  uint32_t result = atomic_load_u32(dest);
1597  atomic_store_u32(dest, result | val);
1598  return result;
1599 }
1600 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1601 
1602 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1603 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1604  uint64_t val)
1605 {
1606  return atomic_fetch_or_u64(dest, val);
1607 }
1608 #else
1609 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1610  uint64_t val)
1611 {
1612  uint64_t result = atomic_load_u64(dest);
1613  atomic_store_u64(dest, result | val);
1614  return result;
1615 }
1616 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1617 
1618 /* FETCH_XOR */
1619 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1620 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1621  uint8_t val)
1622 {
1623  return atomic_fetch_xor_u8(dest, val);
1624 }
1625 #else
1626 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1627  uint8_t val)
1628 {
1629  uint8_t result = atomic_load_u8(dest);
1630  atomic_store_u8(dest, result ^ val);
1631  return result;
1632 }
1633 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1634 
1635 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1636 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1637  uint16_t val)
1638 {
1639  return atomic_fetch_xor_u16(dest, val);
1640 }
1641 #else
1642 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1643  uint16_t val)
1644 {
1645  uint16_t result = atomic_load_u16(dest);
1646  atomic_store_u16(dest, result ^ val);
1647  return result;
1648 }
1649 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1650 
1651 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1652 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1653  uint32_t val)
1654 {
1655  return atomic_fetch_xor_u32(dest, val);
1656 }
1657 #else
1658 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1659  uint32_t val)
1660 {
1661  uint32_t result = atomic_load_u32(dest);
1662  atomic_store_u32(dest, result ^ val);
1663  return result;
1664 }
1665 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1666 
1667 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1668 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1669  uint64_t val)
1670 {
1671  return atomic_fetch_xor_u64(dest, val);
1672 }
1673 #else
1674 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1675  uint64_t val)
1676 {
1677  uint64_t result = atomic_load_u64(dest);
1678  atomic_store_u64(dest, result ^ val);
1679  return result;
1680 }
1681 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1682 
1683 /* FETCH_AND */
1684 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1685 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1686  uint8_t val)
1687 {
1688  return atomic_fetch_and_u8(dest, val);
1689 }
1690 #else
1691 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1692  uint8_t val)
1693 {
1694  uint8_t result = atomic_load_u8(dest);
1695  atomic_store_u8(dest, result & val);
1696  return result;
1697 }
1698 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1699 
1700 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1701 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1702  uint16_t val)
1703 {
1704  return atomic_fetch_and_u16(dest, val);
1705 }
1706 #else
1707 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1708  uint16_t val)
1709 {
1710  uint16_t result = atomic_load_u16(dest);
1711  atomic_store_u16(dest, result & val);
1712  return result;
1713 }
1714 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1715 
1716 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1717 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1718  uint32_t val)
1719 {
1720  return atomic_fetch_and_u32(dest, val);
1721 }
1722 #else
1723 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1724  uint32_t val)
1725 {
1726  uint32_t result = atomic_load_u32(dest);
1727  atomic_store_u32(dest, result & val);
1728  return result;
1729 }
1730 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1731 
1732 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1733 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1734  uint64_t val)
1735 {
1736  return atomic_fetch_and_u64(dest, val);
1737 }
1738 #else
1739 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1740  uint64_t val)
1741 {
1742  uint64_t result = atomic_load_u64(dest);
1743  atomic_store_u64(dest, result & val);
1744  return result;
1745 }
1746 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1747 
1748 #ifdef __cplusplus
1749 }
1750 #endif
1751 
1752 /* NOLINTEND(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name) */
Various helper macros.
int16_t kernel_pid_t
Unique process identifier.
Definition: sched.h:135
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
Definition: atomic_utils.h:378
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_clear_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:868
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
Definition: atomic_utils.h:316
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
Definition: atomic_utils.h:589
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
Definition: atomic_utils.h:396
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
Definition: atomic_utils.h:458
atomic_bit_u16_t atomic_bit_unsigned_t
Type specifying a bit in an unsigned int
Definition: atomic_utils.h:222
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
Definition: atomic_utils.h:997
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
Definition: atomic_utils.h:307
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
Definition: atomic_utils.h:359
static atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest, uint8_t bit)
Create a reference to a bit in an unsigned int
Definition: atomic_utils.h:785
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
Definition: atomic_utils.h:405
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
Definition: atomic_utils.h:290
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
Definition: atomic_utils.h:524
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:931
static void atomic_set_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:828
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
Definition: atomic_utils.h:654
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
Definition: atomic_utils.h:271
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
Definition: atomic_utils.h:719
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:190
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:192
volatile uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:191
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:200
volatile uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:201
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:202
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:210
volatile uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:211
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:212
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:180
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:182
volatile uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:181