atomic_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
9 #pragma once
10 
138 #include <limits.h>
139 #include <stdint.h>
140 
141 #include "irq.h"
142 #include "macros/utils.h"
143 #include "sched.h"
144 
145 #include "atomic_utils_arch.h" /* IWYU pragma: export */
146 
147 #ifdef __cplusplus
148 extern "C" {
149 #endif
150 
151 /* NOLINTBEGIN(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name)
152  *
153  * The macros ATOMIC_LOAD_IMPL() and friends do not surround the argument used
154  * to pass the type with parenthesis. Suppressing the clang-tidy warning here,
155  * as adding parenthesis around a type would be a synstax error.
156  *
157  * The macro ATOMIC_FETCH_OP_IMPL() uses `val` as argument value. But we want
158  * the declaration may be more specific (e.g. summand instead of val).
159  */
160 
161 /* Declarations and documentation: */
162 
163 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
183 typedef struct {
184  volatile uint8_t *dest;
185  uint8_t mask;
187 
193 typedef struct {
194  volatile uint16_t *dest;
195  uint16_t mask;
197 
203 typedef struct {
204  volatile uint32_t *dest;
205  uint32_t mask;
207 
213 typedef struct {
214  volatile uint64_t *dest;
215  uint64_t mask;
217 
219 #endif /* HAS_ATOMIC_BIT */
220 
224 #if UINT_MAX == UINT16_MAX
226 #elif UINT_MAX == UINT32_MAX
228 #else
230 #endif
231 
242 static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
249 static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
256 static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
263 static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
274 static inline unsigned atomic_load_unsigned(const volatile unsigned *var)
275 {
276  if (sizeof(uint64_t) == sizeof(unsigned)) {
277  return atomic_load_u64((volatile void *)var);
278  }
279 
280  if (sizeof(uint32_t) == sizeof(unsigned)) {
281  return atomic_load_u32((volatile void *)var);
282  }
283 
284  return atomic_load_u16((volatile void *)var);
285 }
286 
293 static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
294  if (sizeof(uintptr_t) == 2) {
295  return atomic_load_u16((const volatile uint16_t *)var);
296  }
297 
298  if (sizeof(uintptr_t) == 4) {
299  return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
300  }
301 
302  return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
303 }
310 static inline void * atomic_load_ptr(void **ptr_addr) {
311  return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
312 }
319 static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
320 {
321  return (kernel_pid_t)atomic_load_u16((const volatile uint16_t *)var);
322 }
334 static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
340 static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
346 static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
352 static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
362 static inline void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
363 {
364  if (sizeof(uint64_t) == sizeof(unsigned)) {
365  atomic_store_u64((volatile void *)dest, val);
366  }
367  else if (sizeof(uint32_t) == sizeof(unsigned)) {
368  atomic_store_u32((volatile void *)dest, val);
369  }
370  else {
371  atomic_store_u16((volatile void *)dest, val);
372  }
373 }
374 
381 static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
382 {
383  if (sizeof(uintptr_t) == 2) {
384  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
385  }
386  else if (sizeof(uintptr_t) == 4) {
387  atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
388  }
389  else {
390  atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
391  }
392 }
399 static inline void atomic_store_ptr(void **dest, const void *val) {
400  atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
401 }
408 static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
409  kernel_pid_t val)
410 {
411  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
412 }
425 static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
426  uint8_t summand);
433 static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
434  uint16_t summand);
441 static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
442  uint32_t summand);
449 static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
450  uint64_t summand);
461 static inline unsigned atomic_fetch_add_unsigned(volatile unsigned *dest,
462  unsigned summand)
463 {
464  if (sizeof(unsigned) == sizeof(uint64_t)) {
465  return atomic_fetch_add_u64((volatile void *)dest, summand);
466  }
467 
468  if (sizeof(unsigned) == sizeof(uint32_t)) {
469  return atomic_fetch_add_u32((volatile void *)dest, summand);
470  }
471 
472  return atomic_fetch_add_u16((volatile void *)dest, summand);
473 }
487 static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
488  uint8_t subtrahend);
496 static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
497  uint16_t subtrahend);
505 static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
506  uint32_t subtrahend);
514 static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
515  uint64_t subtrahend);
527 static inline unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest,
528  unsigned subtrahend)
529 {
530  if (sizeof(unsigned) == sizeof(uint64_t)) {
531  return atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
532  }
533 
534  if (sizeof(unsigned) == sizeof(uint32_t)) {
535  return atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
536  }
537 
538  return atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
539 }
553 static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
561 static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
562  uint16_t val);
570 static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
571  uint32_t val);
579 static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
580  uint64_t val);
592 static inline unsigned atomic_fetch_or_unsigned(volatile unsigned *dest,
593  unsigned val)
594 {
595  if (sizeof(unsigned) == sizeof(uint64_t)) {
596  return atomic_fetch_or_u64((volatile void *)dest, val);
597  }
598 
599  if (sizeof(unsigned) == sizeof(uint32_t)) {
600  return atomic_fetch_or_u32((volatile void *)dest, val);
601  }
602 
603  return atomic_fetch_or_u16((volatile void *)dest, val);
604 }
618 static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
626 static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
627  uint16_t val);
635 static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
636  uint32_t val);
644 static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
645  uint64_t val);
657 static inline unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest,
658  unsigned val)
659 {
660  if (sizeof(unsigned) == sizeof(uint64_t)) {
661  return atomic_fetch_xor_u64((volatile void *)dest, val);
662  }
663 
664  if (sizeof(unsigned) == sizeof(uint32_t)) {
665  return atomic_fetch_xor_u32((volatile void *)dest, val);
666  }
667 
668  return atomic_fetch_xor_u16((volatile void *)dest, val);
669 }
683 static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
691 static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
692  uint16_t val);
700 static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
701  uint32_t val);
709 static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
710  uint64_t val);
722 static inline unsigned atomic_fetch_and_unsigned(volatile unsigned *dest,
723  unsigned val)
724 {
725  if (sizeof(unsigned) == sizeof(uint64_t)) {
726  return atomic_fetch_and_u64((volatile void *)dest, val);
727  }
728 
729  if (sizeof(unsigned) == sizeof(uint32_t)) {
730  return atomic_fetch_and_u32((volatile void *)dest, val);
731  }
732 
733  return atomic_fetch_and_u16((volatile void *)dest, val);
734 }
748 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
749  uint8_t bit);
750 
758 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
759  uint8_t bit);
760 
768 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
769  uint8_t bit);
770 
778 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
779  uint8_t bit);
780 
788 static inline atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest,
789  uint8_t bit)
790 {
791  /* Some archs define uint32_t as unsigned long, uint16_t as short etc.,
792  * we need to cast. */
793 #if UINT_MAX == UINT16_MAX
794  return atomic_bit_u16((uint16_t volatile *)dest, bit);
795 #elif UINT_MAX == UINT32_MAX
796  return atomic_bit_u32((uint32_t volatile *)dest, bit);
797 #else
798  return atomic_bit_u64((uint64_t volatile *)dest, bit);
799 #endif
800 }
811 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
816 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
821 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
826 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
832 {
833 #if UINT_MAX == UINT16_MAX
834  atomic_set_bit_u16(bit);
835 #elif UINT_MAX == UINT32_MAX
836  atomic_set_bit_u32(bit);
837 #else
838  atomic_set_bit_u64(bit);
839 #endif
840 }
851 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
856 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
861 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
866 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
872 {
873 #if UINT_MAX == UINT16_MAX
875 #elif UINT_MAX == UINT32_MAX
877 #else
879 #endif
880 }
894 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
895  uint8_t summand);
903 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
904  uint16_t summand);
912 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
913  uint32_t summand);
921 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
922  uint64_t summand);
934 static inline unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest,
935  unsigned summand)
936 {
937  if (sizeof(unsigned) == sizeof(uint64_t)) {
938  return semi_atomic_fetch_add_u64((volatile void *)dest, summand);
939  }
940 
941  if (sizeof(unsigned) == sizeof(uint32_t)) {
942  return semi_atomic_fetch_add_u32((volatile void *)dest, summand);
943  }
944 
945  return semi_atomic_fetch_add_u16((volatile void *)dest, summand);
946 }
960 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
961  uint8_t subtrahend);
969 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
970  uint16_t subtrahend);
978 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
979  uint32_t subtrahend);
987 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
988  uint64_t subtrahend);
1000 static inline unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest,
1001  unsigned subtrahend)
1002 {
1003  if (sizeof(unsigned) == sizeof(uint64_t)) {
1004  return semi_atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
1005  }
1006 
1007  if (sizeof(unsigned) == sizeof(uint32_t)) {
1008  return semi_atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
1009  }
1010 
1011  return semi_atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
1012 }
1026 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
1034 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1035  uint16_t val);
1043 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1044  uint32_t val);
1052 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1053  uint64_t val);
1065 static inline unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest,
1066  unsigned val)
1067 {
1068  if (sizeof(unsigned) == sizeof(uint64_t)) {
1069  return semi_atomic_fetch_or_u64((volatile void *)dest, val);
1070  }
1071 
1072  if (sizeof(unsigned) == sizeof(uint32_t)) {
1073  return semi_atomic_fetch_or_u32((volatile void *)dest, val);
1074  }
1075 
1076  return semi_atomic_fetch_or_u16((volatile void *)dest, val);
1077 }
1091 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1092  uint8_t val);
1100 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1101  uint16_t val);
1109 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1110  uint32_t val);
1118 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1119  uint64_t val);
1131 static inline unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest,
1132  unsigned val)
1133 {
1134  if (sizeof(unsigned) == sizeof(uint64_t)) {
1135  return semi_atomic_fetch_xor_u64((volatile void *)dest, val);
1136  }
1137 
1138  if (sizeof(unsigned) == sizeof(uint32_t)) {
1139  return semi_atomic_fetch_xor_u32((volatile void *)dest, val);
1140  }
1141 
1142  return semi_atomic_fetch_xor_u16((volatile void *)dest, val);
1143 }
1157 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1158  uint8_t val);
1166 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1167  uint16_t val);
1175 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1176  uint32_t val);
1184 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1185  uint64_t val);
1197 static inline unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest,
1198  unsigned val)
1199 {
1200  if (sizeof(unsigned) == sizeof(uint64_t)) {
1201  return semi_atomic_fetch_and_u64((volatile void *)dest, val);
1202  }
1203 
1204  if (sizeof(unsigned) == sizeof(uint32_t)) {
1205  return semi_atomic_fetch_and_u32((volatile void *)dest, val);
1206  }
1207 
1208  return semi_atomic_fetch_and_u16((volatile void *)dest, val);
1209 }
1212 /* Fallback implementations of atomic utility functions: */
1213 
1221 #define ATOMIC_LOAD_IMPL(name, type) \
1222  static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1223  { \
1224  unsigned state = irq_disable(); \
1225  type result = *var; \
1226  irq_restore(state); \
1227  return result; \
1228  }
1229 
1230 #ifndef HAS_ATOMIC_LOAD_U8
1231 ATOMIC_LOAD_IMPL(u8, uint8_t)
1232 #endif
1233 #ifndef HAS_ATOMIC_LOAD_U16
1234 ATOMIC_LOAD_IMPL(u16, uint16_t)
1235 #endif
1236 #ifndef HAS_ATOMIC_LOAD_U32
1237 ATOMIC_LOAD_IMPL(u32, uint32_t)
1238 #endif
1239 #ifndef HAS_ATOMIC_LOAD_U64
1240 ATOMIC_LOAD_IMPL(u64, uint64_t)
1241 #endif
1242 
1250 #define ATOMIC_STORE_IMPL(name, type) \
1251  static inline void CONCAT(atomic_store_, name) \
1252  (volatile type *dest, type val) \
1253  { \
1254  unsigned state = irq_disable(); \
1255  *dest = val; \
1256  irq_restore(state); \
1257  }
1258 
1259 #ifndef HAS_ATOMIC_STORE_U8
1260 ATOMIC_STORE_IMPL(u8, uint8_t)
1261 #endif
1262 #ifndef HAS_ATOMIC_STORE_U16
1263 ATOMIC_STORE_IMPL(u16, uint16_t)
1264 #endif
1265 #ifndef HAS_ATOMIC_STORE_U32
1266 ATOMIC_STORE_IMPL(u32, uint32_t)
1267 #endif
1268 #ifndef HAS_ATOMIC_STORE_U64
1269 ATOMIC_STORE_IMPL(u64, uint64_t)
1270 #endif
1271 
1281 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1282  static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1283  (volatile type *dest, type val) \
1284  { \
1285  unsigned state = irq_disable(); \
1286  const type result = *dest; \
1287  *dest = result op val; \
1288  irq_restore(state); \
1289  return result; \
1290  }
1291 
1292 #ifndef HAS_ATOMIC_FETCH_ADD_U8
1293 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
1294 #endif
1295 #ifndef HAS_ATOMIC_FETCH_ADD_U16
1296 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
1297 #endif
1298 #ifndef HAS_ATOMIC_FETCH_ADD_U32
1299 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
1300 #endif
1301 #ifndef HAS_ATOMIC_FETCH_ADD_U64
1302 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
1303 #endif
1304 
1305 #ifndef HAS_ATOMIC_FETCH_SUB_U8
1306 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
1307 #endif
1308 #ifndef HAS_ATOMIC_FETCH_SUB_U16
1309 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
1310 #endif
1311 #ifndef HAS_ATOMIC_FETCH_SUB_U32
1312 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
1313 #endif
1314 #ifndef HAS_ATOMIC_FETCH_SUB_U64
1315 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
1316 #endif
1317 
1318 #ifndef HAS_ATOMIC_FETCH_OR_U8
1319 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
1320 #endif
1321 #ifndef HAS_ATOMIC_FETCH_OR_U16
1322 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
1323 #endif
1324 #ifndef HAS_ATOMIC_FETCH_OR_U32
1325 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
1326 #endif
1327 #ifndef HAS_ATOMIC_FETCH_OR_U64
1328 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
1329 #endif
1330 
1331 #ifndef HAS_ATOMIC_FETCH_XOR_U8
1332 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
1333 #endif
1334 #ifndef HAS_ATOMIC_FETCH_XOR_U16
1335 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
1336 #endif
1337 #ifndef HAS_ATOMIC_FETCH_XOR_U32
1338 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
1339 #endif
1340 #ifndef HAS_ATOMIC_FETCH_XOR_U64
1341 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
1342 #endif
1343 
1344 #ifndef HAS_ATOMIC_FETCH_AND_U8
1345 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
1346 #endif
1347 #ifndef HAS_ATOMIC_FETCH_AND_U16
1348 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
1349 #endif
1350 #ifndef HAS_ATOMIC_FETCH_AND_U32
1351 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
1352 #endif
1353 #ifndef HAS_ATOMIC_FETCH_AND_U64
1354 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
1355 #endif
1356 
1357 #ifndef HAS_ATOMIC_BIT
1358 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
1359  uint8_t bit)
1360 {
1361  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1362  return result;
1363 }
1364 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1365  uint8_t bit)
1366 {
1367  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1368  return result;
1369 }
1370 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1371  uint8_t bit)
1372 {
1373  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1374  return result;
1375 }
1376 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1377  uint8_t bit)
1378 {
1379  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1380  return result;
1381 }
1382 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1383 {
1384  atomic_fetch_or_u8(bit.dest, bit.mask);
1385 }
1386 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
1387 {
1388  atomic_fetch_or_u16(bit.dest, bit.mask);
1389 }
1390 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
1391 {
1392  atomic_fetch_or_u32(bit.dest, bit.mask);
1393 }
1394 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
1395 {
1396  atomic_fetch_or_u64(bit.dest, bit.mask);
1397 }
1398 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
1399 {
1400  atomic_fetch_and_u8(bit.dest, ~bit.mask);
1401 }
1403 {
1404  atomic_fetch_and_u16(bit.dest, ~bit.mask);
1405 }
1407 {
1408  atomic_fetch_and_u32(bit.dest, ~bit.mask);
1409 }
1411 {
1412  atomic_fetch_and_u64(bit.dest, ~bit.mask);
1413 }
1414 #endif
1415 
1416 /* Provide semi_atomic_*() functions on top.
1417  *
1418  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1419  * - Else:
1420  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1421  * store atomic, as we can avoid touching the IRQ state register that
1422  * way
1423  * - Else: We need to disable and re-enable IRQs anyway, we just use the
1424  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1425  * as well
1426  */
1427 
1428 /* FETCH_ADD */
1429 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1430 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1431  uint8_t val)
1432 {
1433  return atomic_fetch_add_u8(dest, val);
1434 }
1435 #else
1436 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1437  uint8_t val)
1438 {
1439  uint8_t result = atomic_load_u8(dest);
1440  atomic_store_u8(dest, result + val);
1441  return result;
1442 }
1443 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1444 
1445 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1446 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1447  uint16_t val)
1448 {
1449  return atomic_fetch_add_u16(dest, val);
1450 }
1451 #else
1452 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1453  uint16_t val)
1454 {
1455  uint16_t result = atomic_load_u16(dest);
1456  atomic_store_u16(dest, result + val);
1457  return result;
1458 }
1459 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1460 
1461 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1462 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1463  uint32_t val)
1464 {
1465  return atomic_fetch_add_u32(dest, val);
1466 }
1467 #else
1468 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1469  uint32_t val)
1470 {
1471  uint32_t result = atomic_load_u32(dest);
1472  atomic_store_u32(dest, result + val);
1473  return result;
1474 }
1475 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1476 
1477 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1478 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1479  uint64_t val)
1480 {
1481  return atomic_fetch_add_u64(dest, val);
1482 }
1483 #else
1484 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1485  uint64_t val)
1486 {
1487  atomic_store_u64(dest, *dest + val);
1488 }
1489 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1490 
1491 /* FETCH_SUB */
1492 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1493 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1494  uint8_t val)
1495 {
1496  return atomic_fetch_sub_u8(dest, val);
1497 }
1498 #else
1499 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1500  uint8_t val)
1501 {
1502  uint8_t result = atomic_load_u8(dest);
1503  atomic_store_u8(dest, result - val);
1504  return result;
1505 }
1506 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1507 
1508 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1509 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1510  uint16_t val)
1511 {
1512  return atomic_fetch_sub_u16(dest, val);
1513 }
1514 #else
1515 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1516  uint16_t val)
1517 {
1518  uint16_t result = atomic_load_u16(dest);
1519  atomic_store_u16(dest, result - val);
1520  return result;
1521 }
1522 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1523 
1524 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1525 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1526  uint32_t val)
1527 {
1528  return atomic_fetch_sub_u32(dest, val);
1529 }
1530 #else
1531 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1532  uint32_t val)
1533 {
1534  uint32_t result = atomic_load_u32(dest);
1535  atomic_store_u32(dest, result - val);
1536  return result;
1537 }
1538 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1539 
1540 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1541 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1542  uint64_t val)
1543 {
1544  return atomic_fetch_sub_u64(dest, val);
1545 }
1546 #else
1547 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1548  uint64_t val)
1549 {
1550  uint64_t result = atomic_load_u64(dest);
1551  atomic_store_u64(dest, result - val);
1552  return result;
1553 }
1554 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1555 
1556 /* FETCH_OR */
1557 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1558 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1559  uint8_t val)
1560 {
1561  return atomic_fetch_or_u8(dest, val);
1562 }
1563 #else
1564 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1565  uint8_t val)
1566 {
1567  uint8_t result = atomic_load_u8(dest);
1568  atomic_store_u8(dest, result | val);
1569  return result;
1570 }
1571 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1572 
1573 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1574 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1575  uint16_t val)
1576 {
1577  return atomic_fetch_or_u16(dest, val);
1578 }
1579 #else
1580 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1581  uint16_t val)
1582 {
1583  uint16_t result = atomic_load_u16(dest);
1584  atomic_store_u16(dest, result | val);
1585  return result;
1586 }
1587 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1588 
1589 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1590 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1591  uint32_t val)
1592 {
1593  return atomic_fetch_or_u32(dest, val);
1594 }
1595 #else
1596 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1597  uint32_t val)
1598 {
1599  uint32_t result = atomic_load_u32(dest);
1600  atomic_store_u32(dest, result | val);
1601  return result;
1602 }
1603 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1604 
1605 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1606 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1607  uint64_t val)
1608 {
1609  return atomic_fetch_or_u64(dest, val);
1610 }
1611 #else
1612 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1613  uint64_t val)
1614 {
1615  uint64_t result = atomic_load_u64(dest);
1616  atomic_store_u64(dest, result | val);
1617  return result;
1618 }
1619 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1620 
1621 /* FETCH_XOR */
1622 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1623 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1624  uint8_t val)
1625 {
1626  return atomic_fetch_xor_u8(dest, val);
1627 }
1628 #else
1629 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1630  uint8_t val)
1631 {
1632  uint8_t result = atomic_load_u8(dest);
1633  atomic_store_u8(dest, result ^ val);
1634  return result;
1635 }
1636 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1637 
1638 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1639 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1640  uint16_t val)
1641 {
1642  return atomic_fetch_xor_u16(dest, val);
1643 }
1644 #else
1645 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1646  uint16_t val)
1647 {
1648  uint16_t result = atomic_load_u16(dest);
1649  atomic_store_u16(dest, result ^ val);
1650  return result;
1651 }
1652 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1653 
1654 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1655 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1656  uint32_t val)
1657 {
1658  return atomic_fetch_xor_u32(dest, val);
1659 }
1660 #else
1661 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1662  uint32_t val)
1663 {
1664  uint32_t result = atomic_load_u32(dest);
1665  atomic_store_u32(dest, result ^ val);
1666  return result;
1667 }
1668 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1669 
1670 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1671 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1672  uint64_t val)
1673 {
1674  return atomic_fetch_xor_u64(dest, val);
1675 }
1676 #else
1677 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1678  uint64_t val)
1679 {
1680  uint64_t result = atomic_load_u64(dest);
1681  atomic_store_u64(dest, result ^ val);
1682  return result;
1683 }
1684 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1685 
1686 /* FETCH_AND */
1687 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1688 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1689  uint8_t val)
1690 {
1691  return atomic_fetch_and_u8(dest, val);
1692 }
1693 #else
1694 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1695  uint8_t val)
1696 {
1697  uint8_t result = atomic_load_u8(dest);
1698  atomic_store_u8(dest, result & val);
1699  return result;
1700 }
1701 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1702 
1703 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1704 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1705  uint16_t val)
1706 {
1707  return atomic_fetch_and_u16(dest, val);
1708 }
1709 #else
1710 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1711  uint16_t val)
1712 {
1713  uint16_t result = atomic_load_u16(dest);
1714  atomic_store_u16(dest, result & val);
1715  return result;
1716 }
1717 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1718 
1719 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1720 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1721  uint32_t val)
1722 {
1723  return atomic_fetch_and_u32(dest, val);
1724 }
1725 #else
1726 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1727  uint32_t val)
1728 {
1729  uint32_t result = atomic_load_u32(dest);
1730  atomic_store_u32(dest, result & val);
1731  return result;
1732 }
1733 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1734 
1735 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1736 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1737  uint64_t val)
1738 {
1739  return atomic_fetch_and_u64(dest, val);
1740 }
1741 #else
1742 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1743  uint64_t val)
1744 {
1745  uint64_t result = atomic_load_u64(dest);
1746  atomic_store_u64(dest, result & val);
1747  return result;
1748 }
1749 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1750 
1751 #ifdef __cplusplus
1752 }
1753 #endif
1754 
1755 /* NOLINTEND(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name) */
Various helper macros.
int16_t kernel_pid_t
Unique process identifier.
Definition: sched.h:138
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
Definition: atomic_utils.h:381
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_clear_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:871
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
Definition: atomic_utils.h:319
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
Definition: atomic_utils.h:592
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
Definition: atomic_utils.h:399
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
Definition: atomic_utils.h:461
atomic_bit_u16_t atomic_bit_unsigned_t
Type specifying a bit in an unsigned int
Definition: atomic_utils.h:225
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
Definition: atomic_utils.h:310
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
Definition: atomic_utils.h:362
static atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest, uint8_t bit)
Create a reference to a bit in an unsigned int
Definition: atomic_utils.h:788
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
Definition: atomic_utils.h:408
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
Definition: atomic_utils.h:293
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
Definition: atomic_utils.h:527
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
Definition: atomic_utils.h:934
static void atomic_set_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:831
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
Definition: atomic_utils.h:657
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
Definition: atomic_utils.h:274
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
Definition: atomic_utils.h:722
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:193
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:195
volatile uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:194
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:203
volatile uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:204
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:205
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:213
volatile uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:214
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:215
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:183
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:185
volatile uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:184