142 #include "atomic_utils_arch.h"
160 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
221 #if UINT_MAX == UINT16_MAX
223 #elif UINT_MAX == UINT32_MAX
273 if (
sizeof(uint64_t) ==
sizeof(
unsigned)) {
277 if (
sizeof(uint32_t) ==
sizeof(
unsigned)) {
291 if (
sizeof(uintptr_t) == 2) {
295 if (
sizeof(uintptr_t) == 4) {
361 if (
sizeof(uint64_t) ==
sizeof(
unsigned)) {
364 else if (
sizeof(uint32_t) ==
sizeof(
unsigned)) {
380 if (
sizeof(uintptr_t) == 2) {
383 else if (
sizeof(uintptr_t) == 4) {
461 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
465 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
494 uint16_t subtrahend);
503 uint32_t subtrahend);
512 uint64_t subtrahend);
527 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
531 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
592 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
596 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
657 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
661 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
722 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
726 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
790 #if UINT_MAX == UINT16_MAX
792 #elif UINT_MAX == UINT32_MAX
830 #if UINT_MAX == UINT16_MAX
832 #elif UINT_MAX == UINT32_MAX
870 #if UINT_MAX == UINT16_MAX
872 #elif UINT_MAX == UINT32_MAX
934 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
938 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
967 uint16_t subtrahend);
976 uint32_t subtrahend);
985 uint64_t subtrahend);
1000 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1004 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1065 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1069 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1131 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1135 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1197 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1201 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1218 #define ATOMIC_LOAD_IMPL(name, type) \
1219 static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1221 unsigned state = irq_disable(); \
1222 type result = *var; \
1223 irq_restore(state); \
1227 #ifndef HAS_ATOMIC_LOAD_U8
1230 #ifndef HAS_ATOMIC_LOAD_U16
1233 #ifndef HAS_ATOMIC_LOAD_U32
1236 #ifndef HAS_ATOMIC_LOAD_U64
1247 #define ATOMIC_STORE_IMPL(name, type) \
1248 static inline void CONCAT(atomic_store_, name) \
1249 (volatile type *dest, type val) \
1251 unsigned state = irq_disable(); \
1253 irq_restore(state); \
1256 #ifndef HAS_ATOMIC_STORE_U8
1259 #ifndef HAS_ATOMIC_STORE_U16
1262 #ifndef HAS_ATOMIC_STORE_U32
1265 #ifndef HAS_ATOMIC_STORE_U64
1278 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1279 static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1280 (volatile type *dest, type val) \
1282 unsigned state = irq_disable(); \
1283 const type result = *dest; \
1284 *dest = result op val; \
1285 irq_restore(state); \
1289 #ifndef HAS_ATOMIC_FETCH_ADD_U8
1292 #ifndef HAS_ATOMIC_FETCH_ADD_U16
1295 #ifndef HAS_ATOMIC_FETCH_ADD_U32
1298 #ifndef HAS_ATOMIC_FETCH_ADD_U64
1302 #ifndef HAS_ATOMIC_FETCH_SUB_U8
1305 #ifndef HAS_ATOMIC_FETCH_SUB_U16
1308 #ifndef HAS_ATOMIC_FETCH_SUB_U32
1311 #ifndef HAS_ATOMIC_FETCH_SUB_U64
1315 #ifndef HAS_ATOMIC_FETCH_OR_U8
1318 #ifndef HAS_ATOMIC_FETCH_OR_U16
1321 #ifndef HAS_ATOMIC_FETCH_OR_U32
1324 #ifndef HAS_ATOMIC_FETCH_OR_U64
1328 #ifndef HAS_ATOMIC_FETCH_XOR_U8
1331 #ifndef HAS_ATOMIC_FETCH_XOR_U16
1334 #ifndef HAS_ATOMIC_FETCH_XOR_U32
1337 #ifndef HAS_ATOMIC_FETCH_XOR_U64
1341 #ifndef HAS_ATOMIC_FETCH_AND_U8
1344 #ifndef HAS_ATOMIC_FETCH_AND_U16
1347 #ifndef HAS_ATOMIC_FETCH_AND_U32
1350 #ifndef HAS_ATOMIC_FETCH_AND_U64
1354 #ifndef HAS_ATOMIC_BIT
1426 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1442 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1458 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1474 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1489 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1505 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1521 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1537 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1554 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1570 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1586 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1602 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1619 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1635 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1651 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1667 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1684 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1700 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1716 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1732 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
int16_t kernel_pid_t
Unique process identifier.
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_clear_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest &= ~(1 << bit)
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
atomic_bit_u16_t atomic_bit_unsigned_t
Type specifying a bit in an unsigned int
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
static atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest, uint8_t bit)
Create a reference to a bit in an unsigned int
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
static void atomic_set_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest |= (1 << bit)
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
Scheduler API definition.
Type specifying a bit in an uint16_t
uint16_t mask
Bitmask used for setting the bit.
volatile uint16_t * dest
Memory containing the bit to set/clear.
Type specifying a bit in an uint32_t
volatile uint32_t * dest
Memory containing the bit to set/clear.
uint32_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint64_t
volatile uint64_t * dest
Memory containing the bit to set/clear.
uint64_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint8_t
uint8_t mask
Bitmask used for setting the bit.
volatile uint8_t * dest
Memory containing the bit to set/clear.