145 #include "atomic_utils_arch.h"
163 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
224 #if UINT_MAX == UINT16_MAX
226 #elif UINT_MAX == UINT32_MAX
276 if (
sizeof(uint64_t) ==
sizeof(
unsigned)) {
280 if (
sizeof(uint32_t) ==
sizeof(
unsigned)) {
294 if (
sizeof(uintptr_t) == 2) {
298 if (
sizeof(uintptr_t) == 4) {
364 if (
sizeof(uint64_t) ==
sizeof(
unsigned)) {
367 else if (
sizeof(uint32_t) ==
sizeof(
unsigned)) {
383 if (
sizeof(uintptr_t) == 2) {
386 else if (
sizeof(uintptr_t) == 4) {
464 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
468 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
497 uint16_t subtrahend);
506 uint32_t subtrahend);
515 uint64_t subtrahend);
530 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
534 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
595 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
599 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
660 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
664 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
725 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
729 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
793 #if UINT_MAX == UINT16_MAX
795 #elif UINT_MAX == UINT32_MAX
833 #if UINT_MAX == UINT16_MAX
835 #elif UINT_MAX == UINT32_MAX
873 #if UINT_MAX == UINT16_MAX
875 #elif UINT_MAX == UINT32_MAX
937 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
941 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
970 uint16_t subtrahend);
979 uint32_t subtrahend);
988 uint64_t subtrahend);
1001 unsigned subtrahend)
1003 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1007 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1068 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1072 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1134 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1138 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1200 if (
sizeof(
unsigned) ==
sizeof(uint64_t)) {
1204 if (
sizeof(
unsigned) ==
sizeof(uint32_t)) {
1221 #define ATOMIC_LOAD_IMPL(name, type) \
1222 static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1224 unsigned state = irq_disable(); \
1225 type result = *var; \
1226 irq_restore(state); \
1230 #ifndef HAS_ATOMIC_LOAD_U8
1233 #ifndef HAS_ATOMIC_LOAD_U16
1236 #ifndef HAS_ATOMIC_LOAD_U32
1239 #ifndef HAS_ATOMIC_LOAD_U64
1250 #define ATOMIC_STORE_IMPL(name, type) \
1251 static inline void CONCAT(atomic_store_, name) \
1252 (volatile type *dest, type val) \
1254 unsigned state = irq_disable(); \
1256 irq_restore(state); \
1259 #ifndef HAS_ATOMIC_STORE_U8
1262 #ifndef HAS_ATOMIC_STORE_U16
1265 #ifndef HAS_ATOMIC_STORE_U32
1268 #ifndef HAS_ATOMIC_STORE_U64
1281 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1282 static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1283 (volatile type *dest, type val) \
1285 unsigned state = irq_disable(); \
1286 const type result = *dest; \
1287 *dest = result op val; \
1288 irq_restore(state); \
1292 #ifndef HAS_ATOMIC_FETCH_ADD_U8
1295 #ifndef HAS_ATOMIC_FETCH_ADD_U16
1298 #ifndef HAS_ATOMIC_FETCH_ADD_U32
1301 #ifndef HAS_ATOMIC_FETCH_ADD_U64
1305 #ifndef HAS_ATOMIC_FETCH_SUB_U8
1308 #ifndef HAS_ATOMIC_FETCH_SUB_U16
1311 #ifndef HAS_ATOMIC_FETCH_SUB_U32
1314 #ifndef HAS_ATOMIC_FETCH_SUB_U64
1318 #ifndef HAS_ATOMIC_FETCH_OR_U8
1321 #ifndef HAS_ATOMIC_FETCH_OR_U16
1324 #ifndef HAS_ATOMIC_FETCH_OR_U32
1327 #ifndef HAS_ATOMIC_FETCH_OR_U64
1331 #ifndef HAS_ATOMIC_FETCH_XOR_U8
1334 #ifndef HAS_ATOMIC_FETCH_XOR_U16
1337 #ifndef HAS_ATOMIC_FETCH_XOR_U32
1340 #ifndef HAS_ATOMIC_FETCH_XOR_U64
1344 #ifndef HAS_ATOMIC_FETCH_AND_U8
1347 #ifndef HAS_ATOMIC_FETCH_AND_U16
1350 #ifndef HAS_ATOMIC_FETCH_AND_U32
1353 #ifndef HAS_ATOMIC_FETCH_AND_U64
1357 #ifndef HAS_ATOMIC_BIT
1429 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1445 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1461 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1477 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1492 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1508 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1524 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1540 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1557 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1573 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1589 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1605 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1622 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1638 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1654 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1670 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1687 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1703 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1719 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1735 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
int16_t kernel_pid_t
Unique process identifier.
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_clear_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest &= ~(1 << bit)
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
atomic_bit_u16_t atomic_bit_unsigned_t
Type specifying a bit in an unsigned int
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
static atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest, uint8_t bit)
Create a reference to a bit in an unsigned int
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
static void atomic_set_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest |= (1 << bit)
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
Scheduler API definition.
Type specifying a bit in an uint16_t
uint16_t mask
Bitmask used for setting the bit.
volatile uint16_t * dest
Memory containing the bit to set/clear.
Type specifying a bit in an uint32_t
volatile uint32_t * dest
Memory containing the bit to set/clear.
uint32_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint64_t
volatile uint64_t * dest
Memory containing the bit to set/clear.
uint64_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint8_t
uint8_t mask
Bitmask used for setting the bit.
volatile uint8_t * dest
Memory containing the bit to set/clear.