volatile_utils.h
Go to the documentation of this file.
1 /*
2  * SPDX-FileCopyrightText: 2020 Otto-von-Guericke-Universität Magdeburg
3  * SPDX-License-Identifier: LGPL-2.1-only
4  */
5 
6 #pragma once
7 
27 #include <stdint.h>
28 
29 #ifdef __cplusplus
30 extern "C" {
31 #endif
32 
38 static inline uint8_t volatile_load_u8(const volatile uint8_t *var)
39 {
40  return *var;
41 }
47 static inline uint16_t volatile_load_u16(const volatile uint16_t *var)
48 {
49  return *var;
50 }
56 static inline uint32_t volatile_load_u32(const volatile uint32_t *var)
57 {
58  return *var;
59 }
65 static inline uint64_t volatile_load_u64(const volatile uint64_t *var)
66 {
67  return *var;
68 }
69 
75 static inline void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
76 {
77  *dest = val;
78 }
84 static inline void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
85 {
86  *dest = val;
87 }
93 static inline void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
94 {
95  *dest = val;
96 }
102 static inline void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
103 {
104  *dest = val;
105 }
106 
112 static inline uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
113 {
114  uint8_t result = *dest;
115  *dest = result + val;
116  return result;
117 }
123 static inline uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
124 {
125  uint8_t result = *dest;
126  *dest = result - val;
127  return result;
128 }
134 static inline uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
135 {
136  uint8_t result = *dest;
137  *dest = result | val;
138  return result;
139 }
145 static inline uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
146 {
147  uint8_t result = *dest;
148  *dest = result ^ val;
149  return result;
150 }
156 static inline uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
157 {
158  uint8_t result = *dest;
159  *dest = result & val;
160  return result;
161 }
162 
168 static inline uint16_t volatile_fetch_add_u16(volatile uint16_t *dest,
169  uint16_t val)
170 {
171  uint16_t result = *dest;
172  *dest = result + val;
173  return result;
174 }
180 static inline uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest,
181  uint16_t val)
182 {
183  uint16_t result = *dest;
184  *dest = result - val;
185  return result;
186 }
192 static inline uint16_t volatile_fetch_or_u16(volatile uint16_t *dest,
193  uint16_t val)
194 {
195  uint16_t result = *dest;
196  *dest = result | val;
197  return result;
198 }
204 static inline uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest,
205  uint16_t val)
206 {
207  uint16_t result = *dest;
208  *dest = result ^ val;
209  return result;
210 }
216 static inline uint16_t volatile_fetch_and_u16(volatile uint16_t *dest,
217  uint16_t val)
218 {
219  uint16_t result = *dest;
220  *dest = result & val;
221  return result;
222 }
223 
229 static inline uint32_t volatile_fetch_add_u32(volatile uint32_t *dest,
230  uint32_t val)
231 {
232  uint32_t result = *dest;
233  *dest = result + val;
234  return result;
235 }
241 static inline uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest,
242  uint32_t val)
243 {
244  uint32_t result = *dest;
245  *dest = result - val;
246  return result;
247 }
253 static inline uint32_t volatile_fetch_or_u32(volatile uint32_t *dest,
254  uint32_t val)
255 {
256  uint32_t result = *dest;
257  *dest = result | val;
258  return result;
259 }
265 static inline uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest,
266  uint32_t val)
267 {
268  uint32_t result = *dest;
269  *dest = result ^ val;
270  return result;
271 }
277 static inline uint32_t volatile_fetch_and_u32(volatile uint32_t *dest,
278  uint32_t val)
279 {
280  uint32_t result = *dest;
281  *dest = result & val;
282  return result;
283 }
284 
290 static inline uint64_t volatile_fetch_add_u64(volatile uint64_t *dest,
291  uint64_t val)
292 {
293  uint64_t result = *dest;
294  *dest = result + val;
295  return result;
296 }
302 static inline uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest,
303  uint64_t val)
304 {
305  uint64_t result = *dest;
306  *dest = result - val;
307  return result;
308 }
314 static inline uint64_t volatile_fetch_or_u64(volatile uint64_t *dest,
315  uint64_t val)
316 {
317  uint64_t result = *dest;
318  *dest = result | val;
319  return result;
320 }
326 static inline uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest,
327  uint64_t val)
328 {
329  uint64_t result = *dest;
330  *dest = result ^ val;
331  return result;
332 }
338 static inline uint64_t volatile_fetch_and_u64(volatile uint64_t *dest,
339  uint64_t val)
340 {
341  uint64_t result = *dest;
342  *dest = result & val;
343  return result;
344 }
345 
346 #ifdef __cplusplus
347 }
348 #endif
349 
static uint16_t volatile_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest &= val
static uint16_t volatile_load_u16(const volatile uint16_t *var)
Load an 16 bit value completely unoptimized.
static uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest -= val
static uint8_t volatile_load_u8(const volatile uint8_t *var)
Load an 8 bit value completely unoptimized.
static uint32_t volatile_load_u32(const volatile uint32_t *var)
Load an 32 bit value completely unoptimized.
static uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest |= val
static uint32_t volatile_fetch_add_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest += val
static void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
Store a 64 bit value completely unoptimized.
static uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest &= val
static uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest -= val
static uint32_t volatile_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest |= val
static void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
Store a 32 bit value completely unoptimized.
static void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
Store a 16 bit value completely unoptimized.
static uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_add_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest &= val
static uint32_t volatile_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest &= val
static void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
Store an 8 bit value completely unoptimized.
static uint64_t volatile_load_u64(const volatile uint64_t *var)
Load an 64 bit value completely unoptimized.
static uint64_t volatile_fetch_add_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest += val