volatile_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
28 #ifndef VOLATILE_UTILS_H
29 #define VOLATILE_UTILS_H
30 
31 #include <stdint.h>
32 
33 #ifdef __cplusplus
34 extern "C" {
35 #endif
36 
42 static inline uint8_t volatile_load_u8(const volatile uint8_t *var)
43 {
44  return *var;
45 }
51 static inline uint16_t volatile_load_u16(const volatile uint16_t *var)
52 {
53  return *var;
54 }
60 static inline uint32_t volatile_load_u32(const volatile uint32_t *var)
61 {
62  return *var;
63 }
69 static inline uint64_t volatile_load_u64(const volatile uint64_t *var)
70 {
71  return *var;
72 }
73 
79 static inline void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
80 {
81  *dest = val;
82 }
88 static inline void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
89 {
90  *dest = val;
91 }
97 static inline void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
98 {
99  *dest = val;
100 }
106 static inline void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
107 {
108  *dest = val;
109 }
110 
116 static inline uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
117 {
118  uint8_t result = *dest;
119  *dest = result + val;
120  return result;
121 }
127 static inline uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
128 {
129  uint8_t result = *dest;
130  *dest = result - val;
131  return result;
132 }
138 static inline uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
139 {
140  uint8_t result = *dest;
141  *dest = result | val;
142  return result;
143 }
149 static inline uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
150 {
151  uint8_t result = *dest;
152  *dest = result ^ val;
153  return result;
154 }
160 static inline uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
161 {
162  uint8_t result = *dest;
163  *dest = result & val;
164  return result;
165 }
166 
172 static inline uint16_t volatile_fetch_add_u16(volatile uint16_t *dest,
173  uint16_t val)
174 {
175  uint16_t result = *dest;
176  *dest = result + val;
177  return result;
178 }
184 static inline uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest,
185  uint16_t val)
186 {
187  uint16_t result = *dest;
188  *dest = result - val;
189  return result;
190 }
196 static inline uint16_t volatile_fetch_or_u16(volatile uint16_t *dest,
197  uint16_t val)
198 {
199  uint16_t result = *dest;
200  *dest = result | val;
201  return result;
202 }
208 static inline uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest,
209  uint16_t val)
210 {
211  uint16_t result = *dest;
212  *dest = result ^ val;
213  return result;
214 }
220 static inline uint16_t volatile_fetch_and_u16(volatile uint16_t *dest,
221  uint16_t val)
222 {
223  uint16_t result = *dest;
224  *dest = result & val;
225  return result;
226 }
227 
233 static inline uint32_t volatile_fetch_add_u32(volatile uint32_t *dest,
234  uint32_t val)
235 {
236  uint32_t result = *dest;
237  *dest = result + val;
238  return result;
239 }
245 static inline uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest,
246  uint32_t val)
247 {
248  uint32_t result = *dest;
249  *dest = result - val;
250  return result;
251 }
257 static inline uint32_t volatile_fetch_or_u32(volatile uint32_t *dest,
258  uint32_t val)
259 {
260  uint32_t result = *dest;
261  *dest = result | val;
262  return result;
263 }
269 static inline uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest,
270  uint32_t val)
271 {
272  uint32_t result = *dest;
273  *dest = result ^ val;
274  return result;
275 }
281 static inline uint32_t volatile_fetch_and_u32(volatile uint32_t *dest,
282  uint32_t val)
283 {
284  uint32_t result = *dest;
285  *dest = result & val;
286  return result;
287 }
288 
294 static inline uint64_t volatile_fetch_add_u64(volatile uint64_t *dest,
295  uint64_t val)
296 {
297  uint64_t result = *dest;
298  *dest = result + val;
299  return result;
300 }
306 static inline uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest,
307  uint64_t val)
308 {
309  uint64_t result = *dest;
310  *dest = result - val;
311  return result;
312 }
318 static inline uint64_t volatile_fetch_or_u64(volatile uint64_t *dest,
319  uint64_t val)
320 {
321  uint64_t result = *dest;
322  *dest = result | val;
323  return result;
324 }
330 static inline uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest,
331  uint64_t val)
332 {
333  uint64_t result = *dest;
334  *dest = result ^ val;
335  return result;
336 }
342 static inline uint64_t volatile_fetch_and_u64(volatile uint64_t *dest,
343  uint64_t val)
344 {
345  uint64_t result = *dest;
346  *dest = result & val;
347  return result;
348 }
349 
350 #ifdef __cplusplus
351 }
352 #endif
353 
354 #endif /* VOLATILE_UTILS_H */
static uint16_t volatile_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest &= val
static uint16_t volatile_load_u16(const volatile uint16_t *var)
Load an 16 bit value completely unoptimized.
static uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest -= val
static uint8_t volatile_load_u8(const volatile uint8_t *var)
Load an 8 bit value completely unoptimized.
static uint32_t volatile_load_u32(const volatile uint32_t *var)
Load an 32 bit value completely unoptimized.
static uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest |= val
static uint32_t volatile_fetch_add_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest += val
static void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
Store a 64 bit value completely unoptimized.
static uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest &= val
static uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest -= val
static uint32_t volatile_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest |= val
static void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
Store a 32 bit value completely unoptimized.
static void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
Store a 16 bit value completely unoptimized.
static uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_add_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest &= val
static uint32_t volatile_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest &= val
static void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
Store an 8 bit value completely unoptimized.
static uint64_t volatile_load_u64(const volatile uint64_t *var)
Load an 64 bit value completely unoptimized.
static uint64_t volatile_fetch_add_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest += val