volatile_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
9 #pragma once
10 
30 #include <stdint.h>
31 
32 #ifdef __cplusplus
33 extern "C" {
34 #endif
35 
41 static inline uint8_t volatile_load_u8(const volatile uint8_t *var)
42 {
43  return *var;
44 }
50 static inline uint16_t volatile_load_u16(const volatile uint16_t *var)
51 {
52  return *var;
53 }
59 static inline uint32_t volatile_load_u32(const volatile uint32_t *var)
60 {
61  return *var;
62 }
68 static inline uint64_t volatile_load_u64(const volatile uint64_t *var)
69 {
70  return *var;
71 }
72 
78 static inline void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
79 {
80  *dest = val;
81 }
87 static inline void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
88 {
89  *dest = val;
90 }
96 static inline void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
97 {
98  *dest = val;
99 }
105 static inline void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
106 {
107  *dest = val;
108 }
109 
115 static inline uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
116 {
117  uint8_t result = *dest;
118  *dest = result + val;
119  return result;
120 }
126 static inline uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
127 {
128  uint8_t result = *dest;
129  *dest = result - val;
130  return result;
131 }
137 static inline uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
138 {
139  uint8_t result = *dest;
140  *dest = result | val;
141  return result;
142 }
148 static inline uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
149 {
150  uint8_t result = *dest;
151  *dest = result ^ val;
152  return result;
153 }
159 static inline uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
160 {
161  uint8_t result = *dest;
162  *dest = result & val;
163  return result;
164 }
165 
171 static inline uint16_t volatile_fetch_add_u16(volatile uint16_t *dest,
172  uint16_t val)
173 {
174  uint16_t result = *dest;
175  *dest = result + val;
176  return result;
177 }
183 static inline uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest,
184  uint16_t val)
185 {
186  uint16_t result = *dest;
187  *dest = result - val;
188  return result;
189 }
195 static inline uint16_t volatile_fetch_or_u16(volatile uint16_t *dest,
196  uint16_t val)
197 {
198  uint16_t result = *dest;
199  *dest = result | val;
200  return result;
201 }
207 static inline uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest,
208  uint16_t val)
209 {
210  uint16_t result = *dest;
211  *dest = result ^ val;
212  return result;
213 }
219 static inline uint16_t volatile_fetch_and_u16(volatile uint16_t *dest,
220  uint16_t val)
221 {
222  uint16_t result = *dest;
223  *dest = result & val;
224  return result;
225 }
226 
232 static inline uint32_t volatile_fetch_add_u32(volatile uint32_t *dest,
233  uint32_t val)
234 {
235  uint32_t result = *dest;
236  *dest = result + val;
237  return result;
238 }
244 static inline uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest,
245  uint32_t val)
246 {
247  uint32_t result = *dest;
248  *dest = result - val;
249  return result;
250 }
256 static inline uint32_t volatile_fetch_or_u32(volatile uint32_t *dest,
257  uint32_t val)
258 {
259  uint32_t result = *dest;
260  *dest = result | val;
261  return result;
262 }
268 static inline uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest,
269  uint32_t val)
270 {
271  uint32_t result = *dest;
272  *dest = result ^ val;
273  return result;
274 }
280 static inline uint32_t volatile_fetch_and_u32(volatile uint32_t *dest,
281  uint32_t val)
282 {
283  uint32_t result = *dest;
284  *dest = result & val;
285  return result;
286 }
287 
293 static inline uint64_t volatile_fetch_add_u64(volatile uint64_t *dest,
294  uint64_t val)
295 {
296  uint64_t result = *dest;
297  *dest = result + val;
298  return result;
299 }
305 static inline uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest,
306  uint64_t val)
307 {
308  uint64_t result = *dest;
309  *dest = result - val;
310  return result;
311 }
317 static inline uint64_t volatile_fetch_or_u64(volatile uint64_t *dest,
318  uint64_t val)
319 {
320  uint64_t result = *dest;
321  *dest = result | val;
322  return result;
323 }
329 static inline uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest,
330  uint64_t val)
331 {
332  uint64_t result = *dest;
333  *dest = result ^ val;
334  return result;
335 }
341 static inline uint64_t volatile_fetch_and_u64(volatile uint64_t *dest,
342  uint64_t val)
343 {
344  uint64_t result = *dest;
345  *dest = result & val;
346  return result;
347 }
348 
349 #ifdef __cplusplus
350 }
351 #endif
352 
static uint16_t volatile_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest &= val
static uint16_t volatile_load_u16(const volatile uint16_t *var)
Load an 16 bit value completely unoptimized.
static uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest -= val
static uint8_t volatile_load_u8(const volatile uint8_t *var)
Load an 8 bit value completely unoptimized.
static uint32_t volatile_load_u32(const volatile uint32_t *var)
Load an 32 bit value completely unoptimized.
static uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest |= val
static uint32_t volatile_fetch_add_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest += val
static void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
Store a 64 bit value completely unoptimized.
static uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest &= val
static uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest -= val
static uint32_t volatile_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest |= val
static void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
Store a 32 bit value completely unoptimized.
static void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
Store a 16 bit value completely unoptimized.
static uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_add_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest &= val
static uint32_t volatile_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest &= val
static void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
Store an 8 bit value completely unoptimized.
static uint64_t volatile_load_u64(const volatile uint64_t *var)
Load an 64 bit value completely unoptimized.
static uint64_t volatile_fetch_add_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest += val