22 #ifndef P99_ATOMIC_ARM_H
23 #define P99_ATOMIC_ARM_H 1
26 # warning "never include this file directly, use p99_atomic.h, instead"
29 #if !defined(__thumb__) && !defined(__thumb2__)
33 uint8_t p00_arm_ldrexb(uint8_t
volatile*p00_ptr) {
35 __asm__
volatile (
"ldrexb %0,[%1]\t@ load exclusive\n"
44 _Bool p00_arm_strexb(uint8_t
volatile*p00_ptr, uint8_t p00_val) {
46 __asm__
volatile (
"strexb %0,%1,[%2]\t@ store exclusive\n"
48 :
"r" (p00_val),
"r" (p00_ptr)
54 uint16_t p00_arm_ldrexh(uint16_t
volatile*p00_ptr) {
56 __asm__
volatile (
"ldrexh %0,[%1]\t@ load exclusive\n"
65 _Bool p00_arm_strexh(uint16_t
volatile*p00_ptr, uint16_t p00_val) {
67 __asm__
volatile (
"strexh %0,%1,[%2]\t@ store exclusive\n"
69 :
"r" (p00_val),
"r" (p00_ptr)
75 uint32_t p00_arm_ldrex(uint32_t
volatile*p00_ptr) {
77 __asm__
volatile (
"ldrex %0,[%1]\t@ load exclusive\n"
86 _Bool p00_arm_strex(uint32_t
volatile*p00_ptr, uint32_t p00_val) {
88 __asm__
volatile (
"strex %0,%1,[%2]\t@ store exclusive\n"
90 :
"r" (p00_val),
"r" (p00_ptr)
95 # if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)
97 uint64_t p00_arm_ldrexd(uint64_t
volatile*p00_ptr) {
99 __asm__
volatile (
"ldrexd %0, %H0, [%1]\t@ load exclusive\n"
108 _Bool p00_arm_strex(uint64_t
volatile*p00_ptr, uint64_t p00_val) {
110 __asm__
volatile (
"strex %0, %1, %H1, [%2]\t@ store exclusive\n"
112 :
"r" (p00_val),
"r" (p00_ptr)
122 uint8_t p00_arm_ldrexb(uint8_t
volatile*p00_ptr) {
124 __asm__
volatile (
"ldrexb %0,%1\t@ load exclusive\n"
133 _Bool p00_arm_strexb(uint8_t
volatile*p00_ptr, uint8_t p00_val) {
135 __asm__
volatile (
"strexb %0,%1,%2\t@ store exclusive\n"
137 :
"r" (p00_val),
"m" (p00_ptr)
143 uint16_t p00_arm_ldrexh(uint16_t
volatile*p00_ptr) {
145 __asm__
volatile (
"ldrexh %0,%1\t@ load exclusive\n"
154 _Bool p00_arm_strexh(uint16_t
volatile*p00_ptr, uint16_t p00_val) {
156 __asm__
volatile (
"strexh %0,%1,%2\t@ store exclusive\n"
158 :
"r" (p00_val),
"m" (p00_ptr)
164 uint32_t p00_arm_ldrex(uint32_t
volatile*p00_ptr) {
166 __asm__
volatile (
"ldrex %0,%1\t@ load exclusive\n"
175 _Bool p00_arm_strex(uint32_t
volatile*p00_ptr, uint32_t p00_val) {
177 __asm__
volatile (
"strex %0,%1,%2\t@ store exclusive\n"
179 :
"r" (p00_val),
"m" (p00_ptr)
184 # if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)
186 uint64_t p00_arm_ldrexd(uint64_t
volatile*p00_ptr) {
188 __asm__
volatile (
"ldrexd %0, %H0, %1\t@ load exclusive\n"
197 _Bool p00_arm_strexd(uint64_t
volatile*p00_ptr, uint64_t p00_val) {
199 __asm__
volatile (
"strexd %0, %1, %H1, %2\t@ store exclusive\n"
201 :
"r" (p00_val),
"m" (p00_ptr)
210 uint8_t p00_atomic_exchange_1(uint8_t
volatile* p00_objp, uint8_t p00_des) {
212 uint8_t p00_ret = p00_arm_ldrexb(
object);
213 if (!p00_arm_strexb(
object, p00_des))
return p00_ret;
218 uint16_t p00_atomic_exchange_2(uint16_t
volatile* p00_objp, uint16_t p00_des) {
220 uint16_t p00_ret = p00_arm_ldrexh(
object);
221 if (!p00_arm_strexh(
object, p00_des))
return p00_ret;
226 uint32_t p00_atomic_exchange_4(uint32_t
volatile* p00_objp, uint32_t p00_des) {
228 uint32_t p00_ret = p00_arm_ldrex(
object);
229 if (!p00_arm_strex(
object, p00_des))
return p00_ret;
233 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || defined(P00_DOXYGEN)
235 uint64_t p00_atomic_exchange_8(uint64_t
volatile* p00_objp, uint64_t p00_des) {
237 uint64_t p00_ret = p00_arm_ldrexd(
object);
238 if (!p00_arm_strexd(
object, p00_des))
return p00_ret;
243 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) && !defined(P00_DOXYGEN)
246 uint32_t p00_sync_lock_test_and_set(uint32_t
volatile *p00_objp) {
247 return __sync_lock_test_and_set(p00_objp, 1);
251 void p00_sync_lock_release(uint32_t
volatile *p00_objp) {
252 __sync_lock_release(p00_objp);
256 void p00_mfence(
void) {
257 __sync_synchronize();
290 uint32_t p00_sync_lock_test_and_set_internal(uint32_t
volatile *
object, memory_order p00_ord) {
292 uint32_t p00_ret = p00_arm_ldrex(
object);
298 if (!p00_arm_strex(
object, 1) || p00_ret)
return p00_ret;
303 void p00_sync_lock_release_internal(uint32_t
volatile *
object, memory_order p00_ord) {
304 __sync_lock_release(
object);
308 void p00_mfence_internal(memory_order p00_ord) {
309 __asm__ __volatile__(
"dmb":::
"memory");
312 #define p00_mfence(...) \
313 P99_IF_EMPTY(__VA_ARGS__) \
314 (p00_mfence_internal(memory_order_seq_cst)) \
315 (p00_mfence_internal(__VA_ARGS__))
317 #define p00_sync_lock_release(...) \
318 P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
319 (p00_sync_lock_release_internal(__VA_ARGS__)) \
320 (p00_sync_lock_release_internal(P99_ALLBUTLAST(__VA_ARGS__)))
321 #define p00_sync_lock_test_and_set(...) \
322 P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
323 (p00_sync_lock_test_and_set_internal(__VA_ARGS__)) \
324 (p00_sync_lock_test_and_set_internal(P99_ALLBUTLAST(__VA_ARGS__)))
328 uint32_t p00_ret = 0;
330 p00_ret = p00_arm_ldrex(
object);
331 if (p00_pre != p00_ret) {
333 p00_arm_strex(
object, p00_ret);
336 if (!p00_arm_strex(
object, p00_des))
break;
345 uint32_t p00_ret = 0;
347 p00_ret = p00_arm_ldrex(
object);
348 uint32_t p00_des = p00_ret + p00_val;
349 if (!p00_arm_strex(
object, p00_des))
break;
357 uint32_t p00_ret = 0;
359 p00_ret = p00_arm_ldrex(
object);
360 uint32_t p00_des = p00_ret - p00_val;
361 if (!p00_arm_strex(
object, p00_des))
break;
369 uint32_t p00_ret = 0;
371 p00_ret = p00_arm_ldrex(
object);
372 uint32_t p00_des = p00_ret | p00_val;
373 if (!p00_arm_strex(
object, p00_des))
break;
381 uint32_t p00_ret = 0;
383 p00_ret = p00_arm_ldrex(
object);
384 uint32_t p00_des = p00_ret & p00_val;
385 if (!p00_arm_strex(
object, p00_des))
break;
393 uint32_t p00_ret = 0;
395 p00_ret = p00_arm_ldrex(
object);
396 uint32_t p00_des = p00_ret ^ p00_val;
397 if (!p00_arm_strex(
object, p00_des))
break;
403 #undef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4
404 #define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 1