22 #ifndef P99_ATOMIC_X86_H
23 #define P99_ATOMIC_X86_H 1
26 # warning "never include this file directly, use p99_atomic.h, instead"
31 uint8_t p00_atomic_exchange_1(uint8_t
volatile* p00_objp, uint8_t p00_ret) {
32 __asm__ __volatile__(
"xchgb %1, %b0"
34 :
"m"(*p00_objp),
"0"(p00_ret)
40 uint16_t p00_atomic_exchange_2(uint16_t
volatile* p00_objp, uint16_t p00_ret) {
41 __asm__ __volatile__(
"xchgw %1, %w0"
43 :
"m"(*p00_objp),
"0"(p00_ret)
49 uint32_t p00_atomic_exchange_4(uint32_t
volatile* p00_objp, uint32_t p00_ret) {
50 __asm__ __volatile__(
"xchgl %1, %k0"
52 :
"m"(*p00_objp),
"0"(p00_ret)
57 #if defined(__x86_64__) || defined(P00_DOXYGEN)
58 # if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || defined(P00_DOXYGEN)
60 uint64_t p00_atomic_exchange_8(uint64_t
volatile* p00_objp, uint64_t p00_ret) {
61 __asm__ __volatile__(
"xchgq %1, %0"
63 :
"m"(*p00_objp),
"0"(p00_ret)
69 #undef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8
93 void p00_sync_lock_release_internal(uint32_t
volatile *p00_objp) {
94 __asm__ __volatile__(
"movl $0, %0"
101 void p00_mfence_internal(memory_order p00_ord) {
102 __asm__ __volatile__(
"mfence":::
"memory");
105 #define p00_mfence(...) \
106 P99_IF_EMPTY(__VA_ARGS__) \
107 (p00_mfence_internal(memory_order_seq_cst)) \
108 (p00_mfence_internal(__VA_ARGS__))
110 #define p00_sync_lock_release(...) \
111 P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
112 (p00_sync_lock_release_internal(__VA_ARGS__)) \
113 (p00_sync_lock_release_internal(P99_ALLBUTLAST(__VA_ARGS__)))
114 #define p00_sync_lock_test_and_set(...) \
115 P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
116 (p00_atomic_exchange_4(__VA_ARGS__, 1)) \
117 (p00_atomic_exchange_4(P99_ALLBUTLAST(__VA_ARGS__), 1))