17 OF_ASSUME_NONNULL_BEGIN
20 of_atomic_int_add(
volatile int *_Nonnull p,
int i)
23 __asm__ __volatile__ (
31 else if (
sizeof(
int) == 8)
32 __asm__ __volatile__ (
46 static OF_INLINE int32_t
47 of_atomic_int32_add(
volatile int32_t *_Nonnull p, int32_t i)
49 __asm__ __volatile__ (
60 static OF_INLINE
void *_Nullable
61 of_atomic_ptr_add(
void *
volatile _Nullable *_Nonnull p, intptr_t i)
63 #if defined(OF_X86_64_ASM) 64 __asm__ __volatile__ (
73 #elif defined(OF_X86_ASM) 74 __asm__ __volatile__ (
87 of_atomic_int_sub(
volatile int *_Nonnull p,
int i)
90 __asm__ __volatile__ (
99 else if (
sizeof(
int) == 8)
100 __asm__ __volatile__ (
115 static OF_INLINE int32_t
116 of_atomic_int32_sub(
volatile int32_t *_Nonnull p, int32_t i)
118 __asm__ __volatile__ (
130 static OF_INLINE
void *_Nullable
131 of_atomic_ptr_sub(
void *
volatile _Nullable *_Nonnull p, intptr_t i)
133 #if defined(OF_X86_64_ASM) 134 __asm__ __volatile__ (
144 #elif defined(OF_X86_ASM) 145 __asm__ __volatile__ (
159 of_atomic_int_inc(
volatile int *_Nonnull p)
163 if (
sizeof(
int) == 4)
164 __asm__ __volatile__ (
174 else if (
sizeof(
int) == 8)
175 __asm__ __volatile__ (
191 static OF_INLINE int32_t
192 of_atomic_int32_inc(
volatile int32_t *_Nonnull p)
196 __asm__ __volatile__ (
210 of_atomic_int_dec(
volatile int *_Nonnull p)
214 if (
sizeof(
int) == 4)
215 __asm__ __volatile__ (
225 else if (
sizeof(
int) == 8)
226 __asm__ __volatile__ (
242 static OF_INLINE int32_t
243 of_atomic_int32_dec(
volatile int32_t *_Nonnull p)
247 __asm__ __volatile__ (
260 static OF_INLINE
unsigned int 261 of_atomic_int_or(
volatile unsigned int *_Nonnull p,
unsigned int i)
263 if (
sizeof(
int) == 4)
264 __asm__ __volatile__ (
277 else if (
sizeof(
int) == 8)
278 __asm__ __volatile__ (
297 static OF_INLINE uint32_t
298 of_atomic_int32_or(
volatile uint32_t *_Nonnull p, uint32_t i)
300 __asm__ __volatile__ (
316 static OF_INLINE
unsigned int 317 of_atomic_int_and(
volatile unsigned int *_Nonnull p,
unsigned int i)
319 if (
sizeof(
int) == 4)
320 __asm__ __volatile__ (
333 else if (
sizeof(
int) == 8)
334 __asm__ __volatile__ (
353 static OF_INLINE uint32_t
354 of_atomic_int32_and(
volatile uint32_t *_Nonnull p, uint32_t i)
356 __asm__ __volatile__ (
372 static OF_INLINE
unsigned int 373 of_atomic_int_xor(
volatile unsigned int *_Nonnull p,
unsigned int i)
375 if (
sizeof(
int) == 4)
376 __asm__ __volatile__ (
389 else if (
sizeof(
int) == 8)
390 __asm__ __volatile__ (
409 static OF_INLINE uint32_t
410 of_atomic_int32_xor(
volatile uint32_t *_Nonnull p, uint32_t i)
412 __asm__ __volatile__ (
418 "cmpxchgl %0, %2\n\t" 428 static OF_INLINE
bool 429 of_atomic_int_cmpswap(
volatile int *_Nonnull p,
int o,
int n)
433 __asm__ __volatile__ (
446 static OF_INLINE
bool 447 of_atomic_int32_cmpswap(
volatile int32_t *_Nonnull p, int32_t o, int32_t n)
451 __asm__ __volatile__ (
464 static OF_INLINE
bool 465 of_atomic_ptr_cmpswap(
void *
volatile _Nullable *_Nonnull p,
466 void *_Nullable o,
void *_Nullable n)
470 __asm__ __volatile__ (
483 static OF_INLINE
void 484 of_memory_barrier(
void)
486 __asm__ __volatile__ (
487 "mfence" :::
"memory" 491 static OF_INLINE
void 492 of_memory_barrier_acquire(
void)
494 __asm__ __volatile__ (
"" :::
"memory");
497 static OF_INLINE
void 498 of_memory_barrier_release(
void)
500 __asm__ __volatile__ (
"" :::
"memory");
503 OF_ASSUME_NONNULL_END