#if defined(__GNUC__)
#define pg_memory_barrier_impl() __asm__ __volatile__ ("sync" : : : "memory")
#define pg_read_barrier_impl() __asm__ __volatile__ ("lwsync" : : : "memory")
#define pg_write_barrier_impl() __asm__ __volatile__ ("lwsync" : : : "memory")
#endif
#define PG_HAVE_ATOMIC_U32_SUPPORT
typedef struct pg_atomic_uint32
{
volatile uint32 value;
} pg_atomic_uint32;
#if SIZEOF_VOID_P >= 8
#define PG_HAVE_ATOMIC_U64_SUPPORT
typedef struct pg_atomic_uint64
{
volatile uint64 value pg_attribute_aligned(8);
} pg_atomic_uint64;
#endif
#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
static inline bool
pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr,
uint32 *expected, uint32 newval)
{
uint32 found;
uint32 condition_register;
bool ret;
#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
if (__builtin_constant_p(*expected) &&
(int32) *expected <= PG_INT16_MAX &&
(int32) *expected >= PG_INT16_MIN)
__asm__ __volatile__(
" sync \n"
" lwarx %0,0,%5 \n"
" cmpwi %0,%3 \n"
" bne $+12 \n"
" stwcx. %4,0,%5 \n"
" bne $-16 \n"
" isync \n"
" mfcr %1 \n"
: "=&r"(found), "=r"(condition_register), "+m"(ptr->value)
: "i"(*expected), "r"(newval), "r"(&ptr->value)
: "memory", "cc");
else
#endif
__asm__ __volatile__(
" sync \n"
" lwarx %0,0,%5 \n"
" cmpw %0,%3 \n"
" bne $+12 \n"
" stwcx. %4,0,%5 \n"
" bne $-16 \n"
" isync \n"
" mfcr %1 \n"
: "=&r"(found), "=r"(condition_register), "+m"(ptr->value)
: "r"(*expected), "r"(newval), "r"(&ptr->value)
: "memory", "cc");
ret = (condition_register >> 29) & 1;
if (!ret)
*expected = found;
return ret;
}
#define PG_HAVE_ATOMIC_FETCH_ADD_U32
static inline uint32
pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
{
uint32 _t;
uint32 res;
#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
if (__builtin_constant_p(add_) &&
add_ <= PG_INT16_MAX && add_ >= PG_INT16_MIN)
__asm__ __volatile__(
" sync \n"
" lwarx %1,0,%4 \n"
" addi %0,%1,%3 \n"
" stwcx. %0,0,%4 \n"
" bne $-12 \n"
" isync \n"
: "=&r"(_t), "=&b"(res), "+m"(ptr->value)
: "i"(add_), "r"(&ptr->value)
: "memory", "cc");
else
#endif
__asm__ __volatile__(
" sync \n"
" lwarx %1,0,%4 \n"
" add %0,%1,%3 \n"
" stwcx. %0,0,%4 \n"
" bne $-12 \n"
" isync \n"
: "=&r"(_t), "=&r"(res), "+m"(ptr->value)
: "r"(add_), "r"(&ptr->value)
: "memory", "cc");
return res;
}
#ifdef PG_HAVE_ATOMIC_U64_SUPPORT
#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
static inline bool
pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
uint64 *expected, uint64 newval)
{
uint64 found;
uint32 condition_register;
bool ret;
#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
if (__builtin_constant_p(*expected) &&
(int64) *expected <= PG_INT16_MAX &&
(int64) *expected >= PG_INT16_MIN)
__asm__ __volatile__(
" sync \n"
" ldarx %0,0,%5 \n"
" cmpdi %0,%3 \n"
" bne $+12 \n"
" stdcx. %4,0,%5 \n"
" bne $-16 \n"
" isync \n"
" mfcr %1 \n"
: "=&r"(found), "=r"(condition_register), "+m"(ptr->value)
: "i"(*expected), "r"(newval), "r"(&ptr->value)
: "memory", "cc");
else
#endif
__asm__ __volatile__(
" sync \n"
" ldarx %0,0,%5 \n"
" cmpd %0,%3 \n"
" bne $+12 \n"
" stdcx. %4,0,%5 \n"
" bne $-16 \n"
" isync \n"
" mfcr %1 \n"
: "=&r"(found), "=r"(condition_register), "+m"(ptr->value)
: "r"(*expected), "r"(newval), "r"(&ptr->value)
: "memory", "cc");
ret = (condition_register >> 29) & 1;
if (!ret)
*expected = found;
return ret;
}
#define PG_HAVE_ATOMIC_FETCH_ADD_U64
static inline uint64
pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
{
uint64 _t;
uint64 res;
#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
if (__builtin_constant_p(add_) &&
add_ <= PG_INT16_MAX && add_ >= PG_INT16_MIN)
__asm__ __volatile__(
" sync \n"
" ldarx %1,0,%4 \n"
" addi %0,%1,%3 \n"
" stdcx. %0,0,%4 \n"
" bne $-12 \n"
" isync \n"
: "=&r"(_t), "=&b"(res), "+m"(ptr->value)
: "i"(add_), "r"(&ptr->value)
: "memory", "cc");
else
#endif
__asm__ __volatile__(
" sync \n"
" ldarx %1,0,%4 \n"
" add %0,%1,%3 \n"
" stdcx. %0,0,%4 \n"
" bne $-12 \n"
" isync \n"
: "=&r"(_t), "=&r"(res), "+m"(ptr->value)
: "r"(add_), "r"(&ptr->value)
: "memory", "cc");
return res;
}
#endif
#define PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY