#ifndef ATOMICS_H
#define ATOMICS_H
#ifdef FRONTEND
#error "atomics.h may not be included from frontend code"
#endif
#define INSIDE_ATOMICS_H
#include <limits.h>
#if defined(__arm__) || defined(__arm) || defined(__aarch64__)
#include "port/atomics/arch-arm.h"
#elif defined(__i386__) || defined(__i386) || defined(__x86_64__)
#include "port/atomics/arch-x86.h"
#elif defined(__ppc__) || defined(__powerpc__) || defined(__ppc64__) || defined(__powerpc64__)
#include "port/atomics/arch-ppc.h"
#elif defined(__hppa) || defined(__hppa__)
#include "port/atomics/arch-hppa.h"
#endif
#if defined(__GNUC__) || defined(__INTEL_COMPILER)
#include "port/atomics/generic-gcc.h"
#elif defined(_MSC_VER)
#include "port/atomics/generic-msvc.h"
#elif defined(__SUNPRO_C) && !defined(__GNUC__)
#include "port/atomics/generic-sunpro.h"
#else
#endif
#include "port/atomics/fallback.h"
#include "port/atomics/generic.h"
#define pg_compiler_barrier() pg_compiler_barrier_impl()
#define pg_memory_barrier() pg_memory_barrier_impl()
#define pg_read_barrier() pg_read_barrier_impl()
#define pg_write_barrier() pg_write_barrier_impl()
#define pg_spin_delay() pg_spin_delay_impl()
static inline void
pg_atomic_init_flag(volatile pg_atomic_flag *ptr)
{
pg_atomic_init_flag_impl(ptr);
}
static inline bool
pg_atomic_test_set_flag(volatile pg_atomic_flag *ptr)
{
return pg_atomic_test_set_flag_impl(ptr);
}
static inline bool
pg_atomic_unlocked_test_flag(volatile pg_atomic_flag *ptr)
{
return pg_atomic_unlocked_test_flag_impl(ptr);
}
static inline void
pg_atomic_clear_flag(volatile pg_atomic_flag *ptr)
{
pg_atomic_clear_flag_impl(ptr);
}
static inline void
pg_atomic_init_u32(volatile pg_atomic_uint32 *ptr, uint32 val)
{
AssertPointerAlignment(ptr, 4);
pg_atomic_init_u32_impl(ptr, val);
}
static inline uint32
pg_atomic_read_u32(volatile pg_atomic_uint32 *ptr)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_read_u32_impl(ptr);
}
static inline uint32
pg_atomic_read_membarrier_u32(volatile pg_atomic_uint32 *ptr)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_read_membarrier_u32_impl(ptr);
}
static inline void
pg_atomic_write_u32(volatile pg_atomic_uint32 *ptr, uint32 val)
{
AssertPointerAlignment(ptr, 4);
pg_atomic_write_u32_impl(ptr, val);
}
static inline void
pg_atomic_unlocked_write_u32(volatile pg_atomic_uint32 *ptr, uint32 val)
{
AssertPointerAlignment(ptr, 4);
pg_atomic_unlocked_write_u32_impl(ptr, val);
}
static inline void
pg_atomic_write_membarrier_u32(volatile pg_atomic_uint32 *ptr, uint32 val)
{
AssertPointerAlignment(ptr, 4);
pg_atomic_write_membarrier_u32_impl(ptr, val);
}
static inline uint32
pg_atomic_exchange_u32(volatile pg_atomic_uint32 *ptr, uint32 newval)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_exchange_u32_impl(ptr, newval);
}
static inline bool
pg_atomic_compare_exchange_u32(volatile pg_atomic_uint32 *ptr,
uint32 *expected, uint32 newval)
{
AssertPointerAlignment(ptr, 4);
AssertPointerAlignment(expected, 4);
return pg_atomic_compare_exchange_u32_impl(ptr, expected, newval);
}
static inline uint32
pg_atomic_fetch_add_u32(volatile pg_atomic_uint32 *ptr, int32 add_)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_fetch_add_u32_impl(ptr, add_);
}
static inline uint32
pg_atomic_fetch_sub_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
{
AssertPointerAlignment(ptr, 4);
Assert(sub_ != INT_MIN);
return pg_atomic_fetch_sub_u32_impl(ptr, sub_);
}
static inline uint32
pg_atomic_fetch_and_u32(volatile pg_atomic_uint32 *ptr, uint32 and_)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_fetch_and_u32_impl(ptr, and_);
}
static inline uint32
pg_atomic_fetch_or_u32(volatile pg_atomic_uint32 *ptr, uint32 or_)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_fetch_or_u32_impl(ptr, or_);
}
static inline uint32
pg_atomic_add_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 add_)
{
AssertPointerAlignment(ptr, 4);
return pg_atomic_add_fetch_u32_impl(ptr, add_);
}
static inline uint32
pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
{
AssertPointerAlignment(ptr, 4);
Assert(sub_ != INT_MIN);
return pg_atomic_sub_fetch_u32_impl(ptr, sub_);
}
static inline void
pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
pg_atomic_init_u64_impl(ptr, val);
}
static inline uint64
pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_read_u64_impl(ptr);
}
static inline uint64
pg_atomic_read_membarrier_u64(volatile pg_atomic_uint64 *ptr)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_read_membarrier_u64_impl(ptr);
}
static inline void
pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
pg_atomic_write_u64_impl(ptr, val);
}
static inline void
pg_atomic_write_membarrier_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
pg_atomic_write_membarrier_u64_impl(ptr, val);
}
static inline uint64
pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_exchange_u64_impl(ptr, newval);
}
static inline bool
pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
uint64 *expected, uint64 newval)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
}
static inline uint64
pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_fetch_add_u64_impl(ptr, add_);
}
static inline uint64
pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
Assert(sub_ != PG_INT64_MIN);
return pg_atomic_fetch_sub_u64_impl(ptr, sub_);
}
static inline uint64
pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_fetch_and_u64_impl(ptr, and_);
}
static inline uint64
pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_fetch_or_u64_impl(ptr, or_);
}
static inline uint64
pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
return pg_atomic_add_fetch_u64_impl(ptr, add_);
}
static inline uint64
pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
{
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
Assert(sub_ != PG_INT64_MIN);
return pg_atomic_sub_fetch_u64_impl(ptr, sub_);
}
static inline uint64
pg_atomic_monotonic_advance_u64(volatile pg_atomic_uint64 *ptr, uint64 target)
{
uint64 currval;
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
#endif
currval = pg_atomic_read_u64_impl(ptr);
if (currval >= target)
{
pg_memory_barrier();
return currval;
}
while (currval < target)
{
if (pg_atomic_compare_exchange_u64(ptr, &currval, target))
return target;
}
return currval;
}
#undef INSIDE_ATOMICS_H
#endif