#![no_std]
#![feature(asm_experimental_arch)]
#![cfg_attr(not(target_arch = "msp430"), feature(core_intrinsics))]
use core::arch::asm;
use core::cell::UnsafeCell;
use core::fmt;
pub struct AtomicBool {
v: UnsafeCell<u8>,
}
impl Default for AtomicBool {
fn default() -> Self {
Self::new(false)
}
}
unsafe impl Sync for AtomicBool {}
pub struct AtomicPtr<T> {
p: UnsafeCell<*mut T>,
}
impl<T> Default for AtomicPtr<T> {
fn default() -> AtomicPtr<T> {
AtomicPtr::new(core::ptr::null_mut())
}
}
unsafe impl<T> Send for AtomicPtr<T> {}
unsafe impl<T> Sync for AtomicPtr<T> {}
pub const ATOMIC_BOOL_INIT: AtomicBool = AtomicBool::new(false);
impl AtomicBool {
#[inline]
pub const fn new(v: bool) -> AtomicBool {
AtomicBool {
v: UnsafeCell::new(v as u8),
}
}
#[inline]
pub fn get_mut(&mut self) -> &mut bool {
unsafe { &mut *(self.v.get() as *mut bool) }
}
#[inline]
pub fn into_inner(self) -> bool {
self.v.into_inner() != 0
}
#[inline]
pub fn load(&self) -> bool {
unsafe { u8::atomic_load(self.v.get()) != 0 }
}
#[inline]
pub fn store(&self, val: bool) {
unsafe {
u8::atomic_store(self.v.get(), val as u8);
}
}
#[inline]
pub fn and(&self, val: bool) {
unsafe { u8::atomic_and(self.v.get(), val as u8) }
}
#[inline]
pub fn nand(&self, val: bool) {
if val {
self.xor(true)
} else {
self.store(true)
}
}
#[inline]
pub fn or(&self, val: bool) {
unsafe { u8::atomic_or(self.v.get(), val as u8) }
}
#[inline]
pub fn xor(&self, val: bool) {
unsafe { u8::atomic_xor(self.v.get(), val as u8) }
}
}
impl<T> AtomicPtr<T> {
#[inline]
pub const fn new(p: *mut T) -> AtomicPtr<T> {
AtomicPtr {
p: UnsafeCell::new(p),
}
}
#[inline]
pub fn get_mut(&mut self) -> &mut *mut T {
unsafe { &mut *self.p.get() }
}
#[inline]
pub fn into_inner(self) -> *mut T {
self.p.into_inner()
}
#[inline]
pub fn load(&self) -> *mut T {
unsafe { usize::atomic_load(self.p.get() as *mut usize) as *mut T }
}
#[inline]
pub fn store(&self, ptr: *mut T) {
unsafe {
usize::atomic_store(self.p.get() as *mut usize, ptr as usize);
}
}
}
macro_rules! atomic_int {
($int_type:ident $atomic_type:ident $atomic_init:ident $asm_suffix:expr) => {
pub struct $atomic_type {
v: UnsafeCell<$int_type>,
}
pub const $atomic_init: $atomic_type = $atomic_type::new(0);
impl Default for $atomic_type {
fn default() -> Self {
Self::new(Default::default())
}
}
impl fmt::Debug for $atomic_type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple(stringify!($atomic_type))
.field(&self.load())
.finish()
}
}
unsafe impl Sync for $atomic_type {}
impl $atomic_type {
#[inline]
pub const fn new(v: $int_type) -> Self {
$atomic_type {v: UnsafeCell::new(v)}
}
#[inline]
pub fn get_mut(&mut self) -> &mut $int_type {
unsafe { &mut *self.v.get() }
}
#[inline]
pub fn into_inner(self) -> $int_type {
self.v.into_inner()
}
#[inline]
pub fn load(&self) -> $int_type {
unsafe { $int_type::atomic_load(self.v.get()) }
}
#[inline]
pub fn store(&self, val: $int_type) {
unsafe { $int_type::atomic_store(self.v.get(), val); }
}
#[inline]
pub fn add(&self, val: $int_type) {
unsafe { $int_type::atomic_add(self.v.get(), val) }
}
#[inline]
pub fn sub(&self, val: $int_type) {
unsafe { $int_type::atomic_sub(self.v.get(), val) }
}
#[inline]
pub fn and(&self, val: $int_type) {
unsafe { $int_type::atomic_and(self.v.get(), val) }
}
#[inline]
pub fn or(&self, val: $int_type) {
unsafe { $int_type::atomic_or(self.v.get(), val) }
}
#[inline]
pub fn xor(&self, val: $int_type) {
unsafe { $int_type::atomic_xor(self.v.get(), val) }
}
}
#[cfg(target_arch = "msp430")]
impl AtomicOperations for $int_type {
#[inline(always)]
unsafe fn atomic_store(dst: *mut Self, val: Self) {
asm!(concat!("mov", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
#[inline(always)]
unsafe fn atomic_load(dst: *const Self) -> Self {
let out;
asm!(concat!("mov", $asm_suffix, " @{0}, {1}"), in(reg) dst, lateout(reg) out);
out
}
#[inline(always)]
unsafe fn atomic_add(dst: *mut Self, val: Self) {
asm!(concat!("add", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
#[inline(always)]
unsafe fn atomic_sub(dst: *mut Self, val: Self) {
asm!(concat!("sub", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
#[inline(always)]
unsafe fn atomic_and(dst: *mut Self, val: Self) {
asm!(concat!("and", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
#[inline(always)]
unsafe fn atomic_clear(dst: *mut Self, val: Self) {
asm!(concat!("bic", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
#[inline(always)]
unsafe fn atomic_or(dst: *mut Self, val: Self) {
asm!(concat!("bis", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
#[inline(always)]
unsafe fn atomic_xor(dst: *mut Self, val: Self) {
asm!(concat!("xor", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val);
}
}
#[cfg(not(target_arch = "msp430"))]
impl AtomicOperations for $int_type {
#[inline(always)]
unsafe fn atomic_store(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_store(dst, val);
}
#[inline(always)]
unsafe fn atomic_load(dst: *const Self) -> Self {
::core::intrinsics::atomic_load(dst)
}
#[inline(always)]
unsafe fn atomic_add(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_xadd(dst, val);
}
#[inline(always)]
unsafe fn atomic_sub(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_xsub(dst, val);
}
#[inline(always)]
unsafe fn atomic_and(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_and(dst, val);
}
#[inline(always)]
unsafe fn atomic_clear(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_and(dst, !val);
}
#[inline(always)]
unsafe fn atomic_or(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_or(dst, val);
}
#[inline(always)]
unsafe fn atomic_xor(dst: *mut Self, val: Self) {
::core::intrinsics::atomic_xor(dst, val);
}
}
}
}
atomic_int! {
i8 AtomicI8 ATOMIC_I8_INIT ".b"
}
atomic_int! {
u8 AtomicU8 ATOMIC_U8_INIT ".b"
}
atomic_int! {
i16 AtomicI16 ATOMIC_I16_INIT ".w"
}
atomic_int! {
u16 AtomicU16 ATOMIC_U16_INIT ".w"
}
atomic_int! {
isize AtomicIsize ATOMIC_ISIZE_INIT ".w"
}
atomic_int! {
usize AtomicUsize ATOMIC_USIZE_INIT ".w"
}
pub trait AtomicOperations {
unsafe fn atomic_store(dst: *mut Self, val: Self);
unsafe fn atomic_load(dst: *const Self) -> Self;
unsafe fn atomic_add(dst: *mut Self, val: Self);
unsafe fn atomic_sub(dst: *mut Self, val: Self);
unsafe fn atomic_and(dst: *mut Self, val: Self);
unsafe fn atomic_clear(dst: *mut Self, val: Self);
unsafe fn atomic_or(dst: *mut Self, val: Self);
unsafe fn atomic_xor(dst: *mut Self, val: Self);
}
impl fmt::Debug for AtomicBool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("AtomicBool").field(&self.load()).finish()
}
}
impl<T> fmt::Debug for AtomicPtr<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("AtomicPtr").field(&self.load()).finish()
}
}