use core::convert::From;
use core::marker::PhantomData;
#[cfg(feature = "tracing")]
use crate::tracing;
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct RW;
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct R;
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct W;
pub(crate) mod sealed {
use super::*;
pub trait Access {}
impl Access for R {}
impl Access for W {}
impl Access for RW {}
use core::ops::{BitAnd, BitAndAssign, BitOrAssign, Not, Shl, Shr};
pub trait CastFrom<A> {
fn cast_from(val: A) -> Self;
}
impl CastFrom<u64> for u8 {
#[inline(always)]
fn cast_from(val: u64) -> Self {
val as Self
}
}
impl CastFrom<u64> for u16 {
#[inline(always)]
fn cast_from(val: u64) -> Self {
val as Self
}
}
impl CastFrom<u64> for u32 {
#[inline(always)]
fn cast_from(val: u64) -> Self {
val as Self
}
}
impl CastFrom<u64> for u64 {
#[inline(always)]
fn cast_from(val: u64) -> Self {
val as Self
}
}
pub trait RegNumberT:
Copy
+ From<u8>
+ Into<u64>
+ CastFrom<u64>
+ Shr<usize, Output = Self>
+ Shl<usize, Output = Self>
+ BitAndAssign
+ BitAnd<Output = Self>
+ Not<Output = Self>
+ BitOrAssign
{
}
impl RegNumberT for u8 {}
impl RegNumberT for u16 {}
impl RegNumberT for u32 {}
impl RegNumberT for u64 {}
pub trait RegSpec {
type DataType: RegNumberT;
}
}
pub trait Access: sealed::Access + Copy {}
impl Access for R {}
impl Access for W {}
impl Access for RW {}
pub trait Read: Access {}
impl Read for RW {}
impl Read for R {}
pub trait Write: Access {}
impl Write for RW {}
impl Write for W {}
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Reg<T, A: Access> {
phantom: PhantomData<*mut (T, A)>,
}
unsafe impl<T, A: Access> Send for Reg<T, A> {}
unsafe impl<T, A: Access> Sync for Reg<T, A> {}
use sealed::CastFrom;
use sealed::{RegNumberT, RegSpec};
#[doc(hidden)]
#[derive(Copy, Clone)]
pub struct RegValueT<Reg: sealed::RegSpec> {
pub(crate) data: Reg::DataType,
pub(crate) mask: Reg::DataType,
}
pub trait RegisterValue<T: RegSpec> {
#[must_use]
fn new(data: T::DataType) -> Self;
#[must_use]
fn get_raw(&self) -> T::DataType;
#[must_use]
fn set_raw(self, value: T::DataType) -> Self;
}
impl<T: RegSpec> RegisterValue<T> for RegValueT<T> {
#[inline(always)]
fn new(data: T::DataType) -> RegValueT<T> {
Self {
data,
mask: 0x0u8.into(),
}
}
#[inline(always)]
fn get_raw(&self) -> T::DataType {
self.data
}
#[inline(always)]
fn set_raw(mut self, value: T::DataType) -> Self {
self.data = value;
self.mask = !(Into::<T::DataType>::into(0x0u8));
self
}
}
pub trait NoBitfieldReg<Reg: RegSpec>: RegisterValue<Reg>
where
Self: Sized,
{
#[inline(always)]
#[must_use]
fn get(&self) -> Reg::DataType {
self.get_raw()
}
#[inline(always)]
#[must_use]
fn set(self, value: Reg::DataType) -> Self {
self.set_raw(value)
}
}
impl<T, A> Reg<T, A>
where
T: RegSpec,
A: Access,
{
#[allow(dead_code)]
#[inline(always)]
#[must_use]
#[allow(dead_code)]
pub(crate) const fn from_ptr(ptr: *mut u8) -> &'static Self {
unsafe { &*(ptr as *const Self) }
}
#[inline(always)]
#[must_use]
pub const fn ptr(&self) -> *mut T::DataType {
self as *const _ as *mut T::DataType
}
pub fn addr(&self) -> usize {
(self as *const _) as usize
}
}
impl<T, A> Reg<T, A>
where
T: RegSpec,
A: Read,
{
#[inline(always)]
#[must_use]
pub unsafe fn read(&self) -> RegValueT<T> {
unsafe {
#[cfg(feature = "tracing")]
let val = {
let mut buf: u64 = 0x0;
tracing::READ_FN.with(|rf| {
if let Some(rf) = rf.get() {
buf = rf(self.addr(), std::mem::size_of::<T::DataType>());
} else {
#[cfg(not(feature = "tracing_dummy"))]
panic!(
"Please, provide an handler for read with tracing::set_read_fn(callback);"
);
}
});
T::DataType::cast_from(buf)
};
#[cfg(not(feature = "tracing"))]
let val = self.ptr().read_volatile();
RegValueT::<T>::new(val)
}
}
}
impl<T, A> Reg<T, A>
where
T: RegSpec,
A: Write,
{
#[inline(always)]
pub unsafe fn write(&self, reg_value: RegValueT<T>) {
unsafe {
#[cfg(feature = "tracing")]
tracing::WRITE_FN.with(|wf| {
if let Some(wf) = wf.get() {
wf(
self.addr(),
std::mem::size_of::<T::DataType>(),
reg_value.data.into(),
)
} else {
#[cfg(not(feature = "tracing_dummy"))]
panic!(
"Please, provide an handler for read with tracing::set_read_fn(callback);"
);
}
});
#[cfg(not(feature = "tracing"))]
self.ptr().write_volatile(reg_value.data);
}
}
#[inline(always)]
pub unsafe fn write_raw(&self, value: T::DataType) {
unsafe {
#[cfg(feature = "tracing")]
tracing::WRITE_FN.with(|wf| {
if let Some(wf) = wf.get() {
wf(
self.addr(),
std::mem::size_of::<T::DataType>(),
value.into(),
)
} else {
#[cfg(not(feature = "tracing_dummy"))]
panic!(
"Please, provide an handler for read with tracing::set_read_fn(callback);"
);
}
});
#[cfg(not(feature = "tracing"))]
self.ptr().write_volatile(value);
}
}
}
impl<T, A> Reg<T, A>
where
T: RegSpec,
A: Write,
RegValueT<T>: Default,
{
#[inline(always)]
pub unsafe fn init(&self, f: impl FnOnce(RegValueT<T>) -> RegValueT<T>) {
unsafe {
let val = RegValueT::<T>::default();
let res = f(val);
self.write(res);
}
}
}
impl<T, A> Reg<T, A>
where
T: RegSpec,
A: Read + Write,
{
#[inline(always)]
pub unsafe fn modify(&self, f: impl FnOnce(RegValueT<T>) -> RegValueT<T>) {
unsafe {
let val = self.read();
let res = f(val);
self.write(res);
}
}
}
#[repr(transparent)]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct EnumBitfieldStruct<Q: RegNumberT, T>(pub Q, PhantomData<T>);
impl<Q: RegNumberT, T> EnumBitfieldStruct<Q, T> {
pub const fn new(value: Q) -> Self {
Self(value, PhantomData)
}
}
impl<Q: RegNumberT, T> From<EnumBitfieldStruct<Q, T>> for u64 {
#[inline(always)]
fn from(value: EnumBitfieldStruct<Q, T>) -> Self {
value.0.into()
}
}
impl<Q: RegNumberT, T> CastFrom<u64> for EnumBitfieldStruct<Q, T> {
#[inline(always)]
fn cast_from(val: u64) -> Self {
Self(Q::cast_from(val), PhantomData)
}
}
impl<Q: RegNumberT, T> From<Q> for EnumBitfieldStruct<Q, T> {
#[inline(always)]
fn from(value: Q) -> Self {
Self(value, PhantomData)
}
}
pub struct RegisterField<
const START_OFFSET: usize,
const MASK: u64,
const DIM: u8,
const DIM_INCREMENT: u8,
ValueTypeRead,
ValueTypeWrite,
T,
A,
> where
T: RegSpec,
A: Access,
{
data: RegValueT<T>,
index: u8,
marker: PhantomData<(ValueTypeRead, ValueTypeWrite, A)>,
}
impl<
const START_OFFSET: usize,
const MASK: u64,
const DIM: u8,
const DIM_INCREMENT: u8,
ValueTypeRead,
ValueTypeWrite,
T,
A,
> RegisterField<START_OFFSET, MASK, DIM, DIM_INCREMENT, ValueTypeRead, ValueTypeWrite, T, A>
where
T: RegSpec,
A: Access,
{
#[allow(dead_code)]
#[inline(always)]
pub(crate) fn from_register(data: RegValueT<T>, index: u8) -> Self {
Self {
data,
index,
marker: PhantomData,
}
}
#[inline(always)]
#[must_use]
pub fn mask(&self) -> T::DataType {
T::DataType::cast_from(MASK)
}
#[inline(always)]
#[must_use]
pub const fn offset(&self) -> usize {
START_OFFSET + (self.index * DIM_INCREMENT) as usize
}
}
impl<
const START_OFFSET: usize,
const MASK: u64,
const DIM: u8,
const DIM_INCREMENT: u8,
ValueTypeRead,
ValueTypeWrite,
T,
A,
> RegisterField<START_OFFSET, MASK, DIM, DIM_INCREMENT, ValueTypeRead, ValueTypeWrite, T, A>
where
T: RegSpec,
A: Read,
ValueTypeRead: CastFrom<u64>,
{
#[inline(always)]
pub fn get(&self) -> ValueTypeRead {
let offset = START_OFFSET + (self.index * DIM_INCREMENT) as usize;
let filtered: T::DataType = (self.data.data >> offset) & T::DataType::cast_from(MASK);
ValueTypeRead::cast_from(filtered.into())
}
}
impl<
const START_OFFSET: usize,
const MASK: u64,
const DIM: u8,
const DIM_INCREMENT: u8,
ValueTypeRead,
ValueTypeWrite,
T,
A,
> RegisterField<START_OFFSET, MASK, DIM, DIM_INCREMENT, ValueTypeRead, ValueTypeWrite, T, A>
where
T: RegSpec,
A: Write,
u64: From<ValueTypeWrite>,
{
#[inline(always)]
#[must_use]
pub fn set(mut self, value: ValueTypeWrite) -> RegValueT<T> {
let mask = T::DataType::cast_from(MASK);
let value: T::DataType = T::DataType::cast_from(Into::<u64>::into(value)) & mask;
let offset = START_OFFSET + (self.index * DIM_INCREMENT) as usize;
let masked_offset: T::DataType = mask << offset;
self.data.mask |= masked_offset;
self.data.data &= !masked_offset;
self.data.data |= value << offset;
self.data
}
}
pub struct RegisterFieldBool<
const START_OFFSET: usize,
const DIM: u8,
const DIM_INCREMENT: u8,
T,
A,
> where
T: RegSpec,
A: Access,
{
data: RegValueT<T>,
index: u8,
marker: PhantomData<A>,
}
impl<const START_OFFSET: usize, const DIM: u8, const DIM_INCREMENT: u8, T, A>
RegisterFieldBool<START_OFFSET, DIM, DIM_INCREMENT, T, A>
where
T: RegSpec,
A: Read,
{
#[inline(always)]
pub fn get(&self) -> bool {
let offset = START_OFFSET + (self.index * DIM_INCREMENT) as usize;
let filtered = (self.data.data.into() >> offset) & 1;
filtered == 1
}
}
impl<const START_OFFSET: usize, const DIM: u8, const DIM_INCREMENT: u8, T, A>
RegisterFieldBool<START_OFFSET, DIM, DIM_INCREMENT, T, A>
where
T: RegSpec,
A: Write,
{
#[inline(always)]
#[must_use]
pub fn set(mut self, value: bool) -> RegValueT<T> {
let value: T::DataType = if value {
T::DataType::cast_from(1u64)
} else {
T::DataType::cast_from(0u64)
};
let offset = START_OFFSET + (self.index * DIM_INCREMENT) as usize;
let masked_offset = T::DataType::cast_from(0x1u64) << offset;
self.data.mask |= masked_offset;
self.data.data &= !masked_offset;
self.data.data |= value << offset;
self.data
}
}
impl<const START_OFFSET: usize, const DIM: u8, const DIM_INCREMENT: u8, T, A>
RegisterFieldBool<START_OFFSET, DIM, DIM_INCREMENT, T, A>
where
T: RegSpec,
A: Access,
{
#[inline(always)]
#[allow(dead_code)]
pub(crate) fn from_register(data: RegValueT<T>, index: u8) -> Self {
Self {
data,
index,
marker: PhantomData,
}
}
#[inline(always)]
#[must_use]
pub fn mask(&self) -> T::DataType {
T::DataType::cast_from(1)
}
#[inline(always)]
#[must_use]
pub const fn offset(&self) -> usize {
START_OFFSET + (self.index * DIM_INCREMENT) as usize
}
}
pub struct ClusterRegisterArray<T: Sized, const DIM: usize, const DIM_INCREMENT: usize> {
_t: ::core::marker::PhantomData<T>,
}
impl<T: Sized, const DIM: usize, const DIM_INCREMENT: usize>
ClusterRegisterArray<T, DIM, DIM_INCREMENT>
{
#[inline(always)]
pub const fn len(&self) -> usize {
DIM
}
#[inline(always)]
pub const fn is_empty(&self) -> bool {
DIM == 0
}
#[inline(always)]
pub fn iter(&self) -> impl ::core::iter::ExactSizeIterator<Item = &T> {
self.into_iter()
}
#[inline]
pub const fn get(&self, index: usize) -> &T {
assert!(index < DIM);
unsafe { self.get_unchecked(index) }
}
#[inline(always)]
pub const unsafe fn get_unchecked(&self, index: usize) -> &T {
unsafe { &*(self.as_ptr().add(index * DIM_INCREMENT) as *const _) }
}
#[allow(dead_code)]
#[inline(always)]
pub(crate) const unsafe fn from_ptr(ptr: *mut u8) -> &'static Self {
unsafe { &*(ptr as *const Self) }
}
#[inline(always)]
const fn as_ptr(&self) -> *mut u8 {
self as *const _ as *mut _
}
}
impl<T: Sized, const DIM: usize, const DIM_INCREMENT: usize> ::core::ops::Index<usize>
for ClusterRegisterArray<T, DIM, DIM_INCREMENT>
{
type Output = T;
#[inline(always)]
fn index(&self, index: usize) -> &T {
self.get(index)
}
}
impl<'a, T: Sized, const DIM: usize, const DIM_INCREMENT: usize> IntoIterator
for &'a ClusterRegisterArray<T, DIM, DIM_INCREMENT>
{
type Item = &'a T;
type IntoIter = ClusterRegisterArrayIterator<'a, T, DIM, DIM_INCREMENT>;
#[inline(always)]
fn into_iter(self) -> Self::IntoIter {
ClusterRegisterArrayIterator {
array: self,
index: 0,
}
}
}
pub struct ClusterRegisterArrayIterator<'a, T: Sized, const DIM: usize, const DIM_INCREMENT: usize>
{
array: &'a ClusterRegisterArray<T, DIM, DIM_INCREMENT>,
index: usize,
}
impl<'a, T: Sized, const DIM: usize, const DIM_INCREMENT: usize> Iterator
for ClusterRegisterArrayIterator<'a, T, DIM, DIM_INCREMENT>
{
type Item = &'a T;
#[inline(always)]
fn next(&mut self) -> Option<&'a T> {
if self.index < self.array.len() {
let result = &self.array[self.index];
self.index += 1;
Some(result)
} else {
None
}
}
#[inline(always)]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.array.len() - self.index;
(len, Some(len))
}
}
impl<T: Sized, const DIM: usize, const DIM_INCREMENT: usize> ExactSizeIterator
for ClusterRegisterArrayIterator<'_, T, DIM, DIM_INCREMENT>
{
}