1#![no_std]
2
3use core::mem::MaybeUninit;
4use core::sync::atomic::{AtomicUsize, Ordering};
5use core::{ptr, slice};
6use util::*;
7
8pub struct Ring<T, const N: usize> {
9 buf: MaybeUninit<[T; N]>,
10 start: AtomicUsize,
12 end: AtomicUsize,
13 pub ops: [AtomicMemoryRing; N],
14}
15impl<T, const N: usize> Default for Ring<T, N> {
16 fn default() -> Self {
17 Self::new()
18 }
19}
20impl<T, const N: usize> Ring<T, N> {
21 const CAPACITY: usize = N;
22 const WRAP_MAX: usize = MemoryRing::max_idx(Self::CAPACITY);
23 #[allow(clippy::declare_interior_mutable_const)]
24 const INIT_STATE: AtomicMemoryRing = AtomicMemoryRing::new(MemoryRing::INIT);
25 pub const fn new() -> Self {
26 Ring {
27 buf: MaybeUninit::uninit(),
28 start: AtomicUsize::new(0),
29 end: AtomicUsize::new(0),
30 ops: [Self::INIT_STATE; N],
31 }
32 }
33 fn ptr(&self) -> *mut T {
34 self.buf.as_ptr() as *mut T
35 }
36 pub const fn capacity(&self) -> usize {
37 Self::CAPACITY
38 }
39
40 fn wrap_len(&self, start: usize, end: usize) -> usize {
41 if end >= start {
42 end - start
43 } else {
44 Self::WRAP_MAX - start + end
45 }
46 }
47 pub fn len(&self) -> usize {
48 let start = self.start.load(Ordering::Relaxed);
49 let end = self.end.load(Ordering::Relaxed);
50 self.wrap_len(start, end)
51 }
52 pub fn is_empty(&self) -> bool {
53 self.len() == 0
54 }
55 pub fn is_full(&self) -> bool {
56 self.len() >= Self::CAPACITY
57 }
58 #[inline]
59 unsafe fn buffer_read(&self, off: usize) -> T {
60 ptr::read(self.ptr().add(off))
61 }
62 #[inline]
63 unsafe fn buffer_write(&self, off: usize, value: T) {
64 ptr::write(self.ptr().add(off), value);
65 }
66 #[inline]
67 fn index(&self, idx: usize) -> usize {
68 idx % Self::CAPACITY
69 }
70 #[inline]
71 fn next_idx(&self, old: usize) -> usize {
72 if old == Self::WRAP_MAX - 1 {
73 0
74 } else {
75 old + 1
76 }
77 }
78 fn add_ptr_end(&self, old: usize) -> Result<usize, usize> {
79 let new = self.next_idx(old);
80 self.end
81 .compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed)
82 }
83 fn add_ptr_start(&self, old: usize) -> Result<usize, usize> {
84 let new = self.next_idx(old);
85 self.start
86 .compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed)
87 }
88 pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
89 let ptr = self.ptr();
90 let start = self.start.load(Ordering::Relaxed);
91 let end = self.end.load(Ordering::Relaxed);
92 if start == end {
93 return (&mut [], &mut []);
94 }
95 let start = self.index(start);
96 let end = self.index(end);
97 if end > start {
98 (
99 unsafe { slice::from_raw_parts_mut(ptr.add(start), end - start) },
100 &mut [],
101 )
102 } else {
103 (
104 unsafe { slice::from_raw_parts_mut(ptr.add(start), N - start) },
105 unsafe { slice::from_raw_parts_mut(ptr, end) },
106 )
107 }
108 }
109 pub fn clear(&mut self) {
110 let (a, b) = self.as_mut_slices();
111 unsafe { ptr::drop_in_place(a) };
112 unsafe { ptr::drop_in_place(b) };
113 self.end.store(0, Ordering::Relaxed);
114 self.start.store(0, Ordering::Relaxed);
115 self.ops = [Self::INIT_STATE; N];
116 }
117
118 pub fn pop(&self) -> Result<T, Error<()>> {
122 #[cfg(target_os = "none")]
123 let _cs = interrupt::CriticalSection::new();
124 let mut start = self.start.load(Ordering::Relaxed);
125 loop {
126 let cycle = MemoryRing::cycle_of_idx(start, Self::CAPACITY);
127 let index = self.index(start);
128 let expect = MemoryRing::new(cycle, MemoryState::Initialized);
129 let op = self.ops[index].load(Ordering::Acquire);
130 let state = op.state();
131 if op >= expect {
132 if let Err(i) = self.add_ptr_start(start) {
133 start = i;
134 continue;
135 } else {
136 let ret = unsafe { self.buffer_read(index) };
137 let op = MemoryRing::new(cycle + 1, MemoryState::Uninitialized);
138 self.ops[index].store(op, Ordering::Release);
139 return Ok(ret);
140 }
141 } else {
142 return Err(Error {
143 state,
144 input: (),
145 retry: false,
146 });
147 }
148 }
149 }
150
151 pub fn push(&self, value: T) -> Result<(), Error<T>> {
155 #[cfg(target_os = "none")]
156 let _cs = interrupt::CriticalSection::new();
157 let mut end = self.end.load(Ordering::Relaxed);
158 loop {
159 let cycle = MemoryRing::cycle_of_idx(end, Self::CAPACITY);
160 let index = self.index(end);
161 let expect = MemoryRing::new(cycle, MemoryState::Uninitialized);
162 let op = self.ops[index].load(Ordering::Acquire);
163 let state = op.state();
164 if op >= expect {
165 if let Err(i) = self.add_ptr_end(end) {
166 end = i;
167 continue;
168 } else {
169 unsafe { self.buffer_write(index, value) };
170 let op = MemoryRing::new(cycle, MemoryState::Initialized);
171 self.ops[index].store(op, Ordering::Release);
172 return Ok(());
173 }
174 } else {
175 return Err(Error {
176 state,
177 input: value,
178 retry: false,
179 });
180 }
181 }
182 }
183}
184impl<T, const N: usize> Drop for Ring<T, N> {
185 fn drop(&mut self) {
186 self.clear()
187 }
188}