1#![no_std]
2
3use core::mem::MaybeUninit;
4use core::sync::atomic::{AtomicUsize, Ordering};
5use core::{ptr, slice};
6use util::*;
7
8pub struct Ring<T, const N: usize> {
9 buf: MaybeUninit<[T; N]>,
10 start: AtomicUsize,
12 end: AtomicUsize,
13 pub ops: [AtomicMemoryRing; N],
14}
15impl<T, const N: usize> Ring<T, N> {
16 const CAPACITY: usize = N;
17 const WRAP_MAX: usize = MemoryRing::max_idx(Self::CAPACITY);
18 const INIT_STATE: AtomicMemoryRing = AtomicMemoryRing::new(MemoryRing::INIT);
19 pub const fn new() -> Self {
20 Ring {
21 buf: MaybeUninit::uninit(),
22 start: AtomicUsize::new(0),
23 end: AtomicUsize::new(0),
24 ops: [Self::INIT_STATE; N],
25 }
26 }
27 fn ptr(&self) -> *mut T {
28 self.buf.as_ptr() as *mut T
29 }
30 pub const fn capacity(&self) -> usize {
31 Self::CAPACITY
32 }
33
34 fn wrap_len(&self, start: usize, end: usize) -> usize {
35 if end >= start {
36 end - start
37 } else {
38 Self::WRAP_MAX - start + end
39 }
40 }
41 pub fn len(&self) -> usize {
42 let start = self.start.load(Ordering::Relaxed);
43 let end = self.end.load(Ordering::Relaxed);
44 self.wrap_len(start, end)
45 }
46 pub fn is_empty(&self) -> bool {
47 self.len() == 0
48 }
49 pub fn is_full(&self) -> bool {
50 self.len() >= Self::CAPACITY
51 }
52 #[inline]
53 unsafe fn buffer_read(&self, off: usize) -> T {
54 ptr::read(self.ptr().add(off))
55 }
56 #[inline]
57 unsafe fn buffer_write(&self, off: usize, value: T) {
58 ptr::write(self.ptr().add(off), value);
59 }
60 #[inline]
61 fn index(&self, idx: usize) -> usize {
62 idx % Self::CAPACITY
63 }
64 #[inline]
65 fn next_idx(&self, old: usize) -> usize {
66 if old == Self::WRAP_MAX - 1 {
67 0
68 } else {
69 old + 1
70 }
71 }
72 fn add_ptr_end(&self, old: usize) -> Result<usize, usize> {
73 let new = self.next_idx(old);
74 self.end
75 .compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed)
76 }
77 fn add_ptr_start(&self, old: usize) -> Result<usize, usize> {
78 let new = self.next_idx(old);
79 self.start
80 .compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed)
81 }
82 pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
83 let ptr = self.ptr();
84 let start = self.start.load(Ordering::Relaxed);
85 let end = self.end.load(Ordering::Relaxed);
86 if start == end {
87 return (&mut [], &mut []);
88 }
89 let start = self.index(start);
90 let end = self.index(end);
91 if end > start {
92 (
93 unsafe { slice::from_raw_parts_mut(ptr.add(start), end - start) },
94 &mut [],
95 )
96 } else {
97 (
98 unsafe { slice::from_raw_parts_mut(ptr.add(start), N - start) },
99 unsafe { slice::from_raw_parts_mut(ptr, end) },
100 )
101 }
102 }
103 pub fn clear(&mut self) {
104 let (a, b) = self.as_mut_slices();
105 unsafe { ptr::drop_in_place(a) };
106 unsafe { ptr::drop_in_place(b) };
107 self.end.store(0, Ordering::Relaxed);
108 self.start.store(0, Ordering::Relaxed);
109 self.ops = [Self::INIT_STATE; N];
110 }
111
112 pub fn pop(&self) -> Result<T, Error<()>> {
116 #[cfg(target_os = "none")]
117 let _cs = interrupt::CriticalSection::new();
118 let mut start = self.start.load(Ordering::Relaxed);
119 loop {
120 let cycle = MemoryRing::cycle_of_idx(start, Self::CAPACITY);
121 let index = self.index(start);
122 let expect = MemoryRing::new(cycle, MemoryState::Initialized);
123 let op = self.ops[index].load(Ordering::Acquire);
124 let state = op.state();
125 if op >= expect {
126 if let Err(i) = self.add_ptr_start(start) {
127 start = i;
128 continue;
129 } else {
130 let ret = unsafe { self.buffer_read(index) };
131 let op = MemoryRing::new(cycle + 1, MemoryState::Uninitialized);
132 self.ops[index].store(op, Ordering::Release);
133 return Ok(ret);
134 }
135 } else {
136 return Err(Error {
137 state,
138 input: (),
139 retry: false,
140 });
141 }
142 }
143 }
144
145 pub fn push(&self, value: T) -> Result<(), Error<T>> {
149 #[cfg(target_os = "none")]
150 let _cs = interrupt::CriticalSection::new();
151 let mut end = self.end.load(Ordering::Relaxed);
152 loop {
153 let cycle = MemoryRing::cycle_of_idx(end, Self::CAPACITY);
154 let index = self.index(end);
155 let expect = MemoryRing::new(cycle, MemoryState::Uninitialized);
156 let op = self.ops[index].load(Ordering::Acquire);
157 let state = op.state();
158 if op >= expect {
159 if let Err(i) = self.add_ptr_end(end) {
160 end = i;
161 continue;
162 } else {
163 unsafe { self.buffer_write(index, value) };
164 let op = MemoryRing::new(cycle, MemoryState::Initialized);
165 self.ops[index].store(op, Ordering::Release);
166 return Ok(());
167 }
168 } else {
169 return Err(Error {
170 state,
171 input: value,
172 retry: false,
173 });
174 }
175 }
176 }
177}
178impl<T, const N: usize> Drop for Ring<T, N> {
179 fn drop(&mut self) {
180 self.clear()
181 }
182}