embedded_buffer_pool/lib.rs
1#![no_std]
2
3//! Fixed-size [`BufferPool`] for [`no_std`] firmware using [`embassy_sync`].
4//!
5//! The pool holds `N` values of type `T` (**1..=32** slots). A bitmask tracks which
6//! slots are free; [`BufferGuard`] and [`MappedBufferGuard`] return a slot to the pool on
7//! [`Drop`]. When the pool is empty, [`BufferPool::take`] registers a waker and completes
8//! when another task releases a buffer.
9//!
10//! # Requirements
11//!
12//! - Acquire APIs take `&'static self`: the pool is intended to live in a `static` item.
13//! - Choose a [`RawMutex`] (`M`) compatible with your executor (for example the
14//! critical-section mutex in `embassy_sync`).
15//! - To build the backing `[T; N]` in a `const` context, use `[expr; N]` when that is
16//! valid for your type, or [`array_new!`] when you need distinct elements or `T` is not
17//! [`Copy`] (repeat-array syntax requires [`Copy`] in the general case).
18//!
19//! # Example
20//!
21//! ```rust,ignore
22//! use embedded_buffer_pool::BufferPool;
23//! use embassy_sync::blocking_mutex::raw::CriticalSectionRawMutex;
24//!
25//! /// Shared pool: two 64-byte packet buffers.
26//! static POOL: BufferPool<CriticalSectionRawMutex, [u8; 64], 2> =
27//! BufferPool::new([[0u8; 64]; 2]);
28//!
29//! fn try_fill() -> Option<()> {
30//! let mut guard = POOL.try_take()?;
31//! guard[0] = 0xAA;
32//! Some(())
33//! }
34//!
35//! async fn wait_for_buffer() {
36//! let mut guard = POOL.take().await;
37//! guard[0] = 0xBB;
38//! }
39//! ```
40//!
41//! [`embassy_sync`]: https://docs.rs/embassy-sync
42//! [`no_std`]: https://doc.rust-lang.org/nomicon/no-std.html
43//!
44//! For arrays built by repeating an expression (constructors, `const` values, literals),
45//! see [`array_new`].
46
47mod macros;
48
49use core::cell::UnsafeCell;
50use core::future::poll_fn;
51use core::ops::{Deref, DerefMut};
52use core::task::Poll;
53
54use embassy_sync::{
55 blocking_mutex::{Mutex, raw::RawMutex},
56 waitqueue::WakerRegistration,
57};
58
59#[repr(transparent)]
60#[derive(Debug)]
61struct BufferPtr<T: ?Sized>(*mut T);
62
63unsafe impl<T: ?Sized> Send for BufferPtr<T> {}
64unsafe impl<T: ?Sized> Sync for BufferPtr<T> {}
65
66struct State {
67 /// Bitmask of free slots: bit `i` set means slot `i` is available.
68 available: u32,
69 waker: WakerRegistration,
70}
71
72/// Pool of `N` buffers of type `T`, synchronized with mutex `M`.
73///
74/// Free slots are tracked with a `u32` bitmask, so **`N` must be in 1..=32**.
75pub struct BufferPool<M: RawMutex, T, const N: usize> {
76 buffer: UnsafeCell<[T; N]>,
77 state: Mutex<M, State>,
78}
79
80unsafe impl<M: RawMutex + Send, T: Send, const N: usize> Send for BufferPool<M, T, N> {}
81unsafe impl<M: RawMutex + Sync, T: Send, const N: usize> Sync for BufferPool<M, T, N> {}
82
83impl<M: RawMutex, T, const N: usize> BufferPool<M, T, N> {
84 /// Creates a pool with the given backing storage; all slots start available.
85 ///
86 /// # Valid `N`
87 ///
88 /// `N` must be in **1..=32**. Otherwise [`BufferPool::new`] panics when run, or fails
89 /// const evaluation if used in a `const` item.
90 pub const fn new(buffer: [T; N]) -> Self {
91 assert!(N > 0 && N <= 32);
92 Self {
93 buffer: UnsafeCell::new(buffer),
94 state: Mutex::new(State {
95 available: u32::MAX >> (32 - N),
96 waker: WakerRegistration::new(),
97 }),
98 }
99 }
100
101 /// Tries to take one buffer without blocking. Returns [`None`] if the pool is empty.
102 pub fn try_take(&'static self) -> Option<BufferGuard<M, T>> {
103 unsafe {
104 self.state.lock_mut(|state| {
105 if state.available == 0 {
106 return None;
107 }
108 let index = state.available.trailing_zeros() as usize;
109 state.available &= !(1 << index);
110 let buffer = &mut (*self.buffer.get())[index];
111 Some(BufferGuard {
112 store: &self.state,
113 ptr: BufferPtr(buffer),
114 index,
115 })
116 })
117 }
118 }
119
120 /// Waits until a buffer is available, then returns a guard.
121 ///
122 /// If the pool is empty, the current task’s waker is registered; when another task
123 /// drops a [`BufferGuard`] or [`MappedBufferGuard`], waiters are woken.
124 pub fn take(&'static self) -> impl Future<Output = BufferGuard<M, T>> {
125 poll_fn(|cx| unsafe {
126 self.state.lock_mut(|state| {
127 if state.available == 0 {
128 state.waker.register(cx.waker());
129 return Poll::Pending;
130 }
131 let index = state.available.trailing_zeros() as usize;
132 state.available &= !(1 << index);
133 let buffer = &mut (*self.buffer.get())[index];
134 Poll::Ready(BufferGuard {
135 store: &self.state,
136 ptr: BufferPtr(buffer),
137 index,
138 })
139 })
140 })
141 }
142}
143
144/// Exclusive handle to one pooled `T`. Releasing the slot on [`Drop`].
145///
146/// Derefs to `T` via [`Deref`] / [`DerefMut`]. Use [`BufferGuard::map`] to borrow a
147/// subfield or slice while keeping the same pool slot.
148pub struct BufferGuard<M: RawMutex + 'static, T> {
149 store: &'static Mutex<M, State>,
150 ptr: BufferPtr<T>,
151 index: usize,
152}
153
154impl<M: RawMutex + 'static, T> Drop for BufferGuard<M, T> {
155 fn drop(&mut self) {
156 unsafe {
157 self.store.lock_mut(|state| {
158 state.available |= 1 << self.index;
159 state.waker.wake();
160 });
161 }
162 }
163}
164
165impl<M: RawMutex + 'static, T> Deref for BufferGuard<M, T> {
166 type Target = T;
167
168 fn deref(&self) -> &Self::Target {
169 unsafe { &*self.ptr.0 }
170 }
171}
172
173impl<M: RawMutex + 'static, T> DerefMut for BufferGuard<M, T> {
174 fn deref_mut(&mut self) -> &mut Self::Target {
175 unsafe { &mut *self.ptr.0 }
176 }
177}
178
179impl<M: RawMutex + 'static, T> BufferGuard<M, T> {
180 /// Narrows the guard to a `&mut U` derived from the buffer (for example a slice into
181 /// a larger `[u8]`). The original guard is consumed without running its [`Drop`]; the
182 /// returned [`MappedBufferGuard`] still returns the pool slot when dropped.
183 ///
184 /// This is an **associated function**, not a method: the guard is passed as `orig`,
185 /// not `self`. You call it as `BufferGuard::map(guard, f)`. If the first parameter
186 /// were `self`, `guard.map(...)` would always resolve to this routine and **shadow**
187 /// a `map` method on `T`; with `orig: Self`, `guard.map(...)` can still go through
188 /// [`Deref`] / [`DerefMut`] to `T::map` when you want the inner value’s API.
189 pub fn map<U: ?Sized>(
190 orig: Self,
191 fun: impl FnOnce(&mut T) -> &mut U,
192 ) -> MappedBufferGuard<M, U> {
193 let store = orig.store;
194 let index = orig.index;
195 let value = fun(unsafe { &mut *orig.ptr.0 });
196 // Ownership of the slot is moved to the mapped guard; do not run `BufferGuard::drop`.
197 core::mem::forget(orig);
198 MappedBufferGuard {
199 store,
200 value,
201 index,
202 }
203 }
204}
205
206/// Like [`BufferGuard`], but derefs to a borrowed `U` (often a slice or inner field).
207///
208/// Dropping this guard marks the same pool index free as the original [`BufferGuard`].
209pub struct MappedBufferGuard<M: RawMutex + 'static, U: ?Sized> {
210 store: &'static Mutex<M, State>,
211 index: usize,
212 value: *mut U,
213}
214
215impl<M: RawMutex + 'static, U: ?Sized> Drop for MappedBufferGuard<M, U> {
216 fn drop(&mut self) {
217 unsafe {
218 self.store.lock_mut(|state| {
219 state.available |= 1 << self.index;
220 state.waker.wake();
221 });
222 }
223 }
224}
225
226impl<M: RawMutex + 'static, U: ?Sized> Deref for MappedBufferGuard<M, U> {
227 type Target = U;
228
229 fn deref(&self) -> &Self::Target {
230 unsafe { &*self.value }
231 }
232}
233
234impl<M: RawMutex + 'static, U: ?Sized> DerefMut for MappedBufferGuard<M, U> {
235 fn deref_mut(&mut self) -> &mut Self::Target {
236 unsafe { &mut *self.value }
237 }
238}
239
240impl<M: RawMutex + 'static, U: ?Sized> MappedBufferGuard<M, U> {
241 /// Chains [`BufferGuard::map`]: re-borrows `&mut U` into `&mut V` without releasing the
242 /// pool slot until the final mapped guard is dropped.
243 ///
244 /// Same as [`BufferGuard::map`]: `orig: Self` instead of `self`, so call
245 /// `MappedBufferGuard::map(guard, f)` and keep `guard.map(...)` available for `U::map`
246 /// via [`Deref`] / [`DerefMut`].
247 pub fn map<V: ?Sized>(
248 orig: Self,
249 fun: impl FnOnce(&mut U) -> &mut V,
250 ) -> MappedBufferGuard<M, V> {
251 let store = orig.store;
252 let index = orig.index;
253 let value = fun(unsafe { &mut *orig.value });
254 core::mem::forget(orig);
255 MappedBufferGuard {
256 store,
257 value,
258 index,
259 }
260 }
261}