musli_utils/context/
stack_context.rs

1use core::cell::{Cell, UnsafeCell};
2use core::fmt::{self, Write};
3use core::marker::PhantomData;
4use core::ops::Range;
5
6use musli::{Allocator, Context};
7
8use crate::buf::{self, BufString};
9use crate::fixed::FixedVec;
10
11use super::access::{Access, Shared};
12use super::rich_error::{RichError, Step};
13use super::ErrorMarker;
14
15type BufPair<'a, A> = (Range<usize>, BufString<<A as Allocator>::Buf<'a>>);
16
17/// A rich context which uses allocations and tracks the exact location of
18/// errors.
19///
20/// This will only store 4 errors by default, and support a path up to 16. To
21/// control this, use the [`new_with`][StackContext::new_with] constructor.
22pub struct StackContext<'a, const E: usize, const P: usize, A, M>
23where
24    A: ?Sized + Allocator,
25{
26    alloc: &'a A,
27    mark: Cell<usize>,
28    errors: UnsafeCell<FixedVec<BufPair<'a, A>, E>>,
29    path: UnsafeCell<FixedVec<Step<BufString<A::Buf<'a>>>, P>>,
30    // How many elements of `path` we've gone over capacity.
31    path_cap: Cell<usize>,
32    include_type: bool,
33    access: Access,
34    _marker: PhantomData<M>,
35}
36
37impl<'a, A, M> StackContext<'a, 16, 4, A, M>
38where
39    A: ?Sized + Allocator,
40{
41    /// Construct a new context which uses allocations to a fixed number of
42    /// diagnostics.
43    ///
44    /// This uses the default values of:
45    /// * The first 4 errors.
46    /// * 16 path elements stored when tracing.
47    pub fn new(alloc: &'a A) -> Self {
48        Self::new_with(alloc)
49    }
50}
51
52impl<'a, const E: usize, const P: usize, A, M> StackContext<'a, E, P, A, M>
53where
54    A: ?Sized + Allocator,
55{
56    /// Construct a new context which uses allocations to a fixed but
57    /// configurable number of diagnostics.
58    pub fn new_with(alloc: &'a A) -> Self {
59        Self {
60            alloc,
61            mark: Cell::new(0),
62            errors: UnsafeCell::new(FixedVec::new()),
63            path: UnsafeCell::new(FixedVec::new()),
64            path_cap: Cell::new(0),
65            include_type: false,
66            access: Access::new(),
67            _marker: PhantomData,
68        }
69    }
70
71    /// Configure the context to visualize type information, and not just
72    /// variant and fields.
73    pub fn include_type(&mut self) -> &mut Self {
74        self.include_type = true;
75        self
76    }
77
78    /// Generate a line-separated report of all collected errors.
79    pub fn report(&self) -> Report<'_, 'a, A> {
80        Report {
81            errors: self.errors(),
82        }
83    }
84
85    /// Iterate over all collected errors.
86    pub fn errors(&self) -> Errors<'_, 'a, A> {
87        let access = self.access.shared();
88
89        Errors {
90            path: unsafe { &*self.path.get() },
91            errors: unsafe { &*self.errors.get() },
92            index: 0,
93            path_cap: self.path_cap.get(),
94            _access: access,
95        }
96    }
97
98    /// Push an error into the collection.
99    fn push_error(&self, range: Range<usize>, error: BufString<A::Buf<'a>>) {
100        let _access = self.access.exclusive();
101
102        // SAFETY: We've checked that we have exclusive access just above.
103        unsafe {
104            _ = (*self.errors.get()).try_push((range, error));
105        }
106    }
107
108    /// Push a path.
109    fn push_path(&self, step: Step<BufString<A::Buf<'a>>>) {
110        let _access = self.access.exclusive();
111
112        // SAFETY: We've checked that we have exclusive access just above.
113        let path = unsafe { &mut (*self.path.get()) };
114
115        if path.try_push(step).is_err() {
116            self.path_cap.set(self.path_cap.get() + 1);
117        }
118    }
119
120    /// Pop the last path.
121    fn pop_path(&self) {
122        let cap = self.path_cap.get();
123
124        if cap > 0 {
125            self.path_cap.set(cap - 1);
126            return;
127        }
128
129        let _access = self.access.exclusive();
130
131        // SAFETY: We've checked that we have exclusive access just above.
132        unsafe {
133            (*self.path.get()).pop();
134        }
135    }
136
137    fn format_string<T>(&self, value: T) -> Option<BufString<A::Buf<'a>>>
138    where
139        T: fmt::Display,
140    {
141        let buf = self.alloc.alloc()?;
142        let mut string = BufString::new(buf);
143        write!(string, "{value}").ok()?;
144        Some(string)
145    }
146}
147
148impl<'a, const E: usize, const P: usize, A, M> Context for StackContext<'a, E, P, A, M>
149where
150    A: ?Sized + Allocator,
151{
152    type Mode = M;
153    type Error = ErrorMarker;
154    type Mark = usize;
155    type Buf<'this> = A::Buf<'this> where Self: 'this;
156    type BufString<'this> = BufString<A::Buf<'this>> where Self: 'this;
157
158    #[inline]
159    fn clear(&self) {
160        self.mark.set(0);
161        let _access = self.access.exclusive();
162
163        // SAFETY: We have acquired exclusive access just above.
164        unsafe {
165            (*self.errors.get()).clear();
166            (*self.path.get()).clear();
167        }
168    }
169
170    #[inline]
171    fn alloc(&self) -> Option<Self::Buf<'_>> {
172        self.alloc.alloc()
173    }
174
175    #[inline]
176    fn collect_string<T>(&self, value: &T) -> Result<Self::BufString<'_>, Self::Error>
177    where
178        T: ?Sized + fmt::Display,
179    {
180        buf::collect_string(self, value)
181    }
182
183    #[inline]
184    fn custom<T>(&self, message: T) -> Self::Error
185    where
186        T: 'static + Send + Sync + fmt::Display + fmt::Debug,
187    {
188        if let Some(string) = self.format_string(message) {
189            self.push_error(self.mark.get()..self.mark.get(), string);
190        }
191
192        ErrorMarker
193    }
194
195    #[inline]
196    fn message<T>(&self, message: T) -> Self::Error
197    where
198        T: fmt::Display,
199    {
200        if let Some(string) = self.format_string(message) {
201            self.push_error(self.mark.get()..self.mark.get(), string);
202        }
203
204        ErrorMarker
205    }
206
207    #[inline]
208    fn marked_message<T>(&self, mark: Self::Mark, message: T) -> Self::Error
209    where
210        T: fmt::Display,
211    {
212        if let Some(string) = self.format_string(message) {
213            self.push_error(mark..self.mark.get(), string);
214        }
215
216        ErrorMarker
217    }
218
219    #[inline]
220    fn marked_custom<T>(&self, mark: Self::Mark, message: T) -> Self::Error
221    where
222        T: 'static + Send + Sync + fmt::Display + fmt::Debug,
223    {
224        if let Some(string) = self.format_string(message) {
225            self.push_error(mark..self.mark.get(), string);
226        }
227
228        ErrorMarker
229    }
230
231    #[inline]
232    fn mark(&self) -> Self::Mark {
233        self.mark.get()
234    }
235
236    #[inline]
237    fn advance(&self, n: usize) {
238        self.mark.set(self.mark.get().wrapping_add(n));
239    }
240
241    #[inline]
242    fn enter_named_field<T>(&self, name: &'static str, _: &T)
243    where
244        T: ?Sized + fmt::Display,
245    {
246        self.push_path(Step::Named(name));
247    }
248
249    #[inline]
250    fn enter_unnamed_field<T>(&self, index: u32, _: &T)
251    where
252        T: ?Sized + fmt::Display,
253    {
254        self.push_path(Step::Unnamed(index));
255    }
256
257    #[inline]
258    fn leave_field(&self) {
259        self.pop_path();
260    }
261
262    #[inline]
263    fn enter_struct(&self, name: &'static str) {
264        if self.include_type {
265            self.push_path(Step::Struct(name));
266        }
267    }
268
269    #[inline]
270    fn leave_struct(&self) {
271        if self.include_type {
272            self.pop_path();
273        }
274    }
275
276    #[inline]
277    fn enter_enum(&self, name: &'static str) {
278        if self.include_type {
279            self.push_path(Step::Enum(name));
280        }
281    }
282
283    #[inline]
284    fn leave_enum(&self) {
285        if self.include_type {
286            self.pop_path();
287        }
288    }
289
290    #[inline]
291    fn enter_variant<T>(&self, name: &'static str, _: T) {
292        self.push_path(Step::Variant(name));
293    }
294
295    #[inline]
296    fn leave_variant(&self) {
297        self.pop_path();
298    }
299
300    #[inline]
301    fn enter_sequence_index(&self, index: usize) {
302        self.push_path(Step::Index(index));
303    }
304
305    #[inline]
306    fn leave_sequence_index(&self) {
307        self.pop_path();
308    }
309
310    #[inline]
311    fn enter_map_key<T>(&self, field: T)
312    where
313        T: fmt::Display,
314    {
315        if let Some(string) = self.format_string(field) {
316            self.push_path(Step::Key(string));
317        }
318    }
319
320    #[inline]
321    fn leave_map_key(&self) {
322        self.pop_path();
323    }
324}
325
326/// A line-separated report of all errors.
327pub struct Report<'a, 'buf, A>
328where
329    A: 'buf + ?Sized + Allocator,
330{
331    errors: Errors<'a, 'buf, A>,
332}
333
334impl<'a, 'buf, A> fmt::Display for Report<'a, 'buf, A>
335where
336    A: 'buf + ?Sized + Allocator,
337{
338    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
339        for error in self.errors.clone() {
340            writeln!(f, "{error}")?;
341        }
342
343        Ok(())
344    }
345}
346
347/// An iterator over available errors.
348pub struct Errors<'a, 'buf, A>
349where
350    A: 'buf + ?Sized + Allocator,
351{
352    path: &'a [Step<BufString<A::Buf<'buf>>>],
353    errors: &'a [(Range<usize>, BufString<A::Buf<'buf>>)],
354    index: usize,
355    path_cap: usize,
356    _access: Shared<'a>,
357}
358
359impl<'a, 'buf, A> Iterator for Errors<'a, 'buf, A>
360where
361    A: ?Sized + Allocator,
362{
363    type Item = RichError<'a, BufString<A::Buf<'buf>>, BufString<A::Buf<'buf>>>;
364
365    #[inline]
366    fn next(&mut self) -> Option<Self::Item> {
367        let (range, error) = self.errors.get(self.index)?;
368        self.index += 1;
369
370        Some(RichError::new(
371            self.path,
372            self.path_cap,
373            range.clone(),
374            error,
375        ))
376    }
377}
378
379impl<'a, 'buf, A> Clone for Errors<'a, 'buf, A>
380where
381    A: ?Sized + Allocator,
382{
383    fn clone(&self) -> Self {
384        Self {
385            path: self.path,
386            errors: self.errors,
387            index: self.index,
388            path_cap: self.path_cap,
389            _access: self._access.clone(),
390        }
391    }
392}