musli_common/context/
system_context.rs

1use core::cell::{Cell, UnsafeCell};
2use core::fmt;
3use core::marker::PhantomData;
4use core::ops::Range;
5
6use alloc::string::{String, ToString};
7use alloc::vec::Vec;
8
9use musli::{Allocator, Context};
10
11use super::access::{self, Access};
12use super::rich_error::{RichError, Step};
13use super::ErrorMarker;
14use crate::buf::{self, BufString};
15
16type BufTriplet<E> = (Vec<Step<String>>, Range<usize>, E);
17
18/// A rich context dynamically allocating space using the system allocator.
19pub struct SystemContext<A, M> {
20    access: Access,
21    mark: Cell<usize>,
22    alloc: A,
23    errors: UnsafeCell<Vec<BufTriplet<String>>>,
24    path: UnsafeCell<Vec<Step<String>>>,
25    include_type: bool,
26    _marker: PhantomData<M>,
27}
28
29impl<A, M> SystemContext<A, M> {
30    /// Construct a new context which uses allocations to store arbitrary
31    /// amounts of diagnostics about decoding.
32    ///
33    /// Or at least until we run out of memory.
34    pub fn new(alloc: A) -> Self {
35        Self {
36            access: Access::new(),
37            mark: Cell::new(0),
38            alloc,
39            errors: UnsafeCell::new(Vec::new()),
40            path: UnsafeCell::new(Vec::new()),
41            include_type: false,
42            _marker: PhantomData,
43        }
44    }
45
46    /// Configure the context to visualize type information, and not just
47    /// variant and fields.
48    pub fn include_type(&mut self) -> &mut Self {
49        self.include_type = true;
50        self
51    }
52
53    /// Iterate over all collected errors.
54    pub fn errors(&self) -> Errors<'_, String> {
55        let access = self.access.shared();
56
57        // SAFETY: We've checked above that we have shared access.
58        Errors {
59            errors: unsafe { &*self.errors.get() },
60            index: 0,
61            _access: access,
62        }
63    }
64}
65
66impl<A, M> SystemContext<A, M>
67where
68    A: Allocator,
69{
70    fn push_error(&self, range: Range<usize>, message: String) {
71        let _access = self.access.exclusive();
72
73        // SAFETY: We've restricted access to the context, so this is safe.
74        let path = unsafe { (*self.path.get()).clone() };
75        let errors = unsafe { &mut (*self.errors.get()) };
76
77        errors.push((path, range, message));
78    }
79
80    fn push_path(&self, step: Step<String>) {
81        let _access = self.access.exclusive();
82
83        // SAFETY: We've checked that we have exclusive access just above.
84        let path = unsafe { &mut (*self.path.get()) };
85
86        path.push(step);
87    }
88
89    fn pop_path(&self) {
90        let _access = self.access.exclusive();
91
92        // SAFETY: We've checked that we have exclusive access just above.
93        let path = unsafe { &mut (*self.path.get()) };
94
95        path.pop();
96    }
97}
98
99impl<A, M> Context for SystemContext<A, M>
100where
101    A: Allocator,
102{
103    type Mode = M;
104    type Error = ErrorMarker;
105    type Mark = usize;
106    type Buf<'this> = A::Buf<'this> where Self: 'this;
107    type BufString<'this> = BufString<A::Buf<'this>> where Self: 'this;
108
109    #[inline]
110    fn alloc(&self) -> Option<Self::Buf<'_>> {
111        self.alloc.alloc()
112    }
113
114    #[inline]
115    fn collect_string<T>(&self, value: &T) -> Result<Self::BufString<'_>, Self::Error>
116    where
117        T: ?Sized + fmt::Display,
118    {
119        buf::collect_string(self, value)
120    }
121
122    #[inline]
123    fn custom<T>(&self, message: T) -> Self::Error
124    where
125        T: 'static + Send + Sync + fmt::Display + fmt::Debug,
126    {
127        self.push_error(self.mark.get()..self.mark.get(), message.to_string());
128        ErrorMarker
129    }
130
131    #[inline]
132    fn message<T>(&self, message: T) -> Self::Error
133    where
134        T: fmt::Display,
135    {
136        self.push_error(self.mark.get()..self.mark.get(), message.to_string());
137        ErrorMarker
138    }
139
140    #[inline]
141    fn marked_message<T>(&self, mark: Self::Mark, message: T) -> Self::Error
142    where
143        T: fmt::Display,
144    {
145        self.push_error(mark..self.mark.get(), message.to_string());
146        ErrorMarker
147    }
148
149    #[inline]
150    fn marked_custom<T>(&self, mark: Self::Mark, message: T) -> Self::Error
151    where
152        T: 'static + Send + Sync + fmt::Display + fmt::Debug,
153    {
154        self.push_error(mark..self.mark.get(), message.to_string());
155        ErrorMarker
156    }
157
158    #[inline]
159    fn mark(&self) -> Self::Mark {
160        self.mark.get()
161    }
162
163    #[inline]
164    fn advance(&self, n: usize) {
165        self.mark.set(self.mark.get().wrapping_add(n));
166    }
167
168    #[inline]
169    fn enter_named_field<T>(&self, name: &'static str, _: &T)
170    where
171        T: ?Sized + fmt::Display,
172    {
173        self.push_path(Step::Named(name));
174    }
175
176    #[inline]
177    fn enter_unnamed_field<T>(&self, index: u32, _: &T)
178    where
179        T: ?Sized + fmt::Display,
180    {
181        self.push_path(Step::Unnamed(index));
182    }
183
184    #[inline]
185    fn leave_field(&self) {
186        self.pop_path();
187    }
188
189    #[inline]
190    fn enter_struct(&self, name: &'static str) {
191        if self.include_type {
192            self.push_path(Step::Struct(name));
193        }
194    }
195
196    #[inline]
197    fn leave_struct(&self) {
198        if self.include_type {
199            self.pop_path();
200        }
201    }
202
203    #[inline]
204    fn enter_enum(&self, name: &'static str) {
205        if self.include_type {
206            self.push_path(Step::Enum(name));
207        }
208    }
209
210    #[inline]
211    fn leave_enum(&self) {
212        if self.include_type {
213            self.pop_path();
214        }
215    }
216
217    #[inline]
218    fn enter_variant<T>(&self, name: &'static str, _: T) {
219        self.push_path(Step::Variant(name));
220    }
221
222    #[inline]
223    fn leave_variant(&self) {
224        self.pop_path();
225    }
226
227    #[inline]
228    fn enter_sequence_index(&self, index: usize) {
229        self.push_path(Step::Index(index));
230    }
231
232    #[inline]
233    fn leave_sequence_index(&self) {
234        self.pop_path();
235    }
236
237    #[inline]
238    fn enter_map_key<T>(&self, field: T)
239    where
240        T: fmt::Display,
241    {
242        self.push_path(Step::Key(field.to_string()));
243    }
244
245    #[inline]
246    fn leave_map_key(&self) {
247        self.pop_path();
248    }
249}
250
251/// An iterator over collected errors.
252pub struct Errors<'a, E> {
253    errors: &'a [BufTriplet<E>],
254    index: usize,
255    // NB: Drop order is significant, drop the shared access last.
256    _access: access::Shared<'a>,
257}
258
259impl<'a, E> Iterator for Errors<'a, E> {
260    type Item = RichError<'a, String, E>;
261
262    fn next(&mut self) -> Option<Self::Item> {
263        let (path, range, error) = self.errors.get(self.index)?;
264        self.index += 1;
265        Some(RichError::new(path, 0, range.clone(), error))
266    }
267}