facet_reflect/partial/partial_api/option.rs
1use super::*;
2
3////////////////////////////////////////////////////////////////////////////////////////////////////
4// Option / inner
5////////////////////////////////////////////////////////////////////////////////////////////////////
6impl<const BORROW: bool> Partial<'_, BORROW> {
7 /// Begin building the Some variant of an Option
8 pub fn begin_some(mut self) -> Result<Self, ReflectError> {
9 // Verify we're working with an Option and get the def
10 let option_def = {
11 let frame = self.frames().last().unwrap();
12 match frame.shape.def {
13 Def::Option(def) => def,
14 _ => {
15 return Err(ReflectError::WasNotA {
16 expected: "Option",
17 actual: frame.shape,
18 });
19 }
20 }
21 };
22
23 // Check if we need to handle re-initialization.
24 // For Options, also check if tracker is Option{building_inner:false} which means
25 // a previous begin_some/end cycle completed.
26 //
27 // IMPORTANT: For Option<Vec<T>> and similar accumulator types, we do NOT want to
28 // reinitialize when re-entering, as this would destroy the existing Vec.
29 // This can happen with TOML array-of-tables which emit multiple FieldKey events
30 // for the same field.
31 let needs_reinit = {
32 let frame = self.frames().last().unwrap();
33
34 // Check if this is a re-entry into an already-initialized Option.
35 // After end() completes, the tracker is reset to Scalar, not Option{building_inner: false}.
36 // So we check for Scalar tracker + is_init flag.
37 if matches!(frame.tracker, Tracker::Scalar) && frame.is_init {
38 // The Option was previously built and completed.
39 // Check if the inner type can accumulate more values (like List, Map, DynamicValue)
40 let inner_shape = option_def.t;
41 let is_accumulator = matches!(
42 inner_shape.def,
43 Def::List(_) | Def::Map(_) | Def::Set(_) | Def::DynamicValue(_)
44 );
45
46 if is_accumulator {
47 // Don't reinitialize - we'll re-enter the existing inner value below
48 false
49 } else {
50 // For scalars and other types, reinitialize as before
51 true
52 }
53 } else {
54 frame.is_init
55 }
56 };
57
58 // Check if we're re-entering an existing accumulator (like Option<Vec<T>>)
59 let is_reentry = {
60 let frame = self.frames().last().unwrap();
61 matches!(frame.tracker, Tracker::Scalar) && frame.is_init && !needs_reinit
62 };
63
64 if needs_reinit {
65 self.prepare_for_reinitialization();
66 }
67
68 // In deferred mode, push "Some" onto the path to distinguish
69 // Option<T> (path ends before "Some") from the inner T (path includes "Some").
70 // This treats Option like an enum with Some/None variants for path tracking.
71 if let FrameMode::Deferred {
72 stack,
73 start_depth,
74 current_path,
75 stored_frames,
76 ..
77 } = &mut self.mode
78 {
79 let relative_depth = stack.len() - *start_depth;
80 let should_track = current_path.len() == relative_depth;
81
82 if should_track {
83 current_path.push("Some");
84
85 // Check if we have a stored frame for this path (re-entry case)
86 if let Some(stored_frame) = stored_frames.remove(current_path) {
87 trace!("begin_some: Restoring stored frame for path {current_path:?}");
88
89 // Update tracker to indicate we're building the inner value
90 let frame = stack.last_mut().unwrap();
91 frame.tracker = Tracker::Option {
92 building_inner: true,
93 };
94
95 stack.push(stored_frame);
96 return Ok(self);
97 }
98 }
99 }
100
101 // Set tracker to indicate we're building the inner value
102 let frame = self.frames_mut().last_mut().unwrap();
103 frame.tracker = Tracker::Option {
104 building_inner: true,
105 };
106
107 // Get the inner type shape
108 let inner_shape = option_def.t;
109
110 // If we're re-entering an existing accumulator, get a pointer to the existing inner value
111 // instead of allocating new memory
112 let inner_data = if is_reentry {
113 // The Option is already initialized with Some(inner), so we need to get a pointer
114 // to the existing inner value using the Option vtable's get_value function.
115 let frame = self.frames().last().unwrap();
116
117 // Get the Option's vtable which has a get_value function
118 let option_vtable = match &frame.shape.def {
119 Def::Option(opt_def) => opt_def.vtable,
120 _ => unreachable!("Expected Option def"),
121 };
122
123 unsafe {
124 // Use the vtable's get_value function to get a pointer to the inner T
125 // get_value takes PtrConst and returns Option<PtrConst>
126 let option_ptr = PtrConst::new(frame.data.as_byte_ptr());
127 let inner_ptr_opt = (option_vtable.get_value)(option_ptr);
128 let inner_ptr = inner_ptr_opt.expect("Option should be Some when re-entering");
129 // Convert PtrConst to *mut for PtrUninit::new
130 PtrUninit::new(inner_ptr.as_byte_ptr() as *mut u8)
131 }
132 } else {
133 // Allocate memory for the inner value
134 let inner_layout =
135 inner_shape
136 .layout
137 .sized_layout()
138 .map_err(|_| ReflectError::Unsized {
139 shape: inner_shape,
140 operation: "begin_some, allocating Option inner value",
141 })?;
142
143 if inner_layout.size() == 0 {
144 // For ZST, use a non-null but unallocated pointer
145 PtrUninit::new(NonNull::<u8>::dangling().as_ptr())
146 } else {
147 // Allocate memory for the inner value
148 let ptr = unsafe { ::alloc::alloc::alloc(inner_layout) };
149 let Some(ptr) = NonNull::new(ptr) else {
150 ::alloc::alloc::handle_alloc_error(inner_layout);
151 };
152 PtrUninit::new(ptr.as_ptr())
153 }
154 };
155
156 // Create a new frame for the inner value
157 // For re-entry, we use ManagedElsewhere ownership since the Option frame owns the memory
158 let mut inner_frame = Frame::new(
159 inner_data,
160 inner_shape,
161 if is_reentry {
162 FrameOwnership::ManagedElsewhere
163 } else {
164 FrameOwnership::Owned
165 },
166 );
167
168 // CRITICAL: For re-entry, mark the frame as already initialized so that begin_list()
169 // doesn't reinitialize the Vec (which would clear it)
170 if is_reentry {
171 inner_frame.is_init = true;
172 }
173
174 self.frames_mut().push(inner_frame);
175
176 Ok(self)
177 }
178
179 /// Begin building the inner value of a wrapper type
180 pub fn begin_inner(mut self) -> Result<Self, ReflectError> {
181 // Get the inner shape and check for try_from
182 // Priority: builder_shape (for immutable collections) > inner (for variance/transparent wrappers)
183 let (inner_shape, has_try_from, parent_shape, is_option) = {
184 let frame = self.frames().last().unwrap();
185 // Check builder_shape first (immutable collections like Bytes, Arc<[T]>)
186 if let Some(builder_shape) = frame.shape.builder_shape {
187 let has_try_from = frame.shape.vtable.has_try_from();
188 let is_option = matches!(frame.shape.def, Def::Option(_));
189 (Some(builder_shape), has_try_from, frame.shape, is_option)
190 } else if let Some(inner_shape) = frame.shape.inner {
191 let has_try_from = frame.shape.vtable.has_try_from();
192 let is_option = matches!(frame.shape.def, Def::Option(_));
193 (Some(inner_shape), has_try_from, frame.shape, is_option)
194 } else {
195 (None, false, frame.shape, false)
196 }
197 };
198
199 // Handle re-initialization if needed
200 self.prepare_for_reinitialization();
201
202 if let Some(inner_shape) = inner_shape {
203 if has_try_from {
204 // For Option types, use begin_some behavior to properly track building_inner
205 // This ensures end() knows how to handle the popped frame
206 if is_option {
207 return self.begin_some();
208 }
209
210 // Create a conversion frame with the inner shape
211 // For non-Option types with try_from, we leave the parent tracker unchanged
212 // and the conversion will happen in end()
213
214 // Allocate memory for the inner value (conversion source)
215 let inner_layout =
216 inner_shape
217 .layout
218 .sized_layout()
219 .map_err(|_| ReflectError::Unsized {
220 shape: inner_shape,
221 operation: "begin_inner, getting inner layout",
222 })?;
223
224 let inner_data = if inner_layout.size() == 0 {
225 // For ZST, use a non-null but unallocated pointer
226 PtrUninit::new(NonNull::<u8>::dangling().as_ptr())
227 } else {
228 // Allocate memory for the inner value
229 let ptr = unsafe { ::alloc::alloc::alloc(inner_layout) };
230 let Some(ptr) = NonNull::new(ptr) else {
231 ::alloc::alloc::handle_alloc_error(inner_layout);
232 };
233 PtrUninit::new(ptr.as_ptr())
234 };
235
236 // For conversion frames, we create a frame directly with the inner shape
237 // This allows setting values of the inner type which will be converted
238 // The automatic conversion detection in end() will handle the conversion
239 trace!(
240 "begin_inner: Creating frame for inner type {inner_shape} (parent is {parent_shape})"
241 );
242 self.frames_mut()
243 .push(Frame::new(inner_data, inner_shape, FrameOwnership::Owned));
244
245 Ok(self)
246 } else {
247 // For wrapper types without try_from, navigate to the first field
248 // This is a common pattern for newtype wrappers
249 trace!("begin_inner: No try_from for {parent_shape}, using field navigation");
250 self.begin_nth_field(0)
251 }
252 } else {
253 Err(ReflectError::OperationFailed {
254 shape: parent_shape,
255 operation: "type does not have an inner value",
256 })
257 }
258 }
259
260 /// Begin bulding the source shape for custom deserialization, calling end() for this frame will
261 /// call the deserialize_with function provided by the field and set the field using the result.
262 pub fn begin_custom_deserialization(mut self) -> Result<Self, ReflectError> {
263 let current_frame = self.frames().last().unwrap();
264 let target_shape = current_frame.shape;
265 trace!("begin_custom_deserialization: target_shape={target_shape}");
266 if let Some(field) = self.parent_field() {
267 trace!("begin_custom_deserialization: field name={}", field.name);
268 if let Some(proxy_def) = field.proxy() {
269 // Get the source shape from the proxy definition
270 let source_shape = proxy_def.shape;
271 let source_data = source_shape.allocate().map_err(|_| ReflectError::Unsized {
272 shape: target_shape,
273 operation: "Not a Sized type",
274 })?;
275
276 trace!(
277 "begin_custom_deserialization: Creating frame for deserialization type {source_shape}"
278 );
279 let mut new_frame = Frame::new(source_data, source_shape, FrameOwnership::Owned);
280 new_frame.using_custom_deserialization = true;
281 self.frames_mut().push(new_frame);
282
283 Ok(self)
284 } else {
285 Err(ReflectError::OperationFailed {
286 shape: target_shape,
287 operation: "field does not have a proxy definition",
288 })
289 }
290 } else {
291 Err(ReflectError::OperationFailed {
292 shape: target_shape,
293 operation: "not currently processing a field",
294 })
295 }
296 }
297
298 /// Begin building the source shape for custom deserialization using container-level proxy.
299 ///
300 /// Unlike `begin_custom_deserialization` which uses field-level proxy info, this method
301 /// uses the shape's own proxy definition (from `#[facet(proxy = ...)]` at container level).
302 ///
303 /// Returns `Ok((self, true))` if the shape has a container-level proxy and we've begun
304 /// custom deserialization, `Ok((self, false))` if not (self is returned unchanged).
305 pub fn begin_custom_deserialization_from_shape(mut self) -> Result<(Self, bool), ReflectError> {
306 let current_frame = self.frames().last().unwrap();
307 let target_shape = current_frame.shape;
308 trace!("begin_custom_deserialization_from_shape: target_shape={target_shape}");
309
310 let Some(proxy_def) = target_shape.proxy else {
311 return Ok((self, false));
312 };
313
314 let source_shape = proxy_def.shape;
315 let source_data = source_shape.allocate().map_err(|_| ReflectError::Unsized {
316 shape: target_shape,
317 operation: "Not a Sized type",
318 })?;
319
320 trace!(
321 "begin_custom_deserialization_from_shape: Creating frame for deserialization type {source_shape}"
322 );
323 let mut new_frame = Frame::new(source_data, source_shape, FrameOwnership::Owned);
324 new_frame.using_custom_deserialization = true;
325 // Store the target shape's proxy in the frame so end() can use it for conversion
326 new_frame.shape_level_proxy = Some(proxy_def);
327 self.frames_mut().push(new_frame);
328
329 Ok((self, true))
330 }
331}