facet_reflect/partial/partial_api/option.rs
1use super::*;
2use crate::AllocatedShape;
3use facet_path::PathStep;
4
5////////////////////////////////////////////////////////////////////////////////////////////////////
6// Option / inner
7////////////////////////////////////////////////////////////////////////////////////////////////////
8impl<const BORROW: bool> Partial<'_, BORROW> {
9 /// Begin building the Some variant of an Option
10 pub fn begin_some(mut self) -> Result<Self, ReflectError> {
11 // Verify we're working with an Option and get the def
12 let option_def = {
13 let frame = self.frames().last().unwrap();
14 match frame.allocated.shape().def {
15 Def::Option(def) => def,
16 _ => {
17 return Err(self.err(ReflectErrorKind::WasNotA {
18 expected: "Option",
19 actual: frame.allocated.shape(),
20 }));
21 }
22 }
23 };
24
25 // Check if we need to handle re-initialization.
26 // For Options, also check if tracker is Option{building_inner:false} which means
27 // a previous begin_some/end cycle completed.
28 //
29 // IMPORTANT: For certain types, we do NOT want to reinitialize when re-entering,
30 // as this would destroy the existing values:
31 // - Accumulators (Vec, Map, Set, DynamicValue) - can accumulate more values
32 // - Structs/enums in deferred mode - can have more fields set on re-entry
33 let needs_reinit = {
34 let frame = self.frames().last().unwrap();
35
36 // Check if this is a re-entry into an already-initialized Option.
37 // After end() completes, the tracker is reset to Scalar, not Option{building_inner: false}.
38 // So we check for Scalar tracker + is_init flag.
39 if matches!(frame.tracker, Tracker::Scalar) && frame.is_init {
40 // The Option was previously built and completed.
41 // Check if the inner type can accumulate more values or be re-entered
42 let inner_shape = option_def.t;
43 let is_accumulator = matches!(
44 inner_shape.def,
45 Def::List(_) | Def::Map(_) | Def::Set(_) | Def::DynamicValue(_)
46 );
47
48 // In deferred mode, structs and enums are also reentrant - we can set
49 // more fields on them without reinitializing the whole struct
50 let is_reentrant_in_deferred = self.is_deferred()
51 && matches!(
52 inner_shape.ty,
53 Type::User(UserType::Struct(_)) | Type::User(UserType::Enum(_))
54 );
55
56 if is_accumulator || is_reentrant_in_deferred {
57 // Don't reinitialize - we'll re-enter the existing inner value below
58 false
59 } else {
60 // For scalars and other types, reinitialize as before
61 true
62 }
63 } else {
64 frame.is_init
65 }
66 };
67
68 // Check if we're re-entering an existing accumulator (like Option<Vec<T>>)
69 let is_reentry = {
70 let frame = self.frames().last().unwrap();
71 matches!(frame.tracker, Tracker::Scalar) && frame.is_init && !needs_reinit
72 };
73
74 if needs_reinit {
75 self.prepare_for_reinitialization();
76 }
77
78 // In deferred mode, check if we have a stored frame for this Option's inner value.
79 // The path for the inner value includes OptionSome to distinguish it from the Option itself.
80 if self.is_deferred() {
81 // Derive the current path and construct what the path WOULD be after entering Some
82 let mut check_path = self.derive_path();
83 check_path.push(PathStep::OptionSome);
84
85 if let FrameMode::Deferred {
86 stack,
87 stored_frames,
88 ..
89 } = &mut self.mode
90 {
91 // Check if we have a stored frame for this path (re-entry case)
92 if let Some(mut stored_frame) = stored_frames.remove(&check_path) {
93 trace!("begin_some: Restoring stored frame for path {check_path:?}");
94
95 // Update tracker to indicate we're building the inner value
96 let frame = stack.last_mut().unwrap();
97 frame.tracker = Tracker::Option {
98 building_inner: true,
99 pending_inner: None,
100 };
101
102 // Clear the restored frame's current_child - we haven't entered any of its
103 // children yet in this new traversal. Without this, derive_path() would
104 // include stale navigation state and compute incorrect paths.
105 stored_frame.tracker.clear_current_child();
106
107 stack.push(stored_frame);
108 return Ok(self);
109 }
110 }
111 }
112
113 // Set tracker to indicate we're building the inner value
114 // Copy the type_plan (Copy) before dropping the mutable borrow
115 let parent_type_plan = {
116 let frame = self.mode.stack_mut().last_mut().unwrap();
117 frame.tracker = Tracker::Option {
118 building_inner: true,
119 pending_inner: None,
120 };
121 frame.type_plan
122 };
123
124 // Get the inner type shape
125 let inner_shape = option_def.t;
126
127 // Get the inner layout (needed for AllocatedShape later)
128 let inner_layout = inner_shape.layout.sized_layout().map_err(|_| {
129 self.err(ReflectErrorKind::Unsized {
130 shape: inner_shape,
131 operation: "begin_some, getting inner layout",
132 })
133 })?;
134
135 // If we're re-entering an existing accumulator, get a pointer to the existing inner value
136 // instead of allocating new memory
137 let inner_data = if is_reentry {
138 // The Option is already initialized with Some(inner), so we need to get a pointer
139 // to the existing inner value using the Option vtable's get_value function.
140 let frame = self.frames().last().unwrap();
141
142 // Get the Option's vtable which has a get_value function
143 let option_vtable = match &frame.allocated.shape().def {
144 Def::Option(opt_def) => opt_def.vtable,
145 _ => unreachable!("Expected Option def"),
146 };
147
148 unsafe {
149 // Use the vtable's get_value function to get a pointer to the inner T
150 // get_value returns null when the option is None.
151 let option_ptr = PtrConst::new(frame.data.as_byte_ptr());
152 let inner_ptr = (option_vtable.get_value)(option_ptr);
153 assert!(
154 !inner_ptr.is_null(),
155 "Option should be Some when re-entering"
156 );
157 // Convert PtrConst to *mut for PtrUninit::new
158 PtrUninit::new(inner_ptr as *mut u8)
159 }
160 } else {
161 // Allocate memory for the inner value
162 if inner_layout.size() == 0 {
163 // For ZST, use a non-null but unallocated pointer
164 PtrUninit::new(NonNull::<u8>::dangling().as_ptr())
165 } else {
166 // Allocate memory for the inner value
167 let ptr = unsafe { ::alloc::alloc::alloc(inner_layout) };
168 let Some(ptr) = NonNull::new(ptr) else {
169 ::alloc::alloc::handle_alloc_error(inner_layout);
170 };
171 PtrUninit::new(ptr.as_ptr())
172 }
173 };
174
175 // Create a new frame for the inner value
176 // For re-entry, we use ManagedElsewhere ownership since the Option frame owns the memory
177 // Get child type plan NodeId for Option inner
178 let child_plan_id = self
179 .root_plan
180 .option_some_node_id(parent_type_plan)
181 .expect("TypePlan must have option inner node");
182 let mut inner_frame = Frame::new(
183 inner_data,
184 AllocatedShape::new(inner_shape, inner_layout.size()),
185 if is_reentry {
186 FrameOwnership::BorrowedInPlace
187 } else {
188 FrameOwnership::Owned
189 },
190 child_plan_id,
191 );
192
193 // CRITICAL: For re-entry, mark the frame as already initialized so that init_list()
194 // doesn't reinitialize the Vec (which would clear it)
195 if is_reentry {
196 inner_frame.is_init = true;
197 }
198
199 self.mode.stack_mut().push(inner_frame);
200
201 Ok(self)
202 }
203
204 /// Begin building the inner value of a wrapper type
205 pub fn begin_inner(mut self) -> Result<Self, ReflectError> {
206 // Get the inner shape and check for try_from
207 // Priority: builder_shape (for immutable collections) > inner (for variance/transparent wrappers)
208 let (inner_shape, has_try_from, parent_shape, is_option, parent_type_plan) = {
209 let frame = self.frames().last().unwrap();
210 let type_plan = frame.type_plan;
211 // Check builder_shape first (immutable collections like Bytes, Arc<[T]>)
212 if let Some(builder_shape) = frame.allocated.shape().builder_shape {
213 let has_try_from = frame.allocated.shape().vtable.has_try_from();
214 let is_option = matches!(frame.allocated.shape().def, Def::Option(_));
215 (
216 Some(builder_shape),
217 has_try_from,
218 frame.allocated.shape(),
219 is_option,
220 type_plan,
221 )
222 } else if let Some(inner_shape) = frame.allocated.shape().inner {
223 let has_try_from = frame.allocated.shape().vtable.has_try_from();
224 let is_option = matches!(frame.allocated.shape().def, Def::Option(_));
225 (
226 Some(inner_shape),
227 has_try_from,
228 frame.allocated.shape(),
229 is_option,
230 type_plan,
231 )
232 } else {
233 (None, false, frame.allocated.shape(), false, type_plan)
234 }
235 };
236
237 // Handle re-initialization if needed
238 self.prepare_for_reinitialization();
239
240 if let Some(inner_shape) = inner_shape {
241 if has_try_from {
242 // For Option types, use begin_some behavior to properly track building_inner
243 // This ensures end() knows how to handle the popped frame
244 if is_option {
245 return self.begin_some();
246 }
247
248 // Create a conversion frame with the inner shape
249 // For non-Option types with try_from, we leave the parent tracker unchanged
250 // and the conversion will happen in end()
251
252 // Allocate memory for the inner value (conversion source)
253 let inner_layout = inner_shape.layout.sized_layout().map_err(|_| {
254 self.err(ReflectErrorKind::Unsized {
255 shape: inner_shape,
256 operation: "begin_inner, getting inner layout",
257 })
258 })?;
259
260 let inner_data = if inner_layout.size() == 0 {
261 // For ZST, use a non-null but unallocated pointer
262 PtrUninit::new(NonNull::<u8>::dangling().as_ptr())
263 } else {
264 // Allocate memory for the inner value
265 let ptr = unsafe { ::alloc::alloc::alloc(inner_layout) };
266 let Some(ptr) = NonNull::new(ptr) else {
267 ::alloc::alloc::handle_alloc_error(inner_layout);
268 };
269 PtrUninit::new(ptr.as_ptr())
270 };
271
272 // For conversion frames, we create a frame directly with the inner shape
273 // This allows setting values of the inner type which will be converted
274 // The automatic conversion detection in end() will handle the conversion
275 trace!(
276 "begin_inner: Creating frame for inner type {inner_shape} (parent is {parent_shape})"
277 );
278
279 // Mark the parent frame as building its inner value.
280 // This is needed for derive_path() to add an Inner step so that
281 // inner and parent frames have distinct paths in deferred mode.
282 self.mode.stack_mut().last_mut().unwrap().tracker = Tracker::Inner {
283 building_inner: true,
284 };
285
286 // Navigate to the inner type's TypePlan node for correct strategy lookup.
287 // If the TypePlan has a child node for the inner type, use it; otherwise
288 // fall back to the parent's node (which may result in incorrect strategy).
289 let inner_type_plan_id = self
290 .root_plan
291 .inner_node_id(parent_type_plan)
292 .unwrap_or(parent_type_plan);
293 self.mode.stack_mut().push(Frame::new(
294 inner_data,
295 AllocatedShape::new(inner_shape, inner_layout.size()),
296 FrameOwnership::Owned,
297 inner_type_plan_id,
298 ));
299
300 Ok(self)
301 } else {
302 // For wrapper types without try_from, navigate to the first field
303 // This is a common pattern for newtype wrappers
304 trace!("begin_inner: No try_from for {parent_shape}, using field navigation");
305 self.begin_nth_field(0)
306 }
307 } else {
308 Err(self.err(ReflectErrorKind::OperationFailed {
309 shape: parent_shape,
310 operation: "type does not have an inner value",
311 }))
312 }
313 }
314
315 /// Begin bulding the source shape for custom deserialization, calling end() for this frame will
316 /// call the deserialize_with function provided by the field and set the field using the result.
317 ///
318 /// This uses the format-agnostic proxy. For format-specific proxies, use
319 /// `begin_custom_deserialization_with_format`.
320 pub fn begin_custom_deserialization(self) -> Result<Self, ReflectError> {
321 self.begin_custom_deserialization_with_format(None)
322 }
323
324 /// Begin building the source shape for custom deserialization using container-level proxy.
325 ///
326 /// Unlike `begin_custom_deserialization` which uses field-level proxy info, this method
327 /// uses the shape's own proxy definition (from `#[facet(proxy = ...)]` at container level).
328 ///
329 /// Returns `Ok((self, true))` if the shape has a container-level proxy and we've begun
330 /// custom deserialization, `Ok((self, false))` if not (self is returned unchanged).
331 pub fn begin_custom_deserialization_from_shape(self) -> Result<(Self, bool), ReflectError> {
332 // Delegate to the format-aware version with no format namespace
333 self.begin_custom_deserialization_from_shape_with_format(None)
334 }
335
336 /// Begin building the source shape for custom deserialization using container-level proxy,
337 /// with support for format-specific proxy resolution.
338 ///
339 /// If `format_namespace` is provided (e.g., `Some("xml")`), looks for a format-specific
340 /// proxy first (e.g., `#[facet(xml::proxy = XmlProxy)]`), falling back to the format-agnostic
341 /// proxy if no format-specific one is found.
342 ///
343 /// Returns `Ok((self, true))` if a proxy was found and we've begun custom deserialization,
344 /// `Ok((self, false))` if not (self is returned unchanged).
345 pub fn begin_custom_deserialization_from_shape_with_format(
346 mut self,
347 format_namespace: Option<&str>,
348 ) -> Result<(Self, bool), ReflectError> {
349 use crate::typeplan::DeserStrategy;
350
351 let current_frame = self.frames().last().unwrap();
352 let target_shape = current_frame.allocated.shape();
353 trace!(
354 "begin_custom_deserialization_from_shape_with_format: target_shape={target_shape}, format={format_namespace:?}"
355 );
356
357 // Check that we have a ContainerProxy strategy
358 if !matches!(self.deser_strategy(), Some(DeserStrategy::ContainerProxy)) {
359 return Ok((self, false));
360 }
361
362 // Get the proxy_node from the precomputed proxy nodes, selecting by format
363 let Some(proxy_node) = self
364 .proxy_nodes()
365 .and_then(|p| p.node_for(format_namespace))
366 else {
367 return Ok((self, false));
368 };
369
370 // Use effective_proxy for format-aware resolution of the actual ProxyDef
371 let Some(proxy_def) = target_shape.effective_proxy(format_namespace) else {
372 return Ok((self, false));
373 };
374
375 let source_shape = proxy_def.shape;
376 let source_data = source_shape.allocate().map_err(|_| {
377 self.err(ReflectErrorKind::Unsized {
378 shape: target_shape,
379 operation: "Not a Sized type",
380 })
381 })?;
382 let source_size = source_shape
383 .layout
384 .sized_layout()
385 .expect("must be sized")
386 .size();
387
388 trace!(
389 "begin_custom_deserialization_from_shape_with_format: Creating frame for deserialization type {source_shape}"
390 );
391 // Use proxy_node - the TypePlan child node for the proxy type's structure.
392 // This is critical: using parent_type_plan would cause deser_strategy() to return
393 // ContainerProxy again, causing infinite recursion.
394 let mut new_frame = Frame::new(
395 source_data,
396 AllocatedShape::new(source_shape, source_size),
397 FrameOwnership::Owned,
398 proxy_node,
399 );
400 new_frame.using_custom_deserialization = true;
401 // Store the target shape's proxy in the frame so end() can use it for conversion
402 new_frame.shape_level_proxy = Some(proxy_def);
403 self.mode.stack_mut().push(new_frame);
404
405 Ok((self, true))
406 }
407
408 /// Begin building the source shape for custom deserialization using field-level proxy,
409 /// with support for format-specific proxy resolution.
410 ///
411 /// If `format_namespace` is provided (e.g., `Some("xml")`), looks for a format-specific
412 /// proxy first (e.g., `#[facet(xml::proxy = XmlProxy)]`), falling back to the format-agnostic
413 /// proxy if no format-specific one is found.
414 ///
415 /// This is the format-aware version of `begin_custom_deserialization`.
416 pub fn begin_custom_deserialization_with_format(
417 mut self,
418 format_namespace: Option<&str>,
419 ) -> Result<Self, ReflectError> {
420 use crate::typeplan::DeserStrategy;
421
422 let current_frame = self.frames().last().unwrap();
423 let target_shape = current_frame.allocated.shape();
424 trace!(
425 "begin_custom_deserialization_with_format: target_shape={target_shape}, format={format_namespace:?}"
426 );
427
428 // Check that we have a FieldProxy strategy
429 if !matches!(self.deser_strategy(), Some(DeserStrategy::FieldProxy)) {
430 // No field proxy strategy - check the field directly for error message
431 let Some(field) = self.parent_field() else {
432 return Err(self.err(ReflectErrorKind::OperationFailed {
433 shape: target_shape,
434 operation: "not currently processing a field",
435 }));
436 };
437 if field.effective_proxy(format_namespace).is_none() {
438 return Err(self.err(ReflectErrorKind::OperationFailed {
439 shape: target_shape,
440 operation: "field does not have a proxy",
441 }));
442 }
443 }
444
445 // Get the proxy_node from the precomputed proxy nodes, selecting by format
446 let Some(proxy_node) = self
447 .proxy_nodes()
448 .and_then(|p| p.node_for(format_namespace))
449 else {
450 return Err(self.err(ReflectErrorKind::OperationFailed {
451 shape: target_shape,
452 operation: "no proxy node found for format",
453 }));
454 };
455
456 let Some(field) = self.parent_field() else {
457 return Err(self.err(ReflectErrorKind::OperationFailed {
458 shape: target_shape,
459 operation: "not currently processing a field",
460 }));
461 };
462
463 trace!(
464 "begin_custom_deserialization_with_format: field name={}",
465 field.name
466 );
467 // Use effective_proxy for format-aware resolution
468 let Some(proxy_def) = field.effective_proxy(format_namespace) else {
469 return Err(self.err(ReflectErrorKind::OperationFailed {
470 shape: target_shape,
471 operation: "field does not have a proxy",
472 }));
473 };
474
475 // Get the source shape from the proxy definition
476 let source_shape = proxy_def.shape;
477 let source_data = source_shape.allocate().map_err(|_| {
478 self.err(ReflectErrorKind::Unsized {
479 shape: target_shape,
480 operation: "Not a Sized type",
481 })
482 })?;
483 let source_size = source_shape
484 .layout
485 .sized_layout()
486 .expect("must be sized")
487 .size();
488
489 trace!(
490 "begin_custom_deserialization_with_format: Creating frame for deserialization type {source_shape}"
491 );
492 // Use proxy_node - the TypePlan child node for the proxy type's structure.
493 // This is critical: using the parent's type_plan would cause deser_strategy()
494 // to return FieldProxy again, causing infinite recursion.
495 let mut new_frame = Frame::new(
496 source_data,
497 AllocatedShape::new(source_shape, source_size),
498 FrameOwnership::Owned,
499 proxy_node,
500 );
501 new_frame.using_custom_deserialization = true;
502 // Store the proxy def so end() can use the correct convert_in function
503 // This is important for format-specific proxies where field.proxy() would
504 // return the wrong proxy.
505 new_frame.shape_level_proxy = Some(proxy_def);
506 self.mode.stack_mut().push(new_frame);
507
508 Ok(self)
509 }
510}