facet_reflect/partial/partial_api/option.rs
1use super::*;
2use crate::AllocatedShape;
3
4////////////////////////////////////////////////////////////////////////////////////////////////////
5// Option / inner
6////////////////////////////////////////////////////////////////////////////////////////////////////
7impl<const BORROW: bool> Partial<'_, BORROW> {
8 /// Begin building the Some variant of an Option
9 pub fn begin_some(mut self) -> Result<Self, ReflectError> {
10 // Verify we're working with an Option and get the def
11 let option_def = {
12 let frame = self.frames().last().unwrap();
13 match frame.allocated.shape().def {
14 Def::Option(def) => def,
15 _ => {
16 return Err(ReflectError::WasNotA {
17 expected: "Option",
18 actual: frame.allocated.shape(),
19 });
20 }
21 }
22 };
23
24 // Check if we need to handle re-initialization.
25 // For Options, also check if tracker is Option{building_inner:false} which means
26 // a previous begin_some/end cycle completed.
27 //
28 // IMPORTANT: For Option<Vec<T>> and similar accumulator types, we do NOT want to
29 // reinitialize when re-entering, as this would destroy the existing Vec.
30 // This can happen with TOML array-of-tables which emit multiple FieldKey events
31 // for the same field.
32 let needs_reinit = {
33 let frame = self.frames().last().unwrap();
34
35 // Check if this is a re-entry into an already-initialized Option.
36 // After end() completes, the tracker is reset to Scalar, not Option{building_inner: false}.
37 // So we check for Scalar tracker + is_init flag.
38 if matches!(frame.tracker, Tracker::Scalar) && frame.is_init {
39 // The Option was previously built and completed.
40 // Check if the inner type can accumulate more values (like List, Map, DynamicValue)
41 let inner_shape = option_def.t;
42 let is_accumulator = matches!(
43 inner_shape.def,
44 Def::List(_) | Def::Map(_) | Def::Set(_) | Def::DynamicValue(_)
45 );
46
47 if is_accumulator {
48 // Don't reinitialize - we'll re-enter the existing inner value below
49 false
50 } else {
51 // For scalars and other types, reinitialize as before
52 true
53 }
54 } else {
55 frame.is_init
56 }
57 };
58
59 // Check if we're re-entering an existing accumulator (like Option<Vec<T>>)
60 let is_reentry = {
61 let frame = self.frames().last().unwrap();
62 matches!(frame.tracker, Tracker::Scalar) && frame.is_init && !needs_reinit
63 };
64
65 if needs_reinit {
66 self.prepare_for_reinitialization();
67 }
68
69 // In deferred mode, push "Some" onto the path to distinguish
70 // Option<T> (path ends before "Some") from the inner T (path includes "Some").
71 // This treats Option like an enum with Some/None variants for path tracking.
72 if let FrameMode::Deferred {
73 stack,
74 start_depth,
75 current_path,
76 stored_frames,
77 ..
78 } = &mut self.mode
79 {
80 let relative_depth = stack.len() - *start_depth;
81 let should_track = current_path.len() == relative_depth;
82
83 if should_track {
84 current_path.push("Some");
85
86 // Check if we have a stored frame for this path (re-entry case)
87 if let Some(stored_frame) = stored_frames.remove(current_path) {
88 trace!("begin_some: Restoring stored frame for path {current_path:?}");
89
90 // Update tracker to indicate we're building the inner value
91 let frame = stack.last_mut().unwrap();
92 frame.tracker = Tracker::Option {
93 building_inner: true,
94 };
95
96 stack.push(stored_frame);
97 return Ok(self);
98 }
99 }
100 }
101
102 // Set tracker to indicate we're building the inner value
103 let frame = self.frames_mut().last_mut().unwrap();
104 frame.tracker = Tracker::Option {
105 building_inner: true,
106 };
107
108 // Get the inner type shape
109 let inner_shape = option_def.t;
110
111 // Get the inner layout (needed for AllocatedShape later)
112 let inner_layout =
113 inner_shape
114 .layout
115 .sized_layout()
116 .map_err(|_| ReflectError::Unsized {
117 shape: inner_shape,
118 operation: "begin_some, getting inner layout",
119 })?;
120
121 // If we're re-entering an existing accumulator, get a pointer to the existing inner value
122 // instead of allocating new memory
123 let inner_data = if is_reentry {
124 // The Option is already initialized with Some(inner), so we need to get a pointer
125 // to the existing inner value using the Option vtable's get_value function.
126 let frame = self.frames().last().unwrap();
127
128 // Get the Option's vtable which has a get_value function
129 let option_vtable = match &frame.allocated.shape().def {
130 Def::Option(opt_def) => opt_def.vtable,
131 _ => unreachable!("Expected Option def"),
132 };
133
134 unsafe {
135 // Use the vtable's get_value function to get a pointer to the inner T
136 // get_value takes PtrConst and returns Option<PtrConst>
137 let option_ptr = PtrConst::new(frame.data.as_byte_ptr());
138 let inner_ptr_opt = (option_vtable.get_value)(option_ptr);
139 let inner_ptr = inner_ptr_opt.expect("Option should be Some when re-entering");
140 // Convert PtrConst to *mut for PtrUninit::new
141 PtrUninit::new(inner_ptr.as_byte_ptr() as *mut u8)
142 }
143 } else {
144 // Allocate memory for the inner value
145 if inner_layout.size() == 0 {
146 // For ZST, use a non-null but unallocated pointer
147 PtrUninit::new(NonNull::<u8>::dangling().as_ptr())
148 } else {
149 // Allocate memory for the inner value
150 let ptr = unsafe { ::alloc::alloc::alloc(inner_layout) };
151 let Some(ptr) = NonNull::new(ptr) else {
152 ::alloc::alloc::handle_alloc_error(inner_layout);
153 };
154 PtrUninit::new(ptr.as_ptr())
155 }
156 };
157
158 // Create a new frame for the inner value
159 // For re-entry, we use ManagedElsewhere ownership since the Option frame owns the memory
160 let mut inner_frame = Frame::new(
161 inner_data,
162 AllocatedShape::new(inner_shape, inner_layout.size()),
163 if is_reentry {
164 FrameOwnership::BorrowedInPlace
165 } else {
166 FrameOwnership::Owned
167 },
168 );
169
170 // CRITICAL: For re-entry, mark the frame as already initialized so that begin_list()
171 // doesn't reinitialize the Vec (which would clear it)
172 if is_reentry {
173 inner_frame.is_init = true;
174 }
175
176 self.frames_mut().push(inner_frame);
177
178 Ok(self)
179 }
180
181 /// Begin building the inner value of a wrapper type
182 pub fn begin_inner(mut self) -> Result<Self, ReflectError> {
183 // Get the inner shape and check for try_from
184 // Priority: builder_shape (for immutable collections) > inner (for variance/transparent wrappers)
185 let (inner_shape, has_try_from, parent_shape, is_option) = {
186 let frame = self.frames().last().unwrap();
187 // Check builder_shape first (immutable collections like Bytes, Arc<[T]>)
188 if let Some(builder_shape) = frame.allocated.shape().builder_shape {
189 let has_try_from = frame.allocated.shape().vtable.has_try_from();
190 let is_option = matches!(frame.allocated.shape().def, Def::Option(_));
191 (
192 Some(builder_shape),
193 has_try_from,
194 frame.allocated.shape(),
195 is_option,
196 )
197 } else if let Some(inner_shape) = frame.allocated.shape().inner {
198 let has_try_from = frame.allocated.shape().vtable.has_try_from();
199 let is_option = matches!(frame.allocated.shape().def, Def::Option(_));
200 (
201 Some(inner_shape),
202 has_try_from,
203 frame.allocated.shape(),
204 is_option,
205 )
206 } else {
207 (None, false, frame.allocated.shape(), false)
208 }
209 };
210
211 // Handle re-initialization if needed
212 self.prepare_for_reinitialization();
213
214 if let Some(inner_shape) = inner_shape {
215 if has_try_from {
216 // For Option types, use begin_some behavior to properly track building_inner
217 // This ensures end() knows how to handle the popped frame
218 if is_option {
219 return self.begin_some();
220 }
221
222 // Create a conversion frame with the inner shape
223 // For non-Option types with try_from, we leave the parent tracker unchanged
224 // and the conversion will happen in end()
225
226 // Allocate memory for the inner value (conversion source)
227 let inner_layout =
228 inner_shape
229 .layout
230 .sized_layout()
231 .map_err(|_| ReflectError::Unsized {
232 shape: inner_shape,
233 operation: "begin_inner, getting inner layout",
234 })?;
235
236 let inner_data = if inner_layout.size() == 0 {
237 // For ZST, use a non-null but unallocated pointer
238 PtrUninit::new(NonNull::<u8>::dangling().as_ptr())
239 } else {
240 // Allocate memory for the inner value
241 let ptr = unsafe { ::alloc::alloc::alloc(inner_layout) };
242 let Some(ptr) = NonNull::new(ptr) else {
243 ::alloc::alloc::handle_alloc_error(inner_layout);
244 };
245 PtrUninit::new(ptr.as_ptr())
246 };
247
248 // For conversion frames, we create a frame directly with the inner shape
249 // This allows setting values of the inner type which will be converted
250 // The automatic conversion detection in end() will handle the conversion
251 trace!(
252 "begin_inner: Creating frame for inner type {inner_shape} (parent is {parent_shape})"
253 );
254 self.frames_mut().push(Frame::new(
255 inner_data,
256 AllocatedShape::new(inner_shape, inner_layout.size()),
257 FrameOwnership::Owned,
258 ));
259
260 Ok(self)
261 } else {
262 // For wrapper types without try_from, navigate to the first field
263 // This is a common pattern for newtype wrappers
264 trace!("begin_inner: No try_from for {parent_shape}, using field navigation");
265 self.begin_nth_field(0)
266 }
267 } else {
268 Err(ReflectError::OperationFailed {
269 shape: parent_shape,
270 operation: "type does not have an inner value",
271 })
272 }
273 }
274
275 /// Begin bulding the source shape for custom deserialization, calling end() for this frame will
276 /// call the deserialize_with function provided by the field and set the field using the result.
277 pub fn begin_custom_deserialization(mut self) -> Result<Self, ReflectError> {
278 let current_frame = self.frames().last().unwrap();
279 let target_shape = current_frame.allocated.shape();
280 trace!("begin_custom_deserialization: target_shape={target_shape}");
281 if let Some(field) = self.parent_field() {
282 trace!("begin_custom_deserialization: field name={}", field.name);
283 if let Some(proxy_def) = field.proxy() {
284 // Get the source shape from the proxy definition
285 let source_shape = proxy_def.shape;
286 let source_data = source_shape.allocate().map_err(|_| ReflectError::Unsized {
287 shape: target_shape,
288 operation: "Not a Sized type",
289 })?;
290 let source_size = source_shape
291 .layout
292 .sized_layout()
293 .expect("must be sized")
294 .size();
295
296 trace!(
297 "begin_custom_deserialization: Creating frame for deserialization type {source_shape}"
298 );
299 let mut new_frame = Frame::new(
300 source_data,
301 AllocatedShape::new(source_shape, source_size),
302 FrameOwnership::Owned,
303 );
304 new_frame.using_custom_deserialization = true;
305 self.frames_mut().push(new_frame);
306
307 Ok(self)
308 } else {
309 Err(ReflectError::OperationFailed {
310 shape: target_shape,
311 operation: "field does not have a proxy definition",
312 })
313 }
314 } else {
315 Err(ReflectError::OperationFailed {
316 shape: target_shape,
317 operation: "not currently processing a field",
318 })
319 }
320 }
321
322 /// Begin building the source shape for custom deserialization using container-level proxy.
323 ///
324 /// Unlike `begin_custom_deserialization` which uses field-level proxy info, this method
325 /// uses the shape's own proxy definition (from `#[facet(proxy = ...)]` at container level).
326 ///
327 /// Returns `Ok((self, true))` if the shape has a container-level proxy and we've begun
328 /// custom deserialization, `Ok((self, false))` if not (self is returned unchanged).
329 pub fn begin_custom_deserialization_from_shape(mut self) -> Result<(Self, bool), ReflectError> {
330 let current_frame = self.frames().last().unwrap();
331 let target_shape = current_frame.allocated.shape();
332 trace!("begin_custom_deserialization_from_shape: target_shape={target_shape}");
333
334 let Some(proxy_def) = target_shape.proxy else {
335 return Ok((self, false));
336 };
337
338 let source_shape = proxy_def.shape;
339 let source_data = source_shape.allocate().map_err(|_| ReflectError::Unsized {
340 shape: target_shape,
341 operation: "Not a Sized type",
342 })?;
343 let source_size = source_shape
344 .layout
345 .sized_layout()
346 .expect("must be sized")
347 .size();
348
349 trace!(
350 "begin_custom_deserialization_from_shape: Creating frame for deserialization type {source_shape}"
351 );
352 let mut new_frame = Frame::new(
353 source_data,
354 AllocatedShape::new(source_shape, source_size),
355 FrameOwnership::Owned,
356 );
357 new_frame.using_custom_deserialization = true;
358 // Store the target shape's proxy in the frame so end() can use it for conversion
359 new_frame.shape_level_proxy = Some(proxy_def);
360 self.frames_mut().push(new_frame);
361
362 Ok((self, true))
363 }
364}