Skip to main content

shape_runtime/
state_diff.rs

1//! Content-addressed state diffing for distributed Shape.
2//!
3//! Provides `diff(old, new)` and `patch(base, delta)` operations that
4//! compare values using content-hash trees. Only changed subtrees are
5//! included in the delta, enabling efficient state synchronization.
6
7use crate::hashing::HashDigest;
8use crate::type_schema::TypeSchemaRegistry;
9use sha2::{Digest, Sha256};
10use shape_value::NanTag;
11use shape_value::ValueWord;
12use std::collections::HashMap;
13use std::sync::Arc;
14
15// ---------------------------------------------------------------------------
16// Delta representation
17// ---------------------------------------------------------------------------
18
19/// A delta between two values, keyed by content path.
20///
21/// Paths use dot-separated notation:
22/// - `"field_name"` for top-level fields of a TypedObject
23/// - `"field_name.nested"` for nested fields
24/// - `"[0]"`, `"[1]"` for array indices
25/// - `"frames.[0].locals.[2]"` for deeply nested paths
26#[derive(Debug, Clone)]
27pub struct Delta {
28    /// Fields/paths that changed, mapped to their new values.
29    pub changed: HashMap<String, ValueWord>,
30    /// Paths that were removed (present in old, absent in new).
31    pub removed: Vec<String>,
32}
33
34impl Delta {
35    /// Create an empty delta (no changes).
36    pub fn empty() -> Self {
37        Self {
38            changed: HashMap::new(),
39            removed: Vec::new(),
40        }
41    }
42
43    /// True if this delta represents no change.
44    pub fn is_empty(&self) -> bool {
45        self.changed.is_empty() && self.removed.is_empty()
46    }
47
48    /// Number of changes (additions + modifications + removals).
49    pub fn change_count(&self) -> usize {
50        self.changed.len() + self.removed.len()
51    }
52}
53
54// ---------------------------------------------------------------------------
55// Value hashing
56// ---------------------------------------------------------------------------
57
58/// Compute a content hash for a ValueWord value.
59///
60/// Provides structural hashing that is deterministic across runs.
61/// For TypedObjects, fields are hashed in slot order. For arrays, each
62/// element is hashed. Primitives are hashed by their binary representation.
63pub fn content_hash_value(value: &ValueWord, schemas: &TypeSchemaRegistry) -> HashDigest {
64    let mut hasher = Sha256::new();
65    hash_value_into(&mut hasher, value, schemas);
66    let result = hasher.finalize();
67    let hex_str = result.iter().fold(String::with_capacity(64), |mut acc, b| {
68        use std::fmt::Write;
69        let _ = write!(acc, "{:02x}", b);
70        acc
71    });
72    HashDigest::from_hex(&hex_str)
73}
74
75fn hash_value_into(hasher: &mut Sha256, value: &ValueWord, schemas: &TypeSchemaRegistry) {
76    match value.tag() {
77        NanTag::F64 => {
78            hasher.update(b"f64:");
79            if let Some(f) = value.as_f64() {
80                hasher.update(f.to_le_bytes());
81            }
82        }
83        NanTag::I48 => {
84            hasher.update(b"i48:");
85            if let Some(i) = value.as_i64() {
86                hasher.update(i.to_le_bytes());
87            }
88        }
89        NanTag::Bool => {
90            hasher.update(b"bool:");
91            if let Some(b) = value.as_bool() {
92                hasher.update(if b { &[1u8] } else { &[0u8] });
93            }
94        }
95        NanTag::None => {
96            hasher.update(b"none");
97        }
98        NanTag::Unit => {
99            hasher.update(b"unit");
100        }
101        NanTag::Function => {
102            hasher.update(b"fn:");
103            hasher.update(value.raw_bits().to_le_bytes());
104        }
105        NanTag::ModuleFunction => {
106            hasher.update(b"modfn:");
107            hasher.update(value.raw_bits().to_le_bytes());
108        }
109        NanTag::Ref => {
110            hasher.update(b"ref:");
111            hasher.update(value.raw_bits().to_le_bytes());
112        }
113        NanTag::Heap => {
114            // Heap values: differentiate by content
115            if let Some(s) = value.as_str() {
116                hasher.update(b"str:");
117                hasher.update((s.len() as u64).to_le_bytes());
118                hasher.update(s.as_bytes());
119            } else if let Some(view) = value.as_any_array() {
120                hasher.update(b"arr:");
121                hasher.update((view.len() as u64).to_le_bytes());
122                let arr = view.to_generic();
123                for elem in arr.iter() {
124                    hash_value_into(hasher, elem, schemas);
125                }
126            } else if let Some((schema_id, slots, heap_mask)) = value.as_typed_object() {
127                hasher.update(b"obj:");
128                hasher.update(schema_id.to_le_bytes());
129                for (i, slot) in slots.iter().enumerate() {
130                    let is_heap = (heap_mask >> i) & 1 == 1;
131                    if is_heap {
132                        let nb = slot.as_heap_nb();
133                        hash_value_into(hasher, &nb, schemas);
134                    } else {
135                        hasher.update(b"slot:");
136                        hasher.update(slot.raw().to_le_bytes());
137                    }
138                }
139            } else {
140                // Other heap types (BigInt, Decimal, Closure, etc.)
141                hasher.update(b"heap:");
142                hasher.update(value.raw_bits().to_le_bytes());
143            }
144        }
145    }
146}
147
148// ---------------------------------------------------------------------------
149// Diffing
150// ---------------------------------------------------------------------------
151
152/// Compute the delta between two values.
153///
154/// For TypedObjects of the same schema, produces per-field diffs.
155/// For arrays of the same length, produces per-element diffs.
156/// For all other cases, treats the entire value as changed if different.
157pub fn diff_values(old: &ValueWord, new: &ValueWord, schemas: &TypeSchemaRegistry) -> Delta {
158    let mut delta = Delta::empty();
159    diff_recursive(old, new, "", schemas, &mut delta);
160    delta
161}
162
163fn make_path(prefix: &str, suffix: &str) -> String {
164    if prefix.is_empty() {
165        suffix.to_string()
166    } else {
167        format!("{}.{}", prefix, suffix)
168    }
169}
170
171fn root_path(prefix: &str) -> String {
172    if prefix.is_empty() {
173        ".".to_string()
174    } else {
175        prefix.to_string()
176    }
177}
178
179fn diff_recursive(
180    old: &ValueWord,
181    new: &ValueWord,
182    prefix: &str,
183    schemas: &TypeSchemaRegistry,
184    delta: &mut Delta,
185) {
186    // Fast path: identical raw bits means identical value
187    if old.raw_bits() == new.raw_bits() {
188        return;
189    }
190
191    // If tags differ, the whole subtree changed
192    if old.tag() != new.tag() {
193        delta.changed.insert(root_path(prefix), new.clone());
194        return;
195    }
196
197    match old.tag() {
198        NanTag::Heap => {
199            // Try typed object diff
200            if let (Some((old_sid, old_slots, old_hm)), Some((new_sid, new_slots, new_hm))) =
201                (old.as_typed_object(), new.as_typed_object())
202            {
203                if old_sid == new_sid {
204                    let schema = schemas.get_by_id(old_sid as u32);
205                    let min_len = old_slots.len().min(new_slots.len());
206
207                    for i in 0..min_len {
208                        let field_name = schema
209                            .and_then(|s| s.fields.get(i).map(|f| f.name.as_str()))
210                            .unwrap_or("?");
211                        let field_path = make_path(prefix, field_name);
212
213                        let old_is_heap = (old_hm >> i) & 1 == 1;
214                        let new_is_heap = (new_hm >> i) & 1 == 1;
215
216                        if old_is_heap && new_is_heap {
217                            let old_nb = old_slots[i].as_heap_nb();
218                            let new_nb = new_slots[i].as_heap_nb();
219                            diff_recursive(&old_nb, &new_nb, &field_path, schemas, delta);
220                        } else if old_slots[i].raw() != new_slots[i].raw()
221                            || old_is_heap != new_is_heap
222                        {
223                            // Slot raw bits differ or heap-ness changed
224                            if new_is_heap {
225                                delta.changed.insert(field_path, new_slots[i].as_heap_nb());
226                            } else {
227                                delta.changed.insert(field_path, unsafe {
228                                    ValueWord::clone_from_bits(new_slots[i].raw())
229                                });
230                            }
231                        }
232                    }
233
234                    // Extra new slots
235                    for i in old_slots.len()..new_slots.len() {
236                        let field_name = schema
237                            .and_then(|s| s.fields.get(i).map(|f| f.name.as_str()))
238                            .unwrap_or("?");
239                        let field_path = make_path(prefix, field_name);
240                        let is_heap = (new_hm >> i) & 1 == 1;
241                        if is_heap {
242                            delta.changed.insert(field_path, new_slots[i].as_heap_nb());
243                        } else {
244                            delta.changed.insert(field_path, unsafe {
245                                ValueWord::clone_from_bits(new_slots[i].raw())
246                            });
247                        }
248                    }
249
250                    // Removed slots
251                    for i in new_slots.len()..old_slots.len() {
252                        let field_name = schema
253                            .and_then(|s| s.fields.get(i).map(|f| f.name.as_str()))
254                            .unwrap_or("?");
255                        delta.removed.push(make_path(prefix, field_name));
256                    }
257                    return;
258                }
259                // Different schemas: whole value changed
260                delta.changed.insert(root_path(prefix), new.clone());
261                return;
262            }
263
264            // Try array diff
265            if let (Some(old_view), Some(new_view)) = (old.as_any_array(), new.as_any_array()) {
266                let old_arr = old_view.to_generic();
267                let new_arr = new_view.to_generic();
268                let min_len = old_arr.len().min(new_arr.len());
269
270                for i in 0..min_len {
271                    let idx_path = if prefix.is_empty() {
272                        format!("[{}]", i)
273                    } else {
274                        format!("{}.[{}]", prefix, i)
275                    };
276                    diff_recursive(&old_arr[i], &new_arr[i], &idx_path, schemas, delta);
277                }
278
279                for i in min_len..new_arr.len() {
280                    let idx_path = if prefix.is_empty() {
281                        format!("[{}]", i)
282                    } else {
283                        format!("{}.[{}]", prefix, i)
284                    };
285                    delta.changed.insert(idx_path, new_arr[i].clone());
286                }
287
288                for i in min_len..old_arr.len() {
289                    let idx_path = if prefix.is_empty() {
290                        format!("[{}]", i)
291                    } else {
292                        format!("{}.[{}]", prefix, i)
293                    };
294                    delta.removed.push(idx_path);
295                }
296                return;
297            }
298
299            // Try string diff
300            if let (Some(old_s), Some(new_s)) = (old.as_str(), new.as_str()) {
301                if old_s != new_s {
302                    delta.changed.insert(root_path(prefix), new.clone());
303                }
304                return;
305            }
306
307            // Different heap subtypes: whole value changed
308            delta.changed.insert(root_path(prefix), new.clone());
309        }
310
311        _ => {
312            // Primitive types: already checked raw bits above, so they differ
313            delta.changed.insert(root_path(prefix), new.clone());
314        }
315    }
316}
317
318// ---------------------------------------------------------------------------
319// Patching
320// ---------------------------------------------------------------------------
321
322/// Apply a delta to a base value, producing the updated value.
323///
324/// For TypedObjects, patches individual fields by path.
325/// For arrays, patches individual elements by index.
326/// For root-level changes (path "."), replaces the entire value.
327pub fn patch_value(base: &ValueWord, delta: &Delta, schemas: &TypeSchemaRegistry) -> ValueWord {
328    if delta.is_empty() {
329        return base.clone();
330    }
331
332    // Root-level replacement
333    if let Some(root_val) = delta.changed.get(".") {
334        return root_val.clone();
335    }
336
337    // Try to patch TypedObject fields
338    if let Some((schema_id, slots, heap_mask)) = base.as_typed_object() {
339        let schema = schemas.get_by_id(schema_id as u32);
340        if let Some(schema) = schema {
341            // Partition changed entries into direct and nested
342            let mut direct_changes: HashMap<String, ValueWord> = HashMap::new();
343            let mut nested_changes: HashMap<String, Delta> = HashMap::new();
344
345            for (path, value) in &delta.changed {
346                if let Some(dot_pos) = path.find('.') {
347                    let top = &path[..dot_pos];
348                    let rest = &path[dot_pos + 1..];
349                    nested_changes
350                        .entry(top.to_string())
351                        .or_insert_with(Delta::empty)
352                        .changed
353                        .insert(rest.to_string(), value.clone());
354                } else {
355                    direct_changes.insert(path.clone(), value.clone());
356                }
357            }
358
359            // Similarly partition removed entries into direct and nested.
360            // Note: direct removals for TypedObject fields are not currently
361            // applied (fields can't be removed from a fixed schema), but we
362            // still partition so nested removals are forwarded recursively.
363            let mut _direct_removals: Vec<String> = Vec::new();
364            let mut nested_removals: HashMap<String, Delta> = HashMap::new();
365
366            for path in &delta.removed {
367                if let Some(dot_pos) = path.find('.') {
368                    let top = &path[..dot_pos];
369                    let rest = &path[dot_pos + 1..];
370                    nested_removals
371                        .entry(top.to_string())
372                        .or_insert_with(Delta::empty)
373                        .removed
374                        .push(rest.to_string());
375                } else {
376                    _direct_removals.push(path.clone());
377                }
378            }
379
380            // Merge nested removals into nested_changes map
381            for (top, mut removal_delta) in nested_removals {
382                let entry = nested_changes.entry(top).or_insert_with(Delta::empty);
383                entry.removed.append(&mut removal_delta.removed);
384            }
385
386            // Clone all slots carefully
387            let mut new_slots: Vec<shape_value::ValueSlot> = Vec::with_capacity(slots.len());
388            for (i, slot) in slots.iter().enumerate() {
389                let is_heap = (heap_mask >> i) & 1 == 1;
390                if is_heap {
391                    new_slots.push(unsafe { slot.clone_heap() });
392                } else {
393                    new_slots.push(shape_value::ValueSlot::from_raw(slot.raw()));
394                }
395            }
396            let mut new_heap_mask = heap_mask;
397
398            // Apply direct field changes (paths with no '.' separator)
399            for (path, new_val) in &direct_changes {
400                if let Some(field_idx_u16) = schema.field_index(path) {
401                    let field_idx = field_idx_u16 as usize;
402                    if field_idx < new_slots.len() {
403                        // Drop old heap slot if needed
404                        if (new_heap_mask >> field_idx) & 1 == 1 {
405                            unsafe {
406                                new_slots[field_idx].drop_heap();
407                            }
408                        }
409
410                        if new_val.is_heap() {
411                            if let Some(hv) = new_val.as_heap_ref() {
412                                new_slots[field_idx] =
413                                    shape_value::ValueSlot::from_heap(hv.clone());
414                                new_heap_mask |= 1u64 << field_idx;
415                            }
416                        } else if let Some(f) = new_val.as_f64() {
417                            new_slots[field_idx] = shape_value::ValueSlot::from_number(f);
418                            new_heap_mask &= !(1u64 << field_idx);
419                        } else if let Some(i) = new_val.as_i64() {
420                            new_slots[field_idx] = shape_value::ValueSlot::from_int(i);
421                            new_heap_mask &= !(1u64 << field_idx);
422                        } else if let Some(b) = new_val.as_bool() {
423                            new_slots[field_idx] = shape_value::ValueSlot::from_bool(b);
424                            new_heap_mask &= !(1u64 << field_idx);
425                        }
426                    }
427                }
428            }
429
430            // Apply nested field changes (dotted paths like "inner.field")
431            for (top_field, sub_delta) in &nested_changes {
432                if let Some(field_idx_u16) = schema.field_index(top_field) {
433                    let field_idx = field_idx_u16 as usize;
434                    if field_idx < new_slots.len() {
435                        // Extract the current value from the slot
436                        let is_heap = (new_heap_mask >> field_idx) & 1 == 1;
437                        if is_heap {
438                            let current_val = new_slots[field_idx].as_heap_nb();
439                            // Recursively patch the nested value
440                            let patched = patch_value(&current_val, sub_delta, schemas);
441
442                            // Drop the old heap slot
443                            unsafe {
444                                new_slots[field_idx].drop_heap();
445                            }
446
447                            // Write back the patched value
448                            if patched.is_heap() {
449                                if let Some(hv) = patched.as_heap_ref() {
450                                    new_slots[field_idx] =
451                                        shape_value::ValueSlot::from_heap(hv.clone());
452                                    new_heap_mask |= 1u64 << field_idx;
453                                }
454                            } else if let Some(f) = patched.as_f64() {
455                                new_slots[field_idx] = shape_value::ValueSlot::from_number(f);
456                                new_heap_mask &= !(1u64 << field_idx);
457                            } else if let Some(i) = patched.as_i64() {
458                                new_slots[field_idx] = shape_value::ValueSlot::from_int(i);
459                                new_heap_mask &= !(1u64 << field_idx);
460                            } else if let Some(b) = patched.as_bool() {
461                                new_slots[field_idx] = shape_value::ValueSlot::from_bool(b);
462                                new_heap_mask &= !(1u64 << field_idx);
463                            }
464                        }
465                    }
466                }
467            }
468
469            use shape_value::HeapValue;
470            return ValueWord::from_heap_value(HeapValue::TypedObject {
471                schema_id,
472                slots: new_slots.into_boxed_slice(),
473                heap_mask: new_heap_mask,
474            });
475        }
476    }
477
478    // Try to patch Array elements
479    if let Some(view) = base.as_any_array() {
480        let arr = view.to_generic();
481        let mut new_arr: Vec<ValueWord> = arr.to_vec();
482
483        // Process removals first (high to low to preserve indices)
484        let mut removal_indices: Vec<usize> = delta
485            .removed
486            .iter()
487            .filter_map(|path| parse_array_index(path))
488            .collect();
489        removal_indices.sort_unstable();
490        removal_indices.reverse();
491        for idx in removal_indices {
492            if idx < new_arr.len() {
493                new_arr.remove(idx);
494            }
495        }
496
497        // Process changes
498        for (path, new_val) in &delta.changed {
499            if let Some(idx) = parse_array_index(path) {
500                if idx < new_arr.len() {
501                    new_arr[idx] = new_val.clone();
502                } else {
503                    while new_arr.len() < idx {
504                        new_arr.push(ValueWord::none());
505                    }
506                    new_arr.push(new_val.clone());
507                }
508            }
509        }
510
511        return ValueWord::from_array(Arc::new(new_arr));
512    }
513
514    // Cannot patch — return base unchanged
515    base.clone()
516}
517
518/// Parse an array index from a path like "[3]" or "prefix.[3]".
519fn parse_array_index(path: &str) -> Option<usize> {
520    let part = path.rsplit('.').next().unwrap_or(path);
521    if part.starts_with('[') && part.ends_with(']') {
522        part[1..part.len() - 1].parse().ok()
523    } else {
524        None
525    }
526}
527
528// ---------------------------------------------------------------------------
529// Tests
530// ---------------------------------------------------------------------------
531
532#[cfg(test)]
533mod tests {
534    use super::*;
535
536    #[test]
537    fn test_empty_delta() {
538        let delta = Delta::empty();
539        assert!(delta.is_empty());
540        assert_eq!(delta.change_count(), 0);
541    }
542
543    #[test]
544    fn test_diff_identical_primitives() {
545        let schemas = TypeSchemaRegistry::new();
546        let a = ValueWord::from_f64(42.0);
547        let b = ValueWord::from_f64(42.0);
548        let delta = diff_values(&a, &b, &schemas);
549        assert!(delta.is_empty());
550    }
551
552    #[test]
553    fn test_diff_different_primitives() {
554        let schemas = TypeSchemaRegistry::new();
555        let a = ValueWord::from_f64(42.0);
556        let b = ValueWord::from_f64(99.0);
557        let delta = diff_values(&a, &b, &schemas);
558        assert!(!delta.is_empty());
559        assert_eq!(delta.change_count(), 1);
560        assert!(delta.changed.contains_key("."));
561    }
562
563    #[test]
564    fn test_diff_arrays_same() {
565        let schemas = TypeSchemaRegistry::new();
566        let a = ValueWord::from_array(Arc::new(vec![
567            ValueWord::from_f64(1.0),
568            ValueWord::from_f64(2.0),
569        ]));
570        let b = ValueWord::from_array(Arc::new(vec![
571            ValueWord::from_f64(1.0),
572            ValueWord::from_f64(2.0),
573        ]));
574        let delta = diff_values(&a, &b, &schemas);
575        // Different Arc pointers so raw bits differ, but elements match
576        assert!(delta.is_empty());
577    }
578
579    #[test]
580    fn test_diff_arrays_element_changed() {
581        let schemas = TypeSchemaRegistry::new();
582        let a = ValueWord::from_array(Arc::new(vec![
583            ValueWord::from_f64(1.0),
584            ValueWord::from_f64(2.0),
585        ]));
586        let b = ValueWord::from_array(Arc::new(vec![
587            ValueWord::from_f64(1.0),
588            ValueWord::from_f64(99.0),
589        ]));
590        let delta = diff_values(&a, &b, &schemas);
591        assert_eq!(delta.change_count(), 1);
592        assert!(delta.changed.contains_key("[1]"));
593    }
594
595    #[test]
596    fn test_diff_arrays_element_added() {
597        let schemas = TypeSchemaRegistry::new();
598        let a = ValueWord::from_array(Arc::new(vec![ValueWord::from_f64(1.0)]));
599        let b = ValueWord::from_array(Arc::new(vec![
600            ValueWord::from_f64(1.0),
601            ValueWord::from_f64(2.0),
602        ]));
603        let delta = diff_values(&a, &b, &schemas);
604        assert_eq!(delta.changed.len(), 1);
605        assert!(delta.changed.contains_key("[1]"));
606    }
607
608    #[test]
609    fn test_diff_arrays_element_removed() {
610        let schemas = TypeSchemaRegistry::new();
611        let a = ValueWord::from_array(Arc::new(vec![
612            ValueWord::from_f64(1.0),
613            ValueWord::from_f64(2.0),
614        ]));
615        let b = ValueWord::from_array(Arc::new(vec![ValueWord::from_f64(1.0)]));
616        let delta = diff_values(&a, &b, &schemas);
617        assert_eq!(delta.removed.len(), 1);
618        assert!(delta.removed.contains(&"[1]".to_string()));
619    }
620
621    #[test]
622    fn test_patch_root_replacement() {
623        let schemas = TypeSchemaRegistry::new();
624        let base = ValueWord::from_f64(42.0);
625        let mut delta = Delta::empty();
626        delta
627            .changed
628            .insert(".".to_string(), ValueWord::from_f64(99.0));
629
630        let result = patch_value(&base, &delta, &schemas);
631        assert_eq!(result.as_f64(), Some(99.0));
632    }
633
634    #[test]
635    fn test_patch_array_element() {
636        let schemas = TypeSchemaRegistry::new();
637        let base = ValueWord::from_array(Arc::new(vec![
638            ValueWord::from_f64(1.0),
639            ValueWord::from_f64(2.0),
640        ]));
641        let mut delta = Delta::empty();
642        delta
643            .changed
644            .insert("[1]".to_string(), ValueWord::from_f64(99.0));
645
646        let result = patch_value(&base, &delta, &schemas);
647        let arr = result.as_any_array().unwrap().to_generic();
648        assert_eq!(arr[0].as_f64(), Some(1.0));
649        assert_eq!(arr[1].as_f64(), Some(99.0));
650    }
651
652    #[test]
653    fn test_parse_array_index() {
654        assert_eq!(parse_array_index("[0]"), Some(0));
655        assert_eq!(parse_array_index("[42]"), Some(42));
656        assert_eq!(parse_array_index("prefix.[3]"), Some(3));
657        assert_eq!(parse_array_index("notindex"), None);
658    }
659
660    #[test]
661    fn test_content_hash_deterministic() {
662        let schemas = TypeSchemaRegistry::new();
663        let v1 = ValueWord::from_f64(42.0);
664        let v2 = ValueWord::from_f64(42.0);
665        assert_eq!(
666            content_hash_value(&v1, &schemas),
667            content_hash_value(&v2, &schemas)
668        );
669    }
670
671    #[test]
672    fn test_content_hash_different() {
673        let schemas = TypeSchemaRegistry::new();
674        let v1 = ValueWord::from_f64(42.0);
675        let v2 = ValueWord::from_f64(99.0);
676        assert_ne!(
677            content_hash_value(&v1, &schemas),
678            content_hash_value(&v2, &schemas)
679        );
680    }
681
682    #[test]
683    fn test_nested_typed_object_diff_and_patch() {
684        use crate::type_schema::TypeSchemaBuilder;
685        use shape_value::{HeapValue, ValueSlot};
686
687        let mut schemas = TypeSchemaRegistry::new();
688
689        // Register inner type: Inner { x: f64, y: f64 }
690        let inner_id = TypeSchemaBuilder::new("Inner")
691            .f64_field("x")
692            .f64_field("y")
693            .register(&mut schemas);
694
695        // Register outer type: Outer { name: string, inner: Inner, score: f64 }
696        let outer_id = TypeSchemaBuilder::new("Outer")
697            .string_field("name")
698            .object_field("inner", "Inner")
699            .f64_field("score")
700            .register(&mut schemas);
701
702        // Build inner objects
703        let inner_old = ValueWord::from_heap_value(HeapValue::TypedObject {
704            schema_id: inner_id as u64,
705            slots: vec![
706                ValueSlot::from_number(1.0), // x = 1.0
707                ValueSlot::from_number(2.0), // y = 2.0
708            ]
709            .into_boxed_slice(),
710            heap_mask: 0,
711        });
712
713        let inner_new = ValueWord::from_heap_value(HeapValue::TypedObject {
714            schema_id: inner_id as u64,
715            slots: vec![
716                ValueSlot::from_number(1.0),  // x = 1.0 (unchanged)
717                ValueSlot::from_number(99.0), // y = 99.0 (changed)
718            ]
719            .into_boxed_slice(),
720            heap_mask: 0,
721        });
722
723        // Build outer objects
724        let name_val = Arc::new("test".to_string());
725        let old_outer = ValueWord::from_heap_value(HeapValue::TypedObject {
726            schema_id: outer_id as u64,
727            slots: vec![
728                ValueSlot::from_heap(HeapValue::String(name_val.clone())), // name
729                ValueSlot::from_heap(inner_old.as_heap_ref().unwrap().clone()), // inner
730                ValueSlot::from_number(10.0),                              // score
731            ]
732            .into_boxed_slice(),
733            heap_mask: 0b011, // slots 0 and 1 are heap
734        });
735
736        let new_outer = ValueWord::from_heap_value(HeapValue::TypedObject {
737            schema_id: outer_id as u64,
738            slots: vec![
739                ValueSlot::from_heap(HeapValue::String(name_val.clone())), // name (same)
740                ValueSlot::from_heap(inner_new.as_heap_ref().unwrap().clone()), // inner (y changed)
741                ValueSlot::from_number(10.0),                              // score (same)
742            ]
743            .into_boxed_slice(),
744            heap_mask: 0b011,
745        });
746
747        // Diff should produce a dotted path "inner.y"
748        let delta = diff_values(&old_outer, &new_outer, &schemas);
749        assert!(!delta.is_empty(), "delta should not be empty");
750        assert!(
751            delta.changed.contains_key("inner.y"),
752            "delta should contain 'inner.y', got keys: {:?}",
753            delta.changed.keys().collect::<Vec<_>>()
754        );
755        assert_eq!(delta.change_count(), 1, "only inner.y should have changed");
756
757        // Patch should correctly apply the nested change
758        let patched = patch_value(&old_outer, &delta, &schemas);
759
760        // Verify the patched outer object
761        let (patched_sid, patched_slots, patched_hm) = patched
762            .as_typed_object()
763            .expect("patched should be a TypedObject");
764        assert_eq!(patched_sid, outer_id as u64);
765
766        // name should be unchanged
767        assert_eq!(patched_hm & 1, 1, "slot 0 should be heap");
768        let patched_name = patched_slots[0].as_heap_nb();
769        assert_eq!(patched_name.as_str().unwrap(), "test");
770
771        // score should be unchanged
772        assert_eq!(
773            f64::from_bits(patched_slots[2].raw()),
774            10.0,
775            "score should be 10.0"
776        );
777
778        // inner should have y=99.0 and x=1.0
779        assert_eq!((patched_hm >> 1) & 1, 1, "slot 1 should be heap");
780        let patched_inner = patched_slots[1].as_heap_nb();
781        let (inner_sid, inner_slots, _inner_hm) = patched_inner
782            .as_typed_object()
783            .expect("inner should be a TypedObject");
784        assert_eq!(inner_sid, inner_id as u64);
785        assert_eq!(
786            f64::from_bits(inner_slots[0].raw()),
787            1.0,
788            "inner.x should be 1.0"
789        );
790        assert_eq!(
791            f64::from_bits(inner_slots[1].raw()),
792            99.0,
793            "inner.y should be 99.0"
794        );
795    }
796
797    #[test]
798    fn test_patch_direct_fields_still_work() {
799        use crate::type_schema::TypeSchemaBuilder;
800        use shape_value::{HeapValue, ValueSlot};
801
802        let mut schemas = TypeSchemaRegistry::new();
803
804        let schema_id = TypeSchemaBuilder::new("Simple")
805            .f64_field("a")
806            .f64_field("b")
807            .register(&mut schemas);
808
809        let base = ValueWord::from_heap_value(HeapValue::TypedObject {
810            schema_id: schema_id as u64,
811            slots: vec![ValueSlot::from_number(1.0), ValueSlot::from_number(2.0)]
812                .into_boxed_slice(),
813            heap_mask: 0,
814        });
815
816        // Direct field patch (no dots)
817        let mut delta = Delta::empty();
818        delta
819            .changed
820            .insert("b".to_string(), ValueWord::from_f64(42.0));
821
822        let patched = patch_value(&base, &delta, &schemas);
823        let (_sid, slots, _hm) = patched.as_typed_object().unwrap();
824        assert_eq!(f64::from_bits(slots[0].raw()), 1.0, "a unchanged");
825        assert_eq!(f64::from_bits(slots[1].raw()), 42.0, "b patched to 42.0");
826    }
827
828    #[test]
829    fn test_nested_patch_mixed_direct_and_dotted() {
830        use crate::type_schema::TypeSchemaBuilder;
831        use shape_value::{HeapValue, ValueSlot};
832
833        let mut schemas = TypeSchemaRegistry::new();
834
835        let inner_id = TypeSchemaBuilder::new("MixedInner")
836            .f64_field("val")
837            .register(&mut schemas);
838
839        let outer_id = TypeSchemaBuilder::new("MixedOuter")
840            .f64_field("score")
841            .object_field("nested", "MixedInner")
842            .register(&mut schemas);
843
844        let inner_obj = ValueWord::from_heap_value(HeapValue::TypedObject {
845            schema_id: inner_id as u64,
846            slots: vec![ValueSlot::from_number(5.0)].into_boxed_slice(),
847            heap_mask: 0,
848        });
849
850        let base = ValueWord::from_heap_value(HeapValue::TypedObject {
851            schema_id: outer_id as u64,
852            slots: vec![
853                ValueSlot::from_number(100.0),
854                ValueSlot::from_heap(inner_obj.as_heap_ref().unwrap().clone()),
855            ]
856            .into_boxed_slice(),
857            heap_mask: 0b10, // slot 1 is heap
858        });
859
860        // Delta with both a direct change and a nested dotted change
861        let mut delta = Delta::empty();
862        delta
863            .changed
864            .insert("score".to_string(), ValueWord::from_f64(200.0));
865        delta
866            .changed
867            .insert("nested.val".to_string(), ValueWord::from_f64(77.0));
868
869        let patched = patch_value(&base, &delta, &schemas);
870        let (_sid, slots, hm) = patched.as_typed_object().unwrap();
871
872        // Direct field should be patched
873        assert_eq!(
874            f64::from_bits(slots[0].raw()),
875            200.0,
876            "score should be 200.0"
877        );
878
879        // Nested field should be patched
880        assert_eq!((hm >> 1) & 1, 1, "slot 1 should be heap");
881        let patched_inner = slots[1].as_heap_nb();
882        let (_inner_sid, inner_slots, _) = patched_inner.as_typed_object().unwrap();
883        assert_eq!(
884            f64::from_bits(inner_slots[0].raw()),
885            77.0,
886            "nested.val should be 77.0"
887        );
888    }
889}