radix_engine/kernel/
call_frame.rs

1use crate::internal_prelude::*;
2use crate::kernel::kernel_api::DroppedNode;
3use crate::kernel::kernel_callback_api::CallFrameReferences;
4use crate::kernel::substate_io::{
5    IOAccessHandler, SubstateDevice, SubstateIO, SubstateReadHandler,
6};
7use crate::track::interface::{CallbackError, CommitableSubstateStore, IOAccess, NodeSubstates};
8use radix_engine_interface::api::field_api::LockFlags;
9use radix_engine_interface::types::{NodeId, SubstateHandle, SubstateKey};
10use radix_substate_store_interface::db_key_mapper::SubstateKeyContent;
11
12use super::heap::{Heap, HeapRemovePartitionError};
13
14/// A message used for communication between call frames.
15///
16/// Note that it's just an intent, not checked/allowed by kernel yet.
17#[derive(Default, Debug, Clone, PartialEq, Eq)]
18pub struct CallFrameMessage {
19    /// Nodes to be moved from src to dest
20    pub move_nodes: Vec<NodeId>,
21
22    /// Copy of a global ref from src to dest
23    pub copy_global_references: Vec<NodeId>,
24
25    /// Copy of a direct access ref from src to dest
26    pub copy_direct_access_references: Vec<NodeId>,
27
28    /// Create a "stable" transient in dest from src. The src node may
29    /// have global or borrowed visibility
30    /// TODO: Cleanup abstraction (perhaps by adding another type of visibility)
31    pub copy_stable_transient_references: Vec<NodeId>,
32}
33
34impl CallFrameMessage {
35    pub fn from_input<C: CallFrameReferences>(value: &IndexedScryptoValue, references: &C) -> Self {
36        let mut copy_global_references = Vec::new();
37        let mut copy_direct_access_references = Vec::new();
38
39        for arg_ref in value.references().clone() {
40            if arg_ref.is_global() {
41                copy_global_references.push(arg_ref);
42            } else {
43                copy_direct_access_references.push(arg_ref);
44            }
45        }
46
47        copy_global_references.extend(references.global_references());
48        copy_direct_access_references.extend(references.direct_access_references());
49
50        Self {
51            move_nodes: value.owned_nodes().clone(),
52            copy_global_references,
53            copy_direct_access_references,
54            copy_stable_transient_references: references.stable_transient_references(),
55        }
56    }
57
58    pub fn from_output(value: &IndexedScryptoValue) -> Self {
59        let mut copy_global_references = Vec::new();
60        let mut copy_direct_access_references = Vec::new();
61
62        for arg_ref in value.references().clone() {
63            if arg_ref.is_global() {
64                copy_global_references.push(arg_ref);
65            } else {
66                copy_direct_access_references.push(arg_ref);
67            }
68        }
69
70        Self {
71            move_nodes: value.owned_nodes().clone(),
72            copy_global_references,
73            copy_direct_access_references,
74            copy_stable_transient_references: vec![],
75        }
76    }
77}
78
79#[derive(Debug, Clone, PartialEq, Eq)]
80pub struct OpenedSubstate<L> {
81    pub references: IndexSet<NodeId>,
82    pub owned_nodes: IndexSet<NodeId>,
83    pub ref_origin: ReferenceOrigin,
84    pub global_substate_handle: u32,
85    pub device: SubstateDevice,
86    pub data: L,
87}
88
89impl<L> OpenedSubstate<L> {
90    fn diff_on_close(&self) -> SubstateDiff {
91        SubstateDiff {
92            added_owns: index_set_new(),
93            added_refs: index_set_new(),
94            removed_owns: self.owned_nodes.clone(),
95            removed_refs: self.references.clone(),
96        }
97    }
98
99    fn diff(&self, updated_value: &IndexedScryptoValue) -> Result<SubstateDiff, SubstateDiffError> {
100        // Process owned nodes
101        let (added_owned_nodes, removed_owned_nodes) = {
102            let mut added_owned_nodes: IndexSet<NodeId> = index_set_new();
103            let mut new_owned_nodes: IndexSet<NodeId> = index_set_new();
104            for own in updated_value.owned_nodes() {
105                if !new_owned_nodes.insert(*own) {
106                    return Err(SubstateDiffError::ContainsDuplicateOwns);
107                }
108
109                if !self.owned_nodes.contains(own) {
110                    added_owned_nodes.insert(*own);
111                }
112            }
113
114            let mut removed_owned_nodes: IndexSet<NodeId> = index_set_new();
115            for own in &self.owned_nodes {
116                if !new_owned_nodes.contains(own) {
117                    removed_owned_nodes.insert(*own);
118                }
119            }
120
121            (added_owned_nodes, removed_owned_nodes)
122        };
123
124        //====================
125        // Process references
126        //====================
127        let (added_references, removed_references) = {
128            // De-duplicate
129            let updated_references: IndexSet<NodeId> =
130                updated_value.references().clone().into_iter().collect();
131
132            let mut added_references: IndexSet<NodeId> = index_set_new();
133            for reference in &updated_references {
134                let reference_is_new = !self.references.contains(reference);
135
136                if reference_is_new {
137                    added_references.insert(*reference);
138                }
139            }
140
141            let mut removed_references: IndexSet<NodeId> = index_set_new();
142            for old_ref in &self.references {
143                if !updated_references.contains(old_ref) {
144                    removed_references.insert(*old_ref);
145                }
146            }
147
148            (added_references, removed_references)
149        };
150
151        Ok(SubstateDiff {
152            added_owns: added_owned_nodes,
153            removed_owns: removed_owned_nodes,
154            added_refs: added_references,
155            removed_refs: removed_references,
156        })
157    }
158}
159
160#[derive(Debug, Clone, PartialEq, Eq)]
161struct SubstateDiff {
162    added_owns: IndexSet<NodeId>,
163    removed_owns: IndexSet<NodeId>,
164    added_refs: IndexSet<NodeId>,
165    removed_refs: IndexSet<NodeId>,
166}
167
168impl SubstateDiff {
169    pub fn from_new_substate(
170        substate_value: &IndexedScryptoValue,
171    ) -> Result<Self, SubstateDiffError> {
172        let mut added_owns = index_set_new();
173        let mut added_refs = index_set_new();
174
175        for own in substate_value.owned_nodes() {
176            if !added_owns.insert(*own) {
177                return Err(SubstateDiffError::ContainsDuplicateOwns);
178            }
179        }
180
181        for reference in substate_value.references() {
182            added_refs.insert(*reference);
183        }
184
185        Ok(Self {
186            added_owns,
187            added_refs,
188            removed_owns: index_set_new(),
189            removed_refs: index_set_new(),
190        })
191    }
192
193    pub fn from_drop_substate(substate_value: &IndexedScryptoValue) -> Self {
194        let mut removed_owns = index_set_new();
195        let mut removed_refs = index_set_new();
196
197        for own in substate_value.owned_nodes() {
198            if !removed_owns.insert(*own) {
199                panic!("Should never have been able to create duplicate owns");
200            }
201        }
202
203        for reference in substate_value.references() {
204            removed_refs.insert(*reference);
205        }
206
207        Self {
208            added_owns: index_set_new(),
209            added_refs: index_set_new(),
210            removed_owns,
211            removed_refs,
212        }
213    }
214}
215
216#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
217pub enum StableReferenceType {
218    Global,
219    DirectAccess,
220}
221
222#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
223pub struct TransientReference {
224    ref_count: usize,
225    ref_origin: ReferenceOrigin,
226}
227
228#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
229pub enum ReferenceOrigin {
230    FrameOwned,
231    Global(GlobalAddress),
232    DirectlyAccessed,
233    SubstateNonGlobalReference(SubstateDevice),
234}
235
236#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
237pub enum Visibility {
238    StableReference(StableReferenceType),
239    FrameOwned,
240    Borrowed(ReferenceOrigin),
241}
242
243impl Visibility {
244    pub fn is_direct_access(&self) -> bool {
245        matches!(
246            self,
247            Self::StableReference(StableReferenceType::DirectAccess)
248        )
249    }
250
251    pub fn is_normal(&self) -> bool {
252        !self.is_direct_access()
253    }
254}
255
256pub struct NodeVisibility(pub BTreeSet<Visibility>);
257
258impl NodeVisibility {
259    /// Note that system may enforce further constraints on this.
260    /// For instance, system currently only allows substates of actor,
261    /// actor's outer object, and any visible key value store.
262    pub fn is_visible(&self) -> bool {
263        !self.0.is_empty()
264    }
265
266    pub fn can_be_invoked(&self, direct_access: bool) -> bool {
267        if direct_access {
268            self.0.iter().any(|x| x.is_direct_access())
269        } else {
270            self.0.iter().any(|x| x.is_normal())
271        }
272    }
273
274    pub fn can_be_referenced_in_substate(&self) -> bool {
275        self.0.iter().any(|x| x.is_normal())
276    }
277
278    pub fn is_global(&self) -> bool {
279        for v in &self.0 {
280            if let Visibility::StableReference(StableReferenceType::Global) = v {
281                return true;
282            }
283        }
284        false
285    }
286
287    // TODO: Should we return Vec<ReferenceOrigin> and not supersede global with direct access reference
288    pub fn reference_origin(&self, node_id: NodeId) -> Option<ReferenceOrigin> {
289        let mut found_direct_access = false;
290        for v in &self.0 {
291            match v {
292                Visibility::StableReference(StableReferenceType::Global) => {
293                    return Some(ReferenceOrigin::Global(GlobalAddress::new_or_panic(
294                        node_id.0,
295                    )));
296                }
297                Visibility::StableReference(StableReferenceType::DirectAccess) => {
298                    found_direct_access = true
299                }
300                Visibility::Borrowed(ref_origin) => return Some(*ref_origin),
301                Visibility::FrameOwned => {
302                    return Some(ReferenceOrigin::FrameOwned);
303                }
304            }
305        }
306
307        if found_direct_access {
308            return Some(ReferenceOrigin::DirectlyAccessed);
309        }
310
311        None
312    }
313}
314
315/// Callback for IO access, from call frame
316pub trait CallFrameIOAccessHandler<C, L, E> {
317    fn on_io_access(
318        &mut self,
319        current_frame: &CallFrame<C, L>,
320        heap: &Heap,
321        io_access: IOAccess,
322    ) -> Result<(), E>;
323}
324
325/// Callback for substate read, from call frame
326pub trait CallFrameSubstateReadHandler<C, L> {
327    type Error;
328
329    fn on_read_substate(
330        &mut self,
331        current_frame: &CallFrame<C, L>,
332        heap: &Heap,
333        handle: SubstateHandle,
334        value: &IndexedScryptoValue,
335        device: SubstateDevice,
336    ) -> Result<(), Self::Error>;
337}
338
339struct CallFrameToIOAccessAdapter<'g, C, L, E, H: CallFrameIOAccessHandler<C, L, E>> {
340    handler: &'g mut H,
341    call_frame: &'g mut CallFrame<C, L>,
342    phantom: PhantomData<E>,
343}
344
345impl<'g, C, L, E, H: CallFrameIOAccessHandler<C, L, E>> IOAccessHandler<E>
346    for CallFrameToIOAccessAdapter<'g, C, L, E, H>
347{
348    fn on_io_access(&mut self, heap: &Heap, io_access: IOAccess) -> Result<(), E> {
349        self.handler.on_io_access(self.call_frame, heap, io_access)
350    }
351}
352
353struct CallFrameToIOSubstateReadAdapter<'g, C, L, H: CallFrameSubstateReadHandler<C, L>> {
354    handler: &'g mut H,
355    call_frame: &'g CallFrame<C, L>,
356    handle: SubstateHandle,
357}
358
359impl<'g, C, L, H: CallFrameSubstateReadHandler<C, L>> SubstateReadHandler
360    for CallFrameToIOSubstateReadAdapter<'g, C, L, H>
361{
362    type Error = H::Error;
363
364    fn on_read_substate(
365        &mut self,
366        heap: &Heap,
367        value: &IndexedScryptoValue,
368        location: SubstateDevice,
369    ) -> Result<(), Self::Error> {
370        self.handler
371            .on_read_substate(self.call_frame, heap, self.handle, value, location)
372    }
373}
374
375/// A call frame is the basic unit that forms a transaction call stack, which keeps track of the
376/// owned objects and references by this function.
377pub struct CallFrame<C, L> {
378    /// The stack id.
379    stack_id: usize,
380
381    /// The frame id
382    depth: usize,
383
384    /// Call frame system layer data
385    call_frame_data: C,
386
387    /// Owned nodes which by definition must live on heap
388    owned_root_nodes: IndexSet<NodeId>,
389
390    /// References to non-GLOBAL nodes, obtained from substate loading, ref counted.
391    /// These references may NOT be passed between call frames as arguments
392    transient_references: NonIterMap<NodeId, TransientReference>,
393
394    /// Stable references points to nodes in track, which can't moved/deleted.
395    /// Current two types: `GLOBAL` (root, stored) and `DirectAccess`.
396    /// These references MAY be passed between call frames
397    stable_references: BTreeMap<NodeId, StableReferenceType>,
398
399    next_handle: SubstateHandle,
400    open_substates: IndexMap<SubstateHandle, OpenedSubstate<L>>,
401
402    /// The set of nodes that are always globally visible.
403    always_visible_global_nodes: &'static IndexSet<NodeId>,
404}
405
406/// Represents an error when creating a new frame.
407#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
408pub enum CreateFrameError {
409    PassMessageError(PassMessageError),
410}
411
412/// Represents an error when passing message between frames.
413#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
414pub enum PassMessageError {
415    TakeNodeError(TakeNodeError),
416    GlobalRefNotFound(error_models::ReferencedNodeId),
417    DirectRefNotFound(error_models::ReferencedNodeId),
418    TransientRefNotFound(error_models::ReferencedNodeId),
419}
420
421/// Represents an error when creating a node.
422#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
423pub enum CreateNodeError {
424    ProcessSubstateError(ProcessSubstateError),
425    ProcessSubstateKeyError(ProcessSubstateKeyError),
426    SubstateDiffError(SubstateDiffError),
427}
428
429/// Represents an error when dropping a node.
430#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
431pub enum DropNodeError {
432    TakeNodeError(TakeNodeError),
433    NodeBorrowed(error_models::ReferencedNodeId),
434    SubstateBorrowed(error_models::ReferencedNodeId),
435    ProcessSubstateError(ProcessSubstateError),
436}
437
438/// Represents an error when persisting a node into store.
439#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
440pub enum PersistNodeError {
441    ContainsNonGlobalRef(error_models::ReferencedNodeId),
442    NodeBorrowed(error_models::ReferencedNodeId),
443    CannotPersistPinnedNode(error_models::OwnedNodeId),
444}
445
446/// Represents an error when taking a node from current frame.
447#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
448pub enum TakeNodeError {
449    OwnNotFound(error_models::OwnedNodeId),
450    SubstateBorrowed(error_models::ReferencedNodeId),
451}
452
453/// Represents an error when moving modules from one node to another.
454#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
455pub enum MovePartitionError {
456    NodeNotAvailable(error_models::ReferencedNodeId),
457    HeapRemovePartitionError(HeapRemovePartitionError),
458    NonGlobalRefNotAllowed(error_models::ReferencedNodeId),
459    PersistNodeError(PersistNodeError),
460    SubstateBorrowed(error_models::ReferencedNodeId),
461    MoveFromStoreNotPermitted,
462}
463
464#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
465pub enum PinNodeError {
466    NodeNotVisible(error_models::ReferencedNodeId),
467}
468
469#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
470pub enum MarkTransientSubstateError {
471    NodeNotVisible(error_models::ReferencedNodeId),
472}
473
474/// Represents an error when attempting to lock a substate.
475#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
476pub enum OpenSubstateError {
477    NodeNotVisible(error_models::ReferencedNodeId),
478    SubstateFault,
479    InvalidDefaultValue,
480    ProcessSubstateKeyError(ProcessSubstateKeyError),
481    SubstateLocked(error_models::OwnedNodeId, PartitionNumber, SubstateKey),
482    LockUnmodifiedBaseOnHeapNode,
483    LockUnmodifiedBaseOnNewSubstate(error_models::OwnedNodeId, PartitionNumber, SubstateKey),
484    LockUnmodifiedBaseOnOnUpdatedSubstate(error_models::OwnedNodeId, PartitionNumber, SubstateKey),
485}
486
487/// Represents an error when reading substates.
488#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
489pub enum ReadSubstateError {
490    HandleNotFound(SubstateHandle),
491}
492
493/// Represents an error when writing substates.
494#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
495pub enum WriteSubstateError {
496    HandleNotFound(SubstateHandle),
497    ProcessSubstateError(ProcessSubstateError),
498    NoWritePermission,
499    SubstateDiffError(SubstateDiffError),
500}
501
502/// Represents an error when dropping a substate lock.
503#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
504pub enum CloseSubstateError {
505    HandleNotFound(SubstateHandle),
506    SubstateBorrowed(error_models::ReferencedNodeId),
507}
508
509#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
510pub enum CallFrameSetSubstateError {
511    NodeNotVisible(error_models::ReferencedNodeId),
512    SubstateLocked(error_models::OwnedNodeId, PartitionNumber, SubstateKey),
513    ProcessSubstateKeyError(ProcessSubstateKeyError),
514}
515
516#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
517pub enum CallFrameRemoveSubstateError {
518    NodeNotVisible(error_models::ReferencedNodeId),
519    SubstateLocked(error_models::OwnedNodeId, PartitionNumber, SubstateKey),
520    ProcessSubstateKeyError(ProcessSubstateKeyError),
521}
522
523#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
524pub enum CallFrameScanKeysError {
525    NodeNotVisible(error_models::ReferencedNodeId),
526    ProcessSubstateKeyError(ProcessSubstateKeyError),
527}
528
529#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
530pub enum CallFrameDrainSubstatesError {
531    NodeNotVisible(error_models::ReferencedNodeId),
532    NonGlobalRefNotSupported(error_models::ReferencedNodeId),
533    ProcessSubstateKeyError(ProcessSubstateKeyError),
534}
535
536#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
537pub enum CallFrameScanSortedSubstatesError {
538    NodeNotVisible(error_models::ReferencedNodeId),
539    OwnedNodeNotSupported(error_models::OwnedNodeId),
540    ProcessSubstateKeyError(ProcessSubstateKeyError),
541}
542
543#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
544pub enum ProcessSubstateKeyError {
545    NodeNotVisible(error_models::ReferencedNodeId),
546    DecodeError(DecodeError),
547    OwnedNodeNotSupported,
548    NonGlobalRefNotSupported,
549}
550
551#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
552pub enum ProcessSubstateError {
553    TakeNodeError(TakeNodeError),
554    CantDropNodeInStore(error_models::ReferencedNodeId),
555    RefNotFound(error_models::ReferencedNodeId),
556    RefCantBeAddedToSubstate(error_models::ReferencedNodeId),
557    NonGlobalRefNotAllowed(error_models::ReferencedNodeId),
558    PersistNodeError(PersistNodeError),
559}
560
561#[derive(Debug, Clone, PartialEq, Eq, ScryptoSbor)]
562pub enum SubstateDiffError {
563    ContainsDuplicateOwns,
564}
565
566#[derive(Debug)]
567pub struct CallFrameInit<C> {
568    pub data: C,
569    pub global_addresses: IndexSet<GlobalAddress>,
570    pub direct_accesses: IndexSet<InternalAddress>,
571    pub always_visible_global_nodes: &'static IndexSet<NodeId>,
572    pub stack_id: usize,
573}
574
575impl<C, L: Clone> CallFrame<C, L> {
576    pub fn new_root(init: CallFrameInit<C>) -> Self {
577        let mut call_frame = Self {
578            stack_id: init.stack_id,
579            depth: 0,
580            call_frame_data: init.data,
581            stable_references: Default::default(),
582            transient_references: NonIterMap::new(),
583            owned_root_nodes: index_set_new(),
584            next_handle: 0u32,
585            open_substates: index_map_new(),
586            always_visible_global_nodes: init.always_visible_global_nodes,
587        };
588
589        for global_ref in init.global_addresses {
590            call_frame.add_global_reference(global_ref);
591        }
592        for direct_access in init.direct_accesses {
593            call_frame.add_direct_access_reference(direct_access);
594        }
595
596        call_frame
597    }
598
599    pub fn new_child_from_parent<S: CommitableSubstateStore>(
600        substate_io: &SubstateIO<S>,
601        parent: &mut CallFrame<C, L>,
602        call_frame_data: C,
603        message: CallFrameMessage,
604    ) -> Result<Self, CreateFrameError> {
605        let mut frame = Self {
606            stack_id: parent.stack_id,
607            depth: parent.depth + 1,
608            call_frame_data,
609            stable_references: Default::default(),
610            transient_references: NonIterMap::new(),
611            owned_root_nodes: index_set_new(),
612            next_handle: 0u32,
613            open_substates: index_map_new(),
614            always_visible_global_nodes: parent.always_visible_global_nodes,
615        };
616
617        // Copy references and move nodes
618        Self::pass_message(substate_io, parent, &mut frame, message)
619            .map_err(CreateFrameError::PassMessageError)?;
620
621        Ok(frame)
622    }
623
624    pub fn pass_message<S: CommitableSubstateStore>(
625        substate_io: &SubstateIO<S>,
626        from: &mut CallFrame<C, L>,
627        to: &mut CallFrame<C, L>,
628        message: CallFrameMessage,
629    ) -> Result<(), PassMessageError> {
630        for node_id in message.move_nodes {
631            // Note that this has no impact on the `transient_references` because
632            // we don't allow move of "locked nodes".
633            from.take_node_internal(substate_io, &node_id)
634                .map_err(PassMessageError::TakeNodeError)?;
635            to.owned_root_nodes.insert(node_id);
636        }
637
638        // Only allow copy of `Global` and `DirectAccess` references
639        for node_id in message.copy_global_references {
640            if from.get_node_visibility(&node_id).is_global() {
641                // Note that GLOBAL and DirectAccess references are mutually exclusive,
642                // so okay to overwrite
643                to.stable_references
644                    .insert(node_id, StableReferenceType::Global);
645            } else {
646                return Err(PassMessageError::GlobalRefNotFound(node_id.into()));
647            }
648        }
649
650        for node_id in message.copy_direct_access_references {
651            if from.get_node_visibility(&node_id).can_be_invoked(true) {
652                to.stable_references
653                    .insert(node_id, StableReferenceType::DirectAccess);
654            } else {
655                return Err(PassMessageError::DirectRefNotFound(node_id.into()));
656            }
657        }
658
659        for node_id in message.copy_stable_transient_references {
660            if let Some(ref_origin) = from.get_node_visibility(&node_id).reference_origin(node_id) {
661                to.transient_references
662                    .entry(node_id)
663                    .or_insert(TransientReference {
664                        ref_count: 0usize,
665                        ref_origin,
666                    })
667                    .ref_count
668                    .add_assign(1);
669
670                if let ReferenceOrigin::Global(global_address) = ref_origin {
671                    to.stable_references
672                        .insert(global_address.into_node_id(), StableReferenceType::Global);
673                }
674            } else {
675                return Err(PassMessageError::TransientRefNotFound(node_id.into()));
676            }
677        }
678
679        Ok(())
680    }
681
682    pub fn stack_id(&self) -> usize {
683        self.stack_id
684    }
685
686    pub fn depth(&self) -> usize {
687        self.depth
688    }
689
690    pub fn data(&self) -> &C {
691        &self.call_frame_data
692    }
693
694    pub fn data_mut(&mut self) -> &mut C {
695        &mut self.call_frame_data
696    }
697
698    pub fn pin_node<S: CommitableSubstateStore>(
699        &mut self,
700        substate_io: &mut SubstateIO<S>,
701        node_id: NodeId,
702    ) -> Result<(), PinNodeError> {
703        // Get device
704        let (_ref_origin, device) = self
705            .get_node_ref(&node_id)
706            .ok_or_else(|| PinNodeError::NodeNotVisible(node_id.into()))?;
707
708        match device {
709            SubstateDevice::Heap => {
710                substate_io.pinned_to_heap.insert(node_id);
711            }
712            SubstateDevice::Store => {
713                // Nodes in store are always pinned
714            }
715        }
716
717        Ok(())
718    }
719
720    pub fn mark_substate_as_transient<S: CommitableSubstateStore>(
721        &mut self,
722        substate_io: &mut SubstateIO<S>,
723        node_id: NodeId,
724        partition_num: PartitionNumber,
725        substate_key: SubstateKey,
726    ) -> Result<(), MarkTransientSubstateError> {
727        // Get device
728        let (_ref_origin, device) = self
729            .get_node_ref(&node_id)
730            .ok_or_else(|| MarkTransientSubstateError::NodeNotVisible(node_id.into()))?;
731
732        match device {
733            SubstateDevice::Heap => {
734                substate_io.heap_transient_substates.mark_as_transient(
735                    node_id,
736                    partition_num,
737                    substate_key,
738                );
739            }
740            SubstateDevice::Store => {
741                substate_io
742                    .store
743                    .mark_as_transient(node_id, partition_num, substate_key);
744            }
745        }
746
747        Ok(())
748    }
749
750    pub fn create_node<S: CommitableSubstateStore, E>(
751        &mut self,
752        substate_io: &mut SubstateIO<S>,
753        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
754        node_id: NodeId,
755        node_substates: NodeSubstates,
756    ) -> Result<(), CallbackError<CreateNodeError, E>> {
757        // TODO: We need to protect transient blueprints from being globalized directly
758        // into store. This isn't a problem for now since only native objects are allowed
759        // to be transient.
760
761        let destination_device = if node_id.is_global() {
762            SubstateDevice::Store
763        } else {
764            SubstateDevice::Heap
765        };
766
767        for module in node_substates.values() {
768            for (substate_key, substate_value) in module {
769                self.process_input_substate_key(substate_key).map_err(|e| {
770                    CallbackError::Error(CreateNodeError::ProcessSubstateKeyError(e))
771                })?;
772                let diff = SubstateDiff::from_new_substate(substate_value)
773                    .map_err(|e| CallbackError::Error(CreateNodeError::SubstateDiffError(e)))?;
774
775                self.process_substate_diff(substate_io, handler, destination_device, &diff)
776                    .map_err(|e| e.map(CreateNodeError::ProcessSubstateError))?;
777            }
778        }
779
780        match destination_device {
781            SubstateDevice::Store => {
782                self.stable_references
783                    .insert(node_id, StableReferenceType::Global);
784            }
785            SubstateDevice::Heap => {
786                self.owned_root_nodes.insert(node_id);
787            }
788        }
789
790        let mut adapter = CallFrameToIOAccessAdapter {
791            call_frame: self,
792            handler,
793            phantom: PhantomData,
794        };
795
796        substate_io.create_node(destination_device, node_id, node_substates, &mut adapter)?;
797
798        Ok(())
799    }
800
801    /// Removes node from call frame and owned nodes will be possessed by this call frame.
802    pub fn drop_node<E, S: CommitableSubstateStore>(
803        &mut self,
804        substate_io: &mut SubstateIO<S>,
805        node_id: &NodeId,
806        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
807    ) -> Result<DroppedNode, CallbackError<DropNodeError, E>> {
808        self.take_node_internal(substate_io, node_id)
809            .map_err(|e| CallbackError::Error(DropNodeError::TakeNodeError(e)))?;
810
811        let mut adapter = CallFrameToIOAccessAdapter {
812            call_frame: self,
813            handler,
814            phantom: PhantomData,
815        };
816        let substates = substate_io
817            .drop_node(SubstateDevice::Heap, node_id, &mut adapter)
818            .map_err(|e| match e {
819                CallbackError::Error(e) => CallbackError::Error(e),
820                CallbackError::CallbackError(e) => CallbackError::CallbackError(e),
821            })?;
822        for module in substates.values() {
823            for substate_value in module.values() {
824                let diff = SubstateDiff::from_drop_substate(substate_value);
825                adapter
826                    .call_frame
827                    .process_substate_diff(
828                        substate_io,
829                        adapter.handler,
830                        SubstateDevice::Heap,
831                        &diff,
832                    )
833                    .map_err(|e| match e {
834                        CallbackError::Error(e) => {
835                            CallbackError::Error(DropNodeError::ProcessSubstateError(e))
836                        }
837                        CallbackError::CallbackError(e) => CallbackError::CallbackError(e),
838                    })?;
839            }
840        }
841
842        let pinned_to_heap = substate_io.pinned_to_heap.remove(node_id);
843
844        Ok(DroppedNode {
845            substates,
846            pinned_to_heap,
847        })
848    }
849
850    pub fn move_partition<S: CommitableSubstateStore, E>(
851        &mut self,
852        substate_io: &mut SubstateIO<S>,
853        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
854        src_node_id: &NodeId,
855        src_partition_number: PartitionNumber,
856        dest_node_id: &NodeId,
857        dest_partition_number: PartitionNumber,
858    ) -> Result<(), CallbackError<MovePartitionError, E>> {
859        // Check src visibility
860        let (_ref_origin, src_device) = self.get_node_ref(src_node_id).ok_or_else(|| {
861            CallbackError::Error(MovePartitionError::NodeNotAvailable((*src_node_id).into()))
862        })?;
863
864        // Check dest visibility
865        let (_ref_origin, dest_device) = self.get_node_ref(dest_node_id).ok_or_else(|| {
866            CallbackError::Error(MovePartitionError::NodeNotAvailable((*dest_node_id).into()))
867        })?;
868
869        let mut adapter = CallFrameToIOAccessAdapter {
870            call_frame: self,
871            handler,
872            phantom: PhantomData,
873        };
874        substate_io.move_partition(
875            src_device,
876            src_node_id,
877            src_partition_number,
878            dest_device,
879            dest_node_id,
880            dest_partition_number,
881            &mut adapter,
882        )?;
883
884        Ok(())
885    }
886
887    fn process_input_substate_key(
888        &self,
889        substate_key: &SubstateKey,
890    ) -> Result<(), ProcessSubstateKeyError> {
891        match substate_key {
892            SubstateKey::Sorted((_, map_key)) | SubstateKey::Map(map_key) => {
893                let key_value = IndexedScryptoValue::from_slice(map_key)
894                    .map_err(ProcessSubstateKeyError::DecodeError)?;
895
896                // Check owns
897                if !key_value.owned_nodes().is_empty() {
898                    return Err(ProcessSubstateKeyError::OwnedNodeNotSupported);
899                }
900
901                // Check references
902                for reference in key_value.references() {
903                    if !reference.is_global() {
904                        return Err(ProcessSubstateKeyError::NonGlobalRefNotSupported);
905                    }
906
907                    if !self.get_node_visibility(reference).is_visible() {
908                        return Err(ProcessSubstateKeyError::NodeNotVisible((*reference).into()));
909                    }
910                }
911            }
912            _ => {}
913        }
914
915        Ok(())
916    }
917
918    fn process_output_substate_key(
919        &mut self,
920        substate_key: &SubstateKey,
921    ) -> Result<(), ProcessSubstateKeyError> {
922        match substate_key {
923            SubstateKey::Sorted((_, map_key)) | SubstateKey::Map(map_key) => {
924                let key = IndexedScryptoValue::from_slice(map_key).unwrap();
925
926                // Check owns
927                if !key.owned_nodes().is_empty() {
928                    panic!("Unexpected owns in substate key")
929                }
930
931                // Check references
932                for reference in key.references() {
933                    if reference.is_global() {
934                        self.stable_references
935                            .insert(*reference, StableReferenceType::Global);
936                    } else {
937                        panic!("Unexpected non-global refs in substate key")
938                    }
939                }
940            }
941            _ => {}
942        }
943
944        Ok(())
945    }
946
947    #[allow(clippy::too_many_arguments)]
948    pub fn open_substate<S: CommitableSubstateStore, E, F: FnOnce() -> IndexedScryptoValue>(
949        &mut self,
950        substate_io: &mut SubstateIO<S>,
951        node_id: &NodeId,
952        partition_num: PartitionNumber,
953        substate_key: &SubstateKey,
954        flags: LockFlags,
955        default: Option<F>,
956        data: L,
957        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
958    ) -> Result<(SubstateHandle, usize), CallbackError<OpenSubstateError, E>> {
959        let (ref_origin, device) = self.get_node_ref(node_id).ok_or_else(|| {
960            CallbackError::Error(OpenSubstateError::NodeNotVisible((*node_id).into()))
961        })?;
962
963        self.process_input_substate_key(substate_key)
964            .map_err(|e| CallbackError::Error(OpenSubstateError::ProcessSubstateKeyError(e)))?;
965
966        let mut adapter = CallFrameToIOAccessAdapter {
967            call_frame: self,
968            handler,
969            phantom: PhantomData,
970        };
971
972        let (global_substate_handle, substate_value) = substate_io.open_substate(
973            device,
974            node_id,
975            partition_num,
976            substate_key,
977            flags,
978            default,
979            &mut adapter,
980        )?;
981
982        let value_len = substate_value.len();
983        for node_id in substate_value.references() {
984            if node_id.is_global() {
985                // Again, safe to overwrite because Global and DirectAccess are exclusive.
986                self.stable_references
987                    .insert(*node_id, StableReferenceType::Global);
988            }
989        }
990
991        let mut open_substate = OpenedSubstate {
992            references: index_set_new(),
993            owned_nodes: index_set_new(),
994            ref_origin,
995            global_substate_handle,
996            device,
997            data,
998        };
999
1000        let diff = SubstateDiff::from_new_substate(substate_value)
1001            .expect("There should be no issues with already stored substate value");
1002
1003        Self::apply_diff_to_open_substate(
1004            &mut self.transient_references,
1005            substate_io,
1006            &mut open_substate,
1007            &diff,
1008        );
1009
1010        // Issue lock handle
1011        let substate_handle = self.next_handle;
1012        self.open_substates.insert(substate_handle, open_substate);
1013        self.next_handle += 1;
1014
1015        Ok((substate_handle, value_len))
1016    }
1017
1018    pub fn read_substate<'f, S: CommitableSubstateStore, H: CallFrameSubstateReadHandler<C, L>>(
1019        &mut self,
1020        substate_io: &'f mut SubstateIO<S>,
1021        lock_handle: SubstateHandle,
1022        handler: &mut H,
1023    ) -> Result<&'f IndexedScryptoValue, CallbackError<ReadSubstateError, H::Error>> {
1024        let OpenedSubstate {
1025            global_substate_handle,
1026            ..
1027        } = self
1028            .open_substates
1029            .get(&lock_handle)
1030            .ok_or(CallbackError::Error(ReadSubstateError::HandleNotFound(
1031                lock_handle,
1032            )))?;
1033
1034        let mut adapter = CallFrameToIOSubstateReadAdapter {
1035            call_frame: self,
1036            handler,
1037            handle: *global_substate_handle,
1038        };
1039
1040        let substate = substate_io
1041            .read_substate(*global_substate_handle, &mut adapter)
1042            .map_err(CallbackError::CallbackError)?;
1043
1044        Ok(substate)
1045    }
1046
1047    pub fn write_substate<S: CommitableSubstateStore, E>(
1048        &mut self,
1049        substate_io: &mut SubstateIO<S>,
1050        lock_handle: SubstateHandle,
1051        substate: IndexedScryptoValue,
1052        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1053    ) -> Result<(), CallbackError<WriteSubstateError, E>> {
1054        let mut opened_substate =
1055            self.open_substates
1056                .swap_remove(&lock_handle)
1057                .ok_or(CallbackError::Error(WriteSubstateError::HandleNotFound(
1058                    lock_handle,
1059                )))?;
1060
1061        let (.., data) = substate_io
1062            .substate_locks
1063            .get(opened_substate.global_substate_handle);
1064        if !data.flags.contains(LockFlags::MUTABLE) {
1065            return Err(CallbackError::Error(WriteSubstateError::NoWritePermission));
1066        }
1067
1068        let diff = opened_substate
1069            .diff(&substate)
1070            .map_err(|e| CallbackError::Error(WriteSubstateError::SubstateDiffError(e)))?;
1071
1072        self.process_substate_diff(substate_io, handler, opened_substate.device, &diff)
1073            .map_err(|e| e.map(WriteSubstateError::ProcessSubstateError))?;
1074
1075        Self::apply_diff_to_open_substate(
1076            &mut self.transient_references,
1077            substate_io,
1078            &mut opened_substate,
1079            &diff,
1080        );
1081
1082        let mut adapter = CallFrameToIOAccessAdapter {
1083            call_frame: self,
1084            handler,
1085            phantom: PhantomData,
1086        };
1087
1088        substate_io.write_substate(
1089            opened_substate.global_substate_handle,
1090            substate,
1091            &mut adapter,
1092        )?;
1093
1094        self.open_substates.insert(lock_handle, opened_substate);
1095
1096        Ok(())
1097    }
1098
1099    pub fn close_substate<S: CommitableSubstateStore>(
1100        &mut self,
1101        substate_io: &mut SubstateIO<S>,
1102        lock_handle: SubstateHandle,
1103    ) -> Result<(), CloseSubstateError> {
1104        let mut open_substate = self
1105            .open_substates
1106            .swap_remove(&lock_handle)
1107            .ok_or(CloseSubstateError::HandleNotFound(lock_handle))?;
1108
1109        for node_id in open_substate.owned_nodes.iter() {
1110            // We must maintain the invariant that opened substates must always
1111            // be from a visible node. Thus, we cannot close a substate if there is a
1112            // child opened substate.
1113            if substate_io.substate_locks.node_is_locked(node_id) {
1114                return Err(CloseSubstateError::SubstateBorrowed((*node_id).into()));
1115            }
1116        }
1117
1118        substate_io.close_substate(open_substate.global_substate_handle);
1119
1120        let diff = open_substate.diff_on_close();
1121        Self::apply_diff_to_open_substate(
1122            &mut self.transient_references,
1123            substate_io,
1124            &mut open_substate,
1125            &diff,
1126        );
1127
1128        Ok(())
1129    }
1130
1131    pub fn open_substates(&self) -> Vec<u32> {
1132        self.open_substates.keys().cloned().collect()
1133    }
1134
1135    pub fn close_all_substates<S: CommitableSubstateStore>(
1136        &mut self,
1137        substate_io: &mut SubstateIO<S>,
1138    ) {
1139        // Closing of all substates should always be possible as no invariant needs to be maintained
1140        for (_lock_handle, mut open_substate) in self.open_substates.drain(..) {
1141            substate_io.close_substate(open_substate.global_substate_handle);
1142            let diff = open_substate.diff_on_close();
1143            Self::apply_diff_to_open_substate(
1144                &mut self.transient_references,
1145                substate_io,
1146                &mut open_substate,
1147                &diff,
1148            );
1149        }
1150    }
1151
1152    pub fn get_handle_info(&self, lock_handle: SubstateHandle) -> Option<L> {
1153        self.open_substates
1154            .get(&lock_handle)
1155            .map(|substate_lock| substate_lock.data.clone())
1156    }
1157
1158    pub fn add_global_reference(&mut self, address: GlobalAddress) {
1159        self.stable_references
1160            .insert(address.into_node_id(), StableReferenceType::Global);
1161    }
1162
1163    pub fn add_direct_access_reference(&mut self, address: InternalAddress) {
1164        self.stable_references
1165            .insert(address.into_node_id(), StableReferenceType::DirectAccess);
1166    }
1167
1168    //====================================================================================
1169    // Note that reference model isn't fully implemented for set/remove/scan/take APIs.
1170    // They're intended for internal use only and extra caution must be taken.
1171    //====================================================================================
1172
1173    // Substate Virtualization does not apply to this call
1174    // Should this be prevented at this layer?
1175    pub fn set_substate<S: CommitableSubstateStore, E>(
1176        &mut self,
1177        substate_io: &mut SubstateIO<S>,
1178        node_id: &NodeId,
1179        partition_num: PartitionNumber,
1180        key: SubstateKey,
1181        value: IndexedScryptoValue,
1182        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1183    ) -> Result<(), CallbackError<CallFrameSetSubstateError, E>> {
1184        let (_ref_origin, device) = self.get_node_ref(node_id).ok_or_else(|| {
1185            CallbackError::Error(CallFrameSetSubstateError::NodeNotVisible((*node_id).into()))
1186        })?;
1187
1188        self.process_input_substate_key(&key).map_err(|e| {
1189            CallbackError::Error(CallFrameSetSubstateError::ProcessSubstateKeyError(e))
1190        })?;
1191
1192        // TODO: Should process value here (For example, not allow owned objects or references) but
1193        // this isn't a problem for now since only native objects are allowed to use set_substate
1194
1195        let mut adapter = CallFrameToIOAccessAdapter {
1196            call_frame: self,
1197            handler,
1198            phantom: PhantomData,
1199        };
1200
1201        substate_io.set_substate(device, node_id, partition_num, key, value, &mut adapter)?;
1202
1203        Ok(())
1204    }
1205
1206    pub fn remove_substate<S: CommitableSubstateStore, E>(
1207        &mut self,
1208        substate_io: &mut SubstateIO<S>,
1209        node_id: &NodeId,
1210        partition_num: PartitionNumber,
1211        key: &SubstateKey,
1212        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1213    ) -> Result<Option<IndexedScryptoValue>, CallbackError<CallFrameRemoveSubstateError, E>> {
1214        let (_ref_origin, device) = self.get_node_ref(node_id).ok_or_else(|| {
1215            CallbackError::Error(CallFrameRemoveSubstateError::NodeNotVisible(
1216                (*node_id).into(),
1217            ))
1218        })?;
1219
1220        self.process_input_substate_key(key).map_err(|e| {
1221            CallbackError::Error(CallFrameRemoveSubstateError::ProcessSubstateKeyError(e))
1222        })?;
1223
1224        let mut adapter = CallFrameToIOAccessAdapter {
1225            call_frame: self,
1226            handler,
1227            phantom: PhantomData,
1228        };
1229
1230        let removed =
1231            substate_io.remove_substate(device, node_id, partition_num, key, &mut adapter)?;
1232
1233        Ok(removed)
1234    }
1235
1236    pub fn scan_keys<K: SubstateKeyContent, S: CommitableSubstateStore, E>(
1237        &mut self,
1238        substate_io: &mut SubstateIO<S>,
1239        node_id: &NodeId,
1240        partition_num: PartitionNumber,
1241        limit: u32,
1242        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1243    ) -> Result<Vec<SubstateKey>, CallbackError<CallFrameScanKeysError, E>> {
1244        // Check node visibility
1245        let (_ref_origin, device) = self.get_node_ref(node_id).ok_or_else(|| {
1246            CallbackError::Error(CallFrameScanKeysError::NodeNotVisible((*node_id).into()))
1247        })?;
1248
1249        let mut adapter = CallFrameToIOAccessAdapter {
1250            call_frame: self,
1251            handler,
1252            phantom: PhantomData,
1253        };
1254
1255        let keys =
1256            substate_io.scan_keys::<K, E>(device, node_id, partition_num, limit, &mut adapter)?;
1257
1258        for key in &keys {
1259            self.process_output_substate_key(key).map_err(|e| {
1260                CallbackError::Error(CallFrameScanKeysError::ProcessSubstateKeyError(e))
1261            })?;
1262        }
1263
1264        Ok(keys)
1265    }
1266
1267    pub fn drain_substates<K: SubstateKeyContent, S: CommitableSubstateStore, E>(
1268        &mut self,
1269        substate_io: &mut SubstateIO<S>,
1270        node_id: &NodeId,
1271        partition_num: PartitionNumber,
1272        limit: u32,
1273        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1274    ) -> Result<
1275        Vec<(SubstateKey, IndexedScryptoValue)>,
1276        CallbackError<CallFrameDrainSubstatesError, E>,
1277    > {
1278        // Check node visibility
1279        let (_ref_origin, device) = self.get_node_ref(node_id).ok_or_else(|| {
1280            CallbackError::Error(CallFrameDrainSubstatesError::NodeNotVisible(
1281                (*node_id).into(),
1282            ))
1283        })?;
1284
1285        let mut adapter = CallFrameToIOAccessAdapter {
1286            call_frame: self,
1287            handler,
1288            phantom: PhantomData,
1289        };
1290
1291        let substates = substate_io.drain_substates::<K, E>(
1292            device,
1293            node_id,
1294            partition_num,
1295            limit,
1296            &mut adapter,
1297        )?;
1298
1299        for (key, substate) in &substates {
1300            self.process_output_substate_key(key).map_err(|e| {
1301                CallbackError::Error(CallFrameDrainSubstatesError::ProcessSubstateKeyError(e))
1302            })?;
1303
1304            if !substate.owned_nodes().is_empty() {
1305                panic!("Unexpected owns from drain_substates");
1306            }
1307
1308            for reference in substate.references() {
1309                if reference.is_global() {
1310                    self.stable_references
1311                        .insert(*reference, StableReferenceType::Global);
1312                } else {
1313                    panic!("Unexpected non-global ref from drain_substates");
1314                }
1315            }
1316        }
1317
1318        Ok(substates)
1319    }
1320
1321    // Substate Virtualization does not apply to this call
1322    // Should this be prevented at this layer?
1323    pub fn scan_sorted<S: CommitableSubstateStore, E>(
1324        &mut self,
1325        substate_io: &mut SubstateIO<S>,
1326        node_id: &NodeId,
1327        partition_num: PartitionNumber,
1328        count: u32,
1329        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1330    ) -> Result<
1331        Vec<(SortedKey, IndexedScryptoValue)>,
1332        CallbackError<CallFrameScanSortedSubstatesError, E>,
1333    > {
1334        // Check node visibility
1335        let (_ref_origin, device) = self.get_node_ref(node_id).ok_or_else(|| {
1336            CallbackError::Error(CallFrameScanSortedSubstatesError::NodeNotVisible(
1337                (*node_id).into(),
1338            ))
1339        })?;
1340
1341        let mut adapter = CallFrameToIOAccessAdapter {
1342            call_frame: self,
1343            handler,
1344            phantom: PhantomData,
1345        };
1346
1347        let substates =
1348            substate_io.scan_sorted(device, node_id, partition_num, count, &mut adapter)?;
1349
1350        for (key, substate) in &substates {
1351            self.process_output_substate_key(&SubstateKey::Sorted(key.clone()))
1352                .map_err(|e| {
1353                    CallbackError::Error(
1354                        CallFrameScanSortedSubstatesError::ProcessSubstateKeyError(e),
1355                    )
1356                })?;
1357
1358            if !substate.owned_nodes().is_empty() {
1359                panic!("Unexpected owns from scan_substates");
1360            }
1361
1362            for reference in substate.references() {
1363                if reference.is_global() {
1364                    self.stable_references
1365                        .insert(*reference, StableReferenceType::Global);
1366                } else {
1367                    panic!("Unexpected non-global ref from scan_substates");
1368                }
1369            }
1370        }
1371
1372        Ok(substates)
1373    }
1374
1375    pub fn owned_nodes(&self) -> Vec<NodeId> {
1376        self.owned_root_nodes.clone().into_iter().collect()
1377    }
1378
1379    fn get_node_ref(&self, node_id: &NodeId) -> Option<(ReferenceOrigin, SubstateDevice)> {
1380        let node_visibility = self.get_node_visibility(node_id);
1381        let ref_origin = node_visibility.reference_origin(*node_id)?;
1382        let device = match ref_origin {
1383            ReferenceOrigin::FrameOwned => SubstateDevice::Heap,
1384            ReferenceOrigin::Global(..) | ReferenceOrigin::DirectlyAccessed => {
1385                SubstateDevice::Store
1386            }
1387            ReferenceOrigin::SubstateNonGlobalReference(device) => device,
1388        };
1389
1390        Some((ref_origin, device))
1391    }
1392
1393    pub fn get_node_visibility(&self, node_id: &NodeId) -> NodeVisibility {
1394        let mut visibilities = BTreeSet::<Visibility>::new();
1395
1396        // Stable references
1397        if let Some(reference_type) = self.stable_references.get(node_id) {
1398            visibilities.insert(Visibility::StableReference(*reference_type));
1399        }
1400        if self.always_visible_global_nodes.contains(node_id) {
1401            visibilities.insert(Visibility::StableReference(StableReferenceType::Global));
1402        }
1403
1404        // Frame owned nodes
1405        if self.owned_root_nodes.contains(node_id) {
1406            visibilities.insert(Visibility::FrameOwned);
1407        }
1408
1409        // Borrowed from substate loading
1410        if let Some(transient_ref) = self.transient_references.get(node_id) {
1411            visibilities.insert(Visibility::Borrowed(transient_ref.ref_origin));
1412        }
1413
1414        NodeVisibility(visibilities)
1415    }
1416
1417    fn process_substate_diff<S: CommitableSubstateStore, E>(
1418        &mut self,
1419        substate_io: &mut SubstateIO<S>,
1420        handler: &mut impl CallFrameIOAccessHandler<C, L, E>,
1421        device: SubstateDevice,
1422        diff: &SubstateDiff,
1423    ) -> Result<(), CallbackError<ProcessSubstateError, E>> {
1424        // Verify and Update call frame state based on diff
1425        {
1426            for added_own in &diff.added_owns {
1427                // Node no longer owned by frame
1428                self.take_node_internal(substate_io, added_own)
1429                    .map_err(|e| CallbackError::Error(ProcessSubstateError::TakeNodeError(e)))?;
1430            }
1431
1432            for removed_own in &diff.removed_owns {
1433                // Owned nodes discarded by the substate go back to the call frame,
1434                // and must be explicitly dropped.
1435                self.owned_root_nodes.insert(*removed_own);
1436            }
1437
1438            for added_ref in &diff.added_refs {
1439                let node_visibility = self.get_node_visibility(added_ref);
1440                if !node_visibility.is_visible() {
1441                    return Err(CallbackError::Error(ProcessSubstateError::RefNotFound(
1442                        (*added_ref).into(),
1443                    )));
1444                }
1445                if !node_visibility.can_be_referenced_in_substate() {
1446                    return Err(CallbackError::Error(
1447                        ProcessSubstateError::RefCantBeAddedToSubstate((*added_ref).into()),
1448                    ));
1449                }
1450            }
1451
1452            for removed_ref in &diff.removed_refs {
1453                if removed_ref.is_global() {
1454                    self.stable_references
1455                        .insert(*removed_ref, StableReferenceType::Global);
1456                }
1457            }
1458        }
1459
1460        // Update global state
1461        match device {
1462            SubstateDevice::Heap => {
1463                for added_ref in &diff.added_refs {
1464                    if !added_ref.is_global() {
1465                        let (_, device) = self.get_node_ref(added_ref).unwrap();
1466                        substate_io
1467                            .non_global_node_refs
1468                            .increment_ref_count(*added_ref, device);
1469                    }
1470                }
1471                for removed_ref in &diff.removed_refs {
1472                    if !removed_ref.is_global() {
1473                        substate_io
1474                            .non_global_node_refs
1475                            .decrement_ref_count(removed_ref);
1476                    }
1477                }
1478            }
1479            SubstateDevice::Store => {
1480                let mut adapter = CallFrameToIOAccessAdapter {
1481                    call_frame: self,
1482                    handler,
1483                    phantom: PhantomData,
1484                };
1485
1486                for added_own in &diff.added_owns {
1487                    substate_io
1488                        .move_node_from_heap_to_store(added_own, &mut adapter)
1489                        .map_err(|e| e.map(ProcessSubstateError::PersistNodeError))?;
1490                }
1491
1492                if let Some(removed_own) = diff.removed_owns.iter().next() {
1493                    return Err(CallbackError::Error(
1494                        ProcessSubstateError::CantDropNodeInStore((*removed_own).into()),
1495                    ));
1496                }
1497
1498                if let Some(non_global_ref) = diff.added_refs.iter().find(|r| !r.is_global()) {
1499                    return Err(CallbackError::Error(
1500                        ProcessSubstateError::NonGlobalRefNotAllowed((*non_global_ref).into()),
1501                    ));
1502                }
1503
1504                if let Some(non_global_ref) = diff.removed_refs.iter().find(|r| !r.is_global()) {
1505                    panic!(
1506                        "Should never have contained a non global reference: {:?}",
1507                        non_global_ref
1508                    );
1509                }
1510            }
1511        }
1512
1513        Ok(())
1514    }
1515
1516    fn apply_diff_to_open_substate<S: CommitableSubstateStore>(
1517        transient_references: &mut NonIterMap<NodeId, TransientReference>,
1518        substate_io: &SubstateIO<S>,
1519        open_substate: &mut OpenedSubstate<L>,
1520        diff: &SubstateDiff,
1521    ) {
1522        for added_own in &diff.added_owns {
1523            open_substate.owned_nodes.insert(*added_own);
1524            transient_references
1525                .entry(*added_own)
1526                .or_insert(TransientReference {
1527                    ref_count: 0usize,
1528                    ref_origin: open_substate.ref_origin, // Child inherits reference origin
1529                })
1530                .ref_count
1531                .add_assign(1);
1532        }
1533
1534        for removed_own in &diff.removed_owns {
1535            open_substate.owned_nodes.swap_remove(removed_own);
1536            let mut transient_ref = transient_references.remove(removed_own).unwrap();
1537            if transient_ref.ref_count > 1 {
1538                transient_ref.ref_count -= 1;
1539                transient_references.insert(*removed_own, transient_ref);
1540            }
1541        }
1542
1543        for added_ref in &diff.added_refs {
1544            open_substate.references.insert(*added_ref);
1545
1546            if !added_ref.is_global() {
1547                let device = substate_io.non_global_node_refs.get_ref_device(added_ref);
1548
1549                transient_references
1550                    .entry(*added_ref)
1551                    .or_insert(TransientReference {
1552                        ref_count: 0usize,
1553                        ref_origin: ReferenceOrigin::SubstateNonGlobalReference(device),
1554                    })
1555                    .ref_count
1556                    .add_assign(1);
1557            }
1558        }
1559
1560        for removed_ref in &diff.removed_refs {
1561            open_substate.references.swap_remove(removed_ref);
1562
1563            if !removed_ref.is_global() {
1564                let mut transient_ref = transient_references.remove(removed_ref).unwrap();
1565                if transient_ref.ref_count > 1 {
1566                    transient_ref.ref_count -= 1;
1567                    transient_references.insert(*removed_ref, transient_ref);
1568                }
1569            }
1570        }
1571    }
1572
1573    fn take_node_internal<S: CommitableSubstateStore>(
1574        &mut self,
1575        substate_io: &SubstateIO<S>,
1576        node_id: &NodeId,
1577    ) -> Result<(), TakeNodeError> {
1578        // If there exists a non-global node-ref we still allow the node to be
1579        // taken. We prevent substate locked nodes from being taken though.
1580        // We do not need to check children of the node as a node must be
1581        // substate locked in order to access any of it's children.
1582        if substate_io.substate_locks.node_is_locked(node_id) {
1583            return Err(TakeNodeError::SubstateBorrowed((*node_id).into()));
1584        }
1585
1586        if self.owned_root_nodes.swap_remove(node_id) {
1587            Ok(())
1588        } else {
1589            Err(TakeNodeError::OwnNotFound((*node_id).into()))
1590        }
1591    }
1592
1593    #[cfg(feature = "radix_engine_tests")]
1594    pub fn stable_references(&self) -> &BTreeMap<NodeId, StableReferenceType> {
1595        &self.stable_references
1596    }
1597}
1598
1599/// Non Global Node References
1600/// This struct should be maintained with CallFrame as the call frame should be the only
1601/// manipulator. Substate I/O though the "owner" only has read-access to this structure.
1602pub struct NonGlobalNodeRefs {
1603    node_refs: NonIterMap<NodeId, (SubstateDevice, usize)>,
1604}
1605
1606impl NonGlobalNodeRefs {
1607    pub fn new() -> Self {
1608        Self {
1609            node_refs: NonIterMap::new(),
1610        }
1611    }
1612
1613    pub fn node_is_referenced(&self, node_id: &NodeId) -> bool {
1614        self.node_refs
1615            .get(node_id)
1616            .map(|(_, ref_count)| ref_count.gt(&0))
1617            .unwrap_or(false)
1618    }
1619
1620    fn get_ref_device(&self, node_id: &NodeId) -> SubstateDevice {
1621        let (device, ref_count) = self.node_refs.get(node_id).unwrap();
1622
1623        if ref_count.eq(&0) {
1624            panic!("Reference no longer exists");
1625        }
1626
1627        *device
1628    }
1629
1630    fn increment_ref_count(&mut self, node_id: NodeId, device: SubstateDevice) {
1631        let (_, ref_count) = self.node_refs.entry(node_id).or_insert((device, 0));
1632        ref_count.add_assign(1);
1633    }
1634
1635    fn decrement_ref_count(&mut self, node_id: &NodeId) {
1636        let (_, ref_count) = self
1637            .node_refs
1638            .get_mut(node_id)
1639            .unwrap_or_else(|| panic!("Node {:?} not found", node_id));
1640        ref_count.sub_assign(1);
1641    }
1642}
1643
1644impl Default for NonGlobalNodeRefs {
1645    fn default() -> Self {
1646        Self::new()
1647    }
1648}
1649
1650/// Structure which keeps track of all transient substates or substates
1651/// which are never committed though can have transaction runtime state
1652pub struct TransientSubstates {
1653    pub transient_substates: BTreeMap<NodeId, BTreeSet<(PartitionNumber, SubstateKey)>>,
1654}
1655
1656impl TransientSubstates {
1657    pub fn new() -> Self {
1658        Self {
1659            transient_substates: BTreeMap::new(),
1660        }
1661    }
1662
1663    pub fn mark_as_transient(
1664        &mut self,
1665        node_id: NodeId,
1666        partition_num: PartitionNumber,
1667        substate_key: SubstateKey,
1668    ) {
1669        self.transient_substates
1670            .entry(node_id)
1671            .or_default()
1672            .insert((partition_num, substate_key));
1673    }
1674
1675    pub fn is_transient(
1676        &self,
1677        node_id: &NodeId,
1678        partition_num: PartitionNumber,
1679        substate_key: &SubstateKey,
1680    ) -> bool {
1681        match self.transient_substates.get(node_id) {
1682            Some(transient) => transient.contains(&(partition_num, substate_key.clone())),
1683            None => false,
1684        }
1685    }
1686}
1687
1688impl Default for TransientSubstates {
1689    fn default() -> Self {
1690        Self::new()
1691    }
1692}