Skip to main content

shape_jit/
mixed_table.rs

1//! Mixed function table supporting both JIT-compiled and interpreter-fallback entries.
2//!
3//! When per-blob JIT preflight determines that some functions cannot be JIT-compiled
4//! (e.g. they use async opcodes or unsupported builtins), we still want to JIT-compile
5//! the functions that *can* be compiled. The `MixedFunctionTable` maps each function
6//! to either a native code pointer or a marker indicating VM interpretation.
7
8use shape_vm::bytecode::FunctionHash;
9use std::collections::HashMap;
10
11/// Entry in the mixed function table supporting both JIT and interpreted functions.
12#[derive(Debug, Clone)]
13pub enum FunctionEntry {
14    /// JIT-compiled native function pointer.
15    Native(*const u8),
16    /// Falls back to VM interpreter for this function.
17    /// The `u16` is the function index in the linked program.
18    Interpreted(u16),
19    /// Awaiting background compilation.
20    /// The `u16` is the function index in the linked program.
21    Pending(u16),
22}
23
24// SAFETY: Function pointers from JIT are valid for the lifetime of the JITModule
25// that produced them. The caller must ensure the JITModule outlives the table.
26unsafe impl Send for FunctionEntry {}
27
28/// Mixed function table mapping function IDs to either native or interpreted entries.
29///
30/// Supports lookup by both numeric index (for the flat instruction array) and
31/// content hash (for the content-addressed blob store).
32pub struct MixedFunctionTable {
33    entries: Vec<FunctionEntry>,
34    hash_to_entry: HashMap<FunctionHash, usize>,
35}
36
37impl MixedFunctionTable {
38    pub fn new() -> Self {
39        Self {
40            entries: Vec::new(),
41            hash_to_entry: HashMap::new(),
42        }
43    }
44
45    /// Pre-allocate space for `capacity` function entries.
46    pub fn with_capacity(capacity: usize) -> Self {
47        Self {
48            entries: Vec::with_capacity(capacity),
49            hash_to_entry: HashMap::with_capacity(capacity),
50        }
51    }
52
53    /// Insert or replace an entry at the given index.
54    ///
55    /// If `id` is beyond the current length, intermediate slots are filled
56    /// with `Interpreted(0)` placeholders.
57    pub fn insert(&mut self, id: usize, entry: FunctionEntry) {
58        if id >= self.entries.len() {
59            self.entries.resize(id + 1, FunctionEntry::Interpreted(0));
60        }
61        self.entries[id] = entry;
62    }
63
64    /// Look up an entry by numeric function index.
65    pub fn get(&self, id: usize) -> Option<&FunctionEntry> {
66        self.entries.get(id)
67    }
68
69    /// Insert an entry keyed by content hash, also storing it at the given index.
70    pub fn insert_by_hash(&mut self, hash: FunctionHash, entry: FunctionEntry) {
71        let id = self.entries.len();
72        self.entries.push(entry);
73        self.hash_to_entry.insert(hash, id);
74    }
75
76    /// Look up an entry by content hash.
77    pub fn get_by_hash(&self, hash: &FunctionHash) -> Option<&FunctionEntry> {
78        self.hash_to_entry
79            .get(hash)
80            .and_then(|&id| self.entries.get(id))
81    }
82
83    /// Total number of entries in the table.
84    pub fn len(&self) -> usize {
85        self.entries.len()
86    }
87
88    /// Returns `true` if the table contains no entries.
89    pub fn is_empty(&self) -> bool {
90        self.entries.is_empty()
91    }
92
93    /// Count of entries that are JIT-compiled native code.
94    pub fn native_count(&self) -> usize {
95        self.entries
96            .iter()
97            .filter(|e| matches!(e, FunctionEntry::Native(_)))
98            .count()
99    }
100
101    /// Count of entries that fall back to the VM interpreter.
102    pub fn interpreted_count(&self) -> usize {
103        self.entries
104            .iter()
105            .filter(|e| matches!(e, FunctionEntry::Interpreted(_)))
106            .count()
107    }
108
109    /// Count of entries that are awaiting background compilation.
110    pub fn pending_count(&self) -> usize {
111        self.entries
112            .iter()
113            .filter(|e| matches!(e, FunctionEntry::Pending(_)))
114            .count()
115    }
116
117    /// Iterate over all entries with their index.
118    pub fn iter(&self) -> impl Iterator<Item = (usize, &FunctionEntry)> {
119        self.entries.iter().enumerate()
120    }
121
122    /// Promote a pending entry to native after background compilation completes.
123    pub fn promote_to_native(&mut self, id: usize, ptr: *const u8) {
124        if id < self.entries.len() {
125            self.entries[id] = FunctionEntry::Native(ptr);
126        }
127    }
128}
129
130impl Default for MixedFunctionTable {
131    fn default() -> Self {
132        Self::new()
133    }
134}
135
136#[cfg(test)]
137mod tests {
138    use super::*;
139
140    #[test]
141    fn empty_table() {
142        let table = MixedFunctionTable::new();
143        assert!(table.is_empty());
144        assert_eq!(table.len(), 0);
145        assert_eq!(table.native_count(), 0);
146        assert_eq!(table.interpreted_count(), 0);
147    }
148
149    #[test]
150    fn insert_and_get() {
151        let mut table = MixedFunctionTable::new();
152        let fake_ptr = 0xDEAD_BEEF as *const u8;
153        table.insert(0, FunctionEntry::Native(fake_ptr));
154        table.insert(1, FunctionEntry::Interpreted(1));
155        table.insert(2, FunctionEntry::Pending(2));
156
157        assert_eq!(table.len(), 3);
158        assert_eq!(table.native_count(), 1);
159        assert_eq!(table.interpreted_count(), 1);
160        assert_eq!(table.pending_count(), 1);
161
162        assert!(matches!(table.get(0), Some(FunctionEntry::Native(_))));
163        assert!(matches!(table.get(1), Some(FunctionEntry::Interpreted(1))));
164        assert!(matches!(table.get(2), Some(FunctionEntry::Pending(2))));
165        assert!(table.get(3).is_none());
166    }
167
168    #[test]
169    fn insert_by_hash_and_lookup() {
170        let mut table = MixedFunctionTable::new();
171        let hash = FunctionHash([42u8; 32]);
172        let fake_ptr = 0xCAFE as *const u8;
173        table.insert_by_hash(hash, FunctionEntry::Native(fake_ptr));
174
175        assert!(matches!(
176            table.get_by_hash(&hash),
177            Some(FunctionEntry::Native(_))
178        ));
179        assert!(table.get_by_hash(&FunctionHash::ZERO).is_none());
180    }
181
182    #[test]
183    fn promote_pending_to_native() {
184        let mut table = MixedFunctionTable::new();
185        table.insert(0, FunctionEntry::Pending(0));
186        assert_eq!(table.pending_count(), 1);
187
188        let fake_ptr = 0xBEEF as *const u8;
189        table.promote_to_native(0, fake_ptr);
190        assert_eq!(table.pending_count(), 0);
191        assert_eq!(table.native_count(), 1);
192    }
193
194    #[test]
195    fn sparse_insert_fills_gaps() {
196        let mut table = MixedFunctionTable::new();
197        table.insert(5, FunctionEntry::Native(0x1 as *const u8));
198        assert_eq!(table.len(), 6);
199        // Slots 0-4 should be Interpreted(0) placeholders.
200        assert!(matches!(table.get(0), Some(FunctionEntry::Interpreted(0))));
201        assert!(matches!(table.get(4), Some(FunctionEntry::Interpreted(0))));
202        assert!(matches!(table.get(5), Some(FunctionEntry::Native(_))));
203    }
204}