shape_vm/executor/vm_impl/
init.rs1use super::super::*;
2
3impl VirtualMachine {
4 pub fn new(config: VMConfig) -> Self {
5 let debugger = if config.debug_mode {
6 Some(VMDebugger::new())
7 } else {
8 None
9 };
10
11 let gc = GarbageCollector::new(config.gc_config.clone());
12
13 let (registry, builtin_schemas) =
16 shape_runtime::type_schema::TypeSchemaRegistry::with_stdlib_types_and_builtin_ids();
17
18 let mut program = BytecodeProgram::new();
19 program.type_schema_registry = registry;
20
21 let mut vm = Self {
22 config,
23 program,
24 ip: 0,
25 stack: (0..crate::constants::DEFAULT_STACK_CAPACITY)
26 .map(|_| ValueWord::none())
27 .collect(),
28 sp: 0,
29 module_bindings: Vec::new(),
30 call_stack: Vec::with_capacity(crate::constants::DEFAULT_CALL_STACK_CAPACITY),
31 loop_stack: Vec::new(),
32 timeframe_stack: Vec::new(),
33 debugger,
34 gc,
35 instruction_count: 0,
36 exception_handlers: Vec::new(),
37 builtin_schemas,
38 last_error_line: None,
39 last_error_file: None,
40 last_uncaught_exception: None,
41 module_init_done: false,
42 output_buffer: None,
43 module_registry: shape_runtime::module_exports::ModuleExportRegistry::new(),
44 module_fn_table: Vec::new(),
45 function_name_index: HashMap::new(),
46 extension_methods: HashMap::new(),
47 merged_schema_cache: HashMap::new(),
48 interrupt: Arc::new(AtomicU8::new(0)),
49 future_id_counter: 0,
50 async_scope_stack: Vec::new(),
51 task_scheduler: task_scheduler::TaskScheduler::new(),
52 foreign_fn_handles: Vec::new(),
53 function_hashes: Vec::new(),
54 function_hash_raw: Vec::new(),
55 function_id_by_hash: HashMap::new(),
56 function_entry_points: Vec::new(),
57 program_entry_ip: 0,
58 resource_usage: None,
59 time_travel: None,
60 #[cfg(feature = "gc")]
61 gc_heap: None,
62 #[cfg(feature = "jit")]
63 jit_compiled: false,
64 #[cfg(feature = "jit")]
65 jit_dispatch_table: std::collections::HashMap::new(),
66 tier_manager: None,
67 pending_resume: None,
68 pending_frame_resume: None,
69 metrics: None,
70 feedback_vectors: Vec::new(),
71 megamorphic_cache: crate::megamorphic_cache::MegamorphicCache::new(),
72 };
73
74 vm.register_stdlib_module(state_builtins::create_state_module());
77 vm.register_stdlib_module(create_transport_module_exports());
78 vm.register_stdlib_module(shape_runtime::stdlib::regex::create_regex_module());
79 vm.register_stdlib_module(shape_runtime::stdlib::http::create_http_module());
80 vm.register_stdlib_module(shape_runtime::stdlib::crypto::create_crypto_module());
81 vm.register_stdlib_module(shape_runtime::stdlib::env::create_env_module());
82 vm.register_stdlib_module(shape_runtime::stdlib::json::create_json_module());
83 vm.register_stdlib_module(shape_runtime::stdlib::toml_module::create_toml_module());
84 vm.register_stdlib_module(shape_runtime::stdlib::yaml::create_yaml_module());
85 vm.register_stdlib_module(shape_runtime::stdlib::xml::create_xml_module());
86 vm.register_stdlib_module(shape_runtime::stdlib::compress::create_compress_module());
87 vm.register_stdlib_module(shape_runtime::stdlib::archive::create_archive_module());
88 vm.register_stdlib_module(shape_runtime::stdlib::parallel::create_parallel_module());
89 vm.register_stdlib_module(shape_runtime::stdlib::unicode::create_unicode_module());
90 vm.register_stdlib_module(shape_runtime::stdlib::csv_module::create_csv_module());
91 vm.register_stdlib_module(shape_runtime::stdlib::msgpack_module::create_msgpack_module());
92 vm.register_stdlib_module(shape_runtime::stdlib::set_module::create_set_module());
93
94 if vm.config.metrics_enabled {
96 vm.metrics = Some(crate::metrics::VmMetrics::new());
97 }
98
99 #[cfg(feature = "gc")]
101 if vm.config.use_tracing_gc {
102 vm.init_gc_heap();
103 }
104
105 vm
106 }
107
108 pub fn with_resource_limits(mut self, limits: crate::resource_limits::ResourceLimits) -> Self {
110 let mut usage = crate::resource_limits::ResourceUsage::new(limits);
111 usage.start();
112 self.resource_usage = Some(usage);
113 self
114 }
115
116 #[cfg(feature = "gc")]
122 pub fn init_gc_heap(&mut self) {
123 let heap = shape_gc::GcHeap::new();
124 self.gc_heap = Some(heap);
125 if let Some(ref mut heap) = self.gc_heap {
128 unsafe { shape_gc::set_thread_gc_heap(heap as *mut _) };
129 }
130 }
131
132 pub fn set_interrupt(&mut self, flag: Arc<AtomicU8>) {
134 self.interrupt = flag;
135 }
136
137 pub fn enable_time_travel(&mut self, mode: time_travel::CaptureMode, max_entries: usize) {
139 self.time_travel = Some(time_travel::TimeTravel::new(mode, max_entries));
140 }
141
142 pub fn disable_time_travel(&mut self) {
144 self.time_travel = None;
145 }
146
147 #[cfg(feature = "jit")]
162 pub fn set_jit_compiled(&mut self) {
163 self.jit_compiled = true;
164 }
165
166 #[cfg(feature = "jit")]
168 pub fn is_jit_compiled(&self) -> bool {
169 self.jit_compiled
170 }
171
172 #[cfg(feature = "jit")]
177 pub fn register_jit_function(&mut self, function_id: u16, ptr: JitFnPtr) {
178 self.jit_dispatch_table.insert(function_id, ptr);
179 self.jit_compiled = true;
180 }
181
182 #[cfg(feature = "jit")]
184 pub fn jit_dispatch_table(&self) -> &std::collections::HashMap<u16, JitFnPtr> {
185 &self.jit_dispatch_table
186 }
187
188 pub fn enable_tiered_compilation(
196 &mut self,
197 ) -> (
198 std::sync::mpsc::Receiver<crate::tier::CompilationRequest>,
199 std::sync::mpsc::Sender<crate::tier::CompilationResult>,
200 ) {
201 let function_count = self.program.functions.len();
202 let mut mgr = crate::tier::TierManager::new(function_count, true);
203
204 let (req_tx, req_rx) = std::sync::mpsc::channel();
205 let (res_tx, res_rx) = std::sync::mpsc::channel();
206 mgr.set_channels(req_tx, res_rx);
207
208 self.tier_manager = Some(mgr);
209 (req_rx, res_tx)
210 }
211
212 pub fn tier_manager(&self) -> Option<&crate::tier::TierManager> {
214 self.tier_manager.as_ref()
215 }
216
217 pub(crate) fn poll_tier_completions(&mut self) {
226 if let Some(ref mut tier_mgr) = self.tier_manager {
227 let completions = tier_mgr.poll_completions();
230
231 if let Some(ref mut metrics) = self.metrics {
233 for result in &completions {
234 if result.native_code.is_some() {
235 let from_tier = match result.compiled_tier {
236 crate::tier::Tier::BaselineJit => 0, crate::tier::Tier::OptimizingJit => 1, crate::tier::Tier::Interpreted => continue,
239 };
240 let to_tier = match result.compiled_tier {
241 crate::tier::Tier::BaselineJit => 1,
242 crate::tier::Tier::OptimizingJit => 2,
243 crate::tier::Tier::Interpreted => continue,
244 };
245 metrics.record_tier_event(crate::metrics::TierEvent {
246 function_id: result.function_id,
247 from_tier,
248 to_tier,
249 call_count: tier_mgr.get_call_count(result.function_id),
250 timestamp_us: metrics.elapsed_us(),
251 });
252 }
253 }
254 }
255 }
256 }
257
258 #[inline]
261 pub(crate) fn current_feedback_vector(
262 &mut self,
263 ) -> Option<&mut crate::feedback::FeedbackVector> {
264 let func_id = self.call_stack.last()?.function_id? as usize;
265 if func_id >= self.feedback_vectors.len() {
266 return None;
267 }
268 if self.feedback_vectors[func_id].is_none() {
269 if self.tier_manager.is_none() {
270 return None;
271 }
272 self.feedback_vectors[func_id] =
273 Some(crate::feedback::FeedbackVector::new(func_id as u16));
274 }
275 self.feedback_vectors[func_id].as_mut()
276 }
277
278 pub fn feedback_vectors(&self) -> &[Option<crate::feedback::FeedbackVector>] {
280 &self.feedback_vectors
281 }
282
283 pub fn program(&self) -> &BytecodeProgram {
285 &self.program
286 }
287
288 pub fn time_travel(&self) -> Option<&time_travel::TimeTravel> {
290 self.time_travel.as_ref()
291 }
292
293 pub fn time_travel_mut(&mut self) -> Option<&mut time_travel::TimeTravel> {
295 self.time_travel.as_mut()
296 }
297
298 pub fn module_registry(&self) -> &shape_runtime::module_exports::ModuleExportRegistry {
300 &self.module_registry
301 }
302
303 pub(crate) fn next_future_id(&mut self) -> u64 {
305 self.future_id_counter += 1;
306 self.future_id_counter
307 }
308
309 pub fn get_function_id(&self, name: &str) -> Option<u16> {
311 self.program
312 .functions
313 .iter()
314 .position(|f| f.name == name)
315 .map(|id| id as u16)
316 }
317}