shape_vm/executor/vm_impl/
init.rs1use super::super::*;
2
3impl VirtualMachine {
4 pub fn new(config: VMConfig) -> Self {
5 let debugger = if config.debug_mode {
6 Some(VMDebugger::new())
7 } else {
8 None
9 };
10
11 let gc = GarbageCollector::new(config.gc_config.clone());
12
13 let (registry, builtin_schemas) =
16 shape_runtime::type_schema::TypeSchemaRegistry::with_stdlib_types_and_builtin_ids();
17
18 let mut program = BytecodeProgram::new();
19 program.type_schema_registry = registry;
20
21 let mut vm = Self {
22 config,
23 program,
24 ip: 0,
25 stack: (0..crate::constants::DEFAULT_STACK_CAPACITY)
26 .map(|_| ValueWord::none())
27 .collect(),
28 sp: 0,
29 module_bindings: Vec::new(),
30 call_stack: Vec::with_capacity(crate::constants::DEFAULT_CALL_STACK_CAPACITY),
31 loop_stack: Vec::new(),
32 timeframe_stack: Vec::new(),
33 debugger,
34 gc,
35 instruction_count: 0,
36 exception_handlers: Vec::new(),
37 builtin_schemas,
38 last_error_line: None,
39 last_error_file: None,
40 last_uncaught_exception: None,
41 module_init_done: false,
42 output_buffer: None,
43 module_registry: shape_runtime::module_exports::ModuleExportRegistry::new(),
44 module_fn_table: Vec::new(),
45 function_name_index: HashMap::new(),
46 extension_methods: HashMap::new(),
47 merged_schema_cache: HashMap::new(),
48 interrupt: Arc::new(AtomicU8::new(0)),
49 future_id_counter: 0,
50 async_scope_stack: Vec::new(),
51 task_scheduler: task_scheduler::TaskScheduler::new(),
52 foreign_fn_handles: Vec::new(),
53 function_hashes: Vec::new(),
54 function_hash_raw: Vec::new(),
55 function_id_by_hash: HashMap::new(),
56 function_entry_points: Vec::new(),
57 program_entry_ip: 0,
58 resource_usage: None,
59 time_travel: None,
60 #[cfg(feature = "gc")]
61 gc_heap: None,
62 #[cfg(feature = "jit")]
63 jit_compiled: false,
64 #[cfg(feature = "jit")]
65 jit_dispatch_table: std::collections::HashMap::new(),
66 tier_manager: None,
67 pending_resume: None,
68 pending_frame_resume: None,
69 metrics: None,
70 feedback_vectors: Vec::new(),
71 megamorphic_cache: crate::megamorphic_cache::MegamorphicCache::new(),
72 };
73
74 vm.register_stdlib_module(state_builtins::create_state_module());
77 vm.register_stdlib_module(create_transport_module_exports());
78 vm.register_stdlib_module(create_remote_module_exports());
79 vm.register_stdlib_module(shape_runtime::stdlib::regex::create_regex_module());
80 vm.register_stdlib_module(shape_runtime::stdlib::http::create_http_module());
81 vm.register_stdlib_module(shape_runtime::stdlib::crypto::create_crypto_module());
82 vm.register_stdlib_module(shape_runtime::stdlib::env::create_env_module());
83 vm.register_stdlib_module(shape_runtime::stdlib::json::create_json_module());
84 vm.register_stdlib_module(shape_runtime::stdlib::toml_module::create_toml_module());
85 vm.register_stdlib_module(shape_runtime::stdlib::yaml::create_yaml_module());
86 vm.register_stdlib_module(shape_runtime::stdlib::xml::create_xml_module());
87 vm.register_stdlib_module(shape_runtime::stdlib::compress::create_compress_module());
88 vm.register_stdlib_module(shape_runtime::stdlib::archive::create_archive_module());
89 vm.register_stdlib_module(shape_runtime::stdlib::parallel::create_parallel_module());
90 vm.register_stdlib_module(shape_runtime::stdlib::unicode::create_unicode_module());
91 vm.register_stdlib_module(shape_runtime::stdlib::csv_module::create_csv_module());
92 vm.register_stdlib_module(shape_runtime::stdlib::msgpack_module::create_msgpack_module());
93 vm.register_stdlib_module(shape_runtime::stdlib::set_module::create_set_module());
94
95 if vm.config.metrics_enabled {
97 vm.metrics = Some(crate::metrics::VmMetrics::new());
98 }
99
100 #[cfg(feature = "gc")]
102 if vm.config.use_tracing_gc {
103 vm.init_gc_heap();
104 }
105
106 vm
107 }
108
109 pub fn with_resource_limits(mut self, limits: crate::resource_limits::ResourceLimits) -> Self {
111 let mut usage = crate::resource_limits::ResourceUsage::new(limits);
112 usage.start();
113 self.resource_usage = Some(usage);
114 self
115 }
116
117 #[cfg(feature = "gc")]
123 pub fn init_gc_heap(&mut self) {
124 let heap = shape_gc::GcHeap::new();
125 self.gc_heap = Some(heap);
126 if let Some(ref mut heap) = self.gc_heap {
129 unsafe { shape_gc::set_thread_gc_heap(heap as *mut _) };
130 }
131 }
132
133 pub fn set_interrupt(&mut self, flag: Arc<AtomicU8>) {
135 self.interrupt = flag;
136 }
137
138 pub fn enable_time_travel(&mut self, mode: time_travel::CaptureMode, max_entries: usize) {
140 self.time_travel = Some(time_travel::TimeTravel::new(mode, max_entries));
141 }
142
143 pub fn disable_time_travel(&mut self) {
145 self.time_travel = None;
146 }
147
148 #[cfg(feature = "jit")]
163 pub fn set_jit_compiled(&mut self) {
164 self.jit_compiled = true;
165 }
166
167 #[cfg(feature = "jit")]
169 pub fn is_jit_compiled(&self) -> bool {
170 self.jit_compiled
171 }
172
173 #[cfg(feature = "jit")]
178 pub fn register_jit_function(&mut self, function_id: u16, ptr: JitFnPtr) {
179 self.jit_dispatch_table.insert(function_id, ptr);
180 self.jit_compiled = true;
181 }
182
183 #[cfg(feature = "jit")]
185 pub fn jit_dispatch_table(&self) -> &std::collections::HashMap<u16, JitFnPtr> {
186 &self.jit_dispatch_table
187 }
188
189 pub fn enable_tiered_compilation(
197 &mut self,
198 ) -> (
199 std::sync::mpsc::Receiver<crate::tier::CompilationRequest>,
200 std::sync::mpsc::Sender<crate::tier::CompilationResult>,
201 ) {
202 let function_count = self.program.functions.len();
203 let mut mgr = crate::tier::TierManager::new(function_count, true);
204
205 let (req_tx, req_rx) = std::sync::mpsc::channel();
206 let (res_tx, res_rx) = std::sync::mpsc::channel();
207 mgr.set_channels(req_tx, res_rx);
208
209 self.tier_manager = Some(mgr);
210 (req_rx, res_tx)
211 }
212
213 pub fn tier_manager(&self) -> Option<&crate::tier::TierManager> {
215 self.tier_manager.as_ref()
216 }
217
218 pub(crate) fn poll_tier_completions(&mut self) {
227 if let Some(ref mut tier_mgr) = self.tier_manager {
228 let completions = tier_mgr.poll_completions();
231
232 if let Some(ref mut metrics) = self.metrics {
234 for result in &completions {
235 if result.native_code.is_some() {
236 let from_tier = match result.compiled_tier {
237 crate::tier::Tier::BaselineJit => 0, crate::tier::Tier::OptimizingJit => 1, crate::tier::Tier::Interpreted => continue,
240 };
241 let to_tier = match result.compiled_tier {
242 crate::tier::Tier::BaselineJit => 1,
243 crate::tier::Tier::OptimizingJit => 2,
244 crate::tier::Tier::Interpreted => continue,
245 };
246 metrics.record_tier_event(crate::metrics::TierEvent {
247 function_id: result.function_id,
248 from_tier,
249 to_tier,
250 call_count: tier_mgr.get_call_count(result.function_id),
251 timestamp_us: metrics.elapsed_us(),
252 });
253 }
254 }
255 }
256 }
257 }
258
259 #[inline]
262 pub(crate) fn current_feedback_vector(
263 &mut self,
264 ) -> Option<&mut crate::feedback::FeedbackVector> {
265 let func_id = self.call_stack.last()?.function_id? as usize;
266 if func_id >= self.feedback_vectors.len() {
267 return None;
268 }
269 if self.feedback_vectors[func_id].is_none() {
270 if self.tier_manager.is_none() {
271 return None;
272 }
273 self.feedback_vectors[func_id] =
274 Some(crate::feedback::FeedbackVector::new(func_id as u16));
275 }
276 self.feedback_vectors[func_id].as_mut()
277 }
278
279 pub fn feedback_vectors(&self) -> &[Option<crate::feedback::FeedbackVector>] {
281 &self.feedback_vectors
282 }
283
284 pub fn program(&self) -> &BytecodeProgram {
286 &self.program
287 }
288
289 pub fn time_travel(&self) -> Option<&time_travel::TimeTravel> {
291 self.time_travel.as_ref()
292 }
293
294 pub fn time_travel_mut(&mut self) -> Option<&mut time_travel::TimeTravel> {
296 self.time_travel.as_mut()
297 }
298
299 pub fn module_registry(&self) -> &shape_runtime::module_exports::ModuleExportRegistry {
301 &self.module_registry
302 }
303
304 pub(crate) fn next_future_id(&mut self) -> u64 {
306 self.future_id_counter += 1;
307 self.future_id_counter
308 }
309
310 pub fn get_function_id(&self, name: &str) -> Option<u16> {
312 self.program
313 .functions
314 .iter()
315 .position(|f| f.name == name)
316 .map(|id| id as u16)
317 }
318}