1use std::collections::{HashMap, HashSet};
11use std::sync::{Arc, mpsc};
12
13use crate::bytecode::BytecodeProgram;
14use crate::deopt::DeoptTracker;
15use crate::feedback::FeedbackVector;
16
17#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
19pub enum Tier {
20 Interpreted,
22 BaselineJit,
24 OptimizingJit,
26}
27
28impl Tier {
29 pub fn threshold(&self) -> u32 {
31 match self {
32 Self::Interpreted => 0,
33 Self::BaselineJit => 100,
34 Self::OptimizingJit => 10_000,
35 }
36 }
37}
38
39#[derive(Debug)]
41pub struct FunctionTierState {
42 pub tier: Tier,
44 pub call_count: u32,
46 pub compilation_pending: bool,
48}
49
50impl Default for FunctionTierState {
51 fn default() -> Self {
52 Self {
53 tier: Tier::Interpreted,
54 call_count: 0,
55 compilation_pending: false,
56 }
57 }
58}
59
60#[derive(Debug)]
62pub struct CompilationRequest {
63 pub function_id: u16,
65 pub target_tier: Tier,
67 pub blob_hash: Option<[u8; 32]>,
69 pub osr: bool,
72 pub loop_header_ip: Option<usize>,
75 pub feedback: Option<FeedbackVector>,
81 pub callee_feedback: HashMap<u16, FeedbackVector>,
87}
88
89#[derive(Debug)]
91pub struct CompilationResult {
92 pub function_id: u16,
94 pub compiled_tier: Tier,
96 pub native_code: Option<*const u8>,
98 pub error: Option<String>,
100 pub osr_entry: Option<crate::bytecode::OsrEntryPoint>,
102 pub deopt_points: Vec<crate::bytecode::DeoptInfo>,
106 pub loop_header_ip: Option<usize>,
109 pub shape_guards: Vec<shape_value::shape_graph::ShapeId>,
112}
113
114pub trait CompilationBackend: Send + 'static {
119 fn compile(
121 &mut self,
122 request: &CompilationRequest,
123 program: &BytecodeProgram,
124 ) -> CompilationResult;
125}
126
127unsafe impl Send for CompilationResult {}
130
131const DEFAULT_OSR_THRESHOLD: u32 = 1000;
133
134pub struct TierManager {
136 function_states: Vec<FunctionTierState>,
138 compilation_tx: Option<mpsc::Sender<CompilationRequest>>,
140 compilation_rx: Option<mpsc::Receiver<CompilationResult>>,
142 native_code_table: HashMap<u16, *const u8>,
144 enabled: bool,
146 loop_counters: HashMap<(u16, usize), u32>,
149 osr_table: HashMap<(u16, usize), *const u8>,
152 osr_threshold: u32,
154 osr_blacklist: HashSet<(u16, usize)>,
157 deopt_tables: HashMap<u16, Vec<crate::bytecode::DeoptInfo>>,
162 deopt_tracker: DeoptTracker,
166}
167
168unsafe impl Send for TierManager {}
171
172impl TierManager {
173 pub fn new(function_count: usize, enabled: bool) -> Self {
175 let mut function_states = Vec::with_capacity(function_count);
176 function_states.resize_with(function_count, FunctionTierState::default);
177
178 Self {
179 function_states,
180 compilation_tx: None,
181 compilation_rx: None,
182 native_code_table: HashMap::new(),
183 enabled,
184 loop_counters: HashMap::new(),
185 osr_table: HashMap::new(),
186 osr_threshold: DEFAULT_OSR_THRESHOLD,
187 osr_blacklist: HashSet::new(),
188 deopt_tables: HashMap::new(),
189 deopt_tracker: DeoptTracker::new(),
190 }
191 }
192
193 pub fn set_channels(
198 &mut self,
199 compilation_tx: mpsc::Sender<CompilationRequest>,
200 compilation_rx: mpsc::Receiver<CompilationResult>,
201 ) {
202 self.compilation_tx = Some(compilation_tx);
203 self.compilation_rx = Some(compilation_rx);
204 }
205
206 #[inline]
214 pub fn record_call(&mut self, function_id: u16, feedback: Option<&FeedbackVector>) -> bool {
215 if !self.enabled {
216 return false;
217 }
218
219 let idx = function_id as usize;
220 if idx >= self.function_states.len() {
221 return false;
222 }
223
224 let state = &mut self.function_states[idx];
225 state.call_count = state.call_count.saturating_add(1);
226
227 let next_tier = match state.tier {
229 Tier::Interpreted if state.call_count >= Tier::BaselineJit.threshold() => {
230 Some(Tier::BaselineJit)
231 }
232 Tier::BaselineJit if state.call_count >= Tier::OptimizingJit.threshold() => {
233 Some(Tier::OptimizingJit)
234 }
235 _ => None,
236 };
237
238 if let Some(target) = next_tier {
239 if !state.compilation_pending {
240 state.compilation_pending = true;
241 if target == Tier::OptimizingJit {
244 if let Some(fv) = feedback {
245 self.request_compilation_with_feedback(function_id, target, fv.clone());
246 return true;
247 }
248 }
249 self.request_compilation(function_id, target);
250 return true;
251 }
252 }
253
254 false
255 }
256
257 fn request_compilation(&self, function_id: u16, target_tier: Tier) {
259 if let Some(ref tx) = self.compilation_tx {
260 let _ = tx.send(CompilationRequest {
261 function_id,
262 target_tier,
263 blob_hash: None, osr: false,
265 loop_header_ip: None,
266 callee_feedback: HashMap::new(),
267 feedback: None, });
269 }
270 }
271
272 pub fn request_compilation_with_feedback(
277 &self,
278 function_id: u16,
279 target_tier: Tier,
280 feedback: FeedbackVector,
281 ) {
282 if let Some(ref tx) = self.compilation_tx {
283 let _ = tx.send(CompilationRequest {
284 function_id,
285 target_tier,
286 blob_hash: None,
287 osr: false,
288 loop_header_ip: None,
289 feedback: Some(feedback),
290 callee_feedback: HashMap::new(),
291 });
292 }
293 }
294
295 pub fn poll_completions(&mut self) -> Vec<CompilationResult> {
301 let mut results = Vec::new();
302
303 if let Some(ref rx) = self.compilation_rx {
304 while let Ok(result) = rx.try_recv() {
305 let idx = result.function_id as usize;
306 if idx < self.function_states.len() {
307 let state = &mut self.function_states[idx];
308 state.compilation_pending = false;
309
310 if let Some(code_ptr) = result.native_code {
311 if let Some(ref osr_entry) = result.osr_entry {
313 self.osr_table
315 .insert((result.function_id, osr_entry.bytecode_ip), code_ptr);
316 } else {
317 state.tier = result.compiled_tier;
319 self.native_code_table.insert(result.function_id, code_ptr);
320 }
321
322 if !result.deopt_points.is_empty() {
324 self.deopt_tables
325 .insert(result.function_id, result.deopt_points.clone());
326 }
327
328 if !result.shape_guards.is_empty() {
330 self.deopt_tracker
331 .register(result.function_id, &result.shape_guards);
332 }
333 }
334 if result.error.is_some() {
336 if let Some(loop_ip) = result.loop_header_ip {
337 self.osr_blacklist.insert((result.function_id, loop_ip));
338 }
339 }
340 }
341 results.push(result);
342 }
343 }
344
345 self.check_shape_invalidations();
347
348 results
349 }
350
351 fn check_shape_invalidations(&mut self) {
358 let transitions = shape_value::shape_graph::drain_shape_transitions();
359 for (parent_id, _child_id) in transitions {
360 let invalidated = self.deopt_tracker.invalidate_shape(parent_id);
361 for func_id in invalidated {
362 self.invalidate_all(func_id);
363 }
364 }
365 }
366
367 #[inline]
369 pub fn get_native_code(&self, function_id: u16) -> Option<*const u8> {
370 self.native_code_table.get(&function_id).copied()
371 }
372
373 pub fn get_deopt_info(
379 &self,
380 function_id: u16,
381 deopt_id: usize,
382 ) -> Option<&crate::bytecode::DeoptInfo> {
383 self.deopt_tables
384 .get(&function_id)
385 .and_then(|points| points.get(deopt_id))
386 }
387
388 pub fn get_tier(&self, function_id: u16) -> Tier {
390 self.function_states
391 .get(function_id as usize)
392 .map(|s| s.tier)
393 .unwrap_or(Tier::Interpreted)
394 }
395
396 pub fn get_call_count(&self, function_id: u16) -> u32 {
398 self.function_states
399 .get(function_id as usize)
400 .map(|s| s.call_count)
401 .unwrap_or(0)
402 }
403
404 pub fn is_enabled(&self) -> bool {
406 self.enabled
407 }
408
409 pub fn jit_compiled_count(&self) -> usize {
411 self.native_code_table.len()
412 }
413
414 #[inline]
425 pub fn record_loop_iteration(&mut self, func_id: u16, loop_ip: usize) -> bool {
426 if !self.enabled {
427 return false;
428 }
429 if self.osr_blacklist.contains(&(func_id, loop_ip)) {
431 return false;
432 }
433 let counter = self.loop_counters.entry((func_id, loop_ip)).or_insert(0);
434 *counter += 1;
435 *counter == self.osr_threshold && !self.osr_table.contains_key(&(func_id, loop_ip))
438 }
439
440 pub fn register_osr_code(&mut self, func_id: u16, loop_ip: usize, code: *const u8) {
442 self.osr_table.insert((func_id, loop_ip), code);
443 }
444
445 #[inline]
447 pub fn get_osr_code(&self, func_id: u16, loop_ip: usize) -> Option<*const u8> {
448 self.osr_table.get(&(func_id, loop_ip)).copied()
449 }
450
451 pub fn osr_threshold(&self) -> u32 {
453 self.osr_threshold
454 }
455
456 pub fn set_osr_threshold(&mut self, threshold: u32) {
458 self.osr_threshold = threshold;
459 }
460
461 pub fn get_loop_count(&self, func_id: u16, loop_ip: usize) -> u32 {
463 self.loop_counters
464 .get(&(func_id, loop_ip))
465 .copied()
466 .unwrap_or(0)
467 }
468
469 pub fn osr_compiled_count(&self) -> usize {
471 self.osr_table.len()
472 }
473
474 pub fn compilation_sender(&self) -> Option<&mpsc::Sender<CompilationRequest>> {
476 self.compilation_tx.as_ref()
477 }
478
479 pub fn set_backend(
485 &mut self,
486 backend: Box<dyn CompilationBackend>,
487 program: Arc<BytecodeProgram>,
488 ) {
489 let (req_tx, req_rx) = mpsc::channel();
490 let (res_tx, res_rx) = mpsc::channel();
491 self.compilation_tx = Some(req_tx);
492 self.compilation_rx = Some(res_rx);
493 std::thread::Builder::new()
494 .name("shape-jit-worker".into())
495 .spawn(move || {
496 let mut backend = backend;
497 while let Ok(request) = req_rx.recv() {
498 let result = backend.compile(&request, &program);
499 if res_tx.send(result).is_err() {
500 break;
501 }
502 }
503 })
504 .expect("Failed to spawn JIT worker thread");
505 }
506
507 pub fn is_osr_blacklisted(&self, func_id: u16, loop_ip: usize) -> bool {
509 self.osr_blacklist.contains(&(func_id, loop_ip))
510 }
511
512 pub fn invalidate_function(&mut self, func_id: u16) {
521 self.native_code_table.remove(&func_id);
522 self.deopt_tables.remove(&func_id);
523 self.deopt_tracker.clear_function(func_id);
524 if let Some(state) = self.function_states.get_mut(func_id as usize) {
526 state.tier = Tier::Interpreted;
527 state.compilation_pending = false;
528 }
529 }
530
531 pub fn invalidate_osr(&mut self, func_id: u16) {
536 self.osr_table.retain(|&(fid, _), _| fid != func_id);
537 self.loop_counters.retain(|&(fid, _), _| fid != func_id);
538 }
539
540 pub fn invalidate_all(&mut self, func_id: u16) {
542 self.invalidate_function(func_id);
543 self.invalidate_osr(func_id);
544 }
545
546 pub fn stats(&self) -> TierStats {
548 let mut interpreted = 0usize;
549 let mut baseline = 0usize;
550 let mut optimizing = 0usize;
551 let mut pending = 0usize;
552
553 for state in &self.function_states {
554 match state.tier {
555 Tier::Interpreted => interpreted += 1,
556 Tier::BaselineJit => baseline += 1,
557 Tier::OptimizingJit => optimizing += 1,
558 }
559 if state.compilation_pending {
560 pending += 1;
561 }
562 }
563
564 TierStats {
565 interpreted,
566 baseline_jit: baseline,
567 optimizing_jit: optimizing,
568 pending_compilations: pending,
569 total_functions: self.function_states.len(),
570 }
571 }
572}
573
574#[derive(Debug, Clone)]
576pub struct TierStats {
577 pub interpreted: usize,
578 pub baseline_jit: usize,
579 pub optimizing_jit: usize,
580 pub pending_compilations: usize,
581 pub total_functions: usize,
582}
583
584#[cfg(test)]
585mod tests {
586 use super::*;
587
588 #[test]
589 fn test_default_tier_is_interpreted() {
590 let mgr = TierManager::new(10, true);
591 assert_eq!(mgr.get_tier(0), Tier::Interpreted);
592 assert_eq!(mgr.get_tier(5), Tier::Interpreted);
593 }
594
595 #[test]
596 fn test_call_count_tracking() {
597 let mut mgr = TierManager::new(5, true);
598 for _ in 0..50 {
599 mgr.record_call(0, None);
600 }
601 assert_eq!(mgr.get_call_count(0), 50);
602 assert_eq!(mgr.get_call_count(1), 0);
603 }
604
605 #[test]
606 fn test_promotion_threshold() {
607 let mut mgr = TierManager::new(5, true);
608
609 for _ in 0..99 {
611 mgr.record_call(0, None);
612 }
613 let promoted = mgr.record_call(0, None); assert!(promoted);
616
617 assert_eq!(mgr.get_tier(0), Tier::Interpreted);
619 }
620
621 #[test]
622 fn test_disabled_manager_no_promotion() {
623 let mut mgr = TierManager::new(5, false);
624 for _ in 0..200 {
625 assert!(!mgr.record_call(0, None));
626 }
627 }
628
629 #[test]
630 fn test_out_of_bounds_function_id() {
631 let mut mgr = TierManager::new(5, true);
632 assert!(!mgr.record_call(100, None)); assert_eq!(mgr.get_tier(100), Tier::Interpreted);
634 assert_eq!(mgr.get_call_count(100), 0);
635 }
636
637 #[test]
638 fn test_stats() {
639 let mgr = TierManager::new(10, true);
640 let stats = mgr.stats();
641 assert_eq!(stats.total_functions, 10);
642 assert_eq!(stats.interpreted, 10);
643 assert_eq!(stats.baseline_jit, 0);
644 assert_eq!(stats.pending_compilations, 0);
645 }
646
647 #[test]
648 fn test_channel_compilation_flow() {
649 let mut mgr = TierManager::new(5, true);
650
651 let (req_tx, req_rx) = mpsc::channel();
652 let (res_tx, res_rx) = mpsc::channel();
653 mgr.set_channels(req_tx, res_rx);
654
655 for _ in 0..100 {
657 mgr.record_call(0, None);
658 }
659
660 let request = req_rx.try_recv().unwrap();
662 assert_eq!(request.function_id, 0);
663 assert_eq!(request.target_tier, Tier::BaselineJit);
664
665 res_tx
667 .send(CompilationResult {
668 function_id: 0,
669 compiled_tier: Tier::BaselineJit,
670 native_code: Some(0x1000 as *const u8),
671 error: None,
672 osr_entry: None,
673 deopt_points: Vec::new(),
674 loop_header_ip: None,
675 shape_guards: Vec::new(),
676 })
677 .unwrap();
678
679 let results = mgr.poll_completions();
681 assert_eq!(results.len(), 1);
682 assert_eq!(mgr.get_tier(0), Tier::BaselineJit);
683 assert!(mgr.get_native_code(0).is_some());
684 }
685
686 #[test]
687 fn test_tier_ordering() {
688 assert!(Tier::Interpreted < Tier::BaselineJit);
689 assert!(Tier::BaselineJit < Tier::OptimizingJit);
690 }
691
692 #[test]
693 fn test_get_native_code_before_and_after_promotion() {
694 let mut mgr = TierManager::new(5, true);
695
696 assert!(mgr.get_native_code(0).is_none());
698
699 let (req_tx, req_rx) = mpsc::channel();
700 let (res_tx, res_rx) = mpsc::channel();
701 mgr.set_channels(req_tx, res_rx);
702
703 for _ in 0..100 {
705 mgr.record_call(0, None);
706 }
707
708 let request = req_rx.try_recv().unwrap();
710 assert_eq!(request.function_id, 0);
711
712 assert!(mgr.get_native_code(0).is_none());
714
715 let fake_ptr = 0xDEAD_BEEF as *const u8;
717 res_tx
718 .send(CompilationResult {
719 function_id: 0,
720 compiled_tier: Tier::BaselineJit,
721 native_code: Some(fake_ptr),
722 error: None,
723 osr_entry: None,
724 deopt_points: Vec::new(),
725 loop_header_ip: None,
726 shape_guards: Vec::new(),
727 })
728 .unwrap();
729
730 mgr.poll_completions();
732
733 assert_eq!(mgr.get_native_code(0), Some(fake_ptr));
735 assert_eq!(mgr.get_tier(0), Tier::BaselineJit);
736 }
737
738 #[test]
739 fn test_compilation_failure_no_native_code() {
740 let mut mgr = TierManager::new(5, true);
741
742 let (req_tx, _req_rx) = mpsc::channel();
743 let (res_tx, res_rx) = mpsc::channel();
744 mgr.set_channels(req_tx, res_rx);
745
746 for _ in 0..100 {
748 mgr.record_call(0, None);
749 }
750
751 res_tx
753 .send(CompilationResult {
754 function_id: 0,
755 compiled_tier: Tier::BaselineJit,
756 native_code: None,
757 error: Some("compilation failed".to_string()),
758 osr_entry: None,
759 deopt_points: Vec::new(),
760 loop_header_ip: None,
761 shape_guards: Vec::new(),
762 })
763 .unwrap();
764
765 mgr.poll_completions();
766
767 assert!(mgr.get_native_code(0).is_none());
769 assert_eq!(mgr.get_tier(0), Tier::Interpreted);
770 }
771
772 #[test]
773 fn test_no_duplicate_compilation_requests() {
774 let mut mgr = TierManager::new(5, true);
775
776 let (req_tx, req_rx) = mpsc::channel();
777 let (_res_tx, res_rx) = mpsc::channel();
778 mgr.set_channels(req_tx, res_rx);
779
780 for _ in 0..200 {
782 mgr.record_call(0, None);
783 }
784
785 let first = req_rx.try_recv();
787 assert!(first.is_ok());
788 let second = req_rx.try_recv();
789 assert!(second.is_err()); }
791
792 #[test]
793 fn test_optimizing_tier_promotion() {
794 let mut mgr = TierManager::new(5, true);
795
796 let (req_tx, req_rx) = mpsc::channel();
797 let (res_tx, res_rx) = mpsc::channel();
798 mgr.set_channels(req_tx, res_rx);
799
800 for _ in 0..100 {
802 mgr.record_call(0, None);
803 }
804 let request = req_rx.try_recv().unwrap();
805 assert_eq!(request.target_tier, Tier::BaselineJit);
806
807 res_tx
809 .send(CompilationResult {
810 function_id: 0,
811 compiled_tier: Tier::BaselineJit,
812 native_code: Some(0x1000 as *const u8),
813 error: None,
814 osr_entry: None,
815 deopt_points: Vec::new(),
816 loop_header_ip: None,
817 shape_guards: Vec::new(),
818 })
819 .unwrap();
820 mgr.poll_completions();
821 assert_eq!(mgr.get_tier(0), Tier::BaselineJit);
822
823 for _ in 100..10_000 {
825 mgr.record_call(0, None);
826 }
827 let request = req_rx.try_recv().unwrap();
828 assert_eq!(request.target_tier, Tier::OptimizingJit);
829 }
830
831 #[test]
836 fn test_loop_counter_threshold() {
837 let mut mgr = TierManager::new(5, true);
838
839 for _ in 0..999 {
841 assert!(!mgr.record_loop_iteration(0, 42));
842 }
843 assert_eq!(mgr.get_loop_count(0, 42), 999);
844
845 assert!(mgr.record_loop_iteration(0, 42));
847 assert_eq!(mgr.get_loop_count(0, 42), 1000);
848
849 assert!(!mgr.record_loop_iteration(0, 42));
851 assert_eq!(mgr.get_loop_count(0, 42), 1001);
852 }
853
854 #[test]
855 fn test_loop_counter_different_loops() {
856 let mut mgr = TierManager::new(5, true);
857 mgr.set_osr_threshold(10);
858
859 for _ in 0..10 {
861 mgr.record_loop_iteration(0, 100);
862 }
863 assert_eq!(mgr.get_loop_count(0, 100), 10);
864 assert_eq!(mgr.get_loop_count(0, 200), 0);
865
866 for _ in 0..5 {
868 mgr.record_loop_iteration(1, 100);
869 }
870 assert_eq!(mgr.get_loop_count(1, 100), 5);
871 assert_eq!(mgr.get_loop_count(0, 100), 10); }
873
874 #[test]
875 fn test_osr_table_registration() {
876 let mut mgr = TierManager::new(5, true);
877
878 assert!(mgr.get_osr_code(0, 42).is_none());
880 assert_eq!(mgr.osr_compiled_count(), 0);
881
882 let fake_code = 0xBEEF as *const u8;
884 mgr.register_osr_code(0, 42, fake_code);
885
886 assert_eq!(mgr.get_osr_code(0, 42), Some(fake_code));
887 assert_eq!(mgr.osr_compiled_count(), 1);
888
889 assert!(mgr.get_osr_code(0, 100).is_none());
891 }
892
893 #[test]
894 fn test_osr_threshold_prevents_duplicate_request() {
895 let mut mgr = TierManager::new(5, true);
896 mgr.set_osr_threshold(10);
897
898 for _ in 0..9 {
900 mgr.record_loop_iteration(0, 42);
901 }
902 assert!(mgr.record_loop_iteration(0, 42)); mgr.register_osr_code(0, 42, 0x1000 as *const u8);
906
907 for _ in 0..100 {
909 assert!(!mgr.record_loop_iteration(0, 42));
910 }
911 }
912
913 #[test]
914 fn test_invalidate_function_clears_native_code() {
915 let mut mgr = TierManager::new(5, true);
916
917 let (req_tx, _req_rx) = mpsc::channel();
918 let (res_tx, res_rx) = mpsc::channel();
919 mgr.set_channels(req_tx, res_rx);
920
921 for _ in 0..100 {
923 mgr.record_call(0, None);
924 }
925 res_tx
926 .send(CompilationResult {
927 function_id: 0,
928 compiled_tier: Tier::BaselineJit,
929 native_code: Some(0x1000 as *const u8),
930 error: None,
931 osr_entry: None,
932 deopt_points: Vec::new(),
933 loop_header_ip: None,
934 shape_guards: Vec::new(),
935 })
936 .unwrap();
937 mgr.poll_completions();
938 assert!(mgr.get_native_code(0).is_some());
939 assert_eq!(mgr.get_tier(0), Tier::BaselineJit);
940
941 mgr.invalidate_function(0);
943 assert!(mgr.get_native_code(0).is_none());
944 assert_eq!(mgr.get_tier(0), Tier::Interpreted);
945 assert!(!mgr.function_states[0].compilation_pending);
946 }
947
948 #[test]
949 fn test_invalidate_osr_clears_loop_entries() {
950 let mut mgr = TierManager::new(5, true);
951 mgr.set_osr_threshold(10);
952
953 mgr.register_osr_code(0, 42, 0x1000 as *const u8);
955 mgr.register_osr_code(0, 100, 0x2000 as *const u8);
956 mgr.register_osr_code(1, 42, 0x3000 as *const u8);
958
959 for _ in 0..50 {
961 mgr.record_loop_iteration(0, 42);
962 mgr.record_loop_iteration(0, 100);
963 mgr.record_loop_iteration(1, 42);
964 }
965
966 mgr.invalidate_osr(0);
968
969 assert!(mgr.get_osr_code(0, 42).is_none());
970 assert!(mgr.get_osr_code(0, 100).is_none());
971 assert_eq!(mgr.get_loop_count(0, 42), 0);
972 assert_eq!(mgr.get_loop_count(0, 100), 0);
973
974 assert!(mgr.get_osr_code(1, 42).is_some());
976 assert_eq!(mgr.get_loop_count(1, 42), 50);
977 }
978
979 #[test]
980 fn test_invalidate_all() {
981 let mut mgr = TierManager::new(5, true);
982
983 let (req_tx, _req_rx) = mpsc::channel();
984 let (res_tx, res_rx) = mpsc::channel();
985 mgr.set_channels(req_tx, res_rx);
986
987 for _ in 0..100 {
989 mgr.record_call(0, None);
990 }
991 res_tx
992 .send(CompilationResult {
993 function_id: 0,
994 compiled_tier: Tier::BaselineJit,
995 native_code: Some(0x1000 as *const u8),
996 error: None,
997 osr_entry: None,
998 deopt_points: Vec::new(),
999 loop_header_ip: None,
1000 shape_guards: Vec::new(),
1001 })
1002 .unwrap();
1003 mgr.poll_completions();
1004
1005 mgr.register_osr_code(0, 42, 0x2000 as *const u8);
1007 for _ in 0..50 {
1008 mgr.record_loop_iteration(0, 42);
1009 }
1010
1011 mgr.invalidate_all(0);
1013
1014 assert!(mgr.get_native_code(0).is_none());
1015 assert_eq!(mgr.get_tier(0), Tier::Interpreted);
1016 assert!(mgr.get_osr_code(0, 42).is_none());
1017 assert_eq!(mgr.get_loop_count(0, 42), 0);
1018 }
1019
1020 #[test]
1021 fn test_loop_counter_disabled_manager() {
1022 let mut mgr = TierManager::new(5, false);
1023 for _ in 0..2000 {
1025 assert!(!mgr.record_loop_iteration(0, 42));
1026 }
1027 }
1028
1029 #[test]
1030 fn test_custom_osr_threshold() {
1031 let mut mgr = TierManager::new(5, true);
1032 assert_eq!(mgr.osr_threshold(), DEFAULT_OSR_THRESHOLD);
1033
1034 mgr.set_osr_threshold(50);
1035 assert_eq!(mgr.osr_threshold(), 50);
1036
1037 for _ in 0..49 {
1038 assert!(!mgr.record_loop_iteration(0, 10));
1039 }
1040 assert!(mgr.record_loop_iteration(0, 10)); }
1042
1043 #[test]
1044 fn test_poll_completions_handles_osr_result() {
1045 let mut mgr = TierManager::new(5, true);
1046
1047 let (req_tx, _req_rx) = mpsc::channel();
1048 let (res_tx, res_rx) = mpsc::channel();
1049 mgr.set_channels(req_tx, res_rx);
1050
1051 for _ in 0..100 {
1053 mgr.record_call(0, None);
1054 }
1055
1056 let osr_entry = crate::bytecode::OsrEntryPoint {
1058 bytecode_ip: 42,
1059 live_locals: vec![0, 1],
1060 local_kinds: vec![
1061 crate::type_tracking::SlotKind::Int64,
1062 crate::type_tracking::SlotKind::Float64,
1063 ],
1064 exit_ip: 100,
1065 };
1066
1067 res_tx
1068 .send(CompilationResult {
1069 function_id: 0,
1070 compiled_tier: Tier::BaselineJit,
1071 native_code: Some(0xCAFE as *const u8),
1072 error: None,
1073 osr_entry: Some(osr_entry),
1074 deopt_points: Vec::new(),
1075 loop_header_ip: None,
1076 shape_guards: Vec::new(),
1077 })
1078 .unwrap();
1079
1080 mgr.poll_completions();
1081
1082 assert!(mgr.get_native_code(0).is_none());
1084 assert_eq!(mgr.get_osr_code(0, 42), Some(0xCAFE as *const u8));
1085 assert_eq!(mgr.get_tier(0), Tier::Interpreted);
1087 }
1088
1089 #[test]
1090 fn test_osr_blacklist_on_compilation_failure() {
1091 let mut mgr = TierManager::new(5, true);
1092 mgr.set_osr_threshold(10);
1093
1094 let (req_tx, _req_rx) = mpsc::channel();
1095 let (res_tx, res_rx) = mpsc::channel();
1096 mgr.set_channels(req_tx, res_rx);
1097
1098 for _ in 0..100 {
1100 mgr.record_call(0, None);
1101 }
1102
1103 res_tx
1105 .send(CompilationResult {
1106 function_id: 0,
1107 compiled_tier: Tier::BaselineJit,
1108 native_code: None,
1109 error: Some("unsupported opcode CallMethod".to_string()),
1110 osr_entry: None,
1111 deopt_points: Vec::new(),
1112 loop_header_ip: Some(42),
1113 shape_guards: Vec::new(),
1114 })
1115 .unwrap();
1116
1117 mgr.poll_completions();
1118
1119 assert!(mgr.is_osr_blacklisted(0, 42));
1121 for _ in 0..2000 {
1123 assert!(!mgr.record_loop_iteration(0, 42));
1124 }
1125 assert!(!mgr.is_osr_blacklisted(0, 100));
1127 }
1128
1129 #[test]
1130 fn test_compilation_result_loop_header_ip_roundtrip() {
1131 let mut mgr = TierManager::new(5, true);
1132
1133 let (req_tx, _req_rx) = mpsc::channel();
1134 let (res_tx, res_rx) = mpsc::channel();
1135 mgr.set_channels(req_tx, res_rx);
1136
1137 for _ in 0..100 {
1138 mgr.record_call(0, None);
1139 }
1140
1141 res_tx
1143 .send(CompilationResult {
1144 function_id: 0,
1145 compiled_tier: Tier::BaselineJit,
1146 native_code: Some(0xABCD as *const u8),
1147 error: None,
1148 osr_entry: Some(crate::bytecode::OsrEntryPoint {
1149 bytecode_ip: 55,
1150 live_locals: vec![0],
1151 local_kinds: vec![crate::type_tracking::SlotKind::Int64],
1152 exit_ip: 80,
1153 }),
1154 deopt_points: Vec::new(),
1155 loop_header_ip: Some(55),
1156 shape_guards: Vec::new(),
1157 })
1158 .unwrap();
1159
1160 let results = mgr.poll_completions();
1161 assert_eq!(results.len(), 1);
1162 assert_eq!(results[0].loop_header_ip, Some(55));
1163 assert_eq!(mgr.get_osr_code(0, 55), Some(0xABCD as *const u8));
1164 }
1165
1166 #[test]
1167 fn test_deopt_table_stored_on_compilation() {
1168 let mut mgr = TierManager::new(5, true);
1169
1170 let (req_tx, _req_rx) = mpsc::channel();
1171 let (res_tx, res_rx) = mpsc::channel();
1172 mgr.set_channels(req_tx, res_rx);
1173
1174 for _ in 0..100 {
1175 mgr.record_call(0, None);
1176 }
1177
1178 let deopt_info = crate::bytecode::DeoptInfo {
1179 resume_ip: 42,
1180 local_mapping: vec![(0, 0), (1, 2)],
1181 local_kinds: vec![
1182 crate::type_tracking::SlotKind::Int64,
1183 crate::type_tracking::SlotKind::Float64,
1184 ],
1185 stack_depth: 1,
1186 innermost_function_id: None,
1187 inline_frames: Vec::new(),
1188 };
1189
1190 res_tx
1191 .send(CompilationResult {
1192 function_id: 0,
1193 compiled_tier: Tier::BaselineJit,
1194 native_code: Some(0xBEEF as *const u8),
1195 error: None,
1196 osr_entry: None,
1197 deopt_points: vec![deopt_info.clone()],
1198 loop_header_ip: None,
1199 shape_guards: Vec::new(),
1200 })
1201 .unwrap();
1202
1203 mgr.poll_completions();
1204
1205 let retrieved = mgr.get_deopt_info(0, 0);
1207 assert!(retrieved.is_some());
1208 assert_eq!(retrieved.unwrap().resume_ip, 42);
1209 assert_eq!(retrieved.unwrap().local_mapping.len(), 2);
1210 assert_eq!(retrieved.unwrap().stack_depth, 1);
1211
1212 assert!(mgr.get_deopt_info(0, 1).is_none());
1214
1215 assert!(mgr.get_deopt_info(1, 0).is_none());
1217 }
1218
1219 #[test]
1220 fn test_deopt_table_cleared_on_invalidation() {
1221 let mut mgr = TierManager::new(5, true);
1222
1223 let (req_tx, _req_rx) = mpsc::channel();
1224 let (res_tx, res_rx) = mpsc::channel();
1225 mgr.set_channels(req_tx, res_rx);
1226
1227 for _ in 0..100 {
1228 mgr.record_call(0, None);
1229 }
1230
1231 let deopt_info = crate::bytecode::DeoptInfo {
1232 resume_ip: 10,
1233 local_mapping: vec![(0, 0)],
1234 local_kinds: vec![crate::type_tracking::SlotKind::Int64],
1235 stack_depth: 0,
1236 innermost_function_id: None,
1237 inline_frames: Vec::new(),
1238 };
1239
1240 res_tx
1241 .send(CompilationResult {
1242 function_id: 0,
1243 compiled_tier: Tier::BaselineJit,
1244 native_code: Some(0xCAFE as *const u8),
1245 error: None,
1246 osr_entry: None,
1247 deopt_points: vec![deopt_info],
1248 loop_header_ip: None,
1249 shape_guards: Vec::new(),
1250 })
1251 .unwrap();
1252
1253 mgr.poll_completions();
1254 assert!(mgr.get_deopt_info(0, 0).is_some());
1255
1256 mgr.invalidate_function(0);
1258 assert!(mgr.get_deopt_info(0, 0).is_none());
1259 }
1260
1261 #[test]
1262 fn test_deopt_table_empty_not_stored() {
1263 let mut mgr = TierManager::new(5, true);
1264
1265 let (req_tx, _req_rx) = mpsc::channel();
1266 let (res_tx, res_rx) = mpsc::channel();
1267 mgr.set_channels(req_tx, res_rx);
1268
1269 for _ in 0..100 {
1270 mgr.record_call(0, None);
1271 }
1272
1273 res_tx
1275 .send(CompilationResult {
1276 function_id: 0,
1277 compiled_tier: Tier::BaselineJit,
1278 native_code: Some(0x1234 as *const u8),
1279 error: None,
1280 osr_entry: None,
1281 deopt_points: Vec::new(),
1282 loop_header_ip: None,
1283 shape_guards: Vec::new(),
1284 })
1285 .unwrap();
1286
1287 mgr.poll_completions();
1288
1289 assert!(mgr.get_deopt_info(0, 0).is_none());
1291 }
1292}