1pub mod core_tracker;
10pub mod core_types;
11pub mod export_options;
12
13pub mod lockfree_tracker;
15pub mod lockfree_types;
16
17pub mod async_tracker;
19pub mod async_types;
20
21pub mod task_profile;
23
24pub mod efficiency_scoring;
26
27pub mod bottleneck_analysis;
29
30pub mod hotspot_analysis;
32
33pub mod resource_ranking;
35
36pub mod unsafe_tracking;
38
39pub mod unified_tracker;
41
42pub mod global_tracking;
44
45use crate::event_store::{MemoryEvent, MemoryEventType};
46
47pub use core_tracker::{
49 collect_all_trackers_local, configure_tracking_strategy, get_registry_stats_local, get_tracker,
50 MemoryTracker,
51};
52
53pub use export_options::{ExportMode, ExportOptions};
55
56pub use async_tracker::{
58 create_tracked, get_memory_snapshot, initialize, is_tracking_active, shutdown, spawn_tracked,
59 track_current_allocation, track_current_deallocation, AsyncTracker,
60};
61pub use async_types::{
62 AsyncAllocation, AsyncError, AsyncMemorySnapshot, AsyncResult, AsyncSnapshot, AsyncStats,
63 ExtendedTaskInfo, TaskId, TaskInfo, TrackedFuture,
64};
65
66pub use task_profile::{AggregatedTaskStats, TaskMemoryProfile, TaskProfileManager, TaskType};
68
69pub use efficiency_scoring::{
71 ComponentScores, EfficiencyConfig, EfficiencyScorer, EfficiencyWeights,
72};
73
74pub use bottleneck_analysis::{
76 BottleneckAnalyzer, BottleneckConfig, BottleneckKind, BottleneckMetrics, PerformanceIssue,
77 TaskMetrics,
78};
79
80pub use hotspot_analysis::{
82 AllocationFrequencyPattern, CallStackHotspot, FrequencyAnalysis, HotspotAnalyzer,
83 HotspotConfig, HotspotStatistics, MemoryUsagePeak,
84};
85
86pub use resource_ranking::{
88 EfficiencyScores, RankingConfig, RankingStatistics, ResourceRanking, ResourceRankingAnalyzer,
89 TaskResourceMetrics,
90};
91
92pub use unsafe_tracking::{
94 AllocationInfo, AllocationOrigin, AllocationSource, MemoryPassport, OwnershipInfo,
95 PassportStamp, SafetyViolation, SecurityClearance, UnsafeTracker, UnsafeTrackingConfig,
96 UnsafeTrackingStats, ValidityStatus, ViolationSeverity,
97};
98
99pub use lockfree_tracker::{
101 finalize_thread_tracker, get_current_tracker, init_thread_tracker, is_tracking,
102 memory_snapshot, quick_trace, stop_tracing, trace_all, trace_thread, track_allocation_lockfree,
103 track_deallocation_lockfree, ThreadLocalTracker,
104};
105pub use lockfree_types::{
106 AllocationCategory, AnalysisSummary, Event, EventType, FrequencyData, FrequencyPattern,
107 InteractionType, LockfreeAnalysis, MemorySnapshot, MemoryStats, SamplingConfig, SystemMetrics,
108 ThreadInteraction, ThreadStats,
109};
110
111pub use unified_tracker::{
113 detect_environment, get_backend, initialize as initialize_unified, AsyncRuntimeType,
114 BackendConfig, DetectionConfig, DispatcherConfig, DispatcherMetrics, EnvironmentDetection,
115 EnvironmentDetector, MemoryAnalysisData, MemoryStatistics,
116 MemoryTracker as UnifiedMemoryTracker, RuntimeEnvironment, SessionMetadata, TrackerConfig,
117 TrackerStatistics, TrackerType, TrackingDispatcher, TrackingOperation, TrackingSession,
118 TrackingStrategy, UnifiedBackend,
119};
120
121pub use global_tracking::{
123 global_tracker, init_global_tracking, init_global_tracking_with_config, is_initialized,
124 GlobalTracker, GlobalTrackerConfig, GlobalTrackerStats,
125};
126
127pub trait CaptureBackend: Send + Sync {
132 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent;
134
135 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent;
137
138 fn capture_realloc(
140 &self,
141 ptr: usize,
142 old_size: usize,
143 new_size: usize,
144 thread_id: u64,
145 ) -> MemoryEvent;
146
147 fn capture_move(
149 &self,
150 _from_ptr: usize,
151 to_ptr: usize,
152 size: usize,
153 thread_id: u64,
154 ) -> MemoryEvent;
155}
156
157#[derive(Debug, Clone, Copy, PartialEq, Eq)]
159pub enum CaptureBackendType {
160 Core,
162 Lockfree,
164 Async,
166 Unified,
168}
169
170impl CaptureBackendType {
171 pub fn create_backend(&self) -> Box<dyn CaptureBackend> {
173 match self {
174 CaptureBackendType::Core => Box::new(CoreBackend),
175 CaptureBackendType::Lockfree => Box::new(LockfreeBackend),
176 CaptureBackendType::Async => Box::new(AsyncBackend),
177 CaptureBackendType::Unified => Box::new(UnifiedCaptureBackend::new()),
178 }
179 }
180}
181
182#[derive(Debug)]
186pub struct CoreBackend;
187
188impl CaptureBackend for CoreBackend {
189 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
190 MemoryEvent::allocate(ptr, size, thread_id)
191 }
192
193 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
194 MemoryEvent::deallocate(ptr, size, thread_id)
195 }
196
197 fn capture_realloc(
198 &self,
199 ptr: usize,
200 old_size: usize,
201 new_size: usize,
202 thread_id: u64,
203 ) -> MemoryEvent {
204 MemoryEvent::reallocate(ptr, old_size, new_size, thread_id)
205 }
206
207 fn capture_move(
208 &self,
209 _from_ptr: usize,
210 to_ptr: usize,
211 size: usize,
212 thread_id: u64,
213 ) -> MemoryEvent {
214 MemoryEvent {
215 timestamp: MemoryEvent::now(),
216 event_type: MemoryEventType::Move,
217 ptr: to_ptr,
218 size,
219 old_size: None,
220 thread_id,
221 var_name: None,
222 type_name: None,
223 call_stack_hash: None,
224 thread_name: None,
225 source_file: None,
226 source_line: None,
227 }
228 }
229}
230
231#[derive(Debug)]
236pub struct LockfreeBackend;
237
238impl CaptureBackend for LockfreeBackend {
239 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
240 MemoryEvent::allocate(ptr, size, thread_id).with_call_stack_hash(self.hash_call_stack())
241 }
242
243 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
244 MemoryEvent::deallocate(ptr, size, thread_id).with_call_stack_hash(self.hash_call_stack())
245 }
246
247 fn capture_realloc(
248 &self,
249 ptr: usize,
250 old_size: usize,
251 new_size: usize,
252 thread_id: u64,
253 ) -> MemoryEvent {
254 MemoryEvent::reallocate(ptr, old_size, new_size, thread_id)
255 .with_call_stack_hash(self.hash_call_stack())
256 }
257
258 fn capture_move(
259 &self,
260 _from_ptr: usize,
261 to_ptr: usize,
262 size: usize,
263 thread_id: u64,
264 ) -> MemoryEvent {
265 MemoryEvent {
266 timestamp: MemoryEvent::now(),
267 event_type: MemoryEventType::Move,
268 ptr: to_ptr,
269 size,
270 old_size: None,
271 thread_id,
272 var_name: None,
273 type_name: None,
274 call_stack_hash: Some(self.hash_call_stack()),
275 thread_name: None,
276 source_file: None,
277 source_line: None,
278 }
279 }
280}
281
282impl LockfreeBackend {
283 #[inline]
289 fn hash_call_stack(&self) -> u64 {
290 use std::collections::hash_map::DefaultHasher;
291 use std::hash::{Hash, Hasher};
292
293 let mut hasher = DefaultHasher::new();
294
295 std::thread::current().id().hash(&mut hasher);
297
298 static COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
301 let count = COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
302 count.hash(&mut hasher);
303
304 hasher.finish()
305 }
306}
307
308#[derive(Debug)]
312pub struct AsyncBackend;
313
314impl CaptureBackend for AsyncBackend {
315 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
316 MemoryEvent::allocate(ptr, size, thread_id)
317 }
318
319 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
320 MemoryEvent::deallocate(ptr, size, thread_id)
321 }
322
323 fn capture_realloc(
324 &self,
325 ptr: usize,
326 old_size: usize,
327 new_size: usize,
328 thread_id: u64,
329 ) -> MemoryEvent {
330 MemoryEvent::reallocate(ptr, old_size, new_size, thread_id)
331 }
332
333 fn capture_move(
334 &self,
335 _from_ptr: usize,
336 to_ptr: usize,
337 size: usize,
338 thread_id: u64,
339 ) -> MemoryEvent {
340 MemoryEvent {
341 timestamp: MemoryEvent::now(),
342 event_type: MemoryEventType::Move,
343 ptr: to_ptr,
344 size,
345 old_size: None,
346 thread_id,
347 var_name: None,
348 type_name: None,
349 call_stack_hash: None,
350 thread_name: None,
351 source_file: None,
352 source_line: None,
353 }
354 }
355}
356
357pub struct UnifiedCaptureBackend {
362 inner: Box<dyn CaptureBackend>,
364 backend_type: CaptureBackendType,
366}
367
368impl UnifiedCaptureBackend {
369 fn detect_best_backend() -> (Box<dyn CaptureBackend>, CaptureBackendType) {
379 let thread_count = std::thread::available_parallelism()
380 .map(|p| p.get())
381 .unwrap_or(1);
382
383 if thread_count <= 1 {
384 (Box::new(CoreBackend), CaptureBackendType::Core)
385 } else {
386 (Box::new(LockfreeBackend), CaptureBackendType::Lockfree)
387 }
388 }
389
390 pub fn new() -> Self {
392 let (inner, backend_type) = Self::detect_best_backend();
393 Self {
394 inner,
395 backend_type,
396 }
397 }
398
399 pub fn backend_type(&self) -> CaptureBackendType {
401 self.backend_type
402 }
403
404 pub fn refresh_backend(&mut self) {
413 let (new_inner, new_type) = Self::detect_best_backend();
414 self.inner = new_inner;
415 self.backend_type = new_type;
416 }
417}
418
419impl Default for UnifiedCaptureBackend {
420 fn default() -> Self {
421 Self::new()
422 }
423}
424
425impl CaptureBackend for UnifiedCaptureBackend {
426 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
427 self.inner.capture_alloc(ptr, size, thread_id)
428 }
429
430 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
431 self.inner.capture_dealloc(ptr, size, thread_id)
432 }
433
434 fn capture_realloc(
435 &self,
436 ptr: usize,
437 old_size: usize,
438 new_size: usize,
439 thread_id: u64,
440 ) -> MemoryEvent {
441 self.inner
442 .capture_realloc(ptr, old_size, new_size, thread_id)
443 }
444
445 fn capture_move(
446 &self,
447 from_ptr: usize,
448 to_ptr: usize,
449 size: usize,
450 thread_id: u64,
451 ) -> MemoryEvent {
452 self.inner.capture_move(from_ptr, to_ptr, size, thread_id)
453 }
454}
455
456#[cfg(test)]
457mod tests {
458 use super::*;
459
460 #[test]
461 fn test_core_backend() {
462 let backend = CoreBackend;
463 let event = backend.capture_alloc(0x1000, 1024, 1);
464 assert_eq!(event.ptr, 0x1000);
465 assert_eq!(event.size, 1024);
466 assert_eq!(event.thread_id, 1);
467 assert!(event.is_allocation());
468 }
469
470 #[test]
471 fn test_lockfree_backend() {
472 let backend = LockfreeBackend;
473 let event = backend.capture_alloc(0x1000, 1024, 1);
474 assert_eq!(event.ptr, 0x1000);
475 assert_eq!(event.size, 1024);
476 assert!(event.call_stack_hash.is_some());
477 }
478
479 #[test]
480 fn test_async_backend() {
481 let backend = AsyncBackend;
482 let event = backend.capture_alloc(0x1000, 1024, 1);
483 assert_eq!(event.ptr, 0x1000);
484 assert_eq!(event.size, 1024);
485 }
486
487 #[test]
488 fn test_unified_backend() {
489 let backend = UnifiedCaptureBackend::default();
490 let event = backend.capture_alloc(0x1000, 1024, 1);
491 assert_eq!(event.ptr, 0x1000);
492 assert_eq!(event.size, 1024);
493 }
494
495 #[test]
496 fn test_backend_type_creation() {
497 let core_backend = CaptureBackendType::Core.create_backend();
498 let lockfree_backend = CaptureBackendType::Lockfree.create_backend();
499 let async_backend = CaptureBackendType::Async.create_backend();
500 let unified_backend = CaptureBackendType::Unified.create_backend();
501
502 let event1 = core_backend.capture_alloc(0x1000, 1024, 1);
504 let event2 = lockfree_backend.capture_alloc(0x2000, 2048, 2);
505 let event3 = async_backend.capture_alloc(0x3000, 3072, 3);
506 let event4 = unified_backend.capture_alloc(0x4000, 4096, 4);
507
508 assert_eq!(event1.ptr, 0x1000);
509 assert_eq!(event2.ptr, 0x2000);
510 assert_eq!(event3.ptr, 0x3000);
511 assert_eq!(event4.ptr, 0x4000);
512 }
513}