1pub mod core_tracker;
10pub mod core_types;
11pub mod export_options;
12
13pub mod lockfree_tracker;
15pub mod lockfree_types;
16
17pub mod async_tracker;
19pub mod async_types;
20
21pub mod task_profile;
23
24pub mod efficiency_scoring;
26
27pub mod bottleneck_analysis;
29
30pub mod hotspot_analysis;
32
33pub mod resource_ranking;
35
36pub mod unsafe_tracking;
38
39pub mod unified_tracker;
41
42pub mod global_tracking;
44
45use crate::event_store::{MemoryEvent, MemoryEventType};
46
47pub use core_tracker::{
49 collect_all_trackers_local, configure_tracking_strategy, get_registry_stats_local, get_tracker,
50 MemoryTracker,
51};
52
53pub use export_options::{ExportMode, ExportOptions};
55
56pub use async_tracker::{
58 create_tracked, get_memory_snapshot, initialize, is_tracking_active, shutdown, spawn_tracked,
59 track_current_allocation, track_current_deallocation, AsyncTracker,
60};
61pub use async_types::{
62 AsyncAllocation, AsyncError, AsyncMemorySnapshot, AsyncResult, AsyncSnapshot, AsyncStats,
63 ExtendedTaskInfo, TaskId, TaskInfo, TrackedFuture,
64};
65
66pub use task_profile::{AggregatedTaskStats, TaskMemoryProfile, TaskProfileManager, TaskType};
68
69pub use efficiency_scoring::{
71 ComponentScores, EfficiencyConfig, EfficiencyScorer, EfficiencyWeights,
72};
73
74pub use bottleneck_analysis::{
76 BottleneckAnalyzer, BottleneckConfig, BottleneckKind, BottleneckMetrics, PerformanceIssue,
77 TaskMetrics,
78};
79
80pub use hotspot_analysis::{
82 AllocationFrequencyPattern, CallStackHotspot, FrequencyAnalysis, HotspotAnalyzer,
83 HotspotConfig, HotspotStatistics, MemoryUsagePeak,
84};
85
86pub use resource_ranking::{
88 EfficiencyScores, RankingConfig, RankingStatistics, ResourceRanking, ResourceRankingAnalyzer,
89 TaskResourceMetrics,
90};
91
92pub use unsafe_tracking::{
94 AllocationInfo, AllocationOrigin, AllocationSource, MemoryPassport, OwnershipInfo,
95 PassportStamp, SafetyViolation, SecurityClearance, UnsafeTracker, UnsafeTrackingConfig,
96 UnsafeTrackingStats, ValidityStatus, ViolationSeverity,
97};
98
99pub use lockfree_tracker::{
101 finalize_thread_tracker, get_current_tracker, init_thread_tracker, is_tracking,
102 memory_snapshot, quick_trace, stop_tracing, trace_all, trace_thread, track_allocation_lockfree,
103 track_deallocation_lockfree, ThreadLocalTracker,
104};
105pub use lockfree_types::{
106 AllocationCategory, AnalysisSummary, Event, EventType, FrequencyData, FrequencyPattern,
107 InteractionType, LockfreeAnalysis, MemorySnapshot, MemoryStats, SamplingConfig, SystemMetrics,
108 ThreadInteraction, ThreadStats,
109};
110
111pub use unified_tracker::{
113 detect_environment, get_backend, initialize as initialize_unified, AsyncRuntimeType,
114 BackendConfig, DetectionConfig, DispatcherConfig, DispatcherMetrics, EnvironmentDetection,
115 EnvironmentDetector, MemoryAnalysisData, MemoryStatistics,
116 MemoryTracker as UnifiedMemoryTracker, RuntimeEnvironment, SessionMetadata, TrackerConfig,
117 TrackerStatistics, TrackerType, TrackingDispatcher, TrackingOperation, TrackingSession,
118 TrackingStrategy, UnifiedBackend,
119};
120
121pub use global_tracking::{
123 global_tracker, init_global_tracking, init_global_tracking_with_config, is_initialized,
124 GlobalTracker, GlobalTrackerConfig, GlobalTrackerStats,
125};
126
127pub trait CaptureBackend: Send + Sync {
132 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent;
134
135 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent;
137
138 fn capture_realloc(
140 &self,
141 ptr: usize,
142 old_size: usize,
143 new_size: usize,
144 thread_id: u64,
145 ) -> MemoryEvent;
146
147 fn capture_move(
149 &self,
150 _from_ptr: usize,
151 to_ptr: usize,
152 size: usize,
153 thread_id: u64,
154 ) -> MemoryEvent;
155}
156
157#[derive(Debug, Clone, Copy, PartialEq, Eq)]
159pub enum CaptureBackendType {
160 Core,
162 Lockfree,
164 Async,
166 Unified,
168}
169
170impl CaptureBackendType {
171 pub fn create_backend(&self) -> Box<dyn CaptureBackend> {
173 match self {
174 CaptureBackendType::Core => Box::new(CoreBackend),
175 CaptureBackendType::Lockfree => Box::new(LockfreeBackend),
176 CaptureBackendType::Async => Box::new(AsyncBackend),
177 CaptureBackendType::Unified => Box::new(UnifiedCaptureBackend::new()),
178 }
179 }
180}
181
182#[derive(Debug)]
186pub struct CoreBackend;
187
188impl CaptureBackend for CoreBackend {
189 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
190 MemoryEvent::allocate(ptr, size, thread_id)
191 }
192
193 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
194 MemoryEvent::deallocate(ptr, size, thread_id)
195 }
196
197 fn capture_realloc(
198 &self,
199 ptr: usize,
200 old_size: usize,
201 new_size: usize,
202 thread_id: u64,
203 ) -> MemoryEvent {
204 MemoryEvent::reallocate(ptr, old_size, new_size, thread_id)
205 }
206
207 fn capture_move(
208 &self,
209 _from_ptr: usize,
210 to_ptr: usize,
211 size: usize,
212 thread_id: u64,
213 ) -> MemoryEvent {
214 MemoryEvent {
215 timestamp: MemoryEvent::now(),
216 event_type: MemoryEventType::Move,
217 ptr: to_ptr,
218 size,
219 old_size: None,
220 thread_id,
221 var_name: None,
222 type_name: None,
223 call_stack_hash: None,
224 thread_name: None,
225 }
226 }
227}
228
229#[derive(Debug)]
234pub struct LockfreeBackend;
235
236impl CaptureBackend for LockfreeBackend {
237 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
238 MemoryEvent::allocate(ptr, size, thread_id).with_call_stack_hash(self.hash_call_stack())
239 }
240
241 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
242 MemoryEvent::deallocate(ptr, size, thread_id).with_call_stack_hash(self.hash_call_stack())
243 }
244
245 fn capture_realloc(
246 &self,
247 ptr: usize,
248 old_size: usize,
249 new_size: usize,
250 thread_id: u64,
251 ) -> MemoryEvent {
252 MemoryEvent::reallocate(ptr, old_size, new_size, thread_id)
253 .with_call_stack_hash(self.hash_call_stack())
254 }
255
256 fn capture_move(
257 &self,
258 _from_ptr: usize,
259 to_ptr: usize,
260 size: usize,
261 thread_id: u64,
262 ) -> MemoryEvent {
263 MemoryEvent {
264 timestamp: MemoryEvent::now(),
265 event_type: MemoryEventType::Move,
266 ptr: to_ptr,
267 size,
268 old_size: None,
269 thread_id,
270 var_name: None,
271 type_name: None,
272 call_stack_hash: Some(self.hash_call_stack()),
273 thread_name: None,
274 }
275 }
276}
277
278impl LockfreeBackend {
279 #[inline]
285 fn hash_call_stack(&self) -> u64 {
286 use std::collections::hash_map::DefaultHasher;
287 use std::hash::{Hash, Hasher};
288
289 let mut hasher = DefaultHasher::new();
290
291 std::thread::current().id().hash(&mut hasher);
293
294 static COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
297 let count = COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
298 count.hash(&mut hasher);
299
300 hasher.finish()
301 }
302}
303
304#[derive(Debug)]
308pub struct AsyncBackend;
309
310impl CaptureBackend for AsyncBackend {
311 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
312 MemoryEvent::allocate(ptr, size, thread_id)
313 }
314
315 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
316 MemoryEvent::deallocate(ptr, size, thread_id)
317 }
318
319 fn capture_realloc(
320 &self,
321 ptr: usize,
322 old_size: usize,
323 new_size: usize,
324 thread_id: u64,
325 ) -> MemoryEvent {
326 MemoryEvent::reallocate(ptr, old_size, new_size, thread_id)
327 }
328
329 fn capture_move(
330 &self,
331 _from_ptr: usize,
332 to_ptr: usize,
333 size: usize,
334 thread_id: u64,
335 ) -> MemoryEvent {
336 MemoryEvent {
337 timestamp: MemoryEvent::now(),
338 event_type: MemoryEventType::Move,
339 ptr: to_ptr,
340 size,
341 old_size: None,
342 thread_id,
343 var_name: None,
344 type_name: None,
345 call_stack_hash: None,
346 thread_name: None,
347 }
348 }
349}
350
351pub struct UnifiedCaptureBackend {
356 inner: Box<dyn CaptureBackend>,
358 backend_type: CaptureBackendType,
360}
361
362impl UnifiedCaptureBackend {
363 fn detect_best_backend() -> (Box<dyn CaptureBackend>, CaptureBackendType) {
373 let thread_count = std::thread::available_parallelism()
374 .map(|p| p.get())
375 .unwrap_or(1);
376
377 if thread_count <= 1 {
378 (Box::new(CoreBackend), CaptureBackendType::Core)
379 } else {
380 (Box::new(LockfreeBackend), CaptureBackendType::Lockfree)
381 }
382 }
383
384 pub fn new() -> Self {
386 let (inner, backend_type) = Self::detect_best_backend();
387 Self {
388 inner,
389 backend_type,
390 }
391 }
392
393 pub fn backend_type(&self) -> CaptureBackendType {
395 self.backend_type
396 }
397
398 pub fn refresh_backend(&mut self) {
407 let (new_inner, new_type) = Self::detect_best_backend();
408 self.inner = new_inner;
409 self.backend_type = new_type;
410 }
411}
412
413impl Default for UnifiedCaptureBackend {
414 fn default() -> Self {
415 Self::new()
416 }
417}
418
419impl CaptureBackend for UnifiedCaptureBackend {
420 fn capture_alloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
421 self.inner.capture_alloc(ptr, size, thread_id)
422 }
423
424 fn capture_dealloc(&self, ptr: usize, size: usize, thread_id: u64) -> MemoryEvent {
425 self.inner.capture_dealloc(ptr, size, thread_id)
426 }
427
428 fn capture_realloc(
429 &self,
430 ptr: usize,
431 old_size: usize,
432 new_size: usize,
433 thread_id: u64,
434 ) -> MemoryEvent {
435 self.inner
436 .capture_realloc(ptr, old_size, new_size, thread_id)
437 }
438
439 fn capture_move(
440 &self,
441 from_ptr: usize,
442 to_ptr: usize,
443 size: usize,
444 thread_id: u64,
445 ) -> MemoryEvent {
446 self.inner.capture_move(from_ptr, to_ptr, size, thread_id)
447 }
448}
449
450#[cfg(test)]
451mod tests {
452 use super::*;
453
454 #[test]
455 fn test_core_backend() {
456 let backend = CoreBackend;
457 let event = backend.capture_alloc(0x1000, 1024, 1);
458 assert_eq!(event.ptr, 0x1000);
459 assert_eq!(event.size, 1024);
460 assert_eq!(event.thread_id, 1);
461 assert!(event.is_allocation());
462 }
463
464 #[test]
465 fn test_lockfree_backend() {
466 let backend = LockfreeBackend;
467 let event = backend.capture_alloc(0x1000, 1024, 1);
468 assert_eq!(event.ptr, 0x1000);
469 assert_eq!(event.size, 1024);
470 assert!(event.call_stack_hash.is_some());
471 }
472
473 #[test]
474 fn test_async_backend() {
475 let backend = AsyncBackend;
476 let event = backend.capture_alloc(0x1000, 1024, 1);
477 assert_eq!(event.ptr, 0x1000);
478 assert_eq!(event.size, 1024);
479 }
480
481 #[test]
482 fn test_unified_backend() {
483 let backend = UnifiedCaptureBackend::default();
484 let event = backend.capture_alloc(0x1000, 1024, 1);
485 assert_eq!(event.ptr, 0x1000);
486 assert_eq!(event.size, 1024);
487 }
488
489 #[test]
490 fn test_backend_type_creation() {
491 let core_backend = CaptureBackendType::Core.create_backend();
492 let lockfree_backend = CaptureBackendType::Lockfree.create_backend();
493 let async_backend = CaptureBackendType::Async.create_backend();
494 let unified_backend = CaptureBackendType::Unified.create_backend();
495
496 let event1 = core_backend.capture_alloc(0x1000, 1024, 1);
498 let event2 = lockfree_backend.capture_alloc(0x2000, 2048, 2);
499 let event3 = async_backend.capture_alloc(0x3000, 3072, 3);
500 let event4 = unified_backend.capture_alloc(0x4000, 4096, 4);
501
502 assert_eq!(event1.ptr, 0x1000);
503 assert_eq!(event2.ptr, 0x2000);
504 assert_eq!(event3.ptr, 0x3000);
505 assert_eq!(event4.ptr, 0x4000);
506 }
507}