1#[allow(dead_code)]
7use std::collections::{HashMap, VecDeque};
8use std::time::Instant;
9
10#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
12pub enum AllocationStrategy {
13 FirstFit,
15 BestFit,
17 WorstFit,
19 BuddySystem,
21 SegregatedList,
23 #[default]
25 Adaptive,
26 MLBased,
28 Hybrid,
30}
31
32#[derive(Debug, Clone)]
34pub struct MemoryBlock {
35 pub ptr: *mut u8,
36 pub size: usize,
37 pub is_free: bool,
38 pub allocated_at: Option<Instant>,
39 pub last_accessed: Option<Instant>,
40 pub access_count: u64,
41 pub fragmentation_score: f32,
42}
43
44impl MemoryBlock {
45 pub fn new(ptr: *mut u8, size: usize) -> Self {
46 Self {
47 ptr,
48 size,
49 is_free: true,
50 allocated_at: None,
51 last_accessed: None,
52 access_count: 0,
53 fragmentation_score: 0.0,
54 }
55 }
56
57 pub fn mark_used(&mut self) {
58 self.is_free = false;
59 self.allocated_at = Some(Instant::now());
60 self.access_count += 1;
61 }
62
63 pub fn mark_free(&mut self) {
64 self.is_free = true;
65 self.allocated_at = None;
66 }
67
68 pub fn update_access(&mut self) {
69 self.last_accessed = Some(Instant::now());
70 self.access_count += 1;
71 }
72}
73
74#[derive(Debug, Clone)]
76pub struct AllocationEvent {
77 pub size: usize,
79 pub timestamp: Instant,
81 pub cache_hit: bool,
83 pub latency_us: u64,
85 pub thread_id: Option<u64>,
87 pub kernel_context: Option<String>,
89}
90
91impl AllocationEvent {
92 pub fn new(size: usize, cache_hit: bool, latency_us: u64) -> Self {
93 Self {
94 size,
95 timestamp: Instant::now(),
96 cache_hit,
97 latency_us,
98 thread_id: None,
99 kernel_context: None,
100 }
101 }
102}
103
104#[derive(Debug, Clone, Default)]
106pub struct AllocationStats {
107 pub total_allocations: u64,
108 pub total_deallocations: u64,
109 pub cache_hits: u64,
110 pub cache_misses: u64,
111 pub fragmentation_events: u64,
112 pub total_allocated_bytes: u64,
113 pub peak_allocated_bytes: u64,
114 pub average_allocation_size: f64,
115 pub allocation_latency_ms: f64,
116}
117
118impl AllocationStats {
119 pub fn record_allocation(&mut self, size: usize, cache_hit: bool, latency_us: u64) {
120 self.total_allocations += 1;
121 self.total_allocated_bytes += size as u64;
122
123 if self.total_allocated_bytes > self.peak_allocated_bytes {
124 self.peak_allocated_bytes = self.total_allocated_bytes;
125 }
126
127 if cache_hit {
128 self.cache_hits += 1;
129 } else {
130 self.cache_misses += 1;
131 }
132
133 self.average_allocation_size =
135 self.total_allocated_bytes as f64 / self.total_allocations as f64;
136
137 self.allocation_latency_ms = (self.allocation_latency_ms
139 * (self.total_allocations - 1) as f64
140 + latency_us as f64 / 1000.0)
141 / self.total_allocations as f64;
142 }
143
144 pub fn record_deallocation(&mut self, size: usize) {
145 self.total_deallocations += 1;
146 self.total_allocated_bytes = self.total_allocated_bytes.saturating_sub(size as u64);
147 }
148
149 pub fn get_cache_hit_rate(&self) -> f64 {
150 if self.total_allocations == 0 {
151 0.0
152 } else {
153 self.cache_hits as f64 / self.total_allocations as f64
154 }
155 }
156
157 pub fn get_fragmentation_rate(&self) -> f64 {
158 if self.total_allocations == 0 {
159 0.0
160 } else {
161 self.fragmentation_events as f64 / self.total_allocations as f64
162 }
163 }
164}
165
166pub struct AllocationStrategyManager {
168 strategy: AllocationStrategy,
169 free_blocks: HashMap<usize, VecDeque<MemoryBlock>>,
170 allocation_history: VecDeque<AllocationEvent>,
171 stats: AllocationStats,
172 adaptive_config: AdaptiveConfig,
173 hybrid_config: HybridConfig,
174 ml_config: Option<MLConfig>,
175}
176
177#[derive(Debug, Clone)]
179pub struct AdaptiveConfig {
180 pub history_window: usize,
181 pub small_allocation_threshold: usize,
182 pub large_allocation_threshold: usize,
183 pub fragmentation_threshold: f32,
184 pub enable_pattern_detection: bool,
185 pub adaptation_interval: u64,
186}
187
188impl Default for AdaptiveConfig {
189 fn default() -> Self {
190 Self {
191 history_window: 1000,
192 small_allocation_threshold: 4096,
193 large_allocation_threshold: 1024 * 1024,
194 fragmentation_threshold: 0.3,
195 enable_pattern_detection: true,
196 adaptation_interval: 100,
197 }
198 }
199}
200
201#[derive(Debug, Clone)]
203pub struct HybridConfig {
204 pub primary_strategy: AllocationStrategy,
205 pub secondary_strategy: AllocationStrategy,
206 pub switch_threshold_fragmentation: f32,
207 pub switch_threshold_utilization: f32,
208 pub evaluation_window: usize,
209}
210
211impl Default for HybridConfig {
212 fn default() -> Self {
213 Self {
214 primary_strategy: AllocationStrategy::BestFit,
215 secondary_strategy: AllocationStrategy::FirstFit,
216 switch_threshold_fragmentation: 0.4,
217 switch_threshold_utilization: 0.8,
218 evaluation_window: 100,
219 }
220 }
221}
222
223#[derive(Debug, Clone)]
225pub struct MLConfig {
226 pub model_type: MLModelType,
227 pub feature_window: usize,
228 pub training_interval: u64,
229 pub prediction_confidence_threshold: f32,
230 pub fallback_strategy: AllocationStrategy,
231}
232
233#[derive(Debug, Clone)]
234pub enum MLModelType {
235 LinearRegression,
236 DecisionTree,
237 NeuralNetwork,
238 ReinforcementLearning,
239}
240
241impl AllocationStrategyManager {
242 pub fn new(strategy: AllocationStrategy) -> Self {
243 Self {
244 strategy,
245 free_blocks: HashMap::new(),
246 allocation_history: VecDeque::new(),
247 stats: AllocationStats::default(),
248 adaptive_config: AdaptiveConfig::default(),
249 hybrid_config: HybridConfig::default(),
250 ml_config: None,
251 }
252 }
253
254 pub fn with_adaptive_config(mut self, config: AdaptiveConfig) -> Self {
255 self.adaptive_config = config;
256 self
257 }
258
259 pub fn with_hybrid_config(mut self, config: HybridConfig) -> Self {
260 self.hybrid_config = config;
261 self
262 }
263
264 pub fn with_ml_config(mut self, config: MLConfig) -> Self {
265 self.ml_config = Some(config);
266 self
267 }
268
269 pub fn find_free_block(&mut self, size: usize) -> Option<*mut u8> {
271 let start_time = Instant::now();
272
273 let result = match self.strategy {
274 AllocationStrategy::FirstFit => self.find_first_fit(size),
275 AllocationStrategy::BestFit => self.find_best_fit(size),
276 AllocationStrategy::WorstFit => self.find_worst_fit(size),
277 AllocationStrategy::BuddySystem => self.find_buddy_block(size),
278 AllocationStrategy::SegregatedList => self.find_segregated_block(size),
279 AllocationStrategy::Adaptive => self.find_adaptive_block(size),
280 AllocationStrategy::MLBased => self.find_ml_based_block(size),
281 AllocationStrategy::Hybrid => self.find_hybrid_block(size),
282 };
283
284 let latency_us = start_time.elapsed().as_micros() as u64;
285 let cache_hit = result.is_some();
286
287 self.stats.record_allocation(size, cache_hit, latency_us);
288 self.allocation_history
289 .push_back(AllocationEvent::new(size, cache_hit, latency_us));
290
291 if self.allocation_history.len() > self.adaptive_config.history_window {
293 self.allocation_history.pop_front();
294 }
295
296 result
297 }
298
299 pub fn find_first_fit(&mut self, size: usize) -> Option<*mut u8> {
301 for (&block_size, blocks) in &mut self.free_blocks {
302 if block_size >= size && !blocks.is_empty() {
303 if let Some(mut block) = blocks.pop_front() {
304 block.mark_used();
305 return Some(block.ptr);
306 }
307 }
308 }
309 None
310 }
311
312 pub fn find_best_fit(&mut self, size: usize) -> Option<*mut u8> {
314 let mut best_size = None;
315 let mut best_fit_size = usize::MAX;
316
317 for (&block_size, blocks) in &self.free_blocks {
318 if block_size >= size && block_size < best_fit_size && !blocks.is_empty() {
319 best_fit_size = block_size;
320 best_size = Some(block_size);
321 }
322 }
323
324 if let Some(block_size) = best_size {
325 if let Some(blocks) = self.free_blocks.get_mut(&block_size) {
326 if let Some(mut block) = blocks.pop_front() {
327 block.mark_used();
328 return Some(block.ptr);
329 }
330 }
331 }
332
333 None
334 }
335
336 pub fn find_worst_fit(&mut self, size: usize) -> Option<*mut u8> {
338 let mut worst_size = None;
339 let mut worst_fit_size = 0;
340
341 for (&block_size, blocks) in &self.free_blocks {
342 if block_size >= size && block_size > worst_fit_size && !blocks.is_empty() {
343 worst_fit_size = block_size;
344 worst_size = Some(block_size);
345 }
346 }
347
348 if let Some(block_size) = worst_size {
349 if let Some(blocks) = self.free_blocks.get_mut(&block_size) {
350 if let Some(mut block) = blocks.pop_front() {
351 block.mark_used();
352 return Some(block.ptr);
353 }
354 }
355 }
356
357 None
358 }
359
360 pub fn find_buddy_block(&mut self, size: usize) -> Option<*mut u8> {
362 let buddy_size = size.next_power_of_two();
363
364 if let Some(blocks) = self.free_blocks.get_mut(&buddy_size) {
365 if let Some(mut block) = blocks.pop_front() {
366 block.mark_used();
367 return Some(block.ptr);
368 }
369 }
370
371 None
372 }
373
374 pub fn find_segregated_block(&mut self, size: usize) -> Option<*mut u8> {
376 let size_class = self.get_size_class(size);
377
378 let mut search_sizes: Vec<usize> = self
380 .free_blocks
381 .keys()
382 .filter(|&&s| s >= size_class)
383 .cloned()
384 .collect();
385 search_sizes.sort();
386
387 for class_size in search_sizes {
388 if let Some(blocks) = self.free_blocks.get_mut(&class_size) {
389 if let Some(mut block) = blocks.pop_front() {
390 block.mark_used();
391 return Some(block.ptr);
392 }
393 }
394 }
395
396 None
397 }
398
399 pub fn find_adaptive_block(&mut self, size: usize) -> Option<*mut u8> {
401 let pattern = self.analyze_allocation_patterns();
403
404 let chosen_strategy = match pattern {
406 AllocationPattern::SmallFrequent => AllocationStrategy::FirstFit,
407 AllocationPattern::LargeInfrequent => AllocationStrategy::BestFit,
408 AllocationPattern::Mixed => AllocationStrategy::WorstFit,
409 AllocationPattern::Sequential => AllocationStrategy::SegregatedList,
410 AllocationPattern::Random => AllocationStrategy::BuddySystem,
411 AllocationPattern::Unknown => AllocationStrategy::BestFit,
412 };
413
414 match chosen_strategy {
416 AllocationStrategy::FirstFit => self.find_first_fit(size),
417 AllocationStrategy::BestFit => self.find_best_fit(size),
418 AllocationStrategy::WorstFit => self.find_worst_fit(size),
419 AllocationStrategy::SegregatedList => self.find_segregated_block(size),
420 AllocationStrategy::BuddySystem => self.find_buddy_block(size),
421 _ => self.find_best_fit(size), }
423 }
424
425 pub fn find_ml_based_block(&mut self, size: usize) -> Option<*mut u8> {
427 if let Some(ml_config) = self.ml_config.clone() {
428 let features = self.extract_ml_features(size);
430
431 let prediction = self.predict_best_strategy(&features, &ml_config);
433
434 if prediction.confidence >= ml_config.prediction_confidence_threshold as f64 {
436 match prediction.strategy {
437 AllocationStrategy::FirstFit => self.find_first_fit(size),
438 AllocationStrategy::BestFit => self.find_best_fit(size),
439 AllocationStrategy::WorstFit => self.find_worst_fit(size),
440 AllocationStrategy::BuddySystem => self.find_buddy_block(size),
441 AllocationStrategy::SegregatedList => self.find_segregated_block(size),
442 _ => self.apply_fallback_strategy(size, &ml_config.fallback_strategy),
443 }
444 } else {
445 self.apply_fallback_strategy(size, &ml_config.fallback_strategy)
447 }
448 } else {
449 self.find_best_fit(size)
451 }
452 }
453
454 pub fn find_hybrid_block(&mut self, size: usize) -> Option<*mut u8> {
456 let fragmentation_level = self.calculate_fragmentation_level();
458 let utilization_level = self.calculate_utilization_level();
459
460 let chosen_strategy = if fragmentation_level
462 > self.hybrid_config.switch_threshold_fragmentation as f64
463 || utilization_level > self.hybrid_config.switch_threshold_utilization as f64
464 {
465 self.hybrid_config.secondary_strategy.clone()
466 } else {
467 self.hybrid_config.primary_strategy.clone()
468 };
469
470 self.apply_fallback_strategy(size, &chosen_strategy)
472 }
473
474 fn apply_fallback_strategy(
475 &mut self,
476 size: usize,
477 strategy: &AllocationStrategy,
478 ) -> Option<*mut u8> {
479 match strategy {
480 AllocationStrategy::FirstFit => self.find_first_fit(size),
481 AllocationStrategy::BestFit => self.find_best_fit(size),
482 AllocationStrategy::WorstFit => self.find_worst_fit(size),
483 AllocationStrategy::BuddySystem => self.find_buddy_block(size),
484 AllocationStrategy::SegregatedList => self.find_segregated_block(size),
485 AllocationStrategy::Adaptive => self.find_adaptive_block(size),
486 _ => self.find_best_fit(size), }
488 }
489
490 pub fn analyze_allocation_patterns(&self) -> AllocationPattern {
492 if self.allocation_history.len() < 10 {
493 return AllocationPattern::Unknown;
494 }
495
496 let recent_history: Vec<&AllocationEvent> =
497 self.allocation_history.iter().rev().take(50).collect();
498
499 let sizes: Vec<usize> = recent_history.iter().map(|e| e.size).collect();
501 let avg_size = sizes.iter().sum::<usize>() / sizes.len();
502 let small_count = sizes
503 .iter()
504 .filter(|&&s| s < self.adaptive_config.small_allocation_threshold)
505 .count();
506 let large_count = sizes
507 .iter()
508 .filter(|&&s| s > self.adaptive_config.large_allocation_threshold)
509 .count();
510
511 let time_diffs: Vec<u128> = recent_history
513 .windows(2)
514 .map(|w| w[0].timestamp.duration_since(w[1].timestamp).as_millis())
515 .collect();
516 let avg_interval = if !time_diffs.is_empty() {
517 time_diffs.iter().sum::<u128>() / time_diffs.len() as u128
518 } else {
519 0
520 };
521
522 if small_count > recent_history.len() * 8 / 10 && avg_interval < 100 {
524 AllocationPattern::SmallFrequent
525 } else if large_count > recent_history.len() / 2 {
526 AllocationPattern::LargeInfrequent
527 } else if self.is_sequential_pattern(&sizes) {
528 AllocationPattern::Sequential
529 } else if self.is_random_pattern(&sizes) {
530 AllocationPattern::Random
531 } else {
532 AllocationPattern::Mixed
533 }
534 }
535
536 fn is_sequential_pattern(&self, sizes: &[usize]) -> bool {
537 if sizes.len() < 3 {
538 return false;
539 }
540
541 let mut increasing = 0;
542 let mut decreasing = 0;
543
544 for window in sizes.windows(2) {
545 if window[1] > window[0] {
546 increasing += 1;
547 } else if window[1] < window[0] {
548 decreasing += 1;
549 }
550 }
551
552 let trend_ratio = (increasing.max(decreasing) as f64) / (sizes.len() - 1) as f64;
554 trend_ratio > 0.7
555 }
556
557 fn is_random_pattern(&self, sizes: &[usize]) -> bool {
558 if sizes.len() < 5 {
559 return false;
560 }
561
562 let mean = sizes.iter().sum::<usize>() as f64 / sizes.len() as f64;
564 let variance = sizes
565 .iter()
566 .map(|&s| (s as f64 - mean).powi(2))
567 .sum::<f64>()
568 / sizes.len() as f64;
569 let std_dev = variance.sqrt();
570 let cv = std_dev / mean;
571
572 cv > 0.5
574 }
575
576 fn extract_ml_features(&self, size: usize) -> MLFeatures {
578 let recent_history: Vec<&AllocationEvent> = self
579 .allocation_history
580 .iter()
581 .rev()
582 .take(
583 self.ml_config
584 .as_ref()
585 .map(|c| c.feature_window)
586 .unwrap_or(20),
587 )
588 .collect();
589
590 let avg_size = if !recent_history.is_empty() {
591 recent_history.iter().map(|e| e.size).sum::<usize>() as f64
592 / recent_history.len() as f64
593 } else {
594 0.0
595 };
596
597 let avg_latency = if !recent_history.is_empty() {
598 recent_history.iter().map(|e| e.latency_us).sum::<u64>() as f64
599 / recent_history.len() as f64
600 } else {
601 0.0
602 };
603
604 MLFeatures {
605 requested_size: size as f64,
606 avg_recent_size: avg_size,
607 avg_recent_latency: avg_latency,
608 cache_hit_rate: self.stats.get_cache_hit_rate(),
609 fragmentation_level: self.calculate_fragmentation_level(),
610 utilization_level: self.calculate_utilization_level(),
611 allocation_frequency: recent_history.len() as f64,
612 }
613 }
614
615 fn predict_best_strategy(&self, features: &MLFeatures, ml_config: &MLConfig) -> MLPrediction {
617 let score_first_fit =
619 self.score_strategy_for_features(features, &AllocationStrategy::FirstFit);
620 let score_best_fit =
621 self.score_strategy_for_features(features, &AllocationStrategy::BestFit);
622 let score_worst_fit =
623 self.score_strategy_for_features(features, &AllocationStrategy::WorstFit);
624 let score_buddy =
625 self.score_strategy_for_features(features, &AllocationStrategy::BuddySystem);
626 let score_segregated =
627 self.score_strategy_for_features(features, &AllocationStrategy::SegregatedList);
628
629 let mut best_strategy = AllocationStrategy::BestFit;
630 let mut _best_score = score_best_fit;
631 let mut confidence = 0.5;
632
633 if score_first_fit > _best_score {
634 best_strategy = AllocationStrategy::FirstFit;
635 _best_score = score_first_fit;
636 }
637 if score_worst_fit > _best_score {
638 best_strategy = AllocationStrategy::WorstFit;
639 _best_score = score_worst_fit;
640 }
641 if score_buddy > _best_score {
642 best_strategy = AllocationStrategy::BuddySystem;
643 _best_score = score_buddy;
644 }
645 if score_segregated > _best_score {
646 best_strategy = AllocationStrategy::SegregatedList;
647 _best_score = score_segregated;
648 }
649
650 let mut scores = [
652 score_first_fit,
653 score_best_fit,
654 score_worst_fit,
655 score_buddy,
656 score_segregated,
657 ];
658 scores.sort_by(|a, b| b.partial_cmp(a).expect("unwrap failed"));
659 if scores.len() >= 2 {
660 confidence = (scores[0] - scores[1]).clamp(0.0, 1.0);
661 }
662
663 MLPrediction {
664 strategy: best_strategy,
665 confidence,
666 predicted_latency: features.avg_recent_latency,
667 }
668 }
669
670 fn score_strategy_for_features(
671 &self,
672 features: &MLFeatures,
673 strategy: &AllocationStrategy,
674 ) -> f64 {
675 match strategy {
677 AllocationStrategy::FirstFit => {
678 let size_score = if features.requested_size < 4096.0 {
680 0.8
681 } else {
682 0.3
683 };
684 let freq_score = if features.allocation_frequency > 10.0 {
685 0.9
686 } else {
687 0.4
688 };
689 (size_score + freq_score) / 2.0
690 }
691 AllocationStrategy::BestFit => {
692 let util_score = if features.utilization_level > 0.7 {
694 0.9
695 } else {
696 0.6
697 };
698 let frag_score = if features.fragmentation_level < 0.3 {
699 0.8
700 } else {
701 0.4
702 };
703 (util_score + frag_score) / 2.0
704 }
705 AllocationStrategy::WorstFit => {
706 if features.fragmentation_level > 0.4 {
709 0.8
710 } else {
711 0.3
712 }
713 }
714 AllocationStrategy::BuddySystem => {
715 if features.requested_size.log2().fract() < 0.1 {
718 0.9
719 } else {
720 0.4
721 }
722 }
723 AllocationStrategy::SegregatedList
724 if features.cache_hit_rate > 0.6 => {
727 0.7
728 }
729 _ => 0.5, }
731 }
732
733 fn calculate_fragmentation_level(&self) -> f64 {
734 if self.free_blocks.is_empty() {
736 return 0.0;
737 }
738
739 let total_free_space: usize = self
740 .free_blocks
741 .iter()
742 .map(|(size, blocks)| size * blocks.len())
743 .sum();
744
745 let free_block_count: usize = self.free_blocks.values().map(|blocks| blocks.len()).sum();
746
747 if total_free_space == 0 {
748 0.0
749 } else {
750 let avg_block_size = total_free_space as f64 / free_block_count as f64;
751 let fragmentation = 1.0 - (avg_block_size / total_free_space as f64);
752 fragmentation.clamp(0.0, 1.0)
753 }
754 }
755
756 fn calculate_utilization_level(&self) -> f64 {
757 let total_capacity = self.stats.peak_allocated_bytes as f64;
759 let current_allocated = self.stats.total_allocated_bytes as f64;
760
761 if total_capacity == 0.0 {
762 0.0
763 } else {
764 (current_allocated / total_capacity).clamp(0.0, 1.0)
765 }
766 }
767
768 pub fn get_size_class(&self, size: usize) -> usize {
770 match size {
771 0..=256 => 256,
772 257..=512 => 512,
773 513..=1024 => 1024,
774 1025..=2048 => 2048,
775 2049..=4096 => 4096,
776 4097..=8192 => 8192,
777 8193..=16384 => 16384,
778 16385..=32768 => 32768,
779 32769..=65536 => 65536,
780 65537..=131072 => 131072,
781 131073..=262144 => 262144,
782 262145..=524288 => 524288,
783 524289..=1048576 => 1048576,
784 _ => size.next_power_of_two(),
785 }
786 }
787
788 pub fn add_free_block(&mut self, block: MemoryBlock) {
790 let size_class = self.get_size_class(block.size);
791 self.free_blocks
792 .entry(size_class)
793 .or_default()
794 .push_back(block);
795 }
796
797 pub fn remove_free_block(&mut self, size: usize, ptr: *mut u8) -> Option<MemoryBlock> {
799 let size_class = self.get_size_class(size);
800 if let Some(blocks) = self.free_blocks.get_mut(&size_class) {
801 if let Some(pos) = blocks.iter().position(|block| block.ptr == ptr) {
802 return blocks.remove(pos);
803 }
804 }
805 None
806 }
807
808 pub fn get_stats(&self) -> &AllocationStats {
810 &self.stats
811 }
812
813 pub fn get_strategy(&self) -> &AllocationStrategy {
815 &self.strategy
816 }
817
818 pub fn set_strategy(&mut self, strategy: AllocationStrategy) {
820 self.strategy = strategy;
821 }
822
823 pub fn clear_history(&mut self) {
825 self.allocation_history.clear();
826 }
827
828 pub fn get_history(&self) -> &VecDeque<AllocationEvent> {
830 &self.allocation_history
831 }
832}
833
834#[derive(Debug, Clone, PartialEq, Eq, Hash)]
836pub enum AllocationPattern {
837 SmallFrequent,
838 LargeInfrequent,
839 Mixed,
840 Sequential,
841 Random,
842 Unknown,
843}
844
845#[derive(Debug, Clone)]
847pub struct MLFeatures {
848 pub requested_size: f64,
849 pub avg_recent_size: f64,
850 pub avg_recent_latency: f64,
851 pub cache_hit_rate: f64,
852 pub fragmentation_level: f64,
853 pub utilization_level: f64,
854 pub allocation_frequency: f64,
855}
856
857#[derive(Debug, Clone)]
859pub struct MLPrediction {
860 pub strategy: AllocationStrategy,
861 pub confidence: f64,
862 pub predicted_latency: f64,
863}
864
865#[cfg(test)]
866mod tests {
867 use super::*;
868
869 #[test]
870 fn test_allocation_strategies() {
871 let mut manager = AllocationStrategyManager::new(AllocationStrategy::BestFit);
872
873 for i in 0..5 {
875 let block = MemoryBlock::new((i * 1024) as *mut u8, 1024 * (i + 1));
876 manager.add_free_block(block);
877 }
878
879 let ptr = manager.find_free_block(1500);
881 assert!(ptr.is_some());
882
883 let stats = manager.get_stats();
885 assert_eq!(stats.total_allocations, 1);
886 }
887
888 #[test]
889 fn test_size_classes() {
890 let manager = AllocationStrategyManager::new(AllocationStrategy::SegregatedList);
891
892 assert_eq!(manager.get_size_class(100), 256);
893 assert_eq!(manager.get_size_class(300), 512);
894 assert_eq!(manager.get_size_class(1000), 1024);
895 assert_eq!(manager.get_size_class(2000000), 2097152);
896 }
897
898 #[test]
899 fn test_adaptive_strategy() {
900 let mut manager = AllocationStrategyManager::new(AllocationStrategy::Adaptive);
901
902 for _ in 0..20 {
904 let event = AllocationEvent::new(256, true, 10);
905 manager.allocation_history.push_back(event);
906 }
907
908 let pattern = manager.analyze_allocation_patterns();
909 assert_eq!(pattern, AllocationPattern::SmallFrequent);
910 }
911
912 #[test]
913 fn test_fragmentation_calculation() {
914 let mut manager = AllocationStrategyManager::new(AllocationStrategy::BestFit);
915
916 manager.add_free_block(MemoryBlock::new(0x1000 as *mut u8, 1024));
918 manager.add_free_block(MemoryBlock::new(0x2000 as *mut u8, 2048));
919 manager.add_free_block(MemoryBlock::new(0x3000 as *mut u8, 512));
920
921 let fragmentation = manager.calculate_fragmentation_level();
922 assert!((0.0..=1.0).contains(&fragmentation));
923 }
924}