use crate::memory_profiler::allocation::{AccessPattern, AccessType};
use parking_lot::{Mutex, RwLock};
use std::collections::{HashMap, VecDeque};
use std::sync::Arc;
use std::time::{Duration, Instant};
#[derive(Debug)]
pub struct AccessPatternAnalyzer {
active_patterns: Arc<RwLock<HashMap<usize, AccessPattern>>>,
pattern_classifications: Arc<Mutex<HashMap<usize, PatternClassification>>>,
pattern_statistics: Arc<Mutex<PatternStatistics>>,
optimization_suggestions: Arc<Mutex<Vec<PatternOptimizationSuggestion>>>,
config: PatternAnalysisConfig,
prediction_models: Arc<Mutex<HashMap<usize, AccessPredictionModel>>>,
}
#[derive(Debug, Clone)]
pub struct PatternClassification {
pub primary_type: PatternType,
pub secondary_types: Vec<PatternType>,
pub confidence: f64,
pub classified_at: Instant,
pub stability: f64,
pub prediction: AccessPrediction,
}
#[derive(Debug, Clone, PartialEq)]
pub enum PatternType {
Sequential {
stride: usize,
direction: AccessDirection,
},
Random {
entropy: f64,
distribution: AccessDistribution,
},
Streaming {
block_size: usize,
bandwidth_intensive: bool,
},
TemporalClustering {
cluster_size: usize,
access_frequency: f64,
},
SpatialClustering {
locality_radius: usize,
cluster_density: f64,
},
Strided {
stride_length: usize,
stride_consistency: f64,
},
CacheFriendly {
cache_line_utilization: f64,
prefetch_effectiveness: f64,
},
CacheHostile {
cache_miss_rate: f64,
thrashing_likelihood: f64,
},
ComputeIntensive {
compute_to_memory_ratio: f64,
arithmetic_intensity: f64,
},
BandwidthBound {
bandwidth_utilization: f64,
transfer_efficiency: f64,
},
Coalescing {
coalescing_factor: f64,
efficiency: f64,
},
Prefetch {
prefetch_distance: usize,
hit_rate: f64,
},
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AccessDirection {
Forward,
Backward,
Bidirectional,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AccessDistribution {
Uniform,
Normal,
Exponential,
PowerLaw,
Bimodal,
}
#[derive(Debug, Clone)]
pub struct AccessPrediction {
pub next_accesses: Vec<PredictedAccess>,
pub prediction_confidence: f64,
pub time_horizon: Duration,
pub prefetch_candidates: Vec<usize>,
pub cache_behavior: CacheBehaviorPrediction,
}
#[derive(Debug, Clone)]
pub struct PredictedAccess {
pub address: usize,
pub access_type: AccessType,
pub size: usize,
pub confidence: f64,
pub estimated_time: Duration,
}
#[derive(Debug, Clone)]
pub struct CacheBehaviorPrediction {
pub l1_hit_rate: f64,
pub l2_hit_rate: f64,
pub tlb_hit_rate: f64,
pub bandwidth_usage: f64,
pub cache_warming: Vec<CacheWarmingRecommendation>,
}
#[derive(Debug, Clone)]
pub struct CacheWarmingRecommendation {
pub address_range: (usize, usize),
pub priority: f64,
pub estimated_benefit: f64,
pub target_cache_level: CacheLevel,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum CacheLevel {
L1,
L2,
L3,
Memory,
}
#[derive(Debug, Clone)]
pub struct PatternOptimizationSuggestion {
pub target: usize,
pub optimization_type: OptimizationType,
pub suggestion: String,
pub expected_improvement: f64,
pub complexity: OptimizationComplexity,
pub prerequisites: Vec<String>,
pub timeline: OptimizationTimeline,
}
#[derive(Debug, Clone)]
pub enum OptimizationType {
DataLayoutOptimization {
suggested_layout: DataLayout,
memory_savings: usize,
},
PrefetchingOptimization {
prefetch_distance: usize,
prefetch_pattern: PrefetchPattern,
},
CacheOptimization {
cache_strategy: CacheStrategy,
target_cache_level: CacheLevel,
},
MemoryPooling {
pool_size: usize,
allocation_strategy: AllocationStrategy,
},
AccessTransformation {
transformation_type: TransformationType,
expected_locality_improvement: f64,
},
BandwidthOptimization {
batching_strategy: BatchingStrategy,
transfer_optimization: TransferOptimization,
},
}
#[derive(Debug, Clone)]
pub enum DataLayout {
ArrayOfStructs,
StructOfArrays,
Columnar,
Tiled,
Compressed,
Interleaved,
}
#[derive(Debug, Clone)]
pub enum PrefetchPattern {
Sequential,
Strided,
Adaptive,
Predictive,
}
#[derive(Debug, Clone)]
pub enum CacheStrategy {
WriteThrough,
WriteBack,
WriteAround,
DirectMapped,
FullyAssociative,
SetAssociative { ways: usize },
}
#[derive(Debug, Clone)]
pub enum AllocationStrategy {
BestFit,
FirstFit,
NextFit,
BuddySystem,
Slab,
Pool,
}
#[derive(Debug, Clone)]
pub enum TransformationType {
Blocking,
Tiling,
LoopReordering,
DataReorganization,
TemporalBlocking,
SpatialBlocking,
}
#[derive(Debug, Clone)]
pub enum BatchingStrategy {
TimeBased,
SizeBased,
AdaptiveBatching,
PriorityBatching,
}
#[derive(Debug, Clone)]
pub enum TransferOptimization {
Coalescing,
Vectorization,
PipelinedTransfers,
AsynchronousTransfers,
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum OptimizationComplexity {
Low,
Medium,
High,
VeryHigh,
}
#[derive(Debug, Clone)]
pub enum OptimizationTimeline {
Immediate,
ShortTerm, MediumTerm, LongTerm, }
#[derive(Debug, Default, Clone)]
pub struct PatternStatistics {
pub total_patterns: u64,
pub pattern_distribution: Vec<(String, u64)>,
pub average_confidence: f64,
pub cache_efficiency: CacheEfficiencyStats,
pub temporal_analysis: TemporalAnalysisStats,
pub spatial_analysis: SpatialAnalysisStats,
pub performance_impact: PerformanceImpactStats,
}
#[derive(Debug, Default, Clone)]
pub struct CacheEfficiencyStats {
pub overall_hit_rate: f64,
pub l1_stats: CacheLevelStats,
pub l2_stats: CacheLevelStats,
pub tlb_stats: CacheLevelStats,
pub cache_friendly_percentage: f64,
}
#[derive(Debug, Default, Clone)]
pub struct CacheLevelStats {
pub hit_rate: f64,
pub miss_rate: f64,
pub avg_latency: Duration,
pub bandwidth_utilization: f64,
}
#[derive(Debug, Default, Clone)]
pub struct TemporalAnalysisStats {
pub avg_temporal_locality: f64,
pub hot_spots: Vec<TemporalHotSpot>,
pub frequency_distribution: Vec<(f64, u64)>,
pub clustering_strength: f64,
}
#[derive(Debug, Clone)]
pub struct TemporalHotSpot {
pub address_range: (usize, usize),
pub frequency: f64,
pub duration: Duration,
pub peak_time: Instant,
}
#[derive(Debug, Default, Clone)]
pub struct SpatialAnalysisStats {
pub avg_spatial_locality: f64,
pub stride_patterns: Vec<StridePattern>,
pub locality_clusters: Vec<LocalityCluster>,
pub fragmentation_impact: f64,
}
#[derive(Debug, Clone)]
pub struct StridePattern {
pub stride_length: usize,
pub frequency: u64,
pub consistency: f64,
pub memory_range: (usize, usize),
}
#[derive(Debug, Clone)]
pub struct LocalityCluster {
pub center: usize,
pub radius: usize,
pub density: f64,
pub lifetime: Duration,
}
#[derive(Debug, Default, Clone)]
pub struct PerformanceImpactStats {
pub bandwidth_efficiency: f64,
pub cache_miss_penalty: Duration,
pub contention_level: f64,
pub optimization_potential: f64,
}
#[derive(Debug, Clone)]
pub struct PatternAnalysisConfig {
pub min_pattern_length: usize,
pub analysis_window: Duration,
pub confidence_threshold: f64,
pub enable_prediction: bool,
pub enable_optimization_suggestions: bool,
pub max_tracked_patterns: usize,
pub classification_sensitivity: f64,
pub cache_analysis_depth: usize,
}
#[derive(Debug)]
pub struct AccessPredictionModel {
access_history: VecDeque<AccessEvent>,
pattern_state: PatternRecognitionState,
accuracy_tracker: AccuracyTracker,
model_params: PredictionModelParams,
}
#[derive(Debug, Clone)]
pub struct AccessEvent {
pub address: usize,
pub access_type: AccessType,
pub size: usize,
pub timestamp: Instant,
pub context: AccessContext,
}
#[derive(Debug, Clone)]
pub struct AccessContext {
pub thread_id: u64,
pub operation: String,
pub allocation_id: Option<usize>,
}
#[derive(Debug)]
struct PatternRecognitionState {
current_hypothesis: Option<PatternType>,
transition_probabilities: HashMap<PatternType, HashMap<PatternType, f64>>,
confidence: f64,
}
#[derive(Debug)]
struct AccuracyTracker {
correct_predictions: u64,
total_predictions: u64,
recent_accuracy: VecDeque<bool>,
accuracy_by_pattern: HashMap<PatternType, (u64, u64)>, }
#[derive(Debug, Clone)]
struct PredictionModelParams {
history_window: usize,
prediction_horizon: Duration,
learning_rate: f64,
pattern_threshold: f64,
}
fn pattern_type_name(pattern: &PatternType) -> String {
match pattern {
PatternType::Sequential { .. } => "Sequential".to_string(),
PatternType::Random { .. } => "Random".to_string(),
PatternType::Streaming { .. } => "Streaming".to_string(),
PatternType::TemporalClustering { .. } => "TemporalClustering".to_string(),
PatternType::SpatialClustering { .. } => "SpatialClustering".to_string(),
PatternType::Strided { .. } => "Strided".to_string(),
PatternType::Coalescing { .. } => "Coalescing".to_string(),
PatternType::Prefetch { .. } => "Prefetch".to_string(),
PatternType::CacheFriendly { .. } => "CacheFriendly".to_string(),
PatternType::CacheHostile { .. } => "CacheHostile".to_string(),
PatternType::ComputeIntensive { .. } => "ComputeIntensive".to_string(),
PatternType::BandwidthBound { .. } => "BandwidthBound".to_string(),
}
}
impl AccessPatternAnalyzer {
pub fn new(config: PatternAnalysisConfig) -> Self {
Self {
active_patterns: Arc::new(RwLock::new(HashMap::new())),
pattern_classifications: Arc::new(Mutex::new(HashMap::new())),
pattern_statistics: Arc::new(Mutex::new(PatternStatistics::default())),
optimization_suggestions: Arc::new(Mutex::new(Vec::new())),
config,
prediction_models: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn analyze_pattern(
&self,
address: usize,
pattern: &AccessPattern,
) -> Option<PatternClassification> {
if pattern.access_times.len() < self.config.min_pattern_length {
return None;
}
let classification = self.classify_pattern(pattern);
if classification.confidence >= self.config.confidence_threshold {
self.pattern_classifications
.lock()
.insert(address, classification.clone());
self.update_statistics(&classification);
if self.config.enable_optimization_suggestions {
self.generate_optimization_suggestions(address, &classification);
}
if self.config.enable_prediction {
self.update_prediction_model(address, pattern);
}
Some(classification)
} else {
None
}
}
fn classify_pattern(&self, pattern: &AccessPattern) -> PatternClassification {
let mut confidence_scores: Vec<(PatternType, f64)> = Vec::new();
confidence_scores.push((
PatternType::Sequential {
stride: self.estimate_stride(pattern),
direction: self.determine_direction(pattern),
},
pattern.sequential_score,
));
confidence_scores.push((
PatternType::Random {
entropy: self.calculate_entropy(pattern),
distribution: self.determine_distribution(pattern),
},
pattern.random_score,
));
let streaming_score = self.analyze_streaming(pattern);
if streaming_score > 0.3 {
confidence_scores.push((
PatternType::Streaming {
block_size: self.estimate_block_size(pattern),
bandwidth_intensive: streaming_score > 0.7,
},
streaming_score,
));
}
let temporal_score = pattern.temporal_locality;
if temporal_score > 0.5 {
confidence_scores.push((
PatternType::TemporalClustering {
cluster_size: self.estimate_cluster_size(pattern),
access_frequency: pattern.frequency,
},
temporal_score,
));
}
let spatial_score = pattern.spatial_locality;
if spatial_score > 0.5 {
confidence_scores.push((
PatternType::SpatialClustering {
locality_radius: self.estimate_locality_radius(pattern),
cluster_density: spatial_score,
},
spatial_score,
));
}
let primary_index = confidence_scores
.iter()
.enumerate()
.max_by(|(_, (_, a)), (_, (_, b))| {
a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)
})
.map(|(i, _)| i)
.unwrap_or(0);
let (primary_type, confidence) = if primary_index < confidence_scores.len() {
confidence_scores[primary_index].clone()
} else {
(
PatternType::Random {
entropy: 1.0,
distribution: AccessDistribution::Uniform,
},
0.0,
)
};
let secondary_types: Vec<PatternType> = confidence_scores
.into_iter()
.enumerate()
.filter(|(i, (_, score))| *i != primary_index && *score > 0.3)
.map(|(_, (pattern, _))| pattern)
.collect();
let stability = self.calculate_pattern_stability(pattern);
let prediction = self.generate_access_prediction(pattern, &primary_type);
PatternClassification {
primary_type,
secondary_types,
confidence,
classified_at: Instant::now(),
stability,
prediction,
}
}
fn estimate_stride(&self, pattern: &AccessPattern) -> usize {
if pattern.access_sizes.len() < 2 {
return 0;
}
let mut strides = Vec::new();
let access_sizes_vec: Vec<_> = pattern.access_sizes.iter().collect();
for window in access_sizes_vec.windows(2) {
if window[1] > window[0] {
strides.push(window[1] - window[0]);
}
}
if strides.is_empty() {
0
} else {
strides.sort_unstable();
strides[strides.len() / 2]
}
}
fn determine_direction(&self, pattern: &AccessPattern) -> AccessDirection {
if pattern.access_sizes.len() < 3 {
return AccessDirection::Forward;
}
let mut forward_count = 0;
let mut backward_count = 0;
let access_sizes_vec: Vec<_> = pattern.access_sizes.iter().collect();
for window in access_sizes_vec.windows(2) {
if window[1] > window[0] {
forward_count += 1;
} else if window[1] < window[0] {
backward_count += 1;
}
}
let total = forward_count + backward_count;
if total == 0 {
return AccessDirection::Forward;
}
let forward_ratio = forward_count as f64 / total as f64;
if forward_ratio > 0.8 {
AccessDirection::Forward
} else if forward_ratio < 0.2 {
AccessDirection::Backward
} else {
AccessDirection::Bidirectional
}
}
fn calculate_entropy(&self, pattern: &AccessPattern) -> f64 {
if pattern.access_sizes.is_empty() {
return 0.0;
}
let mut freq_map = HashMap::new();
for &size in &pattern.access_sizes {
*freq_map.entry(size).or_insert(0) += 1;
}
let total = pattern.access_sizes.len() as f64;
let mut entropy = 0.0;
for count in freq_map.values() {
let p = *count as f64 / total;
if p > 0.0 {
entropy -= p * p.log2();
}
}
entropy
}
fn determine_distribution(&self, _pattern: &AccessPattern) -> AccessDistribution {
AccessDistribution::Uniform
}
fn analyze_streaming(&self, pattern: &AccessPattern) -> f64 {
if pattern.access_sizes.len() < 3 {
return 0.0;
}
let avg_size =
pattern.access_sizes.iter().sum::<usize>() as f64 / pattern.access_sizes.len() as f64;
let size_consistency = self.calculate_size_consistency(pattern);
if avg_size > 1024.0 && size_consistency > 0.8 {
(avg_size.log2() / 20.0).min(1.0) * size_consistency
} else {
0.0
}
}
fn calculate_size_consistency(&self, pattern: &AccessPattern) -> f64 {
if pattern.access_sizes.len() < 2 {
return 1.0;
}
let avg_size =
pattern.access_sizes.iter().sum::<usize>() as f64 / pattern.access_sizes.len() as f64;
let variance = pattern
.access_sizes
.iter()
.map(|&size| (size as f64 - avg_size).powi(2))
.sum::<f64>()
/ pattern.access_sizes.len() as f64;
let std_dev = variance.sqrt();
let coefficient_of_variation = if avg_size > 0.0 {
std_dev / avg_size
} else {
0.0
};
(1.0 - coefficient_of_variation).max(0.0)
}
fn estimate_block_size(&self, pattern: &AccessPattern) -> usize {
if pattern.access_sizes.is_empty() {
return 0;
}
let mut sizes = pattern.access_sizes.iter().cloned().collect::<Vec<_>>();
sizes.sort_unstable();
sizes[sizes.len() / 2]
}
fn estimate_cluster_size(&self, pattern: &AccessPattern) -> usize {
(pattern.access_times.len() as f64 * pattern.temporal_locality).round() as usize
}
fn estimate_locality_radius(&self, pattern: &AccessPattern) -> usize {
if pattern.access_sizes.len() < 2 {
return 0;
}
let mut distances = Vec::new();
let access_sizes_vec: Vec<_> = pattern.access_sizes.iter().collect();
for window in access_sizes_vec.windows(2) {
let distance = if window[1] > window[0] {
window[1] - window[0]
} else {
window[0] - window[1]
};
distances.push(distance);
}
if distances.is_empty() {
0
} else {
distances.iter().sum::<usize>() / distances.len()
}
}
fn calculate_pattern_stability(&self, pattern: &AccessPattern) -> f64 {
if pattern.access_times.len() < 5 {
return 0.5; }
let mut intervals = Vec::new();
let access_times_vec: Vec<_> = pattern.access_times.iter().collect();
for window in access_times_vec.windows(2) {
intervals.push(window[1].duration_since(*window[0]).as_nanos() as f64);
}
if intervals.is_empty() {
return 0.5;
}
let avg_interval = intervals.iter().sum::<f64>() / intervals.len() as f64;
let variance = intervals
.iter()
.map(|&interval| (interval - avg_interval).powi(2))
.sum::<f64>()
/ intervals.len() as f64;
let std_dev = variance.sqrt();
let coefficient_of_variation = if avg_interval > 0.0 {
std_dev / avg_interval
} else {
0.0
};
(1.0f64 - coefficient_of_variation.min(1.0f64)).max(0.0f64)
}
fn generate_access_prediction(
&self,
pattern: &AccessPattern,
pattern_type: &PatternType,
) -> AccessPrediction {
let mut next_accesses = Vec::new();
let mut prefetch_candidates = Vec::new();
match pattern_type {
PatternType::Sequential { stride, direction } => {
if let (Some(&last_size), Some(&_last_time)) =
(pattern.access_sizes.back(), pattern.access_times.back())
{
let next_size = match direction {
AccessDirection::Forward => last_size + stride,
AccessDirection::Backward => last_size.saturating_sub(*stride),
AccessDirection::Bidirectional => last_size + stride, };
next_accesses.push(PredictedAccess {
address: next_size, access_type: AccessType::Read, size: last_size,
confidence: 0.8,
estimated_time: Duration::from_millis(1),
});
for i in 1..=4 {
let prefetch_addr = match direction {
AccessDirection::Forward => next_size + (stride * i),
AccessDirection::Backward => next_size.saturating_sub(stride * i),
AccessDirection::Bidirectional => next_size + (stride * i),
};
prefetch_candidates.push(prefetch_addr);
}
}
}
PatternType::Streaming { block_size, .. } => {
if let Some(&last_size) = pattern.access_sizes.back() {
next_accesses.push(PredictedAccess {
address: last_size + block_size,
access_type: AccessType::Read,
size: *block_size,
confidence: 0.7,
estimated_time: Duration::from_millis(2),
});
}
}
_ => {
if let Some(&last_size) = pattern.access_sizes.back() {
next_accesses.push(PredictedAccess {
address: last_size,
access_type: AccessType::Read,
size: last_size,
confidence: 0.3,
estimated_time: Duration::from_millis(5),
});
}
}
}
AccessPrediction {
next_accesses,
prediction_confidence: 0.6, time_horizon: Duration::from_millis(100),
prefetch_candidates,
cache_behavior: CacheBehaviorPrediction {
l1_hit_rate: 0.9,
l2_hit_rate: 0.7,
tlb_hit_rate: 0.95,
bandwidth_usage: 0.5,
cache_warming: Vec::new(),
},
}
}
fn update_statistics(&self, classification: &PatternClassification) {
let mut stats = self.pattern_statistics.lock();
stats.total_patterns += 1;
let pattern_name = pattern_type_name(&classification.primary_type);
if let Some((_, count)) = stats
.pattern_distribution
.iter_mut()
.find(|(name, _)| name == &pattern_name)
{
*count += 1;
} else {
stats.pattern_distribution.push((pattern_name, 1));
}
let total_confidence = stats.average_confidence * (stats.total_patterns - 1) as f64
+ classification.confidence;
stats.average_confidence = total_confidence / stats.total_patterns as f64;
}
fn generate_optimization_suggestions(
&self,
address: usize,
classification: &PatternClassification,
) {
let mut suggestions = self.optimization_suggestions.lock();
match &classification.primary_type {
PatternType::Sequential { stride, .. } => {
suggestions.push(PatternOptimizationSuggestion {
target: address,
optimization_type: OptimizationType::PrefetchingOptimization {
prefetch_distance: *stride * 4,
prefetch_pattern: PrefetchPattern::Sequential,
},
suggestion: "Implement sequential prefetching to improve cache performance"
.to_string(),
expected_improvement: 0.3,
complexity: OptimizationComplexity::Low,
prerequisites: vec!["Hardware prefetcher support".to_string()],
timeline: OptimizationTimeline::Immediate,
});
}
PatternType::Random { .. } => {
suggestions.push(PatternOptimizationSuggestion {
target: address,
optimization_type: OptimizationType::CacheOptimization {
cache_strategy: CacheStrategy::SetAssociative { ways: 8 },
target_cache_level: CacheLevel::L2,
},
suggestion: "Use set-associative cache to handle random access patterns"
.to_string(),
expected_improvement: 0.2,
complexity: OptimizationComplexity::Medium,
prerequisites: vec!["Cache configuration access".to_string()],
timeline: OptimizationTimeline::ShortTerm,
});
}
PatternType::Streaming { .. } => {
suggestions.push(PatternOptimizationSuggestion {
target: address,
optimization_type: OptimizationType::BandwidthOptimization {
batching_strategy: BatchingStrategy::SizeBased,
transfer_optimization: TransferOptimization::Coalescing,
},
suggestion: "Implement memory coalescing for streaming workloads".to_string(),
expected_improvement: 0.4,
complexity: OptimizationComplexity::Medium,
prerequisites: vec!["DMA controller access".to_string()],
timeline: OptimizationTimeline::MediumTerm,
});
}
_ => {}
}
}
fn update_prediction_model(&self, address: usize, pattern: &AccessPattern) {
let mut models = self.prediction_models.lock();
if !models.contains_key(&address) {
models.insert(address, AccessPredictionModel::new());
}
if let Some(model) = models.get_mut(&address) {
for (i, (&time, &size)) in pattern
.access_times
.iter()
.zip(pattern.access_sizes.iter())
.enumerate()
{
let access_type = pattern
.access_types
.get(i)
.copied()
.unwrap_or(AccessType::Read);
let event = AccessEvent {
address: size, access_type,
size,
timestamp: time,
context: AccessContext {
thread_id: 0, operation: "unknown".to_string(),
allocation_id: Some(address),
},
};
model.add_access_event(event);
}
}
}
pub fn get_classification(&self, address: usize) -> Option<PatternClassification> {
self.pattern_classifications.lock().get(&address).cloned()
}
pub fn get_optimization_suggestions(&self) -> Vec<PatternOptimizationSuggestion> {
self.optimization_suggestions.lock().clone()
}
pub fn get_statistics(&self) -> PatternStatistics {
(*self.pattern_statistics.lock()).clone()
}
pub fn cleanup_old_data(&self, max_age: Duration) {
let cutoff = Instant::now() - max_age;
let mut classifications = self.pattern_classifications.lock();
classifications.retain(|_, classification| classification.classified_at > cutoff);
let mut suggestions = self.optimization_suggestions.lock();
suggestions.retain(|_suggestion| {
true
});
let mut models = self.prediction_models.lock();
for model in models.values_mut() {
model.cleanup_old_events(cutoff);
}
}
}
impl AccessPredictionModel {
fn new() -> Self {
Self {
access_history: VecDeque::new(),
pattern_state: PatternRecognitionState {
current_hypothesis: None,
transition_probabilities: HashMap::new(),
confidence: 0.0,
},
accuracy_tracker: AccuracyTracker {
correct_predictions: 0,
total_predictions: 0,
recent_accuracy: VecDeque::new(),
accuracy_by_pattern: HashMap::new(),
},
model_params: PredictionModelParams {
history_window: 100,
prediction_horizon: Duration::from_millis(100),
learning_rate: 0.1,
pattern_threshold: 0.7,
},
}
}
fn add_access_event(&mut self, event: AccessEvent) {
self.access_history.push_back(event);
while self.access_history.len() > self.model_params.history_window {
self.access_history.pop_front();
}
self.update_pattern_recognition();
}
fn update_pattern_recognition(&mut self) {
if self.access_history.len() >= 10 {
self.pattern_state.confidence = 0.7; }
}
fn cleanup_old_events(&mut self, cutoff: Instant) {
self.access_history.retain(|event| event.timestamp > cutoff);
}
}
impl Default for PatternAnalysisConfig {
fn default() -> Self {
Self {
min_pattern_length: 5,
analysis_window: Duration::from_secs(60),
confidence_threshold: 0.5,
enable_prediction: true,
enable_optimization_suggestions: true,
max_tracked_patterns: 10000,
classification_sensitivity: 0.1,
cache_analysis_depth: 3,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::memory_profiler::allocation::AccessPattern;
#[test]
fn test_pattern_analyzer_creation() {
let config = PatternAnalysisConfig::default();
let analyzer = AccessPatternAnalyzer::new(config);
assert!(analyzer.get_statistics().total_patterns == 0);
assert!(analyzer.get_optimization_suggestions().is_empty());
}
#[test]
fn test_sequential_pattern_classification() {
let analyzer = AccessPatternAnalyzer::new(PatternAnalysisConfig::default());
let mut pattern = AccessPattern::new();
for i in 0..10 {
pattern.record_access(AccessType::Read, 1000 + i * 8);
}
pattern.sequential_score = 0.9;
pattern.temporal_locality = 0.5;
pattern.spatial_locality = 0.8;
let classification = analyzer.analyze_pattern(0x1000, &pattern);
assert!(classification.is_some());
let classification = classification.expect("operation should succeed");
assert!(matches!(
classification.primary_type,
PatternType::Sequential { .. }
));
assert!(classification.confidence > 0.5);
}
#[test]
fn test_optimization_suggestions() {
let analyzer = AccessPatternAnalyzer::new(PatternAnalysisConfig::default());
let mut pattern = AccessPattern::new();
for i in 0..10 {
pattern.record_access(AccessType::Read, 1000 + i * 8);
}
pattern.sequential_score = 0.9;
analyzer.analyze_pattern(0x1000, &pattern);
let suggestions = analyzer.get_optimization_suggestions();
assert!(!suggestions.is_empty());
assert!(suggestions.iter().any(|s| matches!(
s.optimization_type,
OptimizationType::PrefetchingOptimization { .. }
)));
}
#[test]
fn test_pattern_statistics_update() {
let analyzer = AccessPatternAnalyzer::new(PatternAnalysisConfig::default());
let mut pattern = AccessPattern::new();
for i in 0..10 {
pattern.record_access(AccessType::Read, 1000 + i * 8);
}
pattern.sequential_score = 0.8;
analyzer.analyze_pattern(0x1000, &pattern);
let stats = analyzer.get_statistics();
assert_eq!(stats.total_patterns, 1);
assert!(stats.average_confidence > 0.0);
}
#[test]
fn test_streaming_pattern_detection() {
let analyzer = AccessPatternAnalyzer::new(PatternAnalysisConfig::default());
let mut pattern = AccessPattern::new();
for _i in 0..10 {
pattern.record_access(AccessType::Read, 64 * 1024); }
pattern.sequential_score = 0.6;
pattern.spatial_locality = 0.9;
let streaming_score = analyzer.analyze_streaming(&pattern);
assert!(streaming_score > 0.5);
}
#[test]
fn test_access_prediction() {
let analyzer = AccessPatternAnalyzer::new(PatternAnalysisConfig::default());
let mut pattern = AccessPattern::new();
for i in 0..5 {
pattern.record_access(AccessType::Read, 1000 + i * 8);
}
let pattern_type = PatternType::Sequential {
stride: 8,
direction: AccessDirection::Forward,
};
let prediction = analyzer.generate_access_prediction(&pattern, &pattern_type);
assert!(!prediction.next_accesses.is_empty());
assert!(!prediction.prefetch_candidates.is_empty());
}
}