1use crate::{
7 compression::{create_compressor, CompressionMethod, VectorCompressor},
8 Vector, VectorError,
9};
10use anyhow::Result;
11use std::collections::HashMap;
12use std::sync::{Arc, RwLock};
13use std::time::{Duration, Instant};
14
15#[derive(Debug, Clone)]
19pub struct CompressionContext {
20 pub domain: VectorDomain,
21 pub access_frequency: AccessFrequency,
22 pub quality_requirement: QualityRequirement,
23 pub resource_constraints: ResourceConstraints,
24 pub temporal_patterns: TemporalPatterns,
25}
26
27#[derive(Debug, Clone, PartialEq, Eq, Hash)]
29pub enum VectorDomain {
30 TextEmbeddings,
31 ImageFeatures,
32 AudioFeatures,
33 KnowledgeGraph,
34 TimeSeriesData,
35 Unknown,
36}
37
38#[derive(Debug, Clone)]
40pub enum AccessFrequency {
41 VeryHigh, High, Moderate, Low, Archive, }
47
48#[derive(Debug, Clone)]
50pub enum QualityRequirement {
51 Lossless, HighQuality, Balanced, Compressed, Aggressive, }
57
58#[derive(Debug, Clone)]
60pub struct ResourceConstraints {
61 pub cpu_usage_limit: f32, pub memory_usage_limit: f32, pub compression_time_limit: Duration,
64 pub decompression_time_limit: Duration,
65}
66
67#[derive(Debug, Clone)]
69pub struct TemporalPatterns {
70 pub time_of_day_factor: f32, pub load_factor: f32, pub seasonal_factor: f32, }
74
75#[derive(Debug, Clone)]
77pub struct VectorStats {
78 pub dimensions: usize,
79 pub mean: f32,
80 pub std_dev: f32,
81 pub min_val: f32,
82 pub max_val: f32,
83 pub entropy: f32,
84 pub sparsity: f32, pub correlation: f32, pub intrinsic_dimension: f32, pub clustering_tendency: f32, pub temporal_stability: f32, pub domain_affinity: VectorDomain, }
91
92impl Default for CompressionContext {
93 fn default() -> Self {
94 Self {
95 domain: VectorDomain::Unknown,
96 access_frequency: AccessFrequency::Moderate,
97 quality_requirement: QualityRequirement::Balanced,
98 resource_constraints: ResourceConstraints {
99 cpu_usage_limit: 0.7,
100 memory_usage_limit: 0.8,
101 compression_time_limit: Duration::from_millis(100),
102 decompression_time_limit: Duration::from_millis(50),
103 },
104 temporal_patterns: TemporalPatterns {
105 time_of_day_factor: 1.0,
106 load_factor: 1.0,
107 seasonal_factor: 1.0,
108 },
109 }
110 }
111}
112
113impl VectorStats {
114 pub fn from_vector(vector: &Vector) -> Result<Self, VectorError> {
116 Self::from_vector_with_context(vector, &CompressionContext::default())
117 }
118
119 pub fn from_vector_with_context(
121 vector: &Vector,
122 context: &CompressionContext,
123 ) -> Result<Self, VectorError> {
124 let values = vector.as_f32();
125 let n = values.len();
126
127 if n == 0 {
128 return Err(VectorError::InvalidDimensions("Empty vector".to_string()));
129 }
130
131 let sum: f32 = values.iter().sum();
133 let mean = sum / n as f32;
134
135 let variance: f32 = values.iter().map(|x| (x - mean).powi(2)).sum::<f32>() / n as f32;
136 let std_dev = variance.sqrt();
137
138 let min_val = values.iter().fold(f32::INFINITY, |a, &b| a.min(b));
139 let max_val = values.iter().fold(f32::NEG_INFINITY, |a, &b| a.max(b));
140
141 let bin_count = match context.domain {
143 VectorDomain::TextEmbeddings => 128,
144 VectorDomain::ImageFeatures => 256,
145 VectorDomain::KnowledgeGraph => 64,
146 _ => 256,
147 };
148
149 let mut histogram = vec![0u32; bin_count];
150 let range = max_val - min_val;
151 if range > 0.0 {
152 for val in &values {
153 let bucket = ((val - min_val) / range * (bin_count - 1) as f32)
154 .clamp(0.0, (bin_count - 1) as f32) as usize;
155 histogram[bucket] += 1;
156 }
157 }
158
159 let entropy = histogram
160 .iter()
161 .filter(|&&count| count > 0)
162 .map(|&count| {
163 let p = count as f32 / n as f32;
164 -p * p.log2()
165 })
166 .sum();
167
168 let threshold = std_dev * 0.1;
170 let sparse_count = values.iter().filter(|&&x| x.abs() < threshold).count();
171 let sparsity = sparse_count as f32 / n as f32;
172
173 let correlation = Self::calculate_enhanced_correlation(&values);
175
176 let intrinsic_dimension = Self::estimate_intrinsic_dimension(&values);
178
179 let clustering_tendency = Self::calculate_hopkins_statistic(&values);
181
182 let temporal_stability = 1.0;
184
185 let domain_affinity = Self::detect_domain(&values, entropy, sparsity, correlation);
187
188 Ok(VectorStats {
189 dimensions: n,
190 mean,
191 std_dev,
192 min_val,
193 max_val,
194 entropy,
195 sparsity,
196 correlation,
197 intrinsic_dimension,
198 clustering_tendency,
199 temporal_stability,
200 domain_affinity,
201 })
202 }
203
204 fn calculate_enhanced_correlation(values: &[f32]) -> f32 {
206 let n = values.len();
207 if n <= 1 {
208 return 0.0;
209 }
210
211 let window_sizes = [5, 10, 20].iter().map(|&w| w.min(n / 2).max(2));
212 let mut total_corr = 0.0;
213 let mut total_count = 0;
214
215 for window_size in window_sizes {
216 if window_size >= n {
217 continue;
218 }
219
220 for i in 0..(n - window_size) {
221 let window1 = &values[i..i + window_size];
222 let window2 = &values[i + 1..i + window_size + 1];
223
224 let mean1: f32 = window1.iter().sum::<f32>() / window_size as f32;
225 let mean2: f32 = window2.iter().sum::<f32>() / window_size as f32;
226
227 let covariance: f32 = window1
228 .iter()
229 .zip(window2)
230 .map(|(a, b)| (a - mean1) * (b - mean2))
231 .sum();
232 let var1: f32 = window1.iter().map(|x| (x - mean1).powi(2)).sum();
233 let var2: f32 = window2.iter().map(|x| (x - mean2).powi(2)).sum();
234
235 if var1 > 0.0 && var2 > 0.0 {
236 let corr = covariance / (var1.sqrt() * var2.sqrt());
237 total_corr += corr.abs();
238 total_count += 1;
239 }
240 }
241 }
242
243 if total_count > 0 {
244 total_corr / total_count as f32
245 } else {
246 0.0
247 }
248 }
249
250 fn estimate_intrinsic_dimension(values: &[f32]) -> f32 {
252 let n = values.len();
253 if n < 10 {
254 return n as f32;
255 }
256
257 let sample_size = n.min(100);
259 let step = n / sample_size;
260 let sampled: Vec<f32> = (0..sample_size).map(|i| values[i * step]).collect();
261
262 let mut log_radii = Vec::new();
264 let mut log_counts = Vec::new();
265
266 let max_val = sampled.iter().fold(f32::NEG_INFINITY, |a, &b| a.max(b));
267 let min_val = sampled.iter().fold(f32::INFINITY, |a, &b| a.min(b));
268 let range = max_val - min_val;
269
270 if range <= 0.0 {
271 return 1.0;
272 }
273
274 for radius_factor in [0.001, 0.01, 0.1, 0.5] {
275 let radius = range * radius_factor;
276 let mut count = 0;
277
278 for i in 0..sampled.len() {
279 for j in (i + 1)..sampled.len() {
280 if (sampled[i] - sampled[j]).abs() < radius {
281 count += 1;
282 }
283 }
284 }
285
286 if count > 0 {
287 log_radii.push(radius.ln());
288 log_counts.push((count as f32).ln());
289 }
290 }
291
292 if log_radii.len() < 2 {
294 return n as f32;
295 }
296
297 let mean_log_r: f32 = log_radii.iter().sum::<f32>() / log_radii.len() as f32;
298 let mean_log_c: f32 = log_counts.iter().sum::<f32>() / log_counts.len() as f32;
299
300 let numerator: f32 = log_radii
301 .iter()
302 .zip(&log_counts)
303 .map(|(r, c)| (r - mean_log_r) * (c - mean_log_c))
304 .sum();
305 let denominator: f32 = log_radii.iter().map(|r| (r - mean_log_r).powi(2)).sum();
306
307 if denominator > 0.0 {
308 let slope = numerator / denominator;
309 slope.abs().min(n as f32).max(1.0)
310 } else {
311 n as f32
312 }
313 }
314
315 fn calculate_hopkins_statistic(values: &[f32]) -> f32 {
317 let n = values.len();
318 if n < 10 {
319 return 0.5; }
321
322 let sample_size = (n / 10).clamp(5, 50);
323 let min_val = values.iter().fold(f32::INFINITY, |a, &b| a.min(b));
324 let max_val = values.iter().fold(f32::NEG_INFINITY, |a, &b| a.max(b));
325
326 if max_val <= min_val {
327 return 0.5;
328 }
329
330 let mut w_sum = 0.0; let mut u_sum = 0.0; for i in 0..sample_size {
335 let idx = (i * n / sample_size) % n;
336 let point = values[idx];
337
338 let mut min_dist = f32::INFINITY;
339 for &other in values {
340 if other != point {
341 let dist = (point - other).abs();
342 min_dist = min_dist.min(dist);
343 }
344 }
345 w_sum += min_dist;
346 }
347
348 use std::collections::hash_map::DefaultHasher;
350 use std::hash::{Hash, Hasher};
351
352 let mut hasher = DefaultHasher::new();
353 42u64.hash(&mut hasher);
354 let mut rng_state = hasher.finish();
355
356 for _ in 0..sample_size {
357 rng_state = rng_state.wrapping_mul(1103515245).wrapping_add(12345);
358 let random_point = min_val + (max_val - min_val) * (rng_state as f32 / u64::MAX as f32);
359
360 let mut min_dist = f32::INFINITY;
361 for &data_point in values {
362 let dist = (random_point - data_point).abs();
363 min_dist = min_dist.min(dist);
364 }
365 u_sum += min_dist;
366 }
367
368 if w_sum + u_sum > 0.0 {
369 u_sum / (w_sum + u_sum)
370 } else {
371 0.5
372 }
373 }
374
375 fn detect_domain(
377 _values: &[f32],
378 entropy: f32,
379 sparsity: f32,
380 correlation: f32,
381 ) -> VectorDomain {
382 if entropy > 6.0
384 && entropy < 8.0
385 && sparsity < 0.3
386 && correlation > 0.2
387 && correlation < 0.6
388 {
389 return VectorDomain::TextEmbeddings;
390 }
391
392 if entropy > 7.0 && correlation < 0.3 {
394 return VectorDomain::ImageFeatures;
395 }
396
397 if entropy < 6.0 && sparsity > 0.4 {
399 return VectorDomain::KnowledgeGraph;
400 }
401
402 if correlation > 0.7 && entropy > 5.0 && entropy < 7.0 {
404 return VectorDomain::TimeSeriesData;
405 }
406
407 VectorDomain::Unknown
408 }
409
410 pub fn from_vectors(vectors: &[Vector]) -> Result<Self, VectorError> {
412 Self::from_vectors_with_context(vectors, &CompressionContext::default())
413 }
414
415 pub fn from_vectors_with_context(
417 vectors: &[Vector],
418 context: &CompressionContext,
419 ) -> Result<Self, VectorError> {
420 if vectors.is_empty() {
421 return Err(VectorError::InvalidDimensions(
422 "No vectors provided".to_string(),
423 ));
424 }
425
426 let individual_stats: Result<Vec<_>, _> = vectors
427 .iter()
428 .map(|v| Self::from_vector_with_context(v, context))
429 .collect();
430 let stats = individual_stats?;
431
432 let n = stats.len() as f32;
433
434 Ok(VectorStats {
435 dimensions: stats[0].dimensions,
436 mean: stats.iter().map(|s| s.mean).sum::<f32>() / n,
437 std_dev: stats.iter().map(|s| s.std_dev).sum::<f32>() / n,
438 min_val: stats
439 .iter()
440 .map(|s| s.min_val)
441 .fold(f32::INFINITY, |a, b| a.min(b)),
442 max_val: stats
443 .iter()
444 .map(|s| s.max_val)
445 .fold(f32::NEG_INFINITY, |a, b| a.max(b)),
446 entropy: stats.iter().map(|s| s.entropy).sum::<f32>() / n,
447 sparsity: stats.iter().map(|s| s.sparsity).sum::<f32>() / n,
448 correlation: stats.iter().map(|s| s.correlation).sum::<f32>() / n,
449 intrinsic_dimension: stats.iter().map(|s| s.intrinsic_dimension).sum::<f32>() / n,
450 clustering_tendency: stats.iter().map(|s| s.clustering_tendency).sum::<f32>() / n,
451 temporal_stability: stats.iter().map(|s| s.temporal_stability).sum::<f32>() / n,
452 domain_affinity: Self::aggregate_domain_affinity(&stats),
453 })
454 }
455
456 fn aggregate_domain_affinity(stats: &[VectorStats]) -> VectorDomain {
458 let mut domain_counts = HashMap::new();
459
460 for stat in stats {
461 *domain_counts
462 .entry(stat.domain_affinity.clone())
463 .or_insert(0) += 1;
464 }
465
466 domain_counts
467 .into_iter()
468 .max_by_key(|(_, count)| *count)
469 .map(|(domain, _)| domain)
470 .unwrap_or(VectorDomain::Unknown)
471 }
472}
473
474#[derive(Debug, Clone)]
476pub struct CompressionMetrics {
477 pub method: CompressionMethod,
478 pub compression_ratio: f32,
479 pub compression_time: Duration,
480 pub decompression_time: Duration,
481 pub reconstruction_error: f32,
482 pub usage_count: u64,
483 pub avg_performance_score: f32,
484}
485
486impl CompressionMetrics {
487 pub fn new(method: CompressionMethod) -> Self {
488 Self {
489 method,
490 compression_ratio: 1.0,
491 compression_time: Duration::ZERO,
492 decompression_time: Duration::ZERO,
493 reconstruction_error: 0.0,
494 usage_count: 0,
495 avg_performance_score: 0.0,
496 }
497 }
498
499 pub fn calculate_score(&self, priorities: &CompressionPriorities) -> f32 {
501 let ratio_score = self.compression_ratio.min(0.9); let speed_score = 1.0 / (1.0 + self.compression_time.as_millis() as f32 / 1000.0);
503 let accuracy_score = 1.0 / (1.0 + self.reconstruction_error);
504
505 priorities.compression_weight * ratio_score
506 + priorities.speed_weight * speed_score
507 + priorities.accuracy_weight * accuracy_score
508 }
509
510 pub fn update(
512 &mut self,
513 compression_ratio: f32,
514 comp_time: Duration,
515 decomp_time: Duration,
516 error: f32,
517 priorities: &CompressionPriorities,
518 ) {
519 let alpha = 0.1; self.compression_ratio = self.compression_ratio * (1.0 - alpha) + compression_ratio * alpha;
522 self.compression_time = Duration::from_nanos(
523 (self.compression_time.as_nanos() as f32 * (1.0 - alpha)
524 + comp_time.as_nanos() as f32 * alpha) as u64,
525 );
526 self.decompression_time = Duration::from_nanos(
527 (self.decompression_time.as_nanos() as f32 * (1.0 - alpha)
528 + decomp_time.as_nanos() as f32 * alpha) as u64,
529 );
530 self.reconstruction_error = self.reconstruction_error * (1.0 - alpha) + error * alpha;
531 self.usage_count += 1;
532
533 self.avg_performance_score = self.calculate_score(priorities);
534 }
535}
536
537#[derive(Debug, Clone)]
539pub struct CompressionPriorities {
540 pub compression_weight: f32, pub speed_weight: f32, pub accuracy_weight: f32, }
544
545impl Default for CompressionPriorities {
546 fn default() -> Self {
547 Self {
548 compression_weight: 0.4,
549 speed_weight: 0.3,
550 accuracy_weight: 0.3,
551 }
552 }
553}
554
555#[derive(Debug, Clone)]
557pub struct MultiLevelCompression {
558 pub levels: Vec<CompressionMethod>,
559 pub thresholds: Vec<f32>, }
561
562impl Default for MultiLevelCompression {
563 fn default() -> Self {
564 Self::new()
565 }
566}
567
568impl MultiLevelCompression {
569 pub fn new() -> Self {
570 Self {
571 levels: vec![
572 CompressionMethod::None,
573 CompressionMethod::Quantization { bits: 16 },
574 CompressionMethod::Quantization { bits: 8 },
575 CompressionMethod::Pca { components: 0 }, CompressionMethod::Zstd { level: 3 },
577 ],
578 thresholds: vec![0.0, 0.1, 0.3, 0.6, 0.8],
579 }
580 }
581
582 pub fn select_level(&self, required_compression: f32) -> &CompressionMethod {
584 for (i, &threshold) in self.thresholds.iter().enumerate() {
585 if required_compression <= threshold {
586 return &self.levels[i];
587 }
588 }
589 self.levels.last().unwrap()
590 }
591}
592
593pub struct AdaptiveCompressor {
595 priorities: CompressionPriorities,
597 metrics: Arc<RwLock<HashMap<String, CompressionMetrics>>>,
599 multi_level: MultiLevelCompression,
601 compressor_cache: Arc<RwLock<HashMap<String, Box<dyn VectorCompressor + Send + Sync>>>>,
603 stats_cache: Arc<RwLock<HashMap<String, (VectorStats, Instant)>>>,
605 exploration_rate: f32,
607 cache_ttl: Duration,
608}
609
610impl AdaptiveCompressor {
611 pub fn new() -> Self {
612 Self::new_with_priorities(CompressionPriorities::default())
613 }
614
615 pub fn new_with_priorities(priorities: CompressionPriorities) -> Self {
616 Self {
617 priorities,
618 metrics: Arc::new(RwLock::new(HashMap::new())),
619 multi_level: MultiLevelCompression::new(),
620 compressor_cache: Arc::new(RwLock::new(HashMap::new())),
621 stats_cache: Arc::new(RwLock::new(HashMap::new())),
622 exploration_rate: 0.1,
623 cache_ttl: Duration::from_secs(3600), }
625 }
626
627 pub fn analyze_and_recommend(
629 &mut self,
630 vectors: &[Vector],
631 ) -> Result<CompressionMethod, VectorError> {
632 let stats = VectorStats::from_vectors(vectors)?;
633 let stats_key = self.generate_stats_key(&stats);
634
635 {
637 let cache = self.stats_cache.read().unwrap();
638 if let Some((cached_stats, timestamp)) = cache.get(&stats_key) {
639 if timestamp.elapsed() < self.cache_ttl {
640 return Ok(self.recommend_from_stats(cached_stats));
641 }
642 }
643 }
644
645 {
647 let mut cache = self.stats_cache.write().unwrap();
648 cache.insert(stats_key, (stats.clone(), Instant::now()));
649 }
650
651 Ok(self.recommend_from_stats(&stats))
652 }
653
654 fn recommend_from_stats(&self, stats: &VectorStats) -> CompressionMethod {
656 if stats.sparsity > 0.7 {
658 return CompressionMethod::Quantization { bits: 4 };
659 }
660
661 if stats.correlation > 0.6 && stats.dimensions > 20 {
663 let components = (stats.dimensions as f32 * 0.7) as usize;
664 return CompressionMethod::Pca { components };
665 }
666
667 if stats.entropy < 4.0 {
669 return CompressionMethod::Zstd { level: 9 };
670 }
671
672 if stats.std_dev > stats.mean.abs() {
674 return CompressionMethod::Quantization { bits: 12 };
675 }
676
677 CompressionMethod::Quantization { bits: 8 }
679 }
680
681 pub fn compress_adaptive(&mut self, vector: &Vector) -> Result<Vec<u8>, VectorError> {
683 let stats = VectorStats::from_vector(vector)?;
684 let method = self.recommend_from_stats(&stats);
685
686 if self.should_explore() {
688 let alternative = self.get_alternative_method(&method);
689 return self.compress_with_method(vector, &alternative);
690 }
691
692 self.compress_with_method(vector, &method)
693 }
694
695 pub fn compress_with_method(
697 &mut self,
698 vector: &Vector,
699 method: &CompressionMethod,
700 ) -> Result<Vec<u8>, VectorError> {
701 let method_key = format!("{method:?}");
702 let compressor = self.get_or_create_compressor(method)?;
703
704 let start_time = Instant::now();
705 let compressed = compressor.compress(vector)?;
706 let compression_time = start_time.elapsed();
707
708 let decompressed = compressor.decompress(&compressed, vector.dimensions)?;
710 let error = self.calculate_reconstruction_error(vector, &decompressed)?;
711
712 let compression_ratio = compressed.len() as f32 / (vector.dimensions * 4) as f32; {
716 let mut metrics = self.metrics.write().unwrap();
717 let metric = metrics
718 .entry(method_key)
719 .or_insert_with(|| CompressionMetrics::new(method.clone()));
720 metric.update(
721 compression_ratio,
722 compression_time,
723 Duration::ZERO,
724 error,
725 &self.priorities,
726 );
727 }
728
729 Ok(compressed)
730 }
731
732 pub fn compress_multi_level(
734 &mut self,
735 vector: &Vector,
736 target_ratio: f32,
737 ) -> Result<Vec<u8>, VectorError> {
738 let mut current_vector = vector.clone();
739 let mut compression_steps = Vec::new();
740 let mut total_ratio = 1.0;
741
742 while total_ratio > target_ratio && compression_steps.len() < 3 {
743 let remaining_ratio = target_ratio / total_ratio;
744 let method = self.multi_level.select_level(remaining_ratio);
745
746 let compressor = self.get_or_create_compressor(method)?;
747 let compressed = compressor.compress(¤t_vector)?;
748
749 let step_ratio = compressed.len() as f32 / (current_vector.dimensions * 4) as f32;
750 total_ratio *= step_ratio;
751
752 compression_steps.push((method.clone(), compressed.clone()));
753
754 if total_ratio > target_ratio {
756 current_vector = compressor.decompress(&compressed, current_vector.dimensions)?;
757 }
758 }
759
760 self.serialize_multi_level_result(compression_steps)
762 }
763
764 pub fn get_best_method(&self) -> CompressionMethod {
766 let metrics = self.metrics.read().unwrap();
767 let best = metrics.values().max_by(|a, b| {
768 a.avg_performance_score
769 .partial_cmp(&b.avg_performance_score)
770 .unwrap()
771 });
772
773 best.map(|m| m.method.clone())
774 .unwrap_or(CompressionMethod::Quantization { bits: 8 })
775 }
776
777 pub fn get_performance_stats(&self) -> HashMap<String, CompressionMetrics> {
779 self.metrics.read().unwrap().clone()
780 }
781
782 pub fn update_priorities(&mut self, priorities: CompressionPriorities) {
784 self.priorities = priorities;
785
786 let mut metrics = self.metrics.write().unwrap();
788 for metric in metrics.values_mut() {
789 metric.avg_performance_score = metric.calculate_score(&self.priorities);
790 }
791 }
792
793 pub fn reset(&mut self) {
795 self.metrics.write().unwrap().clear();
796 self.compressor_cache.write().unwrap().clear();
797 self.stats_cache.write().unwrap().clear();
798 }
799
800 fn get_or_create_compressor(
803 &self,
804 method: &CompressionMethod,
805 ) -> Result<Box<dyn VectorCompressor>, VectorError> {
806 let method_key = format!("{method:?}");
807
808 {
809 let cache = self.compressor_cache.read().unwrap();
810 if cache.contains_key(&method_key) {
811 }
814 }
815
816 let compressor = create_compressor(method);
818
819 {
821 let _cache = self.compressor_cache.write().unwrap();
822 }
825
826 Ok(compressor)
827 }
828
829 fn calculate_reconstruction_error(
830 &self,
831 original: &Vector,
832 reconstructed: &Vector,
833 ) -> Result<f32, VectorError> {
834 let orig_values = original.as_f32();
835 let recon_values = reconstructed.as_f32();
836
837 if orig_values.len() != recon_values.len() {
838 return Err(VectorError::InvalidDimensions(
839 "Dimension mismatch".to_string(),
840 ));
841 }
842
843 let mse: f32 = orig_values
844 .iter()
845 .zip(recon_values.iter())
846 .map(|(a, b)| (a - b).powi(2))
847 .sum::<f32>()
848 / orig_values.len() as f32;
849
850 Ok(mse.sqrt()) }
852
853 fn generate_stats_key(&self, stats: &VectorStats) -> String {
854 format!(
855 "{}_{:.2}_{:.2}_{:.2}_{:.2}",
856 stats.dimensions, stats.entropy, stats.sparsity, stats.correlation, stats.std_dev
857 )
858 }
859
860 fn should_explore(&self) -> bool {
861 use scirs2_core::random::{Random, Rng};
862 let mut rng = Random::seed(42);
863 rng.gen_range(0.0..1.0) < self.exploration_rate
864 }
865
866 fn get_alternative_method(&self, current: &CompressionMethod) -> CompressionMethod {
867 match current {
868 CompressionMethod::None => CompressionMethod::Quantization { bits: 8 },
869 CompressionMethod::Quantization { bits } => {
870 if *bits > 8 {
871 CompressionMethod::Quantization { bits: bits - 2 }
872 } else {
873 CompressionMethod::Pca { components: 16 }
874 }
875 }
876 CompressionMethod::Pca { components: _ } => CompressionMethod::Zstd { level: 6 },
877 CompressionMethod::Zstd { level } => {
878 if *level < 15 {
879 CompressionMethod::Zstd { level: level + 3 }
880 } else {
881 CompressionMethod::Quantization { bits: 4 }
882 }
883 }
884 _ => CompressionMethod::None,
885 }
886 }
887
888 fn serialize_multi_level_result(
889 &self,
890 steps: Vec<(CompressionMethod, Vec<u8>)>,
891 ) -> Result<Vec<u8>, VectorError> {
892 use std::io::Write;
893
894 let mut result = Vec::new();
895
896 result.write_all(&(steps.len() as u32).to_le_bytes())?;
898
899 for (method, data) in steps {
901 let method_id = match method {
903 CompressionMethod::None => 0u8,
904 CompressionMethod::Zstd { .. } => 1u8,
905 CompressionMethod::Quantization { .. } => 2u8,
906 CompressionMethod::Pca { .. } => 3u8,
907 CompressionMethod::ProductQuantization { .. } => 4u8,
908 CompressionMethod::Adaptive { .. } => 5u8,
909 };
910 result.push(method_id);
911
912 result.write_all(&(data.len() as u32).to_le_bytes())?;
914 result.extend_from_slice(&data);
915 }
916
917 Ok(result)
918 }
919}
920
921impl Default for AdaptiveCompressor {
922 fn default() -> Self {
923 Self::new()
924 }
925}
926
927pub struct CompressionProfiles {
929 profiles: HashMap<VectorDomain, CompressionPriorities>,
930}
931
932impl Default for CompressionProfiles {
933 fn default() -> Self {
934 Self::new()
935 }
936}
937
938impl CompressionProfiles {
939 pub fn new() -> Self {
940 let mut profiles = HashMap::new();
941
942 profiles.insert(
944 VectorDomain::TextEmbeddings,
945 CompressionPriorities {
946 compression_weight: 0.3,
947 speed_weight: 0.4,
948 accuracy_weight: 0.3,
949 },
950 );
951
952 profiles.insert(
954 VectorDomain::ImageFeatures,
955 CompressionPriorities {
956 compression_weight: 0.5,
957 speed_weight: 0.2,
958 accuracy_weight: 0.3,
959 },
960 );
961
962 profiles.insert(
964 VectorDomain::KnowledgeGraph,
965 CompressionPriorities {
966 compression_weight: 0.2,
967 speed_weight: 0.3,
968 accuracy_weight: 0.5,
969 },
970 );
971
972 profiles.insert(
974 VectorDomain::TimeSeriesData,
975 CompressionPriorities {
976 compression_weight: 0.3,
977 speed_weight: 0.4,
978 accuracy_weight: 0.3,
979 },
980 );
981
982 profiles.insert(
984 VectorDomain::AudioFeatures,
985 CompressionPriorities {
986 compression_weight: 0.4,
987 speed_weight: 0.3,
988 accuracy_weight: 0.3,
989 },
990 );
991
992 Self { profiles }
993 }
994
995 pub fn get_profile(&self, domain: &VectorDomain) -> CompressionPriorities {
996 self.profiles.get(domain).cloned().unwrap_or_default()
997 }
998
999 pub fn update_profile(&mut self, domain: VectorDomain, priorities: CompressionPriorities) {
1000 self.profiles.insert(domain, priorities);
1001 }
1002}
1003
1004#[cfg(test)]
1005mod tests {
1006 use super::*;
1007
1008 #[test]
1009 fn test_vector_stats() {
1010 let vector = Vector::new(vec![1.0, 2.0, 3.0, 4.0, 5.0]);
1011 let stats = VectorStats::from_vector(&vector).unwrap();
1012
1013 assert_eq!(stats.dimensions, 5);
1014 assert_eq!(stats.mean, 3.0);
1015 assert!(stats.std_dev > 0.0);
1016 }
1017
1018 #[test]
1019 fn test_adaptive_compression() {
1020 let vectors = vec![
1021 Vector::new(vec![1.0, 2.0, 3.0, 4.0]),
1022 Vector::new(vec![2.0, 3.0, 4.0, 5.0]),
1023 Vector::new(vec![3.0, 4.0, 5.0, 6.0]),
1024 ];
1025
1026 let mut compressor = AdaptiveCompressor::new();
1027 let recommended = compressor.analyze_and_recommend(&vectors).unwrap();
1028
1029 assert!(!matches!(recommended, CompressionMethod::None));
1031 }
1032
1033 #[test]
1034 fn test_compression_metrics() {
1035 let method = CompressionMethod::Quantization { bits: 8 };
1036 let mut metrics = CompressionMetrics::new(method);
1037 let priorities = CompressionPriorities::default();
1038
1039 metrics.update(
1040 0.5,
1041 Duration::from_millis(10),
1042 Duration::from_millis(5),
1043 0.01,
1044 &priorities,
1045 );
1046
1047 assert!(metrics.avg_performance_score > 0.0);
1048 assert_eq!(metrics.usage_count, 1);
1049 }
1050
1051 #[test]
1052 fn test_multi_level_compression() {
1053 let mut compressor = AdaptiveCompressor::new();
1054 let values: Vec<f32> = (0..256).map(|i| (i % 16) as f32).collect();
1056 let vector = Vector::new(values);
1057
1058 let compressed = compressor.compress_multi_level(&vector, 0.1).unwrap();
1059
1060 println!(
1064 "Compressed size: {} bytes, original size: {} bytes",
1065 compressed.len(),
1066 vector.dimensions * 4
1067 );
1068 assert!(compressed.len() < vector.dimensions * 4); assert!(compressed.len() < 900); }
1071
1072 #[test]
1073 fn test_stats_aggregation() {
1074 let vectors = vec![
1075 Vector::new(vec![1.0, 2.0]),
1076 Vector::new(vec![3.0, 4.0]),
1077 Vector::new(vec![5.0, 6.0]),
1078 ];
1079
1080 let stats = VectorStats::from_vectors(&vectors).unwrap();
1081 assert_eq!(stats.dimensions, 2);
1082 assert!(stats.mean > 0.0);
1083 }
1084}