use scirs2_core::ndarray::{Array1, Array2};
use std::collections::HashMap;
use super::config::{AlgorithmType, PatternType};
use super::learning::PredictionModel;
#[derive(Debug, Clone)]
pub struct MultiModalKnowledgeBase {
pub visual_patterns: HashMap<String, VisualKnowledge>,
pub temporal_patterns: HashMap<String, TemporalKnowledge>,
pub contextual_knowledge: HashMap<String, ContextualKnowledge>,
pub cross_modal_associations: Vec<CrossModalAssociation>,
}
#[derive(Debug, Clone)]
pub struct VisualKnowledge {
pub features: Array1<f64>,
pub optimal_methods: Vec<AlgorithmType>,
pub expected_outcomes: Array1<f64>,
pub confidence: f64,
}
#[derive(Debug, Clone)]
pub struct TemporalKnowledge {
pub patterns: Array2<f64>,
pub prediction_models: Vec<PredictionModel>,
pub dependencies: Vec<TemporalDependency>,
}
#[derive(Debug, Clone)]
pub struct ContextualKnowledge {
pub contextfeatures: Array1<f64>,
pub preferences: HashMap<String, f64>,
pub adaptation_strategies: Vec<super::strategies::AdaptationStrategy>,
}
#[derive(Debug, Clone)]
pub struct CrossModalAssociation {
pub source_modality: String,
pub target_modality: String,
pub strength: f64,
pub transfer_function: Array2<f64>,
}
#[derive(Debug, Clone)]
pub struct TemporalDependency {
pub source_step: usize,
pub target_step: usize,
pub strength: f64,
pub dependency_type: String,
}
#[derive(Debug, Clone)]
pub struct ProcessingContext {
pub image_type: PatternType,
pub user_preferences: HashMap<String, f64>,
pub available_resources: ResourceAvailability,
pub time_constraints: Option<f64>,
pub quality_requirements: Option<f64>,
}
#[derive(Debug, Clone)]
pub struct ResourceAvailability {
pub cpu_cores: usize,
pub memory_mb: f64,
pub gpu_available: bool,
pub quantum_available: bool,
}
impl MultiModalKnowledgeBase {
pub fn new() -> Self {
Self {
visual_patterns: HashMap::new(),
temporal_patterns: HashMap::new(),
contextual_knowledge: HashMap::new(),
cross_modal_associations: Vec::new(),
}
}
pub fn add_visual_knowledge(&mut self, key: String, knowledge: VisualKnowledge) {
self.visual_patterns.insert(key, knowledge);
}
pub fn get_visual_knowledge(&self, key: &str) -> Option<&VisualKnowledge> {
self.visual_patterns.get(key)
}
pub fn add_cross_modal_association(&mut self, association: CrossModalAssociation) {
self.cross_modal_associations.push(association);
}
pub fn find_relevant_knowledge(&self, pattern_features: &Array1<f64>) -> Vec<String> {
let mut relevant_keys = Vec::new();
for (key, knowledge) in &self.visual_patterns {
let similarity = self.calculate_similarity(pattern_features, &knowledge.features);
if similarity > 0.7 {
relevant_keys.push(key.clone());
}
}
relevant_keys
}
fn calculate_similarity(&self, features1: &Array1<f64>, features2: &Array1<f64>) -> f64 {
if features1.len() != features2.len() {
return 0.0;
}
let mut dot_product = 0.0;
let mut norm1 = 0.0;
let mut norm2 = 0.0;
for i in 0..features1.len() {
dot_product += features1[i] * features2[i];
norm1 += features1[i] * features1[i];
norm2 += features2[i] * features2[i];
}
if norm1 == 0.0 || norm2 == 0.0 {
return 0.0;
}
dot_product / (norm1.sqrt() * norm2.sqrt())
}
}
impl ProcessingContext {
pub fn default() -> Self {
Self {
image_type: PatternType::Natural,
user_preferences: HashMap::new(),
available_resources: ResourceAvailability {
cpu_cores: 4,
memory_mb: 8192.0,
gpu_available: false,
quantum_available: false,
},
time_constraints: None,
quality_requirements: None,
}
}
pub fn update_context(&mut self, image_type: PatternType, preferences: HashMap<String, f64>) {
self.image_type = image_type;
self.user_preferences.extend(preferences);
}
pub fn is_time_constrained(&self) -> bool {
self.time_constraints.is_some()
}
pub fn get_processing_power_score(&self) -> f64 {
let mut score = self.available_resources.cpu_cores as f64;
if self.available_resources.gpu_available {
score += 10.0; }
if self.available_resources.quantum_available {
score += 50.0; }
score
}
}