zoey_core/
integration.rs

1/*!
2# Cross-Plugin Integration Module
3
4This module provides optional integration capabilities that connect LauraAI's
5various plugins together. It's designed to gracefully degrade when specific
6plugins aren't available.
7
8## Design Principles
9
101. **Optional Dependencies**: Integration features only activate when their
11   required plugins are registered with the runtime.
12
132. **Graceful Degradation**: If a plugin isn't available, the integration
14   returns a sensible default or skips the feature entirely.
15
163. **Capability-Based**: Plugins advertise their capabilities, and the
17   integration layer uses those to determine what bridges to activate.
18
194. **Non-Breaking**: Core functionality works without any integration.
20
21## Usage
22
23Integration happens automatically through the CapabilityRegistry:
24
25```rust
26use zoey_core::integration::{CapabilityRegistry, Capability};
27
28// Plugins register their capabilities
29let mut registry = CapabilityRegistry::new();
30registry.register(Capability::MLInference);
31registry.register(Capability::WorkflowEngine);
32
33// Integration checks what's available
34if registry.has_all(&[Capability::MLInference, Capability::WorkflowEngine]) {
35    // Enable ML training workflows
36}
37```
38*/
39
40use async_trait::async_trait;
41use serde::{Deserialize, Serialize};
42use std::collections::{HashMap, HashSet};
43use std::sync::Arc;
44use std::time::Duration;
45use tokio::sync::RwLock;
46
47// ============================================================================
48// PLUGIN LIFECYCLE MANAGEMENT
49// ============================================================================
50
51/// Plugin lifecycle policy - determines how a plugin is managed
52#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
53pub enum PluginPolicy {
54    /// Always active - cannot be disabled (compliance, security)
55    AlwaysOn,
56    /// Loaded on demand when capabilities are needed
57    OnDemand,
58    /// Lazy loaded on first use, stays loaded
59    LazyPersistent,
60    /// Loaded for specific workflows, unloaded after
61    WorkflowScoped,
62    /// Can be suspended to free resources, quick resume
63    Suspendable,
64}
65
66/// Current state of a plugin
67#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
68pub enum PluginState {
69    /// Not loaded, resources freed
70    Unloaded,
71    /// Loading in progress
72    Loading,
73    /// Fully loaded and operational
74    Active,
75    /// Suspended (state preserved, resources freed)
76    Suspended,
77    /// Error state
78    Error,
79}
80
81/// Plugin lifecycle metadata
82#[derive(Debug, Clone, Serialize, Deserialize)]
83pub struct PluginLifecycle {
84    /// Plugin name
85    pub name: String,
86    /// Lifecycle policy
87    pub policy: PluginPolicy,
88    /// Current state
89    pub state: PluginState,
90    /// Capabilities this plugin provides
91    pub capabilities: Vec<Capability>,
92    /// Last time plugin was used
93    pub last_used: Option<chrono::DateTime<chrono::Utc>>,
94    /// Memory usage estimate in MB
95    pub memory_mb: f64,
96    /// Whether plugin has been initialized
97    pub initialized: bool,
98    /// Error message if in error state
99    pub error: Option<String>,
100}
101
102impl PluginLifecycle {
103    /// Create new lifecycle for an always-on plugin
104    pub fn always_on(name: &str, capabilities: Vec<Capability>) -> Self {
105        Self {
106            name: name.to_string(),
107            policy: PluginPolicy::AlwaysOn,
108            state: PluginState::Active,
109            capabilities,
110            last_used: Some(chrono::Utc::now()),
111            memory_mb: 0.0,
112            initialized: true,
113            error: None,
114        }
115    }
116
117    /// Create new lifecycle for an on-demand plugin
118    pub fn on_demand(name: &str, capabilities: Vec<Capability>, memory_mb: f64) -> Self {
119        Self {
120            name: name.to_string(),
121            policy: PluginPolicy::OnDemand,
122            state: PluginState::Unloaded,
123            capabilities,
124            last_used: None,
125            memory_mb,
126            initialized: false,
127            error: None,
128        }
129    }
130
131    /// Check if plugin can be unloaded
132    pub fn can_unload(&self) -> bool {
133        self.policy != PluginPolicy::AlwaysOn && self.state == PluginState::Active
134    }
135
136    /// Check if plugin can be suspended
137    pub fn can_suspend(&self) -> bool {
138        matches!(
139            self.policy,
140            PluginPolicy::Suspendable | PluginPolicy::OnDemand
141        ) && self.state == PluginState::Active
142    }
143}
144
145/// Defines which plugins are always-on (critical for compliance/safety)
146pub struct AlwaysOnPlugins;
147
148impl AlwaysOnPlugins {
149    /// Get the list of plugins that must always be active
150    pub fn required() -> Vec<&'static str> {
151        vec![
152            "bootstrap",      // Core agent functionality
153            "judgment",       // PII detection, compliance guardrails
154            "explainability", // Audit trails (for compliance)
155        ]
156    }
157
158    /// Get capabilities that must always be available
159    pub fn required_capabilities() -> Vec<Capability> {
160        vec![Capability::PIIDetection, Capability::TamperEvidentAudit]
161    }
162
163    /// Check if a plugin is always-on
164    pub fn is_always_on(plugin_name: &str) -> bool {
165        Self::required().contains(&plugin_name)
166    }
167
168    /// Check if a capability requires an always-on plugin
169    pub fn is_required_capability(cap: Capability) -> bool {
170        Self::required_capabilities().contains(&cap)
171    }
172}
173
174/// Intent categories that trigger dynamic plugin loading
175#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
176pub enum IntentCategory {
177    /// General conversation, no special plugins needed
178    Conversation,
179    /// ML inference requested
180    MLInference,
181    /// ML training requested
182    MLTraining,
183    /// Complex workflow orchestration
184    WorkflowOrchestration,
185    /// Model deployment
186    ModelDeployment,
187    /// Hardware optimization
188    HardwareOptimization,
189    /// Adaptive learning/fine-tuning
190    AdaptiveLearning,
191    /// Knowledge graph operations
192    KnowledgeGraph,
193}
194
195impl IntentCategory {
196    /// Get the capabilities required for this intent
197    pub fn required_capabilities(&self) -> Vec<Capability> {
198        match self {
199            Self::Conversation => vec![],
200            Self::MLInference => vec![Capability::MLInference],
201            Self::MLTraining => vec![
202                Capability::MLTraining,
203                Capability::WorkflowEngine, // Training needs workflow orchestration
204            ],
205            Self::WorkflowOrchestration => {
206                vec![Capability::WorkflowEngine, Capability::TaskScheduling]
207            }
208            Self::ModelDeployment => vec![
209                Capability::CascadeInference,
210                Capability::TensorFlowBackend, // TF Serving
211            ],
212            Self::HardwareOptimization => vec![Capability::HardwareOptimization],
213            Self::AdaptiveLearning => vec![Capability::AdaptiveLearning, Capability::HumanFeedback],
214            Self::KnowledgeGraph => vec![Capability::SourceAttribution],
215        }
216    }
217
218    /// Get the plugins typically needed for this intent
219    pub fn suggested_plugins(&self) -> Vec<&'static str> {
220        match self {
221            Self::Conversation => vec![],
222            Self::MLInference => vec!["ml", "local-llm"],
223            Self::MLTraining => vec!["ml", "pytorch", "workflow"],
224            Self::WorkflowOrchestration => vec!["workflow"],
225            Self::ModelDeployment => vec!["tensorflow", "production"],
226            Self::HardwareOptimization => vec!["hardware"],
227            Self::AdaptiveLearning => vec!["adaptive"],
228            Self::KnowledgeGraph => vec!["knowledge"],
229        }
230    }
231}
232
233/// Intent detector for dynamic plugin loading
234pub struct IntentDetector;
235
236impl IntentDetector {
237    /// Analyze text to detect required intent categories
238    pub fn detect(text: &str) -> Vec<IntentCategory> {
239        let text_lower = text.to_lowercase();
240        let mut intents = Vec::new();
241
242        // ML Inference
243        if text_lower.contains("predict")
244            || text_lower.contains("classify")
245            || text_lower.contains("inference")
246            || text_lower.contains("run model")
247            || text_lower.contains("analyze with")
248        {
249            intents.push(IntentCategory::MLInference);
250        }
251
252        // ML Training
253        if text_lower.contains("train")
254            || text_lower.contains("fine-tune")
255            || text_lower.contains("fit model")
256            || text_lower.contains("learning rate")
257            || text_lower.contains("epoch")
258        {
259            intents.push(IntentCategory::MLTraining);
260        }
261
262        // Workflow
263        if text_lower.contains("workflow")
264            || text_lower.contains("pipeline")
265            || text_lower.contains("orchestrat")
266            || text_lower.contains("schedule")
267            || text_lower.contains("automat")
268        {
269            intents.push(IntentCategory::WorkflowOrchestration);
270        }
271
272        // Deployment
273        if text_lower.contains("deploy")
274            || text_lower.contains("production")
275            || text_lower.contains("serve model")
276            || text_lower.contains("scale")
277        {
278            intents.push(IntentCategory::ModelDeployment);
279        }
280
281        // Hardware
282        if text_lower.contains("gpu")
283            || text_lower.contains("hardware")
284            || text_lower.contains("optimize")
285            || text_lower.contains("performance")
286        {
287            intents.push(IntentCategory::HardwareOptimization);
288        }
289
290        // Adaptive Learning
291        if text_lower.contains("feedback")
292            || text_lower.contains("improve")
293            || text_lower.contains("learn from")
294            || text_lower.contains("adapt")
295        {
296            intents.push(IntentCategory::AdaptiveLearning);
297        }
298
299        // Default to conversation if no specific intent
300        if intents.is_empty() {
301            intents.push(IntentCategory::Conversation);
302        }
303
304        intents
305    }
306
307    /// Get all capabilities needed for detected intents
308    pub fn capabilities_for_intents(intents: &[IntentCategory]) -> HashSet<Capability> {
309        intents
310            .iter()
311            .flat_map(|i| i.required_capabilities())
312            .collect()
313    }
314
315    /// Get all suggested plugins for detected intents
316    pub fn plugins_for_intents(intents: &[IntentCategory]) -> HashSet<&'static str> {
317        intents.iter().flat_map(|i| i.suggested_plugins()).collect()
318    }
319}
320
321/// Capabilities that plugins can advertise
322#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
323pub enum Capability {
324    // ML Capabilities
325    /// Core ML operations (model registry, inference engine)
326    MLCore,
327    /// ML inference capability
328    MLInference,
329    /// ML training capability
330    MLTraining,
331    /// Model compression (pruning, quantization)
332    MLCompression,
333    /// Hyperparameter optimization
334    MLHyperparameterOptimization,
335
336    // Framework Capabilities
337    /// PyTorch backend available
338    PyTorchBackend,
339    /// TensorFlow backend available
340    TensorFlowBackend,
341    /// TensorFlow Lite for edge deployment
342    TFLiteEdge,
343    /// Distributed training capability
344    DistributedTraining,
345
346    // Workflow Capabilities
347    /// Workflow orchestration engine
348    WorkflowEngine,
349    /// Task scheduling
350    TaskScheduling,
351    /// Pipeline orchestration
352    PipelineOrchestration,
353
354    // Production Capabilities
355    /// Cascade inference (small -> medium -> large)
356    CascadeInference,
357    /// Memory compression and management
358    MemoryManagement,
359    /// Compliance automation (HIPAA, GDPR)
360    ComplianceAutomation,
361
362    // Learning Capabilities
363    /// Adaptive learning (LoRA, continual learning)
364    AdaptiveLearning,
365    /// Human feedback collection
366    HumanFeedback,
367    /// Knowledge distillation
368    KnowledgeDistillation,
369
370    // Explainability Capabilities
371    /// Reasoning chains
372    ReasoningChains,
373    /// Source attribution
374    SourceAttribution,
375    /// Tamper-evident audit logs
376    TamperEvidentAudit,
377
378    // Infrastructure Capabilities
379    /// Hardware detection and optimization
380    HardwareOptimization,
381    /// Local vector database
382    LocalVectorDB,
383    /// Local LLM inference
384    LocalLLMInference,
385
386    // Compliance Capabilities
387    /// PII detection and redaction
388    PIIDetection,
389    /// HIPAA compliance
390    HIPAACompliance,
391}
392
393/// Registry for tracking available capabilities
394#[derive(Debug, Default)]
395pub struct CapabilityRegistry {
396    capabilities: HashSet<Capability>,
397    plugin_capabilities: HashMap<String, Vec<Capability>>,
398}
399
400impl CapabilityRegistry {
401    /// Create a new empty registry
402    pub fn new() -> Self {
403        Self::default()
404    }
405
406    /// Register a capability
407    pub fn register(&mut self, capability: Capability) {
408        self.capabilities.insert(capability);
409    }
410
411    /// Register a capability from a specific plugin
412    pub fn register_from_plugin(&mut self, plugin_name: &str, capability: Capability) {
413        self.capabilities.insert(capability);
414        self.plugin_capabilities
415            .entry(plugin_name.to_string())
416            .or_default()
417            .push(capability);
418    }
419
420    /// Check if a capability is available
421    pub fn has(&self, capability: Capability) -> bool {
422        self.capabilities.contains(&capability)
423    }
424
425    /// Check if all specified capabilities are available
426    pub fn has_all(&self, capabilities: &[Capability]) -> bool {
427        capabilities.iter().all(|c| self.capabilities.contains(c))
428    }
429
430    /// Check if any of the specified capabilities are available
431    pub fn has_any(&self, capabilities: &[Capability]) -> bool {
432        capabilities.iter().any(|c| self.capabilities.contains(c))
433    }
434
435    /// Get all registered capabilities
436    pub fn all(&self) -> &HashSet<Capability> {
437        &self.capabilities
438    }
439
440    /// Get capabilities registered by a specific plugin
441    pub fn get_plugin_capabilities(&self, plugin_name: &str) -> Option<&Vec<Capability>> {
442        self.plugin_capabilities.get(plugin_name)
443    }
444
445    /// Unregister all capabilities from a plugin
446    pub fn unregister_plugin(&mut self, plugin_name: &str) {
447        if let Some(caps) = self.plugin_capabilities.remove(plugin_name) {
448            for cap in caps {
449                // Only remove if no other plugin provides this capability
450                let still_provided = self.plugin_capabilities.values().any(|v| v.contains(&cap));
451                if !still_provided {
452                    self.capabilities.remove(&cap);
453                }
454            }
455        }
456    }
457}
458
459/// Trait for plugins that can provide ML capabilities
460#[async_trait]
461pub trait MLCapable: Send + Sync {
462    /// Run inference with the given input
463    async fn infer(
464        &self,
465        model_name: &str,
466        input: serde_json::Value,
467    ) -> Result<serde_json::Value, String>;
468
469    /// List available models
470    async fn list_models(&self) -> Vec<String>;
471
472    /// Get model status
473    async fn model_status(&self, model_name: &str) -> Option<ModelStatus>;
474}
475
476/// Trait for plugins that can provide workflow capabilities
477#[async_trait]
478pub trait WorkflowCapable: Send + Sync {
479    /// Create a workflow
480    async fn create_workflow(&self, config: WorkflowConfig) -> Result<String, String>;
481
482    /// Execute a workflow by ID
483    async fn execute_workflow(&self, workflow_id: &str) -> Result<WorkflowResult, String>;
484
485    /// Get workflow status
486    async fn workflow_status(&self, workflow_id: &str) -> Option<WorkflowStatus>;
487}
488
489/// Trait for plugins that can provide hardware optimization
490#[async_trait]
491pub trait HardwareCapable: Send + Sync {
492    /// Get current hardware info
493    async fn get_hardware_info(&self) -> HardwareInfo;
494
495    /// Get optimization recommendations
496    async fn get_recommendations(&self) -> Vec<OptimizationRecommendation>;
497}
498
499/// Trait for plugins that can provide explainability
500#[async_trait]
501pub trait ExplainabilityCapable: Send + Sync {
502    /// Generate explanation for a decision
503    async fn explain(&self, decision_id: &str) -> Option<Explanation>;
504
505    /// Record a decision for audit
506    async fn record_for_audit(&self, context: AuditContext) -> Result<String, String>;
507}
508
509/// Trait for plugins that can provide adaptive learning
510#[async_trait]
511pub trait AdaptiveCapable: Send + Sync {
512    /// Add a training example to the replay buffer
513    async fn add_example(&self, example: TrainingExample) -> Result<(), String>;
514
515    /// Trigger fine-tuning if buffer is ready
516    async fn maybe_finetune(&self) -> Option<FinetuneResult>;
517}
518
519/// Model status information
520#[derive(Debug, Clone, Serialize, Deserialize)]
521pub struct ModelStatus {
522    /// Model name
523    pub name: String,
524    /// Current state (ready, training, loading, error)
525    pub state: String,
526    /// Framework (pytorch, tensorflow)
527    pub framework: String,
528    /// Model version
529    pub version: String,
530    /// Performance metrics
531    pub metrics: Option<serde_json::Value>,
532}
533
534/// Workflow configuration
535#[derive(Debug, Clone, Serialize, Deserialize)]
536pub struct WorkflowConfig {
537    /// Workflow name
538    pub name: String,
539    /// Task definitions
540    pub tasks: Vec<TaskConfig>,
541}
542
543/// Task configuration
544#[derive(Debug, Clone, Serialize, Deserialize)]
545pub struct TaskConfig {
546    /// Task name
547    pub name: String,
548    /// Task type
549    pub task_type: String,
550    /// Dependencies (other task names)
551    pub depends_on: Vec<String>,
552    /// Task parameters
553    pub params: serde_json::Value,
554}
555
556/// Workflow execution result
557#[derive(Debug, Clone, Serialize, Deserialize)]
558pub struct WorkflowResult {
559    /// Workflow ID
560    pub workflow_id: String,
561    /// Status
562    pub status: String,
563    /// Task results
564    pub task_results: HashMap<String, serde_json::Value>,
565    /// Execution time in milliseconds
566    pub execution_time_ms: u64,
567}
568
569/// Workflow status
570#[derive(Debug, Clone, Serialize, Deserialize)]
571pub struct WorkflowStatus {
572    /// Workflow ID
573    pub id: String,
574    /// Status
575    pub status: String,
576    /// Progress (0.0 to 1.0)
577    pub progress: f64,
578    /// Current task
579    pub current_task: Option<String>,
580}
581
582/// Hardware information
583#[derive(Debug, Clone, Serialize, Deserialize)]
584pub struct HardwareInfo {
585    /// CPU info
586    pub cpu: CpuInfo,
587    /// Memory info
588    pub memory: MemoryInfo,
589    /// GPU info (if available)
590    pub gpu: Option<GpuInfo>,
591}
592
593/// CPU information
594#[derive(Debug, Clone, Serialize, Deserialize)]
595pub struct CpuInfo {
596    /// Number of cores
597    pub cores: usize,
598    /// Number of threads
599    pub threads: usize,
600    /// Architecture
601    pub architecture: String,
602}
603
604/// Memory information
605#[derive(Debug, Clone, Serialize, Deserialize)]
606pub struct MemoryInfo {
607    /// Total memory in GB
608    pub total_gb: f64,
609    /// Available memory in GB
610    pub available_gb: f64,
611}
612
613/// GPU information
614#[derive(Debug, Clone, Serialize, Deserialize)]
615pub struct GpuInfo {
616    /// GPU name
617    pub name: String,
618    /// VRAM in GB
619    pub vram_gb: f64,
620    /// Compute capability or backend
621    pub compute: String,
622}
623
624/// Optimization recommendation
625#[derive(Debug, Clone, Serialize, Deserialize)]
626pub struct OptimizationRecommendation {
627    /// Recommendation type
628    pub recommendation_type: String,
629    /// Description
630    pub description: String,
631    /// Impact level (low, medium, high)
632    pub impact: String,
633    /// Suggested action
634    pub action: String,
635}
636
637/// Explanation for a decision
638#[derive(Debug, Clone, Serialize, Deserialize)]
639pub struct Explanation {
640    /// Decision ID
641    pub decision_id: String,
642    /// Reasoning steps
643    pub reasoning_steps: Vec<String>,
644    /// Confidence score
645    pub confidence: f64,
646    /// Sources referenced
647    pub sources: Vec<String>,
648    /// Alternatives considered
649    pub alternatives: Vec<AlternativeOption>,
650}
651
652/// Alternative option considered
653#[derive(Debug, Clone, Serialize, Deserialize)]
654pub struct AlternativeOption {
655    /// Description
656    pub description: String,
657    /// Why not chosen
658    pub rejection_reason: String,
659    /// Hypothetical confidence
660    pub hypothetical_confidence: f64,
661}
662
663/// Audit context for recording decisions
664#[derive(Debug, Clone, Serialize, Deserialize)]
665pub struct AuditContext {
666    /// Decision type
667    pub decision_type: String,
668    /// Input data hash
669    pub input_hash: String,
670    /// Output data hash
671    pub output_hash: String,
672    /// Agent ID
673    pub agent_id: String,
674    /// Additional metadata
675    pub metadata: serde_json::Value,
676}
677
678/// Training example for adaptive learning
679#[derive(Debug, Clone, Serialize, Deserialize)]
680pub struct TrainingExample {
681    /// Input text
682    pub input: String,
683    /// Expected output
684    pub output: String,
685    /// Quality score (0.0 to 1.0)
686    pub quality: f64,
687    /// Source (user_feedback, system_generated, etc.)
688    pub source: String,
689}
690
691/// Fine-tuning result
692#[derive(Debug, Clone, Serialize, Deserialize)]
693pub struct FinetuneResult {
694    /// Adapter name
695    pub adapter_name: String,
696    /// Number of examples used
697    pub examples_used: usize,
698    /// Training loss
699    pub final_loss: f64,
700    /// Validation accuracy
701    pub validation_accuracy: Option<f64>,
702}
703
704/// Integration bridge that connects available capabilities
705pub struct IntegrationBridge {
706    registry: Arc<RwLock<CapabilityRegistry>>,
707    ml_providers: Arc<RwLock<Vec<Arc<dyn MLCapable>>>>,
708    workflow_providers: Arc<RwLock<Vec<Arc<dyn WorkflowCapable>>>>,
709    hardware_providers: Arc<RwLock<Vec<Arc<dyn HardwareCapable>>>>,
710    explainability_providers: Arc<RwLock<Vec<Arc<dyn ExplainabilityCapable>>>>,
711    adaptive_providers: Arc<RwLock<Vec<Arc<dyn AdaptiveCapable>>>>,
712}
713
714impl Default for IntegrationBridge {
715    fn default() -> Self {
716        Self::new()
717    }
718}
719
720impl IntegrationBridge {
721    /// Create a new integration bridge
722    pub fn new() -> Self {
723        Self {
724            registry: Arc::new(RwLock::new(CapabilityRegistry::new())),
725            ml_providers: Arc::new(RwLock::new(Vec::new())),
726            workflow_providers: Arc::new(RwLock::new(Vec::new())),
727            hardware_providers: Arc::new(RwLock::new(Vec::new())),
728            explainability_providers: Arc::new(RwLock::new(Vec::new())),
729            adaptive_providers: Arc::new(RwLock::new(Vec::new())),
730        }
731    }
732
733    /// Get the capability registry
734    pub fn registry(&self) -> Arc<RwLock<CapabilityRegistry>> {
735        self.registry.clone()
736    }
737
738    /// Register an ML-capable provider
739    pub async fn register_ml_provider(&self, provider: Arc<dyn MLCapable>) {
740        self.ml_providers.write().await.push(provider);
741        self.registry.write().await.register(Capability::MLCore);
742        self.registry
743            .write()
744            .await
745            .register(Capability::MLInference);
746    }
747
748    /// Register a workflow-capable provider
749    pub async fn register_workflow_provider(&self, provider: Arc<dyn WorkflowCapable>) {
750        self.workflow_providers.write().await.push(provider);
751        self.registry
752            .write()
753            .await
754            .register(Capability::WorkflowEngine);
755    }
756
757    /// Register a hardware-capable provider
758    pub async fn register_hardware_provider(&self, provider: Arc<dyn HardwareCapable>) {
759        self.hardware_providers.write().await.push(provider);
760        self.registry
761            .write()
762            .await
763            .register(Capability::HardwareOptimization);
764    }
765
766    /// Register an explainability provider
767    pub async fn register_explainability_provider(&self, provider: Arc<dyn ExplainabilityCapable>) {
768        self.explainability_providers.write().await.push(provider);
769        self.registry
770            .write()
771            .await
772            .register(Capability::ReasoningChains);
773        self.registry
774            .write()
775            .await
776            .register(Capability::TamperEvidentAudit);
777    }
778
779    /// Register an adaptive learning provider
780    pub async fn register_adaptive_provider(&self, provider: Arc<dyn AdaptiveCapable>) {
781        self.adaptive_providers.write().await.push(provider);
782        self.registry
783            .write()
784            .await
785            .register(Capability::AdaptiveLearning);
786    }
787
788    // === Bridged Operations (Gracefully Degrading) ===
789
790    /// Run inference if ML capability is available
791    pub async fn try_infer(
792        &self,
793        model_name: &str,
794        input: serde_json::Value,
795    ) -> Option<serde_json::Value> {
796        let providers = self.ml_providers.read().await;
797        if let Some(provider) = providers.first() {
798            provider.infer(model_name, input).await.ok()
799        } else {
800            tracing::debug!("ML inference not available - no ML provider registered");
801            None
802        }
803    }
804
805    /// Create and execute a workflow if capability is available
806    pub async fn try_run_workflow(&self, config: WorkflowConfig) -> Option<WorkflowResult> {
807        let providers = self.workflow_providers.read().await;
808        if let Some(provider) = providers.first() {
809            if let Ok(workflow_id) = provider.create_workflow(config).await {
810                return provider.execute_workflow(&workflow_id).await.ok();
811            }
812        } else {
813            tracing::debug!("Workflow not available - no workflow provider registered");
814        }
815        None
816    }
817
818    /// Get hardware optimization if capability is available
819    pub async fn try_get_hardware_recommendations(&self) -> Vec<OptimizationRecommendation> {
820        let providers = self.hardware_providers.read().await;
821        if let Some(provider) = providers.first() {
822            provider.get_recommendations().await
823        } else {
824            tracing::debug!("Hardware optimization not available");
825            Vec::new()
826        }
827    }
828
829    /// Record for audit if explainability is available
830    pub async fn try_record_audit(&self, context: AuditContext) -> Option<String> {
831        let providers = self.explainability_providers.read().await;
832        if let Some(provider) = providers.first() {
833            provider.record_for_audit(context).await.ok()
834        } else {
835            tracing::debug!("Audit recording not available - no explainability provider");
836            None
837        }
838    }
839
840    /// Add training example if adaptive learning is available
841    pub async fn try_add_training_example(&self, example: TrainingExample) -> bool {
842        let providers = self.adaptive_providers.read().await;
843        if let Some(provider) = providers.first() {
844            provider.add_example(example).await.is_ok()
845        } else {
846            tracing::debug!("Adaptive learning not available");
847            false
848        }
849    }
850
851    /// Get a summary of available integrations
852    pub async fn integration_summary(&self) -> IntegrationSummary {
853        let registry = self.registry.read().await;
854        let capabilities = registry.all().clone();
855
856        IntegrationSummary {
857            ml_available: registry.has(Capability::MLCore),
858            workflow_available: registry.has(Capability::WorkflowEngine),
859            hardware_optimization: registry.has(Capability::HardwareOptimization),
860            explainability_available: registry
861                .has_any(&[Capability::ReasoningChains, Capability::TamperEvidentAudit]),
862            adaptive_learning_available: registry.has(Capability::AdaptiveLearning),
863            total_capabilities: capabilities.len(),
864            capabilities: capabilities.into_iter().collect(),
865        }
866    }
867}
868
869/// Summary of available integrations
870#[derive(Debug, Clone, Serialize, Deserialize)]
871pub struct IntegrationSummary {
872    /// Whether ML capabilities are available
873    pub ml_available: bool,
874    /// Whether workflow capabilities are available
875    pub workflow_available: bool,
876    /// Whether hardware optimization is available
877    pub hardware_optimization: bool,
878    /// Whether explainability is available
879    pub explainability_available: bool,
880    /// Whether adaptive learning is available
881    pub adaptive_learning_available: bool,
882    /// Total number of capabilities
883    pub total_capabilities: usize,
884    /// List of available capabilities
885    pub capabilities: Vec<Capability>,
886}
887
888// ============================================================================
889// DYNAMIC PLUGIN MANAGER
890// ============================================================================
891
892/// Manages dynamic plugin loading based on workflow needs
893pub struct DynamicPluginManager {
894    /// Plugin lifecycles
895    lifecycles: Arc<RwLock<HashMap<String, PluginLifecycle>>>,
896    /// Active plugin count by policy
897    active_by_policy: Arc<RwLock<HashMap<PluginPolicy, usize>>>,
898    /// Maximum memory budget for on-demand plugins (MB)
899    memory_budget_mb: f64,
900    /// Current memory usage (MB)
901    current_memory_mb: Arc<RwLock<f64>>,
902    /// Integration bridge reference
903    bridge: Arc<IntegrationBridge>,
904    /// Idle timeout for suspending plugins
905    idle_timeout: Duration,
906}
907
908impl DynamicPluginManager {
909    /// Create a new dynamic plugin manager
910    pub fn new(bridge: Arc<IntegrationBridge>, memory_budget_mb: f64) -> Self {
911        Self {
912            lifecycles: Arc::new(RwLock::new(HashMap::new())),
913            active_by_policy: Arc::new(RwLock::new(HashMap::new())),
914            memory_budget_mb,
915            current_memory_mb: Arc::new(RwLock::new(0.0)),
916            bridge,
917            idle_timeout: Duration::from_secs(300), // 5 minutes default
918        }
919    }
920
921    /// Register an always-on plugin (will be immediately activated)
922    pub async fn register_always_on(&self, name: &str, capabilities: Vec<Capability>) {
923        let lifecycle = PluginLifecycle::always_on(name, capabilities.clone());
924
925        // Register capabilities in the bridge
926        for cap in &capabilities {
927            self.bridge
928                .registry()
929                .write()
930                .await
931                .register_from_plugin(name, *cap);
932        }
933
934        self.lifecycles
935            .write()
936            .await
937            .insert(name.to_string(), lifecycle);
938
939        let mut counts = self.active_by_policy.write().await;
940        *counts.entry(PluginPolicy::AlwaysOn).or_insert(0) += 1;
941
942        tracing::info!(
943            "Registered always-on plugin: {} with {:?}",
944            name,
945            capabilities
946        );
947    }
948
949    /// Register an on-demand plugin (will be loaded when needed)
950    pub async fn register_on_demand(
951        &self,
952        name: &str,
953        capabilities: Vec<Capability>,
954        memory_mb: f64,
955    ) {
956        let lifecycle = PluginLifecycle::on_demand(name, capabilities.clone(), memory_mb);
957        self.lifecycles
958            .write()
959            .await
960            .insert(name.to_string(), lifecycle);
961
962        tracing::info!(
963            "Registered on-demand plugin: {} ({:.1}MB) with {:?}",
964            name,
965            memory_mb,
966            capabilities
967        );
968    }
969
970    /// Ensure required capabilities are available, loading plugins if needed
971    pub async fn ensure_capabilities(
972        &self,
973        required: &[Capability],
974    ) -> Result<Vec<String>, String> {
975        let mut loaded_plugins = Vec::new();
976
977        for cap in required {
978            // Check if capability is already available
979            if self.bridge.registry().read().await.has(*cap) {
980                continue;
981            }
982
983            // Find a plugin that provides this capability
984            let lifecycles = self.lifecycles.read().await;
985            let provider = lifecycles
986                .values()
987                .find(|l| l.capabilities.contains(cap) && l.state != PluginState::Active);
988
989            if let Some(lifecycle) = provider {
990                let plugin_name = lifecycle.name.clone();
991                drop(lifecycles);
992
993                // Load the plugin
994                self.load_plugin(&plugin_name).await?;
995                loaded_plugins.push(plugin_name);
996            }
997        }
998
999        Ok(loaded_plugins)
1000    }
1001
1002    /// Load a plugin by name
1003    pub async fn load_plugin(&self, name: &str) -> Result<(), String> {
1004        // First pass: check if already loaded or get memory requirement
1005        let (already_active, memory_needed) = {
1006            let mut lifecycles = self.lifecycles.write().await;
1007            let lifecycle = lifecycles
1008                .get_mut(name)
1009                .ok_or_else(|| format!("Plugin not registered: {}", name))?;
1010
1011            if lifecycle.state == PluginState::Active {
1012                lifecycle.last_used = Some(chrono::Utc::now());
1013                return Ok(());
1014            }
1015
1016            (lifecycle.state == PluginState::Active, lifecycle.memory_mb)
1017        };
1018
1019        if already_active {
1020            return Ok(());
1021        }
1022
1023        // Check memory budget and free if needed
1024        let current = *self.current_memory_mb.read().await;
1025        if current + memory_needed > self.memory_budget_mb {
1026            self.free_memory(memory_needed).await?;
1027        }
1028
1029        // Now actually load the plugin
1030        let mut lifecycles = self.lifecycles.write().await;
1031        let lifecycle = lifecycles.get_mut(name).unwrap();
1032        lifecycle.state = PluginState::Loading;
1033        let capabilities = lifecycle.capabilities.clone();
1034        let memory = lifecycle.memory_mb;
1035        let policy = lifecycle.policy;
1036
1037        // Simulate loading (in real implementation, would init the plugin)
1038        lifecycle.state = PluginState::Active;
1039        lifecycle.initialized = true;
1040        lifecycle.last_used = Some(chrono::Utc::now());
1041        drop(lifecycles);
1042
1043        // Register capabilities
1044        for cap in &capabilities {
1045            self.bridge
1046                .registry()
1047                .write()
1048                .await
1049                .register_from_plugin(name, *cap);
1050        }
1051
1052        // Update memory usage
1053        *self.current_memory_mb.write().await += memory;
1054
1055        let mut counts = self.active_by_policy.write().await;
1056        *counts.entry(policy).or_insert(0) += 1;
1057
1058        tracing::info!("Loaded plugin: {} ({:.1}MB)", name, memory);
1059        Ok(())
1060    }
1061
1062    /// Unload a plugin by name (fails for always-on plugins)
1063    pub async fn unload_plugin(&self, name: &str) -> Result<(), String> {
1064        let mut lifecycles = self.lifecycles.write().await;
1065        let lifecycle = lifecycles
1066            .get_mut(name)
1067            .ok_or_else(|| format!("Plugin not registered: {}", name))?;
1068
1069        // Cannot unload always-on plugins
1070        if lifecycle.policy == PluginPolicy::AlwaysOn {
1071            return Err(format!("Cannot unload always-on plugin: {}", name));
1072        }
1073
1074        if lifecycle.state != PluginState::Active {
1075            return Ok(());
1076        }
1077
1078        let memory = lifecycle.memory_mb;
1079        lifecycle.state = PluginState::Unloaded;
1080
1081        // Unregister capabilities
1082        self.bridge.registry().write().await.unregister_plugin(name);
1083
1084        // Update memory usage
1085        *self.current_memory_mb.write().await -= memory;
1086
1087        let mut counts = self.active_by_policy.write().await;
1088        if let Some(count) = counts.get_mut(&lifecycle.policy) {
1089            *count = count.saturating_sub(1);
1090        }
1091
1092        tracing::info!("Unloaded plugin: {} (freed {:.1}MB)", name, memory);
1093        Ok(())
1094    }
1095
1096    /// Suspend a plugin (keep state, free resources)
1097    pub async fn suspend_plugin(&self, name: &str) -> Result<(), String> {
1098        let mut lifecycles = self.lifecycles.write().await;
1099        let lifecycle = lifecycles
1100            .get_mut(name)
1101            .ok_or_else(|| format!("Plugin not registered: {}", name))?;
1102
1103        if !lifecycle.can_suspend() {
1104            return Err(format!("Cannot suspend plugin: {}", name));
1105        }
1106
1107        let memory = lifecycle.memory_mb;
1108        lifecycle.state = PluginState::Suspended;
1109
1110        *self.current_memory_mb.write().await -= memory;
1111
1112        tracing::info!("Suspended plugin: {} (freed {:.1}MB)", name, memory);
1113        Ok(())
1114    }
1115
1116    /// Resume a suspended plugin
1117    pub async fn resume_plugin(&self, name: &str) -> Result<(), String> {
1118        // First, check if plugin exists and get memory requirement
1119        let memory_needed = {
1120            let lifecycles = self.lifecycles.read().await;
1121            let lifecycle = lifecycles
1122                .get(name)
1123                .ok_or_else(|| format!("Plugin not registered: {}", name))?;
1124
1125            if lifecycle.state != PluginState::Suspended {
1126                return Err(format!("Plugin is not suspended: {}", name));
1127            }
1128
1129            lifecycle.memory_mb
1130        };
1131
1132        // Check memory budget and free if needed
1133        let current = *self.current_memory_mb.read().await;
1134        if current + memory_needed > self.memory_budget_mb {
1135            self.free_memory(memory_needed).await?;
1136        }
1137
1138        // Now actually resume
1139        let mut lifecycles = self.lifecycles.write().await;
1140        let lifecycle = lifecycles.get_mut(name).unwrap();
1141        lifecycle.state = PluginState::Active;
1142        lifecycle.last_used = Some(chrono::Utc::now());
1143
1144        *self.current_memory_mb.write().await += lifecycle.memory_mb;
1145
1146        tracing::info!("Resumed plugin: {}", name);
1147        Ok(())
1148    }
1149
1150    /// Free memory by suspending or unloading idle plugins
1151    async fn free_memory(&self, needed_mb: f64) -> Result<(), String> {
1152        let mut freed = 0.0;
1153        let now = chrono::Utc::now();
1154
1155        let lifecycles = self.lifecycles.read().await;
1156        let mut candidates: Vec<_> = lifecycles
1157            .values()
1158            .filter(|l| l.can_suspend() || l.can_unload())
1159            .filter(|l| {
1160                l.last_used
1161                    .map(|t| (now - t).num_seconds() > self.idle_timeout.as_secs() as i64)
1162                    .unwrap_or(true)
1163            })
1164            .map(|l| (l.name.clone(), l.memory_mb, l.policy))
1165            .collect();
1166        drop(lifecycles);
1167
1168        // Sort by memory (largest first) to free memory efficiently
1169        candidates.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
1170
1171        for (name, memory, policy) in candidates {
1172            if freed >= needed_mb {
1173                break;
1174            }
1175
1176            // Prefer suspending over unloading
1177            if matches!(policy, PluginPolicy::Suspendable) {
1178                if self.suspend_plugin(&name).await.is_ok() {
1179                    freed += memory;
1180                }
1181            } else if self.unload_plugin(&name).await.is_ok() {
1182                freed += memory;
1183            }
1184        }
1185
1186        if freed >= needed_mb {
1187            Ok(())
1188        } else {
1189            Err(format!(
1190                "Could not free enough memory: needed {:.1}MB, freed {:.1}MB",
1191                needed_mb, freed
1192            ))
1193        }
1194    }
1195
1196    /// Process a user message and ensure required plugins are loaded
1197    pub async fn process_intent(&self, message: &str) -> ProcessedIntent {
1198        let intents = IntentDetector::detect(message);
1199        let required_caps = IntentDetector::capabilities_for_intents(&intents);
1200        let suggested_plugins = IntentDetector::plugins_for_intents(&intents);
1201
1202        // Always ensure compliance capabilities
1203        let mut all_caps: Vec<_> = required_caps.into_iter().collect();
1204        for cap in AlwaysOnPlugins::required_capabilities() {
1205            if !all_caps.contains(&cap) {
1206                all_caps.push(cap);
1207            }
1208        }
1209
1210        // Try to load required plugins
1211        let loaded = self
1212            .ensure_capabilities(&all_caps)
1213            .await
1214            .unwrap_or_default();
1215
1216        ProcessedIntent {
1217            detected_intents: intents,
1218            required_capabilities: all_caps,
1219            suggested_plugins: suggested_plugins.into_iter().map(String::from).collect(),
1220            plugins_loaded: loaded,
1221        }
1222    }
1223
1224    /// Get current plugin status summary
1225    pub async fn status(&self) -> PluginManagerStatus {
1226        let lifecycles = self.lifecycles.read().await;
1227
1228        let always_on: Vec<_> = lifecycles
1229            .values()
1230            .filter(|l| l.policy == PluginPolicy::AlwaysOn)
1231            .map(|l| l.name.clone())
1232            .collect();
1233
1234        let active: Vec<_> = lifecycles
1235            .values()
1236            .filter(|l| l.state == PluginState::Active && l.policy != PluginPolicy::AlwaysOn)
1237            .map(|l| l.name.clone())
1238            .collect();
1239
1240        let suspended: Vec<_> = lifecycles
1241            .values()
1242            .filter(|l| l.state == PluginState::Suspended)
1243            .map(|l| l.name.clone())
1244            .collect();
1245
1246        let unloaded: Vec<_> = lifecycles
1247            .values()
1248            .filter(|l| l.state == PluginState::Unloaded)
1249            .map(|l| l.name.clone())
1250            .collect();
1251
1252        PluginManagerStatus {
1253            always_on_plugins: always_on,
1254            active_plugins: active,
1255            suspended_plugins: suspended,
1256            unloaded_plugins: unloaded,
1257            memory_used_mb: *self.current_memory_mb.read().await,
1258            memory_budget_mb: self.memory_budget_mb,
1259        }
1260    }
1261}
1262
1263/// Result of processing user intent
1264#[derive(Debug, Clone, Serialize, Deserialize)]
1265pub struct ProcessedIntent {
1266    /// Detected intent categories
1267    pub detected_intents: Vec<IntentCategory>,
1268    /// Required capabilities
1269    pub required_capabilities: Vec<Capability>,
1270    /// Suggested plugins
1271    pub suggested_plugins: Vec<String>,
1272    /// Plugins that were loaded to satisfy requirements
1273    pub plugins_loaded: Vec<String>,
1274}
1275
1276/// Plugin manager status
1277#[derive(Debug, Clone, Serialize, Deserialize)]
1278pub struct PluginManagerStatus {
1279    /// Always-on plugins (cannot be disabled)
1280    pub always_on_plugins: Vec<String>,
1281    /// Currently active on-demand plugins
1282    pub active_plugins: Vec<String>,
1283    /// Suspended plugins (quick resume)
1284    pub suspended_plugins: Vec<String>,
1285    /// Unloaded plugins
1286    pub unloaded_plugins: Vec<String>,
1287    /// Current memory usage
1288    pub memory_used_mb: f64,
1289    /// Memory budget
1290    pub memory_budget_mb: f64,
1291}
1292
1293#[cfg(test)]
1294mod tests {
1295    use super::*;
1296
1297    #[test]
1298    fn test_capability_registry() {
1299        let mut registry = CapabilityRegistry::new();
1300
1301        registry.register(Capability::MLCore);
1302        registry.register(Capability::MLInference);
1303
1304        assert!(registry.has(Capability::MLCore));
1305        assert!(registry.has(Capability::MLInference));
1306        assert!(!registry.has(Capability::WorkflowEngine));
1307
1308        assert!(registry.has_all(&[Capability::MLCore, Capability::MLInference]));
1309        assert!(!registry.has_all(&[Capability::MLCore, Capability::WorkflowEngine]));
1310
1311        assert!(registry.has_any(&[Capability::MLCore, Capability::WorkflowEngine]));
1312        assert!(!registry.has_any(&[Capability::WorkflowEngine, Capability::PyTorchBackend]));
1313    }
1314
1315    #[test]
1316    fn test_plugin_capability_tracking() {
1317        let mut registry = CapabilityRegistry::new();
1318
1319        registry.register_from_plugin("ml", Capability::MLCore);
1320        registry.register_from_plugin("ml", Capability::MLInference);
1321        registry.register_from_plugin("pytorch", Capability::PyTorchBackend);
1322
1323        assert_eq!(registry.get_plugin_capabilities("ml").unwrap().len(), 2);
1324        assert_eq!(
1325            registry.get_plugin_capabilities("pytorch").unwrap().len(),
1326            1
1327        );
1328
1329        registry.unregister_plugin("pytorch");
1330        assert!(!registry.has(Capability::PyTorchBackend));
1331        assert!(registry.has(Capability::MLCore)); // Still provided by ml plugin
1332    }
1333
1334    #[tokio::test]
1335    async fn test_integration_bridge() {
1336        let bridge = IntegrationBridge::new();
1337
1338        // Without any providers, operations should gracefully return None
1339        let result = bridge.try_infer("test", serde_json::json!({})).await;
1340        assert!(result.is_none());
1341
1342        let summary = bridge.integration_summary().await;
1343        assert!(!summary.ml_available);
1344        assert!(!summary.workflow_available);
1345        assert_eq!(summary.total_capabilities, 0);
1346    }
1347}