feagi_agent/core/
config.rs

1// Copyright 2025 Neuraville Inc.
2// SPDX-License-Identifier: Apache-2.0
3
4//! Configuration for FEAGI Agent SDK
5
6use crate::core::error::{Result, SdkError};
7use feagi_io::{
8    AgentCapabilities, AgentType, MotorCapability, SensoryCapability, VisionCapability,
9    VisualizationCapability,
10};
11
12/// Agent configuration builder
13#[derive(Debug, Clone)]
14pub struct AgentConfig {
15    /// Unique agent identifier
16    pub agent_id: String,
17
18    /// Agent type (sensory, motor, both, visualization, or infrastructure)
19    pub agent_type: AgentType,
20
21    /// Agent capabilities
22    pub capabilities: AgentCapabilities,
23
24    /// FEAGI registration endpoint (ZMQ REQ)
25    pub registration_endpoint: String,
26
27    /// FEAGI sensory input endpoint (ZMQ PUSH)
28    pub sensory_endpoint: String,
29
30    /// FEAGI motor output endpoint (ZMQ SUB)
31    pub motor_endpoint: String,
32
33    /// FEAGI visualization stream endpoint (ZMQ SUB)
34    pub visualization_endpoint: String,
35
36    /// FEAGI control/API endpoint (ZMQ REQ - REST over ZMQ)
37    pub control_endpoint: String,
38
39    /// Heartbeat interval in seconds (0 = disabled)
40    pub heartbeat_interval: f64,
41
42    /// Connection timeout in milliseconds
43    pub connection_timeout_ms: u64,
44
45    /// Registration retry attempts
46    pub registration_retries: u32,
47
48    /// Retry backoff base in milliseconds
49    pub retry_backoff_ms: u64,
50
51    /// ZMQ PUSH socket high-water-mark for sensory data
52    pub sensory_send_hwm: i32,
53    /// ZMQ PUSH socket linger period when disconnecting
54    pub sensory_linger_ms: i32,
55    /// Whether to enable ZMQ immediate mode on the sensory socket
56    pub sensory_immediate: bool,
57}
58
59impl AgentConfig {
60    /// Create a new agent configuration
61    ///
62    /// # Arguments
63    /// * `agent_id` - Unique identifier for this agent
64    /// * `agent_type` - Type of agent (Sensory, Motor, or Both)
65    ///
66    /// # Example
67    /// ```
68    /// use feagi_agent::{AgentConfig, AgentType};
69    ///
70    /// let config = AgentConfig::new("my_camera", AgentType::Sensory);
71    /// ```
72    pub fn new(agent_id: impl Into<String>, agent_type: AgentType) -> Self {
73        Self {
74            agent_id: agent_id.into(),
75            agent_type,
76            capabilities: AgentCapabilities::default(),
77            // NO HARDCODED ENDPOINTS - must be set explicitly via builder methods or with_feagi_endpoints()
78            registration_endpoint: String::new(),
79            sensory_endpoint: String::new(),
80            motor_endpoint: String::new(),
81            visualization_endpoint: String::new(),
82            control_endpoint: String::new(),
83            heartbeat_interval: 5.0,
84            connection_timeout_ms: 5000,
85            registration_retries: 3,
86            retry_backoff_ms: 1000,
87            // REAL-TIME: HWM=1 ensures agent doesn't buffer old sensory data
88            // If FEAGI is slow, old frames are dropped (desired behavior for real-time)
89            sensory_send_hwm: 1,
90            sensory_linger_ms: 0,
91            // REAL-TIME: immediate=true disables Nagle's algorithm for lowest latency
92            sensory_immediate: true,
93        }
94    }
95
96    /// Set FEAGI host and ports to derive all endpoints
97    ///
98    /// Note: This method requires explicit port numbers. NO DEFAULTS are provided.
99    /// Ports must match those configured in FEAGI's feagi_configuration.toml
100    ///
101    /// # Example
102    /// ```
103    /// # use feagi_agent::{AgentConfig, AgentType};
104    /// let config = AgentConfig::new("camera", AgentType::Sensory)
105    ///     .with_feagi_endpoints("192.168.1.100", 30001, 5558, 5564, 5562, 5563);
106    /// ```
107    #[deprecated(
108        since = "0.1.0",
109        note = "Use with_feagi_endpoints() instead to explicitly specify all ports"
110    )]
111    pub fn with_feagi_host(mut self, host: impl Into<String>) -> Self {
112        let host = host.into();
113        // @architecture:acceptable - deprecated method, kept for backwards compatibility only
114        // Users should migrate to with_feagi_endpoints() or individual endpoint setters
115        self.registration_endpoint = format!("tcp://{}:30001", host);
116        self.sensory_endpoint = format!("tcp://{}:5558", host);
117        self.motor_endpoint = format!("tcp://{}:5564", host);
118        self.visualization_endpoint = format!("tcp://{}:5562", host);
119        self.control_endpoint = format!("tcp://{}:5563", host);
120        self
121    }
122
123    /// Set FEAGI endpoints with explicit ports (RECOMMENDED)
124    ///
125    /// All ports must be provided explicitly to match FEAGI's configuration.
126    /// No default values are used.
127    ///
128    /// # Example
129    /// ```
130    /// # use feagi_agent::{AgentConfig, AgentType};
131    /// let config = AgentConfig::new("camera", AgentType::Sensory)
132    ///     .with_feagi_endpoints(
133    ///         "192.168.1.100",
134    ///         30001,  // registration_port
135    ///         5558,   // sensory_port
136    ///         5564,   // motor_port
137    ///         5562,   // visualization_port
138    ///         5563    // control_port
139    ///     );
140    /// ```
141    pub fn with_feagi_endpoints(
142        mut self,
143        host: impl Into<String>,
144        registration_port: u16,
145        sensory_port: u16,
146        motor_port: u16,
147        visualization_port: u16,
148        control_port: u16,
149    ) -> Self {
150        let host = host.into();
151        self.registration_endpoint = format!("tcp://{}:{}", host, registration_port);
152        self.sensory_endpoint = format!("tcp://{}:{}", host, sensory_port);
153        self.motor_endpoint = format!("tcp://{}:{}", host, motor_port);
154        self.visualization_endpoint = format!("tcp://{}:{}", host, visualization_port);
155        self.control_endpoint = format!("tcp://{}:{}", host, control_port);
156        self
157    }
158
159    /// Set registration endpoint
160    pub fn with_registration_endpoint(mut self, endpoint: impl Into<String>) -> Self {
161        self.registration_endpoint = endpoint.into();
162        self
163    }
164
165    /// Set sensory input endpoint
166    pub fn with_sensory_endpoint(mut self, endpoint: impl Into<String>) -> Self {
167        self.sensory_endpoint = endpoint.into();
168        self
169    }
170
171    /// Set motor output endpoint
172    pub fn with_motor_endpoint(mut self, endpoint: impl Into<String>) -> Self {
173        self.motor_endpoint = endpoint.into();
174        self
175    }
176
177    /// Set visualization stream endpoint
178    pub fn with_visualization_endpoint(mut self, endpoint: impl Into<String>) -> Self {
179        self.visualization_endpoint = endpoint.into();
180        self
181    }
182
183    /// Set control/API endpoint
184    pub fn with_control_endpoint(mut self, endpoint: impl Into<String>) -> Self {
185        self.control_endpoint = endpoint.into();
186        self
187    }
188
189    /// Set heartbeat interval in seconds (0 to disable)
190    pub fn with_heartbeat_interval(mut self, interval: f64) -> Self {
191        self.heartbeat_interval = interval;
192        self
193    }
194
195    /// Set connection timeout in milliseconds
196    pub fn with_connection_timeout_ms(mut self, timeout_ms: u64) -> Self {
197        self.connection_timeout_ms = timeout_ms;
198        self
199    }
200
201    /// Set registration retry attempts
202    pub fn with_registration_retries(mut self, retries: u32) -> Self {
203        self.registration_retries = retries;
204        self
205    }
206
207    /// Configure sensory socket behaviour (ZMQ PUSH)
208    pub fn with_sensory_socket_config(
209        mut self,
210        send_hwm: i32,
211        linger_ms: i32,
212        immediate: bool,
213    ) -> Self {
214        self.sensory_send_hwm = send_hwm;
215        self.sensory_linger_ms = linger_ms;
216        self.sensory_immediate = immediate;
217        self
218    }
219
220    /// Add vision capability
221    ///
222    /// # Example
223    /// ```
224    /// # use feagi_agent::{AgentConfig, AgentType};
225    /// let config = AgentConfig::new("camera", AgentType::Sensory)
226    ///     .with_vision_capability("camera", (640, 480), 3, "i_vision");
227    /// ```
228    pub fn with_vision_capability(
229        mut self,
230        modality: impl Into<String>,
231        dimensions: (usize, usize),
232        channels: usize,
233        target_cortical_area: impl Into<String>,
234    ) -> Self {
235        self.capabilities.vision = Some(VisionCapability {
236            modality: modality.into(),
237            dimensions,
238            channels,
239            target_cortical_area: target_cortical_area.into(),
240            unit: None,
241            group: None,
242        });
243        self
244    }
245
246    /// Add motor capability
247    ///
248    /// # Example
249    /// ```
250    /// # use feagi_agent::{AgentConfig, AgentType};
251    /// let config = AgentConfig::new("arm", AgentType::Motor)
252    ///     .with_motor_capability("servo", 4, vec!["o_motor".to_string()]);
253    /// ```
254    pub fn with_motor_capability(
255        mut self,
256        modality: impl Into<String>,
257        output_count: usize,
258        source_cortical_areas: Vec<String>,
259    ) -> Self {
260        self.capabilities.motor = Some(MotorCapability {
261            modality: modality.into(),
262            output_count,
263            source_cortical_areas,
264            unit: None,
265            group: None,
266            source_units: None,
267        });
268        self
269    }
270
271    /// Add vision capability using semantic unit + group (Option B contract).
272    ///
273    /// This avoids requiring SDK users to know FEAGI's internal cortical ID encoding.
274    pub fn with_vision_unit(
275        mut self,
276        modality: impl Into<String>,
277        dimensions: (usize, usize),
278        channels: usize,
279        unit: feagi_io::SensoryUnit,
280        group: u8,
281    ) -> Self {
282        self.capabilities.vision = Some(VisionCapability {
283            modality: modality.into(),
284            dimensions,
285            channels,
286            target_cortical_area: String::new(),
287            unit: Some(unit),
288            group: Some(group),
289        });
290        self
291    }
292
293    /// Add motor capability using semantic unit + group (Option B contract).
294    pub fn with_motor_unit(
295        mut self,
296        modality: impl Into<String>,
297        output_count: usize,
298        unit: feagi_io::MotorUnit,
299        group: u8,
300    ) -> Self {
301        self.capabilities.motor = Some(MotorCapability {
302            modality: modality.into(),
303            output_count,
304            source_cortical_areas: Vec::new(),
305            unit: Some(unit),
306            group: Some(group),
307            source_units: None,
308        });
309        self
310    }
311
312    /// Add multiple motor units using semantic unit + group pairs (Option B contract).
313    pub fn with_motor_units(
314        mut self,
315        modality: impl Into<String>,
316        output_count: usize,
317        source_units: Vec<feagi_io::MotorUnitSpec>,
318    ) -> Self {
319        self.capabilities.motor = Some(MotorCapability {
320            modality: modality.into(),
321            output_count,
322            source_cortical_areas: Vec::new(),
323            unit: None,
324            group: None,
325            source_units: Some(source_units),
326        });
327        self
328    }
329
330    /// Add visualization capability
331    ///
332    /// # Example
333    /// ```
334    /// # use feagi_agent::{AgentConfig, AgentType};
335    /// let config = AgentConfig::new("brain_viz", AgentType::Visualization)
336    ///     .with_visualization_capability("3d_brain", Some((1920, 1080)), Some(30.0), false);
337    /// ```
338    pub fn with_visualization_capability(
339        mut self,
340        visualization_type: impl Into<String>,
341        resolution: Option<(usize, usize)>,
342        refresh_rate: Option<f64>,
343        bridge_proxy: bool,
344    ) -> Self {
345        self.capabilities.visualization = Some(VisualizationCapability {
346            visualization_type: visualization_type.into(),
347            resolution,
348            refresh_rate,
349            bridge_proxy,
350        });
351        self
352    }
353
354    /// Add sensory capability (generic)
355    ///
356    /// This is used for non-vision sensory modalities (text, audio, etc.)
357    ///
358    /// Device registrations are handled separately via ConnectorAgent and
359    /// device_registrations in capabilities.
360    ///
361    /// # Arguments
362    /// * `rate_hz` - Sensory data rate in Hz
363    /// * `shm_path` - Optional shared memory path
364    ///
365    /// # Example
366    /// ```
367    /// # use feagi_agent::{AgentConfig, AgentType};
368    /// let config = AgentConfig::new("text_input", AgentType::Sensory)
369    ///     .with_sensory_capability(20.0, None);
370    /// ```
371    pub fn with_sensory_capability(mut self, rate_hz: f64, shm_path: Option<String>) -> Self {
372        self.capabilities.sensory = Some(SensoryCapability { rate_hz, shm_path });
373        self
374    }
375
376    /// Add custom capability
377    ///
378    /// # Example
379    /// ```
380    /// # use feagi_agent::{AgentConfig, AgentType};
381    /// use serde_json::json;
382    ///
383    /// let config = AgentConfig::new("audio", AgentType::Sensory)
384    ///     .with_custom_capability("audio", json!({
385    ///         "sample_rate": 44100,
386    ///         "channels": 2
387    ///     }));
388    /// ```
389    pub fn with_custom_capability(
390        mut self,
391        key: impl Into<String>,
392        value: serde_json::Value,
393    ) -> Self {
394        self.capabilities.custom.insert(key.into(), value);
395        self
396    }
397
398    /// Validate configuration
399    pub fn validate(&self) -> Result<()> {
400        // Agent ID must not be empty
401        if self.agent_id.is_empty() {
402            return Err(SdkError::InvalidConfig(
403                "agent_id cannot be empty".to_string(),
404            ));
405        }
406
407        // Must have at least one capability
408        if self.capabilities.vision.is_none()
409            && self.capabilities.motor.is_none()
410            && self.capabilities.visualization.is_none()
411            && self.capabilities.sensory.is_none()
412            && self.capabilities.custom.is_empty()
413        {
414            return Err(SdkError::InvalidConfig(
415                "Agent must have at least one capability".to_string(),
416            ));
417        }
418
419        // Validate agent type matches capabilities
420        match self.agent_type {
421            AgentType::Sensory => {
422                if self.capabilities.vision.is_none()
423                    && self.capabilities.sensory.is_none()
424                    && self.capabilities.custom.is_empty()
425                {
426                    return Err(SdkError::InvalidConfig(
427                        "Sensory agent must have vision or custom input capability".to_string(),
428                    ));
429                }
430            }
431            AgentType::Motor => {
432                if self.capabilities.motor.is_none() {
433                    return Err(SdkError::InvalidConfig(
434                        "Motor agent must have motor capability".to_string(),
435                    ));
436                }
437            }
438            AgentType::Both => {
439                if (self.capabilities.vision.is_none()
440                    && self.capabilities.sensory.is_none()
441                    && self.capabilities.custom.is_empty())
442                    || self.capabilities.motor.is_none()
443                {
444                    return Err(SdkError::InvalidConfig(
445                        "Bidirectional agent must have both input and output capabilities"
446                            .to_string(),
447                    ));
448                }
449            }
450            AgentType::Visualization => {
451                if self.capabilities.visualization.is_none() {
452                    return Err(SdkError::InvalidConfig(
453                        "Visualization agent must have visualization capability".to_string(),
454                    ));
455                }
456            }
457            AgentType::Infrastructure => {
458                // Infrastructure agents can have any combination of capabilities
459                // No strict requirements as they may proxy multiple types
460                if self.capabilities.vision.is_none()
461                    && self.capabilities.motor.is_none()
462                    && self.capabilities.visualization.is_none()
463                    && self.capabilities.custom.is_empty()
464                {
465                    return Err(SdkError::InvalidConfig(
466                        "Infrastructure agent must declare at least one capability".to_string(),
467                    ));
468                }
469            }
470        }
471
472        // Validate endpoints based on agent type
473        // Registration endpoint is always required
474        if self.registration_endpoint.is_empty() {
475            return Err(SdkError::InvalidConfig(
476                "registration_endpoint must be set (use with_registration_endpoint() or with_feagi_endpoints())".to_string()
477            ));
478        }
479        if !self.registration_endpoint.starts_with("tcp://") {
480            return Err(SdkError::InvalidConfig(
481                "registration_endpoint must start with tcp://".to_string(),
482            ));
483        }
484
485        // Validate sensory endpoint for sensory agents
486        if matches!(self.agent_type, AgentType::Sensory | AgentType::Both) {
487            if self.sensory_endpoint.is_empty() {
488                return Err(SdkError::InvalidConfig(
489                    "sensory_endpoint must be set for Sensory/Both agents (use with_sensory_endpoint() or with_feagi_endpoints())".to_string()
490                ));
491            }
492            if !self.sensory_endpoint.starts_with("tcp://") {
493                return Err(SdkError::InvalidConfig(
494                    "sensory_endpoint must start with tcp://".to_string(),
495                ));
496            }
497        }
498
499        // Validate motor endpoint for motor agents
500        if matches!(self.agent_type, AgentType::Motor | AgentType::Both) {
501            if self.motor_endpoint.is_empty() {
502                return Err(SdkError::InvalidConfig(
503                    "motor_endpoint must be set for Motor/Both agents (use with_motor_endpoint() or with_feagi_endpoints())".to_string()
504                ));
505            }
506            if !self.motor_endpoint.starts_with("tcp://") {
507                return Err(SdkError::InvalidConfig(
508                    "motor_endpoint must start with tcp://".to_string(),
509                ));
510            }
511        }
512
513        // Validate visualization endpoint for visualization agents
514        if matches!(self.agent_type, AgentType::Visualization) {
515            if self.visualization_endpoint.is_empty() {
516                return Err(SdkError::InvalidConfig(
517                    "visualization_endpoint must be set for Visualization agents (use with_visualization_endpoint() or with_feagi_endpoints())".to_string()
518                ));
519            }
520            if !self.visualization_endpoint.starts_with("tcp://") {
521                return Err(SdkError::InvalidConfig(
522                    "visualization_endpoint must start with tcp://".to_string(),
523                ));
524            }
525        }
526
527        if self.sensory_send_hwm < 0 {
528            return Err(SdkError::InvalidConfig(
529                "sensory_send_hwm must be >= 0".to_string(),
530            ));
531        }
532
533        Ok(())
534    }
535}
536
537#[cfg(test)]
538mod tests {
539    use super::*;
540
541    #[test]
542    fn test_config_builder() {
543        #[allow(deprecated)]
544        let config = AgentConfig::new("test_agent", AgentType::Sensory)
545            .with_feagi_host("192.168.1.100")
546            .with_vision_capability("camera", (640, 480), 3, "i_vision")
547            .with_heartbeat_interval(10.0);
548
549        assert_eq!(config.agent_id, "test_agent");
550        assert_eq!(config.heartbeat_interval, 10.0);
551        assert_eq!(config.registration_endpoint, "tcp://192.168.1.100:30001");
552        assert!(config.capabilities.vision.is_some());
553    }
554
555    #[test]
556    fn test_config_validation_empty_agent_id() {
557        let config = AgentConfig::new("", AgentType::Sensory);
558        assert!(config.validate().is_err());
559    }
560
561    #[test]
562    fn test_config_validation_no_capabilities() {
563        let config = AgentConfig::new("test", AgentType::Sensory);
564        assert!(config.validate().is_err());
565    }
566
567    #[test]
568    fn test_config_validation_sensory_without_input() {
569        let mut config = AgentConfig::new("test", AgentType::Sensory);
570        config.capabilities.motor = Some(MotorCapability {
571            modality: "servo".to_string(),
572            output_count: 1,
573            source_cortical_areas: vec!["motor".to_string()],
574            unit: None,
575            group: None,
576            source_units: None,
577        });
578        assert!(config.validate().is_err());
579    }
580
581    #[test]
582    fn test_config_validation_valid() {
583        let config = AgentConfig::new("test", AgentType::Sensory)
584            .with_vision_capability("camera", (640, 480), 3, "vision")
585            .with_registration_endpoint("tcp://localhost:8000")
586            .with_sensory_endpoint("tcp://localhost:5558");
587        assert!(config.validate().is_ok());
588    }
589}