Skip to main content

mockforge_foundation/intelligent_behavior/
config.rs

1//! Configuration for the Intelligent Mock Behavior system
2//!
3//! Moved from `mockforge-core::intelligent_behavior::config` (Phase 6 / A8).
4//! All dependencies (BehaviorRules, StateMachine, ConsistencyRule, Persona,
5//! SessionTracking) are now in foundation.
6
7use super::session::SessionTracking;
8use super::{types::BehaviorRules, Persona};
9use serde::{Deserialize, Serialize};
10use std::time::Duration;
11
12/// Configuration for the Intelligent Mock Behavior system
13#[derive(Debug, Clone, Serialize, Deserialize, Default)]
14#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
15pub struct IntelligentBehaviorConfig {
16    /// Enable intelligent behavior
17    #[serde(default)]
18    pub enabled: bool,
19    /// Session tracking configuration
20    #[serde(default)]
21    pub session_tracking: SessionTracking,
22    /// Behavior model configuration
23    #[serde(default)]
24    pub behavior_model: BehaviorModelConfig,
25    /// Vector store configuration
26    #[serde(default)]
27    pub vector_store: VectorStoreConfig,
28    /// Performance settings
29    #[serde(default)]
30    pub performance: PerformanceConfig,
31    /// Smart Personas configuration
32    #[serde(default)]
33    pub personas: PersonasConfig,
34}
35
36/// Personas configuration for consistent data generation
37#[derive(Debug, Clone, Serialize, Deserialize, Default)]
38#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
39pub struct PersonasConfig {
40    /// List of configured personas
41    #[serde(default)]
42    pub personas: Vec<Persona>,
43    /// Active persona name (if None, uses first persona or defaults)
44    pub active_persona: Option<String>,
45}
46
47impl PersonasConfig {
48    /// Get the active persona, or the first persona if no active persona is set
49    pub fn get_active_persona(&self) -> Option<&Persona> {
50        if let Some(active_name) = &self.active_persona {
51            self.personas.iter().find(|p| p.name == *active_name)
52        } else if !self.personas.is_empty() {
53            Some(&self.personas[0])
54        } else {
55            None
56        }
57    }
58}
59
60/// Behavior model configuration
61#[derive(Debug, Clone, Serialize, Deserialize)]
62#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
63pub struct BehaviorModelConfig {
64    /// LLM provider (openai, anthropic, ollama, openai-compatible)
65    pub llm_provider: String,
66    /// Model name (e.g., gpt-4, claude-3-opus, llama2)
67    pub model: String,
68    /// API key (optional, can use environment variable)
69    pub api_key: Option<String>,
70    /// API endpoint (optional, uses provider default)
71    pub api_endpoint: Option<String>,
72    /// Temperature for LLM generation (0.0 to 2.0)
73    #[serde(default = "default_temperature")]
74    pub temperature: f64,
75    /// Maximum tokens for LLM response
76    #[serde(default = "default_max_tokens")]
77    pub max_tokens: usize,
78    /// Behavior rules
79    #[serde(default)]
80    pub rules: BehaviorRules,
81}
82
83impl Default for BehaviorModelConfig {
84    fn default() -> Self {
85        Self {
86            llm_provider: "openai".to_string(),
87            model: "gpt-3.5-turbo".to_string(),
88            api_key: None,
89            api_endpoint: None,
90            temperature: default_temperature(),
91            max_tokens: default_max_tokens(),
92            rules: BehaviorRules::default(),
93        }
94    }
95}
96
97/// Vector store configuration
98#[derive(Debug, Clone, Serialize, Deserialize)]
99#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
100pub struct VectorStoreConfig {
101    /// Enable vector store for long-term memory
102    #[serde(default)]
103    pub enabled: bool,
104    /// Embedding provider (openai, openai-compatible)
105    #[serde(default = "default_embedding_provider")]
106    pub embedding_provider: String,
107    /// Embedding model (e.g., text-embedding-ada-002)
108    #[serde(default = "default_embedding_model")]
109    pub embedding_model: String,
110    /// Storage path (optional, defaults to in-memory)
111    pub storage_path: Option<String>,
112    /// Number of top results to retrieve for semantic search
113    #[serde(default = "default_search_limit")]
114    pub semantic_search_limit: usize,
115    /// Similarity threshold for semantic search (0.0 to 1.0)
116    #[serde(default = "default_similarity_threshold")]
117    pub similarity_threshold: f32,
118}
119
120impl Default for VectorStoreConfig {
121    fn default() -> Self {
122        Self {
123            enabled: false,
124            embedding_provider: default_embedding_provider(),
125            embedding_model: default_embedding_model(),
126            storage_path: None,
127            semantic_search_limit: default_search_limit(),
128            similarity_threshold: default_similarity_threshold(),
129        }
130    }
131}
132
133/// Performance configuration
134#[derive(Debug, Clone, Serialize, Deserialize)]
135#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
136pub struct PerformanceConfig {
137    /// Cache TTL in seconds
138    #[serde(default = "default_cache_ttl")]
139    pub cache_ttl_seconds: u64,
140    /// Maximum number of interactions to keep in session history
141    #[serde(default = "default_max_history")]
142    pub max_history_length: usize,
143    /// Session timeout in seconds (inactive sessions are removed)
144    #[serde(default = "default_session_timeout")]
145    pub session_timeout_seconds: u64,
146    /// Enable response caching for identical requests
147    #[serde(default = "default_true")]
148    pub enable_response_cache: bool,
149}
150
151impl Default for PerformanceConfig {
152    fn default() -> Self {
153        Self {
154            cache_ttl_seconds: default_cache_ttl(),
155            max_history_length: default_max_history(),
156            session_timeout_seconds: default_session_timeout(),
157            enable_response_cache: true,
158        }
159    }
160}
161
162impl PerformanceConfig {
163    /// Get cache TTL as Duration
164    pub fn cache_ttl(&self) -> Duration {
165        Duration::from_secs(self.cache_ttl_seconds)
166    }
167
168    /// Get session timeout as Duration
169    pub fn session_timeout(&self) -> Duration {
170        Duration::from_secs(self.session_timeout_seconds)
171    }
172}
173
174fn default_temperature() -> f64 {
175    0.7
176}
177
178fn default_max_tokens() -> usize {
179    1024
180}
181
182fn default_embedding_provider() -> String {
183    "openai".to_string()
184}
185
186fn default_embedding_model() -> String {
187    "text-embedding-ada-002".to_string()
188}
189
190fn default_search_limit() -> usize {
191    10
192}
193
194fn default_similarity_threshold() -> f32 {
195    0.7
196}
197
198fn default_cache_ttl() -> u64 {
199    300
200}
201
202fn default_max_history() -> usize {
203    50
204}
205
206fn default_session_timeout() -> u64 {
207    3600
208}
209
210fn default_true() -> bool {
211    true
212}