use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::time::Duration;
use super::session::SessionTracking;
use super::types::BehaviorRules;
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct IntelligentBehaviorConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub session_tracking: SessionTracking,
#[serde(default)]
pub behavior_model: BehaviorModelConfig,
#[serde(default)]
pub vector_store: VectorStoreConfig,
#[serde(default)]
pub performance: PerformanceConfig,
#[serde(default)]
pub personas: PersonasConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct PersonasConfig {
#[serde(default)]
pub personas: Vec<Persona>,
pub active_persona: Option<String>,
}
impl PersonasConfig {
pub fn get_active_persona(&self) -> Option<&Persona> {
if let Some(active_name) = &self.active_persona {
self.personas.iter().find(|p| p.name == *active_name)
} else if !self.personas.is_empty() {
Some(&self.personas[0])
} else {
None
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct Persona {
pub name: String,
#[serde(default)]
pub traits: HashMap<String, String>,
}
impl Persona {
pub fn get_numeric_trait(&self, key: &str) -> Option<u64> {
self.traits.get(key).and_then(|value| {
if let Some((min_str, max_str)) = value.split_once('-') {
if let (Ok(min), Ok(max)) =
(min_str.trim().parse::<u64>(), max_str.trim().parse::<u64>())
{
return Some((min + max) / 2);
}
}
value.parse::<u64>().ok()
})
}
pub fn get_trait(&self, key: &str) -> Option<&String> {
self.traits.get(key)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct BehaviorModelConfig {
pub llm_provider: String,
pub model: String,
pub api_key: Option<String>,
pub api_endpoint: Option<String>,
#[serde(default = "default_temperature")]
pub temperature: f64,
#[serde(default = "default_max_tokens")]
pub max_tokens: usize,
#[serde(default)]
pub rules: BehaviorRules,
}
impl Default for BehaviorModelConfig {
fn default() -> Self {
Self {
llm_provider: "openai".to_string(),
model: "gpt-3.5-turbo".to_string(),
api_key: None,
api_endpoint: None,
temperature: default_temperature(),
max_tokens: default_max_tokens(),
rules: BehaviorRules::default(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct VectorStoreConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_embedding_provider")]
pub embedding_provider: String,
#[serde(default = "default_embedding_model")]
pub embedding_model: String,
pub storage_path: Option<String>,
#[serde(default = "default_search_limit")]
pub semantic_search_limit: usize,
#[serde(default = "default_similarity_threshold")]
pub similarity_threshold: f32,
}
impl Default for VectorStoreConfig {
fn default() -> Self {
Self {
enabled: false,
embedding_provider: default_embedding_provider(),
embedding_model: default_embedding_model(),
storage_path: None,
semantic_search_limit: default_search_limit(),
similarity_threshold: default_similarity_threshold(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct PerformanceConfig {
#[serde(default = "default_cache_ttl")]
pub cache_ttl_seconds: u64,
#[serde(default = "default_max_history")]
pub max_history_length: usize,
#[serde(default = "default_session_timeout")]
pub session_timeout_seconds: u64,
#[serde(default = "default_true")]
pub enable_response_cache: bool,
}
impl Default for PerformanceConfig {
fn default() -> Self {
Self {
cache_ttl_seconds: default_cache_ttl(),
max_history_length: default_max_history(),
session_timeout_seconds: default_session_timeout(),
enable_response_cache: true,
}
}
}
impl PerformanceConfig {
pub fn cache_ttl(&self) -> Duration {
Duration::from_secs(self.cache_ttl_seconds)
}
pub fn session_timeout(&self) -> Duration {
Duration::from_secs(self.session_timeout_seconds)
}
}
fn default_temperature() -> f64 {
0.7
}
fn default_max_tokens() -> usize {
1024
}
fn default_embedding_provider() -> String {
"openai".to_string()
}
fn default_embedding_model() -> String {
"text-embedding-ada-002".to_string()
}
fn default_search_limit() -> usize {
10
}
fn default_similarity_threshold() -> f32 {
0.7
}
fn default_cache_ttl() -> u64 {
300 }
fn default_max_history() -> usize {
50
}
fn default_session_timeout() -> u64 {
3600 }
fn default_true() -> bool {
true
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_defaults() {
let config = IntelligentBehaviorConfig::default();
assert!(!config.enabled);
assert!(!config.vector_store.enabled);
assert_eq!(config.behavior_model.llm_provider, "openai");
assert_eq!(config.performance.cache_ttl_seconds, 300);
}
#[test]
fn test_persona_get_numeric_trait() {
let mut persona = Persona {
name: "test".to_string(),
traits: HashMap::new(),
};
persona.traits.insert("hive_count".to_string(), "20-40".to_string());
assert_eq!(persona.get_numeric_trait("hive_count"), Some(30));
persona.traits.insert("apiary_count".to_string(), "50".to_string());
assert_eq!(persona.get_numeric_trait("apiary_count"), Some(50));
assert_eq!(persona.get_numeric_trait("nonexistent"), None);
persona.traits.insert("invalid".to_string(), "not-a-number".to_string());
assert_eq!(persona.get_numeric_trait("invalid"), None);
}
#[test]
fn test_personas_config_get_active_persona() {
let mut config = PersonasConfig::default();
assert!(config.get_active_persona().is_none());
config.personas.push(Persona {
name: "first".to_string(),
traits: HashMap::new(),
});
config.personas.push(Persona {
name: "second".to_string(),
traits: HashMap::new(),
});
let active = config.get_active_persona();
assert!(active.is_some());
assert_eq!(active.unwrap().name, "first");
config.active_persona = Some("second".to_string());
let active = config.get_active_persona();
assert!(active.is_some());
assert_eq!(active.unwrap().name, "second");
config.active_persona = Some("nonexistent".to_string());
assert!(config.get_active_persona().is_none());
}
#[test]
fn test_performance_config_durations() {
let config = PerformanceConfig::default();
assert_eq!(config.cache_ttl(), Duration::from_secs(300));
assert_eq!(config.session_timeout(), Duration::from_secs(3600));
}
#[test]
fn test_vector_store_config() {
let config = VectorStoreConfig {
enabled: true,
embedding_provider: "openai".to_string(),
embedding_model: "text-embedding-ada-002".to_string(),
storage_path: Some("/tmp/vectors".to_string()),
semantic_search_limit: 5,
similarity_threshold: 0.8,
};
assert!(config.enabled);
assert_eq!(config.semantic_search_limit, 5);
assert_eq!(config.similarity_threshold, 0.8);
}
}