use super::session::SessionTracking;
use super::{types::BehaviorRules, Persona};
use serde::{Deserialize, Serialize};
use std::time::Duration;
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct IntelligentBehaviorConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub session_tracking: SessionTracking,
#[serde(default)]
pub behavior_model: BehaviorModelConfig,
#[serde(default)]
pub vector_store: VectorStoreConfig,
#[serde(default)]
pub performance: PerformanceConfig,
#[serde(default)]
pub personas: PersonasConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct PersonasConfig {
#[serde(default)]
pub personas: Vec<Persona>,
pub active_persona: Option<String>,
}
impl PersonasConfig {
pub fn get_active_persona(&self) -> Option<&Persona> {
if let Some(active_name) = &self.active_persona {
self.personas.iter().find(|p| p.name == *active_name)
} else if !self.personas.is_empty() {
Some(&self.personas[0])
} else {
None
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct BehaviorModelConfig {
pub llm_provider: String,
pub model: String,
pub api_key: Option<String>,
pub api_endpoint: Option<String>,
#[serde(default = "default_temperature")]
pub temperature: f64,
#[serde(default = "default_max_tokens")]
pub max_tokens: usize,
#[serde(default)]
pub rules: BehaviorRules,
}
impl Default for BehaviorModelConfig {
fn default() -> Self {
Self {
llm_provider: "openai".to_string(),
model: "gpt-3.5-turbo".to_string(),
api_key: None,
api_endpoint: None,
temperature: default_temperature(),
max_tokens: default_max_tokens(),
rules: BehaviorRules::default(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct VectorStoreConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_embedding_provider")]
pub embedding_provider: String,
#[serde(default = "default_embedding_model")]
pub embedding_model: String,
pub storage_path: Option<String>,
#[serde(default = "default_search_limit")]
pub semantic_search_limit: usize,
#[serde(default = "default_similarity_threshold")]
pub similarity_threshold: f32,
}
impl Default for VectorStoreConfig {
fn default() -> Self {
Self {
enabled: false,
embedding_provider: default_embedding_provider(),
embedding_model: default_embedding_model(),
storage_path: None,
semantic_search_limit: default_search_limit(),
similarity_threshold: default_similarity_threshold(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
pub struct PerformanceConfig {
#[serde(default = "default_cache_ttl")]
pub cache_ttl_seconds: u64,
#[serde(default = "default_max_history")]
pub max_history_length: usize,
#[serde(default = "default_session_timeout")]
pub session_timeout_seconds: u64,
#[serde(default = "default_true")]
pub enable_response_cache: bool,
}
impl Default for PerformanceConfig {
fn default() -> Self {
Self {
cache_ttl_seconds: default_cache_ttl(),
max_history_length: default_max_history(),
session_timeout_seconds: default_session_timeout(),
enable_response_cache: true,
}
}
}
impl PerformanceConfig {
pub fn cache_ttl(&self) -> Duration {
Duration::from_secs(self.cache_ttl_seconds)
}
pub fn session_timeout(&self) -> Duration {
Duration::from_secs(self.session_timeout_seconds)
}
}
fn default_temperature() -> f64 {
0.7
}
fn default_max_tokens() -> usize {
1024
}
fn default_embedding_provider() -> String {
"openai".to_string()
}
fn default_embedding_model() -> String {
"text-embedding-ada-002".to_string()
}
fn default_search_limit() -> usize {
10
}
fn default_similarity_threshold() -> f32 {
0.7
}
fn default_cache_ttl() -> u64 {
300
}
fn default_max_history() -> usize {
50
}
fn default_session_timeout() -> u64 {
3600
}
fn default_true() -> bool {
true
}