use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct LlmConfig {
pub model: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub api_key: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub base_url: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timeout_secs: Option<u64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max_retries: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub temperature: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StructuredExtractionConfig {
pub schema: serde_json::Value,
#[serde(default = "default_schema_name")]
pub schema_name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub schema_description: Option<String>,
#[serde(default)]
pub strict: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub prompt: Option<String>,
pub llm: LlmConfig,
}
fn default_schema_name() -> String {
"extraction".to_string()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_llm_config_default_trait_is_satisfied() {
let cfg = LlmConfig::default();
assert!(cfg.model.is_empty(), "default model should be empty string");
assert!(cfg.api_key.is_none());
assert!(cfg.base_url.is_none());
assert!(cfg.timeout_secs.is_none());
assert!(cfg.max_retries.is_none());
assert!(cfg.temperature.is_none());
assert!(cfg.max_tokens.is_none());
}
#[test]
fn test_llm_config_struct_update_syntax() {
let cfg = LlmConfig {
model: "openai/gpt-4o-mini".to_string(),
..Default::default()
};
assert_eq!(cfg.model, "openai/gpt-4o-mini");
assert!(cfg.api_key.is_none());
assert!(cfg.base_url.is_none());
assert!(cfg.timeout_secs.is_none());
assert!(cfg.max_retries.is_none());
assert!(cfg.temperature.is_none());
assert!(cfg.max_tokens.is_none());
}
}