Skip to main content

synth_claw/providers/
mod.rs

1mod anthropic;
2mod openai;
3
4pub use anthropic::AnthropicProvider;
5pub use openai::OpenAIProvider;
6
7use async_trait::async_trait;
8use crate::config::ProviderConfig;
9use crate::Result;
10
11#[derive(Debug, Clone)]
12pub struct GenerationRequest {
13    pub prompt: String,
14    pub system_prompt: Option<String>,
15    pub temperature: Option<f32>,
16    pub max_tokens: Option<u32>,
17}
18
19#[derive(Debug, Clone)]
20pub struct GenerationResponse {
21    pub content: String,
22    pub input_tokens: u32,
23    pub output_tokens: u32,
24}
25
26#[derive(Debug, Clone, Copy)]
27pub struct ModelPricing {
28    pub input_per_million: f64,
29    pub output_per_million: f64,
30}
31
32#[async_trait]
33pub trait LLMProvider: Send + Sync {
34    async fn generate(&self, request: GenerationRequest) -> Result<GenerationResponse>;
35    fn estimate_cost(&self, input_tokens: u32, output_tokens: u32) -> f64;
36    fn name(&self) -> &str;
37    fn model(&self) -> &str;
38}
39
40pub fn create_provider(config: &ProviderConfig) -> Result<Box<dyn LLMProvider>> {
41    match config {
42        ProviderConfig::OpenAI {
43            model,
44            api_key,
45            base_url,
46            temperature,
47            max_tokens,
48        } => {
49            let provider = OpenAIProvider::new(
50                model.clone(),
51                api_key.clone(),
52                base_url.clone(),
53                *temperature,
54                *max_tokens,
55            )?;
56            Ok(Box::new(provider))
57        }
58        ProviderConfig::Anthropic {
59            model,
60            api_key,
61            temperature,
62            max_tokens,
63        } => {
64            let provider = AnthropicProvider::new(
65                model.clone(),
66                api_key.clone(),
67                *temperature,
68                *max_tokens,
69            )?;
70            Ok(Box::new(provider))
71        }
72    }
73}
74
75#[cfg(test)]
76mod tests {
77    use super::*;
78
79    #[test]
80    fn test_create_openai_provider() {
81        let config = ProviderConfig::OpenAI {
82            model: "gpt-4o-mini".to_string(),
83            api_key: Some("test-key".to_string()),
84            base_url: None,
85            temperature: Some(0.7),
86            max_tokens: Some(1000),
87        };
88
89        let provider = create_provider(&config).unwrap();
90        assert_eq!(provider.name(), "openai");
91        assert_eq!(provider.model(), "gpt-4o-mini");
92    }
93
94    #[test]
95    fn test_create_anthropic_provider() {
96        let config = ProviderConfig::Anthropic {
97            model: "claude-haiku-4-5-20251001".to_string(),
98            api_key: Some("test-key".to_string()),
99            temperature: None,
100            max_tokens: None,
101        };
102
103        let provider = create_provider(&config).unwrap();
104        assert_eq!(provider.name(), "anthropic");
105        assert_eq!(provider.model(), "claude-haiku-4-5-20251001");
106    }
107}