llm_connector/providers/
openai.rs

1//! OpenAI服务提供商实现 - V2架构
2//!
3//! 这个模块提供OpenAI服务的完整实现,使用统一的V2架构。
4
5use crate::core::{GenericProvider, HttpClient, Protocol};
6use crate::protocols::OpenAIProtocol;
7use crate::error::LlmConnectorError;
8use std::collections::HashMap;
9
10/// OpenAI服务提供商类型
11pub type OpenAIProvider = GenericProvider<OpenAIProtocol>;
12
13/// 创建OpenAI服务提供商
14/// 
15/// # 参数
16/// - `api_key`: OpenAI API密钥
17/// 
18/// # 返回
19/// 配置好的OpenAI服务提供商实例
20/// 
21/// # 示例
22/// ```rust,no_run
23/// use llm_connector::providers::openai;
24/// 
25/// let provider = openai("sk-...").unwrap();
26/// ```
27pub fn openai(api_key: &str) -> Result<OpenAIProvider, LlmConnectorError> {
28    openai_with_config(api_key, None, None, None)
29}
30
31/// 创建带有自定义基础URL的OpenAI服务提供商
32/// 
33/// # 参数
34/// - `api_key`: API密钥
35/// - `base_url`: 自定义基础URL (如用于OpenAI兼容的服务)
36/// 
37/// # 示例
38/// ```rust,no_run
39/// use llm_connector::providers::openai_with_base_url;
40/// 
41/// // 使用自定义端点 (如Azure OpenAI)
42/// let provider = openai_with_base_url("sk-...", "https://your-resource.openai.azure.com").unwrap();
43/// ```
44pub fn openai_with_base_url(api_key: &str, base_url: &str) -> Result<OpenAIProvider, LlmConnectorError> {
45    openai_with_config(api_key, Some(base_url), None, None)
46}
47
48/// 创建带有自定义配置的OpenAI服务提供商
49/// 
50/// # 参数
51/// - `api_key`: API密钥
52/// - `base_url`: 自定义基础URL (可选)
53/// - `timeout_secs`: 超时时间(秒) (可选)
54/// - `proxy`: 代理URL (可选)
55/// 
56/// # 示例
57/// ```rust,no_run
58/// use llm_connector::providers::openai_with_config;
59/// 
60/// let provider = openai_with_config(
61///     "sk-...",
62///     Some("https://api.openai.com"),
63///     Some(60), // 60秒超时
64///     Some("http://proxy:8080")
65/// ).unwrap();
66/// ```
67pub fn openai_with_config(
68    api_key: &str,
69    base_url: Option<&str>,
70    timeout_secs: Option<u64>,
71    proxy: Option<&str>,
72) -> Result<OpenAIProvider, LlmConnectorError> {
73    // 创建协议实例
74    let protocol = OpenAIProtocol::new(api_key);
75    
76    // 创建HTTP客户端
77    let client = HttpClient::with_config(
78        base_url.unwrap_or("https://api.openai.com"),
79        timeout_secs,
80        proxy,
81    )?;
82    
83    // 添加认证头
84    let auth_headers: HashMap<String, String> = protocol.auth_headers().into_iter().collect();
85    let client = client.with_headers(auth_headers);
86    
87    // 创建通用提供商
88    Ok(GenericProvider::new(protocol, client))
89}
90
91/// 创建用于Azure OpenAI的服务提供商
92/// 
93/// # 参数
94/// - `api_key`: Azure OpenAI API密钥
95/// - `endpoint`: Azure OpenAI端点 (如 "https://your-resource.openai.azure.com")
96/// - `api_version`: API版本 (如 "2024-02-15-preview")
97/// 
98/// # 示例
99/// ```rust,no_run
100/// use llm_connector::providers::azure_openai;
101/// 
102/// let provider = azure_openai(
103///     "your-api-key",
104///     "https://your-resource.openai.azure.com",
105///     "2024-02-15-preview"
106/// ).unwrap();
107/// ```
108pub fn azure_openai(
109    api_key: &str,
110    endpoint: &str,
111    api_version: &str,
112) -> Result<OpenAIProvider, LlmConnectorError> {
113    let protocol = OpenAIProtocol::new(api_key);
114
115    // Content-Type 由 HttpClient::post() 的 .json() 方法自动设置
116    let client = HttpClient::new(endpoint)?
117        .with_header("api-key".to_string(), api_key.to_string())
118        .with_header("api-version".to_string(), api_version.to_string());
119
120    Ok(GenericProvider::new(protocol, client))
121}
122
123/// 创建用于OpenAI兼容服务的提供商
124/// 
125/// 这个函数为各种OpenAI兼容的服务提供便利的创建方法,
126/// 如DeepSeek、Moonshot、Together AI等。
127/// 
128/// # 参数
129/// - `api_key`: API密钥
130/// - `base_url`: 服务的基础URL
131/// - `service_name`: 服务名称 (用于错误消息)
132/// 
133/// # 示例
134/// ```rust,no_run
135/// use llm_connector::providers::openai_compatible;
136/// 
137/// // DeepSeek
138/// let deepseek = openai_compatible(
139///     "sk-...",
140///     "https://api.deepseek.com",
141///     "deepseek"
142/// ).unwrap();
143/// 
144/// // Moonshot
145/// let moonshot = openai_compatible(
146///     "sk-...",
147///     "https://api.moonshot.cn",
148///     "moonshot"
149/// ).unwrap();
150/// ```
151pub fn openai_compatible(
152    api_key: &str,
153    base_url: &str,
154    service_name: &str,
155) -> Result<OpenAIProvider, LlmConnectorError> {
156    let protocol = OpenAIProtocol::new(api_key);
157
158    // Content-Type 由 HttpClient::post() 的 .json() 方法自动设置
159    let client = HttpClient::new(base_url)?
160        .with_header("Authorization".to_string(), format!("Bearer {}", api_key))
161        .with_header("User-Agent".to_string(), format!("llm-connector/{}", service_name));
162
163    Ok(GenericProvider::new(protocol, client))
164}
165
166/// 验证OpenAI API密钥格式
167pub fn validate_openai_key(api_key: &str) -> bool {
168    api_key.starts_with("sk-") && api_key.len() > 20
169}
170
171#[cfg(test)]
172mod tests {
173    use super::*;
174    
175    #[test]
176    fn test_openai_provider_creation() {
177        let provider = openai("test-key");
178        assert!(provider.is_ok());
179        
180        let provider = provider.unwrap();
181        assert_eq!(provider.protocol().name(), "openai");
182        assert_eq!(provider.protocol().api_key(), "test-key");
183    }
184    
185    #[test]
186    fn test_openai_with_base_url() {
187        let provider = openai_with_base_url("test-key", "https://custom.api.com");
188        assert!(provider.is_ok());
189        
190        let provider = provider.unwrap();
191        assert_eq!(provider.client().base_url(), "https://custom.api.com");
192    }
193    
194    #[test]
195    fn test_azure_openai() {
196        let provider = azure_openai(
197            "test-key",
198            "https://test.openai.azure.com",
199            "2024-02-15-preview"
200        );
201        assert!(provider.is_ok());
202    }
203    
204    #[test]
205    fn test_openai_compatible() {
206        let provider = openai_compatible(
207            "test-key",
208            "https://api.deepseek.com",
209            "deepseek"
210        );
211        assert!(provider.is_ok());
212    }
213}