llm_connector/providers/
tencent.rs

1//! 腾讯云混元(Tencent Hunyuan)服务提供商实现
2//!
3//! 腾讯云混元使用 OpenAI 兼容的 API 格式,完全兼容标准 OpenAI 协议。
4
5use crate::core::{ConfigurableProtocol, ProviderBuilder};
6use crate::protocols::OpenAIProtocol;
7use crate::error::LlmConnectorError;
8
9/// 腾讯云混元协议适配器
10///
11/// 使用 ConfigurableProtocol 包装 OpenAI protocol
12pub type TencentProtocol = ConfigurableProtocol<OpenAIProtocol>;
13
14/// 腾讯云混元服务提供商类型
15pub type TencentProvider = crate::core::GenericProvider<TencentProtocol>;
16
17/// 创建腾讯云混元服务提供商
18///
19/// # 参数
20/// - `api_key`: 腾讯云混元 API 密钥 (格式: sk-...)
21///
22/// # 示例
23/// ```rust,no_run
24/// use llm_connector::providers::tencent;
25///
26/// let provider = tencent("sk-YMiR2Q7LNWVKVWKivkfPn49geQXT27OZXumFkSS3Ef6FlQ50").unwrap();
27/// ```
28pub fn tencent(api_key: &str) -> Result<TencentProvider, LlmConnectorError> {
29    tencent_with_config(api_key, None, None, None)
30}
31
32/// 创建带有自定义配置的腾讯云混元服务提供商
33///
34/// # 参数
35/// - `api_key`: API 密钥
36/// - `base_url`: 自定义基础 URL (可选,默认为腾讯云混元端点)
37/// - `timeout_secs`: 超时时间(秒) (可选)
38/// - `proxy`: 代理 URL (可选)
39///
40/// # 示例
41/// ```rust,no_run
42/// use llm_connector::providers::tencent_with_config;
43///
44/// let provider = tencent_with_config(
45///     "sk-YMiR2Q7LNWVKVWKivkfPn49geQXT27OZXumFkSS3Ef6FlQ50",
46///     None, // 使用默认 URL
47///     Some(60), // 60秒超时
48///     None
49/// ).unwrap();
50/// ```
51pub fn tencent_with_config(
52    api_key: &str,
53    base_url: Option<&str>,
54    timeout_secs: Option<u64>,
55    proxy: Option<&str>,
56) -> Result<TencentProvider, LlmConnectorError> {
57    // 创建配置驱动的协议
58    let protocol = ConfigurableProtocol::openai_compatible(
59        OpenAIProtocol::new(api_key),
60        "tencent"
61    );
62
63    // 使用 Builder 构建
64    let mut builder = ProviderBuilder::new(
65        protocol,
66        base_url.unwrap_or("https://api.hunyuan.cloud.tencent.com")
67    );
68
69    if let Some(timeout) = timeout_secs {
70        builder = builder.timeout(timeout);
71    }
72
73    if let Some(proxy_url) = proxy {
74        builder = builder.proxy(proxy_url);
75    }
76
77    builder.build()
78}
79
80#[cfg(test)]
81mod tests {
82    use super::*;
83    use crate::core::Protocol;
84
85    #[test]
86    fn test_tencent() {
87        let provider = tencent("sk-test-key");
88        assert!(provider.is_ok());
89    }
90
91    #[test]
92    fn test_tencent_with_config() {
93        let provider = tencent_with_config(
94            "sk-test-key",
95            Some("https://custom.url"),
96            Some(60),
97            None
98        );
99        assert!(provider.is_ok());
100    }
101
102    #[test]
103    fn test_tencent_protocol_endpoint() {
104        let protocol = ConfigurableProtocol::openai_compatible(
105            OpenAIProtocol::new("sk-test-key"),
106            "tencent"
107        );
108        let endpoint = protocol.chat_endpoint("https://api.hunyuan.cloud.tencent.com");
109        assert_eq!(endpoint, "https://api.hunyuan.cloud.tencent.com/v1/chat/completions");
110    }
111
112    #[test]
113    fn test_tencent_protocol_name() {
114        let protocol = ConfigurableProtocol::openai_compatible(
115            OpenAIProtocol::new("sk-test-key"),
116            "tencent"
117        );
118        assert_eq!(protocol.name(), "tencent");
119    }
120}
121