llm_connector/
client.rs

1//! V2统一客户端 - 下一代LLM客户端接口
2//!
3//! 这个模块提供统一的客户端接口,支持所有LLM服务提供商。
4
5use crate::core::Provider;
6use crate::error::LlmConnectorError;
7use crate::types::{ChatRequest, ChatResponse};
8use std::sync::Arc;
9
10#[cfg(feature = "streaming")]
11use crate::types::ChatStream;
12
13/// 统一LLM客户端
14///
15/// 这个客户端提供统一的接口来访问各种LLM服务,
16/// 使用V2架构的清晰抽象层。
17///
18/// # 示例
19/// ```rust,no_run
20/// use llm_connector::{LlmClient, types::{ChatRequest, Message, Role}};
21///
22/// #[tokio::main]
23/// async fn main() -> Result<(), Box<dyn std::error::Error>> {
24///     // 创建OpenAI客户端
25///     let client = LlmClient::openai("sk-...")?;
26///
27///     // 创建请求
28///     let request = ChatRequest {
29///         model: "gpt-4".to_string(),
30///         messages: vec![Message::text(Role::User, "Hello, how are you?")],
31///         ..Default::default()
32///     };
33///
34///     // 发送请求
35///     let response = client.chat(&request).await?;
36///     println!("Response: {}", response.content);
37///
38///     Ok(())
39/// }
40/// ```
41pub struct LlmClient {
42    provider: Arc<dyn Provider>,
43}
44
45impl LlmClient {
46    /// 从任何Provider创建客户端
47    pub fn from_provider(provider: Arc<dyn Provider>) -> Self {
48        Self { provider }
49    }
50
51    /// 创建OpenAI客户端
52    ///
53    /// # 参数
54    /// - `api_key`: OpenAI API密钥
55    ///
56    /// # 示例
57    /// ```rust,no_run
58    /// use llm_connector::LlmClient;
59    ///
60    /// let client = LlmClient::openai("sk-...").unwrap();
61    /// ```
62    pub fn openai(api_key: &str) -> Result<Self, LlmConnectorError> {
63        let provider = crate::providers::openai(api_key)?;
64        Ok(Self::from_provider(Arc::new(provider)))
65    }
66
67    /// 创建带有自定义基础URL的OpenAI客户端
68    ///
69    /// # 参数
70    /// - `api_key`: API密钥
71    /// - `base_url`: 自定义基础URL
72    ///
73    /// # 示例
74    /// ```rust,no_run
75    /// use llm_connector::LlmClient;
76    ///
77    /// let client = LlmClient::openai_with_base_url(
78    ///     "sk-...",
79    ///     "https://api.deepseek.com"
80    /// ).unwrap();
81    /// ```
82    pub fn openai_with_base_url(api_key: &str, base_url: &str) -> Result<Self, LlmConnectorError> {
83        let provider = crate::providers::openai_with_base_url(api_key, base_url)?;
84        Ok(Self::from_provider(Arc::new(provider)))
85    }
86
87    /// 创建Azure OpenAI客户端
88    ///
89    /// # 参数
90    /// - `api_key`: Azure OpenAI API密钥
91    /// - `endpoint`: Azure OpenAI端点
92    /// - `api_version`: API版本
93    ///
94    /// # 示例
95    /// ```rust,no_run
96    /// use llm_connector::LlmClient;
97    ///
98    /// let client = LlmClient::azure_openai(
99    ///     "your-api-key",
100    ///     "https://your-resource.openai.azure.com",
101    ///     "2024-02-15-preview"
102    /// ).unwrap();
103    /// ```
104    pub fn azure_openai(
105        api_key: &str,
106        endpoint: &str,
107        api_version: &str,
108    ) -> Result<Self, LlmConnectorError> {
109        let provider = crate::providers::azure_openai(api_key, endpoint, api_version)?;
110        Ok(Self::from_provider(Arc::new(provider)))
111    }
112
113    /// 创建阿里云DashScope客户端
114    ///
115    /// # 参数
116    /// - `api_key`: 阿里云DashScope API密钥
117    ///
118    /// # 示例
119    /// ```rust,no_run
120    /// use llm_connector::LlmClient;
121    ///
122    /// let client = LlmClient::aliyun("sk-...").unwrap();
123    /// ```
124    pub fn aliyun(api_key: &str) -> Result<Self, LlmConnectorError> {
125        let provider = crate::providers::aliyun(api_key)?;
126        Ok(Self::from_provider(Arc::new(provider)))
127    }
128
129    /// 创建Anthropic Claude客户端
130    ///
131    /// # 参数
132    /// - `api_key`: Anthropic API密钥 (格式: sk-ant-...)
133    ///
134    /// # 示例
135    /// ```rust,no_run
136    /// use llm_connector::LlmClient;
137    ///
138    /// let client = LlmClient::anthropic("sk-ant-...").unwrap();
139    /// ```
140    pub fn anthropic(api_key: &str) -> Result<Self, LlmConnectorError> {
141        let provider = crate::providers::anthropic(api_key)?;
142        Ok(Self::from_provider(Arc::new(provider)))
143    }
144
145    /// 创建智谱GLM客户端
146    ///
147    /// # 参数
148    /// - `api_key`: 智谱GLM API密钥
149    ///
150    /// # 示例
151    /// ```rust,no_run
152    /// use llm_connector::LlmClient;
153    ///
154    /// let client = LlmClient::zhipu("your-api-key").unwrap();
155    /// ```
156    pub fn zhipu(api_key: &str) -> Result<Self, LlmConnectorError> {
157        let provider = crate::providers::zhipu(api_key)?;
158        Ok(Self::from_provider(Arc::new(provider)))
159    }
160
161    /// 创建智谱GLM客户端 (OpenAI兼容模式)
162    ///
163    /// # 参数
164    /// - `api_key`: 智谱GLM API密钥
165    ///
166    /// # 示例
167    /// ```rust,no_run
168    /// use llm_connector::LlmClient;
169    ///
170    /// let client = LlmClient::zhipu_openai_compatible("your-api-key").unwrap();
171    /// ```
172    pub fn zhipu_openai_compatible(api_key: &str) -> Result<Self, LlmConnectorError> {
173        let provider = crate::providers::zhipu_openai_compatible(api_key)?;
174        Ok(Self::from_provider(Arc::new(provider)))
175    }
176
177    /// 创建Ollama客户端 (默认本地地址)
178    ///
179    /// # 示例
180    /// ```rust,no_run
181    /// use llm_connector::LlmClient;
182    ///
183    /// let client = LlmClient::ollama().unwrap();
184    /// ```
185    pub fn ollama() -> Result<Self, LlmConnectorError> {
186        let provider = crate::providers::ollama()?;
187        Ok(Self::from_provider(Arc::new(provider)))
188    }
189
190    /// 创建带有自定义URL的Ollama客户端
191    ///
192    /// # 参数
193    /// - `base_url`: Ollama服务的URL
194    ///
195    /// # 示例
196    /// ```rust,no_run
197    /// use llm_connector::LlmClient;
198    ///
199    /// let client = LlmClient::ollama_with_base_url("http://192.168.1.100:11434").unwrap();
200    /// ```
201    pub fn ollama_with_base_url(base_url: &str) -> Result<Self, LlmConnectorError> {
202        let provider = crate::providers::ollama_with_base_url(base_url)?;
203        Ok(Self::from_provider(Arc::new(provider)))
204    }
205
206    /// 创建OpenAI兼容服务客户端
207    ///
208    /// # 参数
209    /// - `api_key`: API密钥
210    /// - `base_url`: 服务基础URL
211    /// - `service_name`: 服务名称
212    ///
213    /// # 示例
214    /// ```rust,no_run
215    /// use llm_connector::LlmClient;
216    ///
217    /// // DeepSeek
218    /// let deepseek = LlmClient::openai_compatible(
219    ///     "sk-...",
220    ///     "https://api.deepseek.com",
221    ///     "deepseek"
222    /// ).unwrap();
223    ///
224    /// // Moonshot
225    /// let moonshot = LlmClient::openai_compatible(
226    ///     "sk-...",
227    ///     "https://api.moonshot.cn",
228    ///     "moonshot"
229    /// ).unwrap();
230    ///
231    /// // LongCat (OpenAI format)
232    /// let longcat = LlmClient::openai_compatible(
233    ///     "ak_...",
234    ///     "https://api.longcat.chat/openai",
235    ///     "longcat"
236    /// ).unwrap();
237    /// ```
238    pub fn openai_compatible(
239        api_key: &str,
240        base_url: &str,
241        service_name: &str,
242    ) -> Result<Self, LlmConnectorError> {
243        let provider = crate::providers::openai_compatible(api_key, base_url, service_name)?;
244        Ok(Self::from_provider(Arc::new(provider)))
245    }
246
247    /// 创建LongCat Anthropic格式客户端
248    ///
249    /// LongCat的Anthropic端点使用Bearer认证而不是标准的x-api-key认证
250    ///
251    /// # 参数
252    /// - `api_key`: LongCat API密钥 (格式: ak_...)
253    ///
254    /// # 示例
255    /// ```rust,no_run
256    /// use llm_connector::LlmClient;
257    ///
258    /// let client = LlmClient::longcat_anthropic("ak_...").unwrap();
259    /// ```
260    pub fn longcat_anthropic(api_key: &str) -> Result<Self, LlmConnectorError> {
261        let provider = crate::providers::longcat_anthropic(api_key)?;
262        Ok(Self::from_provider(Arc::new(provider)))
263    }
264
265    /// 创建带有自定义配置的LongCat Anthropic客户端
266    pub fn longcat_anthropic_with_config(
267        api_key: &str,
268        base_url: Option<&str>,
269        timeout_secs: Option<u64>,
270        proxy: Option<&str>,
271    ) -> Result<Self, LlmConnectorError> {
272        let provider = crate::providers::longcat_anthropic_with_config(
273            api_key,
274            base_url,
275            timeout_secs,
276            proxy,
277        )?;
278        Ok(Self::from_provider(Arc::new(provider)))
279    }
280
281    /// 创建火山引擎(Volcengine)客户端
282    ///
283    /// 火山引擎使用 OpenAI 兼容的 API 格式,但端点路径不同
284    ///
285    /// # 参数
286    /// - `api_key`: 火山引擎 API 密钥 (UUID 格式)
287    ///
288    /// # 示例
289    /// ```rust,no_run
290    /// use llm_connector::LlmClient;
291    ///
292    /// let client = LlmClient::volcengine("26f962bd-450e-4876-bc32-a732e6da9cd2").unwrap();
293    /// ```
294    pub fn volcengine(api_key: &str) -> Result<Self, LlmConnectorError> {
295        let provider = crate::providers::volcengine(api_key)?;
296        Ok(Self::from_provider(Arc::new(provider)))
297    }
298
299    /// 创建带有自定义配置的火山引擎客户端
300    pub fn volcengine_with_config(
301        api_key: &str,
302        base_url: Option<&str>,
303        timeout_secs: Option<u64>,
304        proxy: Option<&str>,
305    ) -> Result<Self, LlmConnectorError> {
306        let provider = crate::providers::volcengine_with_config(
307            api_key,
308            base_url,
309            timeout_secs,
310            proxy,
311        )?;
312        Ok(Self::from_provider(Arc::new(provider)))
313    }
314
315    /// 创建腾讯云混元(Tencent Hunyuan)客户端
316    ///
317    /// 腾讯云混元使用 OpenAI 兼容的 API 格式
318    ///
319    /// # 参数
320    /// - `api_key`: 腾讯云混元 API 密钥 (格式: sk-...)
321    ///
322    /// # 示例
323    /// ```rust,no_run
324    /// use llm_connector::LlmClient;
325    ///
326    /// let client = LlmClient::tencent("sk-YMiR2Q7LNWVKVWKivkfPn49geQXT27OZXumFkSS3Ef6FlQ50").unwrap();
327    /// ```
328    pub fn tencent(api_key: &str) -> Result<Self, LlmConnectorError> {
329        let provider = crate::providers::tencent(api_key)?;
330        Ok(Self::from_provider(Arc::new(provider)))
331    }
332
333    /// 创建带有自定义配置的腾讯云混元客户端
334    pub fn tencent_with_config(
335        api_key: &str,
336        base_url: Option<&str>,
337        timeout_secs: Option<u64>,
338        proxy: Option<&str>,
339    ) -> Result<Self, LlmConnectorError> {
340        let provider = crate::providers::tencent_with_config(
341            api_key,
342            base_url,
343            timeout_secs,
344            proxy,
345        )?;
346        Ok(Self::from_provider(Arc::new(provider)))
347    }
348
349    /// 创建 Moonshot(月之暗面)客户端
350    ///
351    /// Moonshot 使用 OpenAI 兼容的 API 格式
352    ///
353    /// # 参数
354    /// - `api_key`: Moonshot API 密钥 (格式: sk-...)
355    ///
356    /// # 示例
357    /// ```rust,no_run
358    /// use llm_connector::LlmClient;
359    ///
360    /// let client = LlmClient::moonshot("sk-...").unwrap();
361    /// ```
362    pub fn moonshot(api_key: &str) -> Result<Self, LlmConnectorError> {
363        let provider = crate::providers::moonshot(api_key)?;
364        Ok(Self::from_provider(Arc::new(provider)))
365    }
366
367    /// 创建带有自定义配置的 Moonshot 客户端
368    pub fn moonshot_with_config(
369        api_key: &str,
370        base_url: Option<&str>,
371        timeout_secs: Option<u64>,
372        proxy: Option<&str>,
373    ) -> Result<Self, LlmConnectorError> {
374        let provider = crate::providers::moonshot_with_config(
375            api_key,
376            base_url,
377            timeout_secs,
378            proxy,
379        )?;
380        Ok(Self::from_provider(Arc::new(provider)))
381    }
382
383    /// 创建 DeepSeek 客户端
384    ///
385    /// DeepSeek 使用 OpenAI 兼容的 API 格式,支持推理模型
386    ///
387    /// # 参数
388    /// - `api_key`: DeepSeek API 密钥 (格式: sk-...)
389    ///
390    /// # 示例
391    /// ```rust,no_run
392    /// use llm_connector::LlmClient;
393    ///
394    /// let client = LlmClient::deepseek("sk-...").unwrap();
395    /// ```
396    pub fn deepseek(api_key: &str) -> Result<Self, LlmConnectorError> {
397        let provider = crate::providers::deepseek(api_key)?;
398        Ok(Self::from_provider(Arc::new(provider)))
399    }
400
401    /// 创建带有自定义配置的 DeepSeek 客户端
402    pub fn deepseek_with_config(
403        api_key: &str,
404        base_url: Option<&str>,
405        timeout_secs: Option<u64>,
406        proxy: Option<&str>,
407    ) -> Result<Self, LlmConnectorError> {
408        let provider = crate::providers::deepseek_with_config(
409            api_key,
410            base_url,
411            timeout_secs,
412            proxy,
413        )?;
414        Ok(Self::from_provider(Arc::new(provider)))
415    }
416
417    // ============================================================================
418    // 高级构造函数 - 自定义配置
419    // ============================================================================
420
421    /// 创建带有自定义配置的OpenAI客户端
422    pub fn openai_with_config(
423        api_key: &str,
424        base_url: Option<&str>,
425        timeout_secs: Option<u64>,
426        proxy: Option<&str>,
427    ) -> Result<Self, LlmConnectorError> {
428        let provider =
429            crate::providers::openai_with_config(api_key, base_url, timeout_secs, proxy)?;
430        Ok(Self::from_provider(Arc::new(provider)))
431    }
432
433    /// 创建带有自定义配置的Aliyun客户端
434    pub fn aliyun_with_config(
435        api_key: &str,
436        base_url: Option<&str>,
437        timeout_secs: Option<u64>,
438        proxy: Option<&str>,
439    ) -> Result<Self, LlmConnectorError> {
440        let provider =
441            crate::providers::aliyun_with_config(api_key, base_url, timeout_secs, proxy)?;
442        Ok(Self::from_provider(Arc::new(provider)))
443    }
444
445    /// 创建Aliyun国际版客户端
446    pub fn aliyun_international(api_key: &str, region: &str) -> Result<Self, LlmConnectorError> {
447        let provider = crate::providers::aliyun_international(api_key, region)?;
448        Ok(Self::from_provider(Arc::new(provider)))
449    }
450
451    /// 创建Aliyun专有云客户端
452    pub fn aliyun_private(api_key: &str, base_url: &str) -> Result<Self, LlmConnectorError> {
453        let provider = crate::providers::aliyun_private(api_key, base_url)?;
454        Ok(Self::from_provider(Arc::new(provider)))
455    }
456
457    /// 创建带有自定义超时的Aliyun客户端
458    pub fn aliyun_with_timeout(
459        api_key: &str,
460        timeout_secs: u64,
461    ) -> Result<Self, LlmConnectorError> {
462        let provider = crate::providers::aliyun_with_timeout(api_key, timeout_secs)?;
463        Ok(Self::from_provider(Arc::new(provider)))
464    }
465
466    /// 创建带有自定义配置的Anthropic客户端
467    pub fn anthropic_with_config(
468        api_key: &str,
469        base_url: Option<&str>,
470        timeout_secs: Option<u64>,
471        proxy: Option<&str>,
472    ) -> Result<Self, LlmConnectorError> {
473        let provider =
474            crate::providers::anthropic_with_config(api_key, base_url, timeout_secs, proxy)?;
475        Ok(Self::from_provider(Arc::new(provider)))
476    }
477
478    /// 创建Anthropic Vertex AI客户端
479    pub fn anthropic_vertex(
480        project_id: &str,
481        location: &str,
482        access_token: &str,
483    ) -> Result<Self, LlmConnectorError> {
484        let provider = crate::providers::anthropic_vertex(project_id, location, access_token)?;
485        Ok(Self::from_provider(Arc::new(provider)))
486    }
487
488    /// 创建Anthropic AWS Bedrock客户端
489    pub fn anthropic_bedrock(
490        region: &str,
491        access_key: &str,
492        secret_key: &str,
493    ) -> Result<Self, LlmConnectorError> {
494        let provider = crate::providers::anthropic_bedrock(region, access_key, secret_key)?;
495        Ok(Self::from_provider(Arc::new(provider)))
496    }
497
498    /// 创建带有自定义超时的Anthropic客户端
499    pub fn anthropic_with_timeout(
500        api_key: &str,
501        timeout_secs: u64,
502    ) -> Result<Self, LlmConnectorError> {
503        let provider = crate::providers::anthropic_with_timeout(api_key, timeout_secs)?;
504        Ok(Self::from_provider(Arc::new(provider)))
505    }
506
507    /// 创建带有自定义配置的Zhipu客户端
508    pub fn zhipu_with_config(
509        api_key: &str,
510        openai_compatible: bool,
511        base_url: Option<&str>,
512        timeout_secs: Option<u64>,
513        proxy: Option<&str>,
514    ) -> Result<Self, LlmConnectorError> {
515        let provider = crate::providers::zhipu_with_config(
516            api_key,
517            openai_compatible,
518            base_url,
519            timeout_secs,
520            proxy,
521        )?;
522        Ok(Self::from_provider(Arc::new(provider)))
523    }
524
525    /// 创建带有自定义超时的Zhipu客户端
526    pub fn zhipu_with_timeout(api_key: &str, timeout_secs: u64) -> Result<Self, LlmConnectorError> {
527        let provider = crate::providers::zhipu_with_timeout(api_key, timeout_secs)?;
528        Ok(Self::from_provider(Arc::new(provider)))
529    }
530
531    /// 创建Zhipu企业版客户端
532    pub fn zhipu_enterprise(api_key: &str, base_url: &str) -> Result<Self, LlmConnectorError> {
533        let provider = crate::providers::zhipu_enterprise(api_key, base_url)?;
534        Ok(Self::from_provider(Arc::new(provider)))
535    }
536
537    /// 创建带有自定义配置的Ollama客户端
538    pub fn ollama_with_config(
539        base_url: &str,
540        timeout_secs: Option<u64>,
541        proxy: Option<&str>,
542    ) -> Result<Self, LlmConnectorError> {
543        let provider = crate::providers::ollama_with_config(base_url, timeout_secs, proxy)?;
544        Ok(Self::from_provider(Arc::new(provider)))
545    }
546
547    /// 获取提供商名称
548    pub fn provider_name(&self) -> &str {
549        self.provider.name()
550    }
551
552    /// 发送聊天完成请求
553    ///
554    /// # 参数
555    /// - `request`: 聊天请求
556    ///
557    /// # 返回
558    /// 聊天响应
559    ///
560    /// # 示例
561    /// ```rust,no_run
562    /// use llm_connector::LlmClient;
563    /// use llm_connector::types::{ChatRequest, Message};
564    ///
565    /// #[tokio::main]
566    /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
567    ///     let client = LlmClient::openai("sk-...")?;
568    ///
569    ///     let request = ChatRequest {
570    ///         model: "gpt-4".to_string(),
571    ///         messages: vec![Message::user("Hello!")],
572    ///         ..Default::default()
573    ///     };
574    ///
575    ///     let response = client.chat(&request).await?;
576    ///     println!("Response: {}", response.content);
577    ///
578    ///     Ok(())
579    /// }
580    /// ```
581    pub async fn chat(&self, request: &ChatRequest) -> Result<ChatResponse, LlmConnectorError> {
582        self.provider.chat(request).await
583    }
584
585    /// 发送流式聊天完成请求
586    ///
587    /// # 参数
588    /// - `request`: 聊天请求
589    ///
590    /// # 返回
591    /// 聊天流
592    ///
593    /// # 示例
594    /// ```rust,no_run
595    /// use llm_connector::LlmClient;
596    /// use llm_connector::types::{ChatRequest, Message};
597    /// use futures_util::StreamExt;
598    ///
599    /// #[tokio::main]
600    /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
601    ///     let client = LlmClient::openai("sk-...")?;
602    ///
603    ///     let request = ChatRequest {
604    ///         model: "gpt-4".to_string(),
605    ///         messages: vec![Message::user("Hello!")],
606    ///         stream: Some(true),
607    ///         ..Default::default()
608    ///     };
609    ///
610    ///     let mut stream = client.chat_stream(&request).await?;
611    ///     while let Some(chunk) = stream.next().await {
612    ///         let chunk = chunk?;
613    ///         if let Some(content) = chunk.get_content() {
614    ///             print!("{}", content);
615    ///         }
616    ///     }
617    ///
618    ///     Ok(())
619    /// }
620    /// ```
621    #[cfg(feature = "streaming")]
622    pub async fn chat_stream(
623        &self,
624        request: &ChatRequest,
625    ) -> Result<ChatStream, LlmConnectorError> {
626        self.provider.chat_stream(request).await
627    }
628
629    /// 获取可用模型列表
630    ///
631    /// # 返回
632    /// 模型名称列表
633    ///
634    /// # 示例
635    /// ```rust,no_run
636    /// use llm_connector::LlmClient;
637    ///
638    /// #[tokio::main]
639    /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
640    ///     let client = LlmClient::openai("sk-...")?;
641    ///
642    ///     let models = client.models().await?;
643    ///     for model in models {
644    ///         println!("Available model: {}", model);
645    ///     }
646    ///
647    ///     Ok(())
648    /// }
649    /// ```
650    pub async fn models(&self) -> Result<Vec<String>, LlmConnectorError> {
651        self.provider.models().await
652    }
653
654    /// 获取底层提供商的引用 (用于特殊功能访问)
655    ///
656    /// # 示例
657    /// ```rust,no_run
658    /// use llm_connector::LlmClient;
659    ///
660    /// let client = LlmClient::openai("sk-...").unwrap();
661    /// let provider = client.provider();
662    ///
663    /// // 可以进行类型转换以访问特定提供商的功能
664    /// ```
665    pub fn provider(&self) -> &dyn Provider {
666        self.provider.as_ref()
667    }
668
669    // ============================================================================
670    // 类型安全的Provider转换方法
671    // ============================================================================
672
673    /// 尝试将客户端转换为OllamaProvider
674    ///
675    /// # 返回
676    /// 如果底层Provider是OllamaProvider,返回Some引用,否则返回None
677    ///
678    /// # 示例
679    /// ```rust,no_run
680    /// use llm_connector::LlmClient;
681    ///
682    /// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
683    /// let client = LlmClient::ollama()?;
684    /// if let Some(_ollama) = client.as_ollama() {
685    ///     // 可以访问 Ollama 特定的功能
686    /// }
687    /// # Ok(())
688    /// # }
689    /// ```
690    pub fn as_ollama(&self) -> Option<&crate::providers::OllamaProvider> {
691        self.provider
692            .as_any()
693            .downcast_ref::<crate::providers::OllamaProvider>()
694    }
695
696    /// 尝试将客户端转换为OpenAIProvider
697    pub fn as_openai(&self) -> Option<&crate::providers::OpenAIProvider> {
698        self.provider
699            .as_any()
700            .downcast_ref::<crate::providers::OpenAIProvider>()
701    }
702
703    /// 尝试将客户端转换为AliyunProvider
704    pub fn as_aliyun(&self) -> Option<&crate::providers::AliyunProvider> {
705        self.provider
706            .as_any()
707            .downcast_ref::<crate::providers::AliyunProvider>()
708    }
709
710    /// 尝试将客户端转换为AnthropicProvider
711    pub fn as_anthropic(&self) -> Option<&crate::providers::AnthropicProvider> {
712        self.provider
713            .as_any()
714            .downcast_ref::<crate::providers::AnthropicProvider>()
715    }
716
717    /// 尝试将客户端转换为ZhipuProvider
718    pub fn as_zhipu(&self) -> Option<&crate::providers::ZhipuProvider> {
719        self.provider
720            .as_any()
721            .downcast_ref::<crate::providers::ZhipuProvider>()
722    }
723}
724
725impl Clone for LlmClient {
726    fn clone(&self) -> Self {
727        Self {
728            provider: Arc::clone(&self.provider),
729        }
730    }
731}
732
733impl std::fmt::Debug for LlmClient {
734    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
735        f.debug_struct("LlmClient")
736            .field("provider", &self.provider.name())
737            .finish()
738    }
739}