pub struct LlmClient { /* private fields */ }Expand description
统一LLM客户端
这个客户端提供统一的接口来访问各种LLM服务, 使用V2架构的清晰抽象层。
§示例
use llm_connector::{LlmClient, types::{ChatRequest, Message, Role}};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// 创建OpenAI客户端
let client = LlmClient::openai("sk-...")?;
// 创建请求
let request = ChatRequest {
model: "gpt-4".to_string(),
messages: vec![Message::text(Role::User, "Hello, how are you?")],
..Default::default()
};
// 发送请求
let response = client.chat(&request).await?;
println!("Response: {}", response.content);
Ok(())
}Implementations§
Source§impl LlmClient
impl LlmClient
Sourcepub fn from_provider(provider: Arc<dyn Provider>) -> Self
pub fn from_provider(provider: Arc<dyn Provider>) -> Self
从任何Provider创建客户端
Sourcepub fn openai(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn openai(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn openai_with_base_url(
api_key: &str,
base_url: &str,
) -> Result<Self, LlmConnectorError>
pub fn openai_with_base_url( api_key: &str, base_url: &str, ) -> Result<Self, LlmConnectorError>
Sourcepub fn azure_openai(
api_key: &str,
endpoint: &str,
api_version: &str,
) -> Result<Self, LlmConnectorError>
pub fn azure_openai( api_key: &str, endpoint: &str, api_version: &str, ) -> Result<Self, LlmConnectorError>
Sourcepub fn aliyun(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn aliyun(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn anthropic(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn anthropic(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn zhipu(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn zhipu(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn zhipu_openai_compatible(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn zhipu_openai_compatible(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn ollama() -> Result<Self, LlmConnectorError>
pub fn ollama() -> Result<Self, LlmConnectorError>
Sourcepub fn ollama_with_base_url(base_url: &str) -> Result<Self, LlmConnectorError>
pub fn ollama_with_base_url(base_url: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn openai_compatible(
api_key: &str,
base_url: &str,
service_name: &str,
) -> Result<Self, LlmConnectorError>
pub fn openai_compatible( api_key: &str, base_url: &str, service_name: &str, ) -> Result<Self, LlmConnectorError>
创建OpenAI兼容服务客户端
§参数
api_key: API密钥base_url: 服务基础URLservice_name: 服务名称
§示例
use llm_connector::LlmClient;
// DeepSeek
let deepseek = LlmClient::openai_compatible(
"sk-...",
"https://api.deepseek.com",
"deepseek"
).unwrap();
// Moonshot
let moonshot = LlmClient::openai_compatible(
"sk-...",
"https://api.moonshot.cn",
"moonshot"
).unwrap();
// LongCat (OpenAI format)
let longcat = LlmClient::openai_compatible(
"ak_...",
"https://api.longcat.chat/openai",
"longcat"
).unwrap();Sourcepub fn longcat_anthropic(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn longcat_anthropic(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn longcat_anthropic_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn longcat_anthropic_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的LongCat Anthropic客户端
Sourcepub fn volcengine(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn volcengine(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn volcengine_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn volcengine_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的火山引擎客户端
Sourcepub fn tencent(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn tencent(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn tencent_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn tencent_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的腾讯云混元客户端
Sourcepub fn moonshot(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn moonshot(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn moonshot_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn moonshot_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的 Moonshot 客户端
Sourcepub fn deepseek(api_key: &str) -> Result<Self, LlmConnectorError>
pub fn deepseek(api_key: &str) -> Result<Self, LlmConnectorError>
Sourcepub fn deepseek_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn deepseek_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的 DeepSeek 客户端
Sourcepub fn openai_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn openai_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的OpenAI客户端
Sourcepub fn aliyun_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn aliyun_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的Aliyun客户端
Sourcepub fn aliyun_international(
api_key: &str,
region: &str,
) -> Result<Self, LlmConnectorError>
pub fn aliyun_international( api_key: &str, region: &str, ) -> Result<Self, LlmConnectorError>
创建Aliyun国际版客户端
Sourcepub fn aliyun_private(
api_key: &str,
base_url: &str,
) -> Result<Self, LlmConnectorError>
pub fn aliyun_private( api_key: &str, base_url: &str, ) -> Result<Self, LlmConnectorError>
创建Aliyun专有云客户端
Sourcepub fn aliyun_with_timeout(
api_key: &str,
timeout_secs: u64,
) -> Result<Self, LlmConnectorError>
pub fn aliyun_with_timeout( api_key: &str, timeout_secs: u64, ) -> Result<Self, LlmConnectorError>
创建带有自定义超时的Aliyun客户端
Sourcepub fn anthropic_with_config(
api_key: &str,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn anthropic_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的Anthropic客户端
Sourcepub fn anthropic_vertex(
project_id: &str,
location: &str,
access_token: &str,
) -> Result<Self, LlmConnectorError>
pub fn anthropic_vertex( project_id: &str, location: &str, access_token: &str, ) -> Result<Self, LlmConnectorError>
创建Anthropic Vertex AI客户端
Sourcepub fn anthropic_bedrock(
region: &str,
access_key: &str,
secret_key: &str,
) -> Result<Self, LlmConnectorError>
pub fn anthropic_bedrock( region: &str, access_key: &str, secret_key: &str, ) -> Result<Self, LlmConnectorError>
创建Anthropic AWS Bedrock客户端
Sourcepub fn anthropic_with_timeout(
api_key: &str,
timeout_secs: u64,
) -> Result<Self, LlmConnectorError>
pub fn anthropic_with_timeout( api_key: &str, timeout_secs: u64, ) -> Result<Self, LlmConnectorError>
创建带有自定义超时的Anthropic客户端
Sourcepub fn zhipu_with_config(
api_key: &str,
openai_compatible: bool,
base_url: Option<&str>,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn zhipu_with_config( api_key: &str, openai_compatible: bool, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的Zhipu客户端
Sourcepub fn zhipu_with_timeout(
api_key: &str,
timeout_secs: u64,
) -> Result<Self, LlmConnectorError>
pub fn zhipu_with_timeout( api_key: &str, timeout_secs: u64, ) -> Result<Self, LlmConnectorError>
创建带有自定义超时的Zhipu客户端
Sourcepub fn zhipu_enterprise(
api_key: &str,
base_url: &str,
) -> Result<Self, LlmConnectorError>
pub fn zhipu_enterprise( api_key: &str, base_url: &str, ) -> Result<Self, LlmConnectorError>
创建Zhipu企业版客户端
Sourcepub fn ollama_with_config(
base_url: &str,
timeout_secs: Option<u64>,
proxy: Option<&str>,
) -> Result<Self, LlmConnectorError>
pub fn ollama_with_config( base_url: &str, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>
创建带有自定义配置的Ollama客户端
Sourcepub fn provider_name(&self) -> &str
pub fn provider_name(&self) -> &str
获取提供商名称
Sourcepub async fn chat(
&self,
request: &ChatRequest,
) -> Result<ChatResponse, LlmConnectorError>
pub async fn chat( &self, request: &ChatRequest, ) -> Result<ChatResponse, LlmConnectorError>
发送聊天完成请求
§参数
request: 聊天请求
§返回
聊天响应
§示例
use llm_connector::LlmClient;
use llm_connector::types::{ChatRequest, Message};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = LlmClient::openai("sk-...")?;
let request = ChatRequest {
model: "gpt-4".to_string(),
messages: vec![Message::user("Hello!")],
..Default::default()
};
let response = client.chat(&request).await?;
println!("Response: {}", response.content);
Ok(())
}Sourcepub fn provider(&self) -> &dyn Provider
pub fn provider(&self) -> &dyn Provider
获取底层提供商的引用 (用于特殊功能访问)
§示例
use llm_connector::LlmClient;
let client = LlmClient::openai("sk-...").unwrap();
let provider = client.provider();
// 可以进行类型转换以访问特定提供商的功能Sourcepub fn as_ollama(&self) -> Option<&OllamaProvider>
pub fn as_ollama(&self) -> Option<&OllamaProvider>
Sourcepub fn as_openai(&self) -> Option<&OpenAIProvider>
pub fn as_openai(&self) -> Option<&OpenAIProvider>
尝试将客户端转换为OpenAIProvider
Sourcepub fn as_aliyun(&self) -> Option<&AliyunProvider>
pub fn as_aliyun(&self) -> Option<&AliyunProvider>
尝试将客户端转换为AliyunProvider
Sourcepub fn as_anthropic(&self) -> Option<&AnthropicProvider>
pub fn as_anthropic(&self) -> Option<&AnthropicProvider>
尝试将客户端转换为AnthropicProvider
Sourcepub fn as_zhipu(&self) -> Option<&ZhipuProvider>
pub fn as_zhipu(&self) -> Option<&ZhipuProvider>
尝试将客户端转换为ZhipuProvider
Trait Implementations§
Auto Trait Implementations§
impl Freeze for LlmClient
impl !RefUnwindSafe for LlmClient
impl Send for LlmClient
impl Sync for LlmClient
impl Unpin for LlmClient
impl !UnwindSafe for LlmClient
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more