LlmClient

Struct LlmClient 

Source
pub struct LlmClient { /* private fields */ }
Expand description

统一LLM客户端

这个客户端提供统一的接口来访问各种LLM服务, 使用V2架构的清晰抽象层。

§示例

use llm_connector::{LlmClient, types::{ChatRequest, Message, Role}};

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    // 创建OpenAI客户端
    let client = LlmClient::openai("sk-...")?;

    // 创建请求
    let request = ChatRequest {
        model: "gpt-4".to_string(),
        messages: vec![Message::text(Role::User, "Hello, how are you?")],
        ..Default::default()
    };

    // 发送请求
    let response = client.chat(&request).await?;
    println!("Response: {}", response.content);

    Ok(())
}

Implementations§

Source§

impl LlmClient

Source

pub fn from_provider(provider: Arc<dyn Provider>) -> Self

从任何Provider创建客户端

Source

pub fn openai(api_key: &str) -> Result<Self, LlmConnectorError>

创建OpenAI客户端

§参数
  • api_key: OpenAI API密钥
§示例
use llm_connector::LlmClient;

let client = LlmClient::openai("sk-...").unwrap();
Source

pub fn openai_with_base_url( api_key: &str, base_url: &str, ) -> Result<Self, LlmConnectorError>

创建带有自定义基础URL的OpenAI客户端

§参数
  • api_key: API密钥
  • base_url: 自定义基础URL
§示例
use llm_connector::LlmClient;

let client = LlmClient::openai_with_base_url(
    "sk-...",
    "https://api.deepseek.com"
).unwrap();
Source

pub fn azure_openai( api_key: &str, endpoint: &str, api_version: &str, ) -> Result<Self, LlmConnectorError>

创建Azure OpenAI客户端

§参数
  • api_key: Azure OpenAI API密钥
  • endpoint: Azure OpenAI端点
  • api_version: API版本
§示例
use llm_connector::LlmClient;

let client = LlmClient::azure_openai(
    "your-api-key",
    "https://your-resource.openai.azure.com",
    "2024-02-15-preview"
).unwrap();
Source

pub fn aliyun(api_key: &str) -> Result<Self, LlmConnectorError>

创建阿里云DashScope客户端

§参数
  • api_key: 阿里云DashScope API密钥
§示例
use llm_connector::LlmClient;

let client = LlmClient::aliyun("sk-...").unwrap();
Source

pub fn anthropic(api_key: &str) -> Result<Self, LlmConnectorError>

创建Anthropic Claude客户端

§参数
  • api_key: Anthropic API密钥 (格式: sk-ant-…)
§示例
use llm_connector::LlmClient;

let client = LlmClient::anthropic("sk-ant-...").unwrap();
Source

pub fn zhipu(api_key: &str) -> Result<Self, LlmConnectorError>

创建智谱GLM客户端

§参数
  • api_key: 智谱GLM API密钥
§示例
use llm_connector::LlmClient;

let client = LlmClient::zhipu("your-api-key").unwrap();
Source

pub fn zhipu_openai_compatible(api_key: &str) -> Result<Self, LlmConnectorError>

创建智谱GLM客户端 (OpenAI兼容模式)

§参数
  • api_key: 智谱GLM API密钥
§示例
use llm_connector::LlmClient;

let client = LlmClient::zhipu_openai_compatible("your-api-key").unwrap();
Source

pub fn ollama() -> Result<Self, LlmConnectorError>

创建Ollama客户端 (默认本地地址)

§示例
use llm_connector::LlmClient;

let client = LlmClient::ollama().unwrap();
Source

pub fn ollama_with_base_url(base_url: &str) -> Result<Self, LlmConnectorError>

创建带有自定义URL的Ollama客户端

§参数
  • base_url: Ollama服务的URL
§示例
use llm_connector::LlmClient;

let client = LlmClient::ollama_with_base_url("http://192.168.1.100:11434").unwrap();
Source

pub fn openai_compatible( api_key: &str, base_url: &str, service_name: &str, ) -> Result<Self, LlmConnectorError>

创建OpenAI兼容服务客户端

§参数
  • api_key: API密钥
  • base_url: 服务基础URL
  • service_name: 服务名称
§示例
use llm_connector::LlmClient;

// DeepSeek
let deepseek = LlmClient::openai_compatible(
    "sk-...",
    "https://api.deepseek.com",
    "deepseek"
).unwrap();

// Moonshot
let moonshot = LlmClient::openai_compatible(
    "sk-...",
    "https://api.moonshot.cn",
    "moonshot"
).unwrap();

// LongCat (OpenAI format)
let longcat = LlmClient::openai_compatible(
    "ak_...",
    "https://api.longcat.chat/openai",
    "longcat"
).unwrap();
Source

pub fn longcat_anthropic(api_key: &str) -> Result<Self, LlmConnectorError>

创建LongCat Anthropic格式客户端

LongCat的Anthropic端点使用Bearer认证而不是标准的x-api-key认证

§参数
  • api_key: LongCat API密钥 (格式: ak_…)
§示例
use llm_connector::LlmClient;

let client = LlmClient::longcat_anthropic("ak_...").unwrap();
Source

pub fn longcat_anthropic_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的LongCat Anthropic客户端

Source

pub fn volcengine(api_key: &str) -> Result<Self, LlmConnectorError>

创建火山引擎(Volcengine)客户端

火山引擎使用 OpenAI 兼容的 API 格式,但端点路径不同

§参数
  • api_key: 火山引擎 API 密钥 (UUID 格式)
§示例
use llm_connector::LlmClient;

let client = LlmClient::volcengine("26f962bd-450e-4876-bc32-a732e6da9cd2").unwrap();
Source

pub fn volcengine_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的火山引擎客户端

Source

pub fn tencent(api_key: &str) -> Result<Self, LlmConnectorError>

创建腾讯云混元(Tencent Hunyuan)客户端

腾讯云混元使用 OpenAI 兼容的 API 格式

§参数
  • api_key: 腾讯云混元 API 密钥 (格式: sk-…)
§示例
use llm_connector::LlmClient;

let client = LlmClient::tencent("sk-YMiR2Q7LNWVKVWKivkfPn49geQXT27OZXumFkSS3Ef6FlQ50").unwrap();
Source

pub fn tencent_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的腾讯云混元客户端

Source

pub fn moonshot(api_key: &str) -> Result<Self, LlmConnectorError>

创建 Moonshot(月之暗面)客户端

Moonshot 使用 OpenAI 兼容的 API 格式

§参数
  • api_key: Moonshot API 密钥 (格式: sk-…)
§示例
use llm_connector::LlmClient;

let client = LlmClient::moonshot("sk-...").unwrap();
Source

pub fn moonshot_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的 Moonshot 客户端

Source

pub fn deepseek(api_key: &str) -> Result<Self, LlmConnectorError>

创建 DeepSeek 客户端

DeepSeek 使用 OpenAI 兼容的 API 格式,支持推理模型

§参数
  • api_key: DeepSeek API 密钥 (格式: sk-…)
§示例
use llm_connector::LlmClient;

let client = LlmClient::deepseek("sk-...").unwrap();
Source

pub fn deepseek_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的 DeepSeek 客户端

Source

pub fn openai_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的OpenAI客户端

Source

pub fn aliyun_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的Aliyun客户端

Source

pub fn aliyun_international( api_key: &str, region: &str, ) -> Result<Self, LlmConnectorError>

创建Aliyun国际版客户端

Source

pub fn aliyun_private( api_key: &str, base_url: &str, ) -> Result<Self, LlmConnectorError>

创建Aliyun专有云客户端

Source

pub fn aliyun_with_timeout( api_key: &str, timeout_secs: u64, ) -> Result<Self, LlmConnectorError>

创建带有自定义超时的Aliyun客户端

Source

pub fn anthropic_with_config( api_key: &str, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的Anthropic客户端

Source

pub fn anthropic_vertex( project_id: &str, location: &str, access_token: &str, ) -> Result<Self, LlmConnectorError>

创建Anthropic Vertex AI客户端

Source

pub fn anthropic_bedrock( region: &str, access_key: &str, secret_key: &str, ) -> Result<Self, LlmConnectorError>

创建Anthropic AWS Bedrock客户端

Source

pub fn anthropic_with_timeout( api_key: &str, timeout_secs: u64, ) -> Result<Self, LlmConnectorError>

创建带有自定义超时的Anthropic客户端

Source

pub fn zhipu_with_config( api_key: &str, openai_compatible: bool, base_url: Option<&str>, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的Zhipu客户端

Source

pub fn zhipu_with_timeout( api_key: &str, timeout_secs: u64, ) -> Result<Self, LlmConnectorError>

创建带有自定义超时的Zhipu客户端

Source

pub fn zhipu_enterprise( api_key: &str, base_url: &str, ) -> Result<Self, LlmConnectorError>

创建Zhipu企业版客户端

Source

pub fn ollama_with_config( base_url: &str, timeout_secs: Option<u64>, proxy: Option<&str>, ) -> Result<Self, LlmConnectorError>

创建带有自定义配置的Ollama客户端

Source

pub fn provider_name(&self) -> &str

获取提供商名称

Source

pub async fn chat( &self, request: &ChatRequest, ) -> Result<ChatResponse, LlmConnectorError>

发送聊天完成请求

§参数
  • request: 聊天请求
§返回

聊天响应

§示例
use llm_connector::LlmClient;
use llm_connector::types::{ChatRequest, Message};

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    let client = LlmClient::openai("sk-...")?;

    let request = ChatRequest {
        model: "gpt-4".to_string(),
        messages: vec![Message::user("Hello!")],
        ..Default::default()
    };

    let response = client.chat(&request).await?;
    println!("Response: {}", response.content);

    Ok(())
}
Source

pub async fn models(&self) -> Result<Vec<String>, LlmConnectorError>

获取可用模型列表

§返回

模型名称列表

§示例
use llm_connector::LlmClient;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    let client = LlmClient::openai("sk-...")?;

    let models = client.models().await?;
    for model in models {
        println!("Available model: {}", model);
    }

    Ok(())
}
Source

pub fn provider(&self) -> &dyn Provider

获取底层提供商的引用 (用于特殊功能访问)

§示例
use llm_connector::LlmClient;

let client = LlmClient::openai("sk-...").unwrap();
let provider = client.provider();

// 可以进行类型转换以访问特定提供商的功能
Source

pub fn as_ollama(&self) -> Option<&OllamaProvider>

尝试将客户端转换为OllamaProvider

§返回

如果底层Provider是OllamaProvider,返回Some引用,否则返回None

§示例
use llm_connector::LlmClient;

let client = LlmClient::ollama()?;
if let Some(_ollama) = client.as_ollama() {
    // 可以访问 Ollama 特定的功能
}
Source

pub fn as_openai(&self) -> Option<&OpenAIProvider>

尝试将客户端转换为OpenAIProvider

Source

pub fn as_aliyun(&self) -> Option<&AliyunProvider>

尝试将客户端转换为AliyunProvider

Source

pub fn as_anthropic(&self) -> Option<&AnthropicProvider>

尝试将客户端转换为AnthropicProvider

Source

pub fn as_zhipu(&self) -> Option<&ZhipuProvider>

尝试将客户端转换为ZhipuProvider

Trait Implementations§

Source§

impl Clone for LlmClient

Source§

fn clone(&self) -> Self

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for LlmClient

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> ErasedDestructor for T
where T: 'static,