Skip to main content

a3s_code_core/llm/
mod.rs

1//! LLM client abstraction layer
2//!
3//! Provides a unified interface for interacting with LLM providers
4//! (Anthropic Claude, OpenAI, Zhipu AI GLM, and OpenAI-compatible providers).
5
6pub mod anthropic;
7pub mod factory;
8pub mod http;
9pub mod openai;
10mod types;
11pub mod zhipu;
12
13// Re-export public types
14pub use anthropic::AnthropicClient;
15pub use factory::{create_client_with_config, LlmConfig};
16pub use http::{default_http_client, HttpClient, HttpResponse, StreamingHttpResponse};
17pub use openai::OpenAiClient;
18pub use types::*;
19pub use zhipu::ZhipuClient;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use tokio::sync::mpsc;
24
25/// LLM client trait
26#[async_trait]
27pub trait LlmClient: Send + Sync {
28    /// Complete a conversation (non-streaming)
29    async fn complete(
30        &self,
31        messages: &[Message],
32        system: Option<&str>,
33        tools: &[ToolDefinition],
34    ) -> Result<LlmResponse>;
35
36    /// Complete a conversation with streaming
37    /// Returns a receiver for streaming events
38    async fn complete_streaming(
39        &self,
40        messages: &[Message],
41        system: Option<&str>,
42        tools: &[ToolDefinition],
43    ) -> Result<mpsc::Receiver<StreamEvent>>;
44}
45
46// Include test modules — these reference internal types via crate paths
47#[cfg(test)]
48#[path = "tests.rs"]
49mod tests_file;