Skip to main content

a3s_code_core/llm/
mod.rs

1//! LLM client abstraction layer
2//!
3//! Provides a unified interface for interacting with LLM providers
4//! (Anthropic Claude, OpenAI, and OpenAI-compatible providers).
5
6pub mod anthropic;
7pub mod factory;
8pub mod http;
9pub mod openai;
10mod types;
11
12// Re-export public types
13pub use anthropic::AnthropicClient;
14pub use factory::{create_client_with_config, LlmConfig};
15pub use http::{default_http_client, HttpClient, HttpResponse, StreamingHttpResponse};
16pub use openai::OpenAiClient;
17pub use types::*;
18
19use anyhow::Result;
20use async_trait::async_trait;
21use tokio::sync::mpsc;
22
23/// LLM client trait
24#[async_trait]
25pub trait LlmClient: Send + Sync {
26    /// Complete a conversation (non-streaming)
27    async fn complete(
28        &self,
29        messages: &[Message],
30        system: Option<&str>,
31        tools: &[ToolDefinition],
32    ) -> Result<LlmResponse>;
33
34    /// Complete a conversation with streaming
35    /// Returns a receiver for streaming events
36    async fn complete_streaming(
37        &self,
38        messages: &[Message],
39        system: Option<&str>,
40        tools: &[ToolDefinition],
41    ) -> Result<mpsc::Receiver<StreamEvent>>;
42}
43
44// Include test modules — these reference internal types via crate paths
45#[cfg(test)]
46#[path = "tests.rs"]
47mod tests_file;