Skip to main content

agent_code_lib/llm/
mod.rs

1//! LLM client layer.
2//!
3//! Handles all communication with LLM APIs. Supports OpenAI-compatible
4//! and Anthropic-native APIs with streaming via Server-Sent Events (SSE).
5//!
6//! # Architecture
7//!
8//! - `client` — HTTP client with retry logic and streaming
9//! - `message` — Message types for the conversation protocol
10//! - `stream` — SSE parser that yields `StreamEvent` values
11
12pub mod anthropic;
13pub mod client;
14pub mod message;
15pub mod normalize;
16pub mod openai;
17pub mod provider;
18pub mod retry;
19pub mod stream;