1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
// Copyright (c) 2026 vectorless developers
// SPDX-License-Identifier: Apache-2.0
//! Unified LLM client module.
//!
//! This module provides a unified interface for all LLM operations across the codebase:
//! - **Summarization** — Generating document summaries
//! - **Retrieval** — Document tree navigation
//! - **TOC Processing** — Table of contents extraction
//!
//! # Features
//!
//! - Unified configuration with purpose-specific presets
//! - Automatic retry with exponential backoff
//! - JSON response parsing
//! - Unified error handling
//!
//! # Architecture
//!
//! ```text
//! ┌─────────────────────────────────────────────────────────────────┐
//! │ LlmPool │
//! │ │
//! │ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
//! │ │ summary │ │ retrieval │ │ toc │ │
//! │ │ LlmClient │ │ LlmClient │ │ LlmClient │ │
//! │ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ │
//! │ │ │ │ │
//! │ └────────────────┼────────────────┘ │
//! │ │ │
//! │ ▼ │
//! │ ┌─────────────────────┐ │
//! │ │ async-openai │ │
//! │ └─────────────────────┘ │
//! └─────────────────────────────────────────────────────────────────┘
//! ```
//!
//! # Example
//!
//! ```rust,no_run
//! use vectorless::llm::{LlmPool, LlmConfig, RetryConfig};
//!
//! # #[tokio::main]
//! # async fn main() -> vectorless::llm::LlmResult<()> {
//! // Create a pool with default configurations
//! let pool = LlmPool::from_defaults();
//!
//! // Use summary client
//! let summary = pool.summary().complete(
//! "You summarize text concisely.",
//! "Long text to summarize..."
//! ).await?;
//!
//! // Use retrieval client with JSON output
//! #[derive(serde::Deserialize)]
//! struct NavDecision { section: usize }
//! let decision: NavDecision = pool.retrieval().complete_json(
//! "You navigate documents.",
//! "Find section about X..."
//! ).await?;
//!
//! # Ok(())
//! # }
//! ```
pub use LlmClient;
pub use LlmConfig;
pub use LlmResult;
pub use LlmExecutor;