rexis_llm/
lib.rs

1//! # RSLLM - Rust LLM Client Library
2//!
3//! **RSLLM** is a Rust-native client library for Large Language Models with multi-provider
4//! support, streaming capabilities, and type-safe interfaces.
5//!
6//! ## Design Philosophy
7//!
8//! RSLLM embraces Rust's core principles:
9//! - **Type Safety**: Compile-time guarantees for API contracts
10//! - **Memory Safety**: Zero-copy operations where possible
11//! - **Async-First**: Built around async/await and streaming
12//! - **Multi-Provider**: Unified interface for OpenAI, Claude, Ollama, etc.
13//! - **Composable**: Easy integration with frameworks like RRAG
14//!
15//! ## Architecture
16//!
17//! ```text
18//! ┌─────────────────┐    ┌─────────────────┐    ┌─────────────────┐
19//! │   Application   │───▶│    RSLLM        │───▶│   LLM Provider  │
20//! │   (RRAG, etc)   │    │    Client       │    │  (OpenAI/etc)   │
21//! └─────────────────┘    └─────────────────┘    └─────────────────┘
22//!                                 │
23//!                                 ▼
24//! ┌─────────────────┐    ┌─────────────────┐    ┌─────────────────┐
25//! │   Streaming     │◀───│   Provider      │◀───│    HTTP/API     │
26//! │   Response      │    │   Abstraction   │    │    Transport    │
27//! └─────────────────┘    └─────────────────┘    └─────────────────┘
28//! ```
29//!
30//! ## Quick Start
31//!
32//! ```rust,no_run
33//! use rsllm::{Client, Provider, ChatMessage, MessageRole};
34//!
35//! #[tokio::main]
36//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
37//!     // Create client with OpenAI provider
38//!     let client = Client::builder()
39//!         .provider(Provider::OpenAI)
40//!         .api_key("your-api-key")
41//!         .model("gpt-4")
42//!         .build()?;
43//!
44//!     // Simple chat completion
45//!     let messages = vec![
46//!         ChatMessage::new(MessageRole::User, "What is Rust?")
47//!     ];
48//!
49//!     let response = client.chat_completion(messages).await?;
50//!     tracing::debug!("Response: {}", response.content);
51//!
52//!     Ok(())
53//! }
54//! ```
55//!
56//! ## Streaming Example
57//!
58//! ```rust,no_run
59//! use rsllm::{Client, Provider, ChatMessage, MessageRole};
60//! use futures_util::StreamExt;
61//!
62//! #[tokio::main]
63//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
64//!     let client = Client::builder()
65//!         .provider(Provider::OpenAI)
66//!         .api_key("your-api-key")
67//!         .build()?;
68//!
69//!     let messages = vec![
70//!         ChatMessage::new(MessageRole::User, "Tell me a story")
71//!     ];
72//!
73//!     let mut stream = client.chat_completion_stream(messages).await?;
74//!
75//!     while let Some(chunk) = stream.next().await {
76//!         match chunk? {
77//!             chunk if chunk.is_delta() => {
78//!                 tracing::debug!("{}", chunk.content);
79//!             }
80//!             chunk if chunk.is_done() => {
81//!                 tracing::debug!("\n[DONE]");
82//!                 break;
83//!             }
84//!             _ => {}
85//!         }
86//!     }
87//!
88//!     Ok(())
89//! }
90//! ```
91
92// Core modules
93pub mod client;
94pub mod config;
95pub mod error;
96pub mod message;
97pub mod provider;
98pub mod response;
99pub mod streaming;
100pub mod tools;
101
102// Re-export proc macros
103#[cfg(feature = "macros")]
104pub use rexis_macros::{arg, context, tool};
105
106// Re-exports for convenience
107pub use client::{Client, ClientBuilder};
108pub use config::{ClientConfig, ModelConfig};
109pub use error::{RsllmError, RsllmResult};
110pub use message::{ChatMessage, MessageContent, MessageRole, ToolCall};
111pub use provider::{LLMProvider, Provider, ProviderConfig};
112pub use response::{ChatResponse, CompletionResponse, EmbeddingResponse, StreamChunk, Usage};
113pub use streaming::{ChatStream, CompletionStream};
114
115/// Version information
116pub const VERSION: &str = env!("CARGO_PKG_VERSION");
117
118/// Framework name
119pub const NAME: &str = "RSLLM";
120
121/// Framework description
122pub const DESCRIPTION: &str = "Rust LLM Client Library";
123
124/// Prelude module for convenient imports
125pub mod prelude {
126    pub use crate::{
127        ChatMessage, ChatResponse, ChatStream, Client, ClientBuilder, ClientConfig,
128        CompletionResponse, CompletionStream, EmbeddingResponse, LLMProvider, MessageContent,
129        MessageRole, ModelConfig, Provider, ProviderConfig, RsllmError, RsllmResult, StreamChunk,
130        ToolCall, Usage,
131    };
132
133    // External dependencies commonly used
134    pub use async_trait::async_trait;
135    pub use futures_util::{Stream, StreamExt};
136    pub use serde::{Deserialize, Serialize};
137    pub use tokio;
138}