rsai/lib.rs
1//! # rsai
2//!
3//! Predictable development for unpredictable models. Let the compiler handle the chaos.
4//!
5//! ## ⚠️ WARNING
6//!
7//! This is a pre-release version with an unstable API. Breaking changes may occur between versions.
8//! Use with caution and pin to specific versions in production applications.
9//!
10//! ## Quick Start
11//!
12//! ```rust,no_run
13//! use rsai::{llm, Message, ChatRole, ApiKey, Provider, TextResponse, completion_schema};
14//!
15//! #[completion_schema]
16//! struct Analysis {
17//! sentiment: String,
18//! confidence: f32,
19//! }
20//!
21//! #[tokio::main]
22//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
23//! let analysis = llm::with(Provider::OpenAI)
24//! .api_key(ApiKey::Default)?
25//! .model("gpt-4o-mini")
26//! .messages(vec![Message {
27//! role: ChatRole::User,
28//! content: "Analyze: 'This library is amazing!'".to_string(),
29//! }])
30//! .complete::<Analysis>()
31//! .await?;
32//!
33//! let reply = llm::with(Provider::OpenAI)
34//! .api_key(ApiKey::Default)?
35//! .model("gpt-4o-mini")
36//! .messages(vec![
37//! Message {
38//! role: ChatRole::System,
39//! content: "You are friendly and concise.".to_string(),
40//! },
41//! Message {
42//! role: ChatRole::User,
43//! content: "Share a fun fact about Rust.".to_string(),
44//! },
45//! ])
46//! .complete::<TextResponse>()
47//! .await?;
48//!
49//! println!("{}", reply.text);
50//! Ok(())
51//! }
52//! ```
53//!
54mod completions;
55mod core;
56mod provider;
57mod responses;
58
59// Core types
60pub use core::{ChatRole, ConversationMessage, Ctx, Message};
61pub use core::{Tool, ToolCall, ToolCallResult, ToolRegistry, ToolSet, ToolSetBuilder};
62pub use core::{ToolCallingConfig, ToolCallingGuard};
63
64// Configuration types
65pub use core::{
66 ApiKey, GenerationConfig, Inspector, InspectorConfig, LlmBuilder, ToolChoice, ToolConfig,
67};
68pub use responses::{Format, HttpClientConfig};
69
70// Response types
71pub use core::{
72 LanguageModelUsage, ResponseMetadata, StructuredRequest, StructuredResponse, TextResponse,
73};
74
75// Async helpers
76pub use core::BoxFuture;
77
78// Error handling
79pub use core::LlmError;
80pub type Result<T> = std::result::Result<T, LlmError>;
81
82// Gen AI request builders
83pub use core::llm;
84
85// Gen AI providers
86pub use provider::{
87 GeminiClient, GeminiConfig, OpenAiClient, OpenAiConfig, OpenRouterClient, OpenRouterConfig,
88 Provider,
89};
90
91// Traits
92pub use core::{CompletionTarget, LlmProvider, ToolFunction};
93
94// Macros from `rsai-macros`
95pub use rsai_macros::{completion_schema, tool, toolset};