astrid_llm/lib.rs
1//! Astrid LLM - LLM provider abstraction with streaming support.
2//!
3//! This crate provides:
4//! - LLM provider trait for abstraction
5//! - Claude (Anthropic) implementation
6//! - `OpenAI`-compatible implementation (LM Studio, `OpenAI`, vLLM, etc.)
7//! - Streaming response support
8//! - Tool use support
9//!
10//! # Example with Claude
11//!
12//! ```rust,no_run
13//! use astrid_llm::{ClaudeProvider, LlmProvider, Message, ProviderConfig};
14//!
15//! # async fn example() -> Result<(), astrid_llm::LlmError> {
16//! // Create provider
17//! let config = ProviderConfig::new("your-api-key", "claude-sonnet-4-20250514");
18//! let provider = ClaudeProvider::new(config);
19//!
20//! // Simple completion
21//! let response = provider.complete_simple("What is 2+2?").await?;
22//! println!("Response: {}", response);
23//! # Ok(())
24//! # }
25//! ```
26//!
27//! # Example with LM Studio
28//!
29//! ```rust,no_run
30//! use astrid_llm::{OpenAiCompatProvider, LlmProvider, Message};
31//!
32//! # async fn example() -> Result<(), astrid_llm::LlmError> {
33//! // Connect to LM Studio running locally
34//! let provider = OpenAiCompatProvider::lm_studio();
35//!
36//! // Or with a specific model
37//! let provider = OpenAiCompatProvider::lm_studio_with_model("llama-3.1-8b");
38//!
39//! let response = provider.complete_simple("Hello!").await?;
40//! println!("Response: {}", response);
41//! # Ok(())
42//! # }
43//! ```
44//!
45//! # Streaming
46//!
47//! ```rust,no_run
48//! use astrid_llm::{ClaudeProvider, LlmProvider, Message, ProviderConfig, StreamEvent};
49//! use futures::StreamExt;
50//!
51//! # async fn example() -> Result<(), astrid_llm::LlmError> {
52//! let provider = ClaudeProvider::new(ProviderConfig::new("api-key", "claude-sonnet-4-20250514"));
53//! let messages = vec![Message::user("Tell me a story")];
54//!
55//! let mut stream = provider.stream(&messages, &[], "").await?;
56//!
57//! while let Some(event) = stream.next().await {
58//! match event? {
59//! StreamEvent::TextDelta(text) => print!("{}", text),
60//! StreamEvent::Done => println!("\n[Done]"),
61//! _ => {}
62//! }
63//! }
64//! # Ok(())
65//! # }
66//! ```
67
68#![deny(unsafe_code)]
69#![deny(missing_docs)]
70#![deny(clippy::all)]
71#![warn(unreachable_pub)]
72#![deny(clippy::unwrap_used)]
73#![cfg_attr(test, allow(clippy::unwrap_used))]
74
75pub mod prelude;
76
77mod claude;
78mod error;
79mod openai_compat;
80mod provider;
81mod types;
82mod zai;
83
84pub use claude::ClaudeProvider;
85pub use error::{LlmError, LlmResult};
86pub use openai_compat::OpenAiCompatProvider;
87pub use provider::{LlmProvider, ProviderConfig, StreamBox};
88pub use types::{
89 ContentPart, LlmResponse, LlmToolDefinition, Message, MessageContent, MessageRole, StopReason,
90 StreamEvent, ToolCall, ToolCallResult, Usage,
91};
92pub use zai::ZaiProvider;