simple_llm_client/lib.rs
1//! # Simple LLM Client
2//!
3//! A Rust crate for interacting with Large Language Model APIs to streamline
4//! content creation, research, and information synthesis.
5//!
6//! ## Features
7//!
8//! - **Perplexity AI Integration**: Seamlessly connect with the Perplexity AI API for advanced research capabilities
9//! - **Markdown Output**: Automatically format responses as Markdown with proper citation formatting
10//! - **Streaming Support**: Option to stream responses in real-time or receive complete responses
11//! - **Citation Handling**: Extract and format citations from AI responses
12//!
13//! ## Usage Example
14//!
15//! ```rust,no_run
16//! use simple_llm_client::perplexity::{chat_completion_markdown, models::ChatMessage};
17//! use std::path::Path;
18//!
19//! #[tokio::main]
20//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
21//! let messages = vec![
22//! ChatMessage {
23//! role: "system".to_string(),
24//! content: "Be precise and concise.".to_string(),
25//! },
26//! ChatMessage {
27//! role: "user".to_string(),
28//! content: "How many stars are there in our galaxy?".to_string(),
29//! },
30//! ];
31//!
32//! chat_completion_markdown(
33//! "sonar-pro",
34//! messages,
35//! Some(Path::new("./output")),
36//! "research_result.md"
37//! ).await?;
38//!
39//! Ok(())
40//! }
41//! ```
42//!
43//! ## Project Structure
44//!
45//! - `perplexity`: Module for interacting with Perplexity AI API
46//! - `providers`: Module for abstracting different LLM providers (with more to be added in future releases)
47//!
48//! ## Future Development
49//!
50//! Future versions will include support for additional LLM providers such as OpenAI, Anthropic,
51//! Google Gemini, and others based on community needs.
52
53pub mod perplexity;
54pub mod openai;
55pub mod providers;