helios_engine/lib.rs
1//! # Helios Engine
2//!
3//! Helios is a powerful and flexible Rust framework for building LLM-powered agents
4//! with tool support, chat capabilities, and easy configuration management.
5//!
6//! ## Quick Start
7//!
8//! ### Using as a Library (Direct LLM Calls)
9//!
10//! ## Example
11//! ```no_run
12//! use helios_engine::{LLMClient, ChatMessage};
13//! use helios_engine::config::LLMConfig;
14//!
15//! #[tokio::main]
16//! async fn main() -> helios_engine::Result<()> {
17//! let llm_config = LLMConfig {
18//! model_name: "gpt-3.5-turbo".to_string(),
19//! base_url: "https://api.openai.com/v1".to_string(),
20//! api_key: std::env::var("OPENAI_API_KEY").unwrap(),
21//! temperature: 0.7,
22//! max_tokens: 2048,
23//! };
24//!
25//! let client = LLMClient::new(helios_engine::llm::LLMProviderType::Remote(llm_config)).await?;
26//! let messages = vec![
27//! ChatMessage::system("You are a helpful assistant."),
28//! ChatMessage::user("What is the capital of France?"),
29//! ];
30//!
31//! let response = client.chat(messages, None, None, None, None).await?;
32//! println!("Response: {}", response.content);
33//! Ok(())
34//! }
35//! ```
36//!
37//! ## Overview
38//!
39//! ```no_run
40//! use helios_engine::{Agent, Config, CalculatorTool};
41//!
42//! #[tokio::main]
43//! async fn main() -> helios_engine::Result<()> {
44//! let config = Config::from_file("config.toml")?;
45//!
46//! let mut agent = Agent::builder("MyAgent")
47//! .config(config)
48//! .system_prompt("You are a helpful assistant.")
49//! .tool(Box::new(CalculatorTool))
50//! .build()
51//! .await?;
52//!
53//! let response = agent.chat("What is 15 * 7?").await?;
54//! println!("Agent: {}", response);
55//! Ok(())
56//! }
57//! ```
58//!
59//! ## Features
60//!
61//! - **Direct LLM Access**: Use `LLMClient` for simple, direct calls to LLM models.
62//! - **Agent System**: Create intelligent agents with tools and persistent conversation.
63//! - **Tool Support**: Extensible tool system for adding custom functionality.
64//! - **Multi-Provider**: Works with OpenAI, Azure OpenAI, local models, and any OpenAI-compatible API.
65//! - **Type-Safe**: Leverages Rust's type system for reliability.
66//! - **Async**: Built on Tokio for high-performance async operations.
67
68// Modules
69
70/// Defines the `Agent` struct and its associated builder, which are central to the Helios Engine.
71pub mod agent;
72
73/// Provides chat-related functionality, including `ChatMessage`, `ChatSession`, and `Role`.
74pub mod chat;
75
76/// Handles configuration for the engine, including LLM providers and agent settings.
77pub mod config;
78
79/// Defines the custom `HeliosError` and `Result` types for error handling.
80pub mod error;
81
82/// Manages interactions with Large Language Models (LLMs), including different providers.
83pub mod llm;
84
85/// Contains the tool system, including the `Tool` trait and various tool implementations.
86pub mod tools;
87
88/// Provides HTTP server functionality for exposing OpenAI-compatible API endpoints.
89pub mod serve;
90
91// Re-exports
92
93/// Re-export of the `Agent` and `AgentBuilder` for convenient access.
94pub use agent::{Agent, AgentBuilder};
95
96/// Re-export of chat-related types.
97pub use chat::{ChatMessage, ChatSession, Role};
98
99#[cfg(not(feature = "local"))]
100pub use config::{Config, LLMConfig};
101/// Re-export of configuration types.
102#[cfg(feature = "local")]
103pub use config::{Config, LLMConfig, LocalConfig};
104
105/// Re-export of the custom error and result types.
106pub use error::{HeliosError, Result};
107
108/// Re-export of LLM-related types.
109#[cfg(feature = "local")]
110pub use llm::{
111 Delta, LLMClient, LLMProvider, LLMRequest, LLMResponse, LocalLLMProvider, StreamChoice,
112 StreamChunk,
113};
114#[cfg(not(feature = "local"))]
115pub use llm::{Delta, LLMClient, LLMProvider, LLMRequest, LLMResponse, StreamChoice, StreamChunk};
116pub use tools::{
117 CalculatorTool, EchoTool, FileEditTool, FileReadTool, FileSearchTool, FileWriteTool,
118 MemoryDBTool, QdrantRAGTool, Tool, ToolParameter, ToolRegistry, ToolResult,
119};
120
121/// Re-export of serve functionality.
122pub use serve::{
123 load_custom_endpoints_config, start_server, start_server_with_agent,
124 start_server_with_agent_and_custom_endpoints, start_server_with_custom_endpoints,
125 CustomEndpoint, CustomEndpointsConfig, ServerState,
126};