helios_engine/lib.rs
1//! # Helios Engine
2//!
3//! Helios is a powerful and flexible Rust framework for building LLM-powered agents
4//! with tool support, chat capabilities, and easy configuration management.
5//!
6//! ## Quick Start
7//!
8//! ### Using as a Library (Direct LLM Calls)
9//!
10//! ## Example
11//! ```no_run
12//! use helios_engine::{LLMClient, ChatMessage};
13//! use helios_engine::config::LLMConfig;
14//!
15//! #[tokio::main]
16//! async fn main() -> helios_engine::Result<()> {
17//! let llm_config = LLMConfig {
18//! model_name: "gpt-3.5-turbo".to_string(),
19//! base_url: "https://api.openai.com/v1".to_string(),
20//! api_key: std::env::var("OPENAI_API_KEY").unwrap(),
21//! temperature: 0.7,
22//! max_tokens: 2048,
23//! };
24//!
25//! let client = LLMClient::new(helios_engine::llm::LLMProviderType::Remote(llm_config)).await?;
26//! let messages = vec![
27//! ChatMessage::system("You are a helpful assistant."),
28//! ChatMessage::user("What is the capital of France?"),
29//! ];
30//!
31//! let response = client.chat(messages, None, None, None, None).await?;
32//! println!("Response: {}", response.content);
33//! Ok(())
34//! }
35//! ```
36//!
37//! ## Overview
38//!
39//! ```no_run
40//! use helios_engine::{Agent, Config, CalculatorTool};
41//!
42//! #[tokio::main]
43//! async fn main() -> helios_engine::Result<()> {
44//! let config = Config::from_file("config.toml")?;
45//!
46//! let mut agent = Agent::builder("MyAgent")
47//! .config(config)
48//! .system_prompt("You are a helpful assistant.")
49//! .tool(Box::new(CalculatorTool))
50//! .build()
51//! .await?;
52//!
53//! let response = agent.chat("What is 15 * 7?").await?;
54//! println!("Agent: {}", response);
55//! Ok(())
56//! }
57//! ```
58//!
59//! ## Features
60//!
61//! - **Direct LLM Access**: Use `LLMClient` for simple, direct calls to LLM models.
62//! - **Agent System**: Create intelligent agents with tools and persistent conversation.
63//! - **Tool Support**: Extensible tool system for adding custom functionality.
64//! - **Multi-Provider**: Works with OpenAI, Azure OpenAI, local models, and any OpenAI-compatible API.
65//! - **Type-Safe**: Leverages Rust's type system for reliability.
66//! - **Async**: Built on Tokio for high-performance async operations.
67
68// Modules
69
70/// Defines the `Agent` struct and its associated builder, which are central to the Helios Engine.
71pub mod agent;
72
73/// Provides chat-related functionality, including `ChatMessage`, `ChatSession`, and `Role`.
74pub mod chat;
75
76/// Handles configuration for the engine, including LLM providers and agent settings.
77pub mod config;
78
79/// Defines the custom `HeliosError` and `Result` types for error handling.
80pub mod error;
81
82/// Manages interactions with Large Language Models (LLMs), including different providers.
83pub mod llm;
84
85/// Contains the tool system, including the `Tool` trait and various tool implementations.
86pub mod tools;
87
88/// Provides HTTP server functionality for exposing OpenAI-compatible API endpoints.
89pub mod serve;
90
91/// RAG (Retrieval-Augmented Generation) system with vector stores and embeddings.
92pub mod rag;
93
94/// RAG tool implementation for agent use.
95pub mod rag_tool;
96
97/// Forest of Agents - Multi-agent collaboration system.
98pub mod forest;
99
100// Re-exports
101
102/// Re-export of the `Agent` and `AgentBuilder` for convenient access.
103pub use agent::{Agent, AgentBuilder};
104
105/// Re-export of chat-related types.
106pub use chat::{ChatMessage, ChatSession, Role};
107
108#[cfg(not(feature = "local"))]
109pub use config::{Config, LLMConfig};
110/// Re-export of configuration types.
111#[cfg(feature = "local")]
112pub use config::{Config, LLMConfig, LocalConfig};
113
114/// Re-export of the custom error and result types.
115pub use error::{HeliosError, Result};
116
117/// Re-export of LLM-related types.
118#[cfg(feature = "local")]
119pub use llm::{
120 Delta, LLMClient, LLMProvider, LLMRequest, LLMResponse, LocalLLMProvider, StreamChoice,
121 StreamChunk,
122};
123#[cfg(not(feature = "local"))]
124pub use llm::{Delta, LLMClient, LLMProvider, LLMRequest, LLMResponse, StreamChoice, StreamChunk};
125pub use tools::{
126 CalculatorTool, EchoTool, FileEditTool, FileIOTool, FileListTool, FileReadTool, FileSearchTool, FileWriteTool,
127 HttpRequestTool, JsonParserTool, MemoryDBTool, QdrantRAGTool, ShellCommandTool, SystemInfoTool,
128 TextProcessorTool, TimestampTool, Tool, ToolParameter, ToolRegistry, ToolResult, WebScraperTool,
129};
130
131/// Re-export of RAG system components.
132pub use rag::{
133 Document, EmbeddingProvider, InMemoryVectorStore, OpenAIEmbeddings, QdrantVectorStore,
134 RAGSystem, SearchResult, VectorStore,
135};
136
137/// Re-export of RAG tool.
138pub use rag_tool::RAGTool;
139
140/// Re-export of serve functionality.
141pub use serve::{
142 load_custom_endpoints_config, start_server, start_server_with_agent,
143 start_server_with_agent_and_custom_endpoints, start_server_with_custom_endpoints,
144 CustomEndpoint, CustomEndpointsConfig, ServerState,
145};
146
147/// Re-export of Forest of Agents functionality.
148pub use forest::{
149 AgentId, DelegateTaskTool, ForestBuilder, ForestMessage, ForestOfAgents, SendMessageTool,
150 ShareContextTool, SharedContext,
151};