helios_engine/lib.rs
1//! # Helios Engine
2//!
3//! Helios is a powerful and flexible Rust framework for building LLM-powered agents
4//! with tool support, chat capabilities, and easy configuration management.
5//!
6//! ## Quick Start
7//!
8//! ### Using as a Library (Direct LLM Calls)
9//!
10//! ## Example
11//! ```no_run
12//! use helios_engine::{LLMClient, ChatMessage};
13//! use helios_engine::config::LLMConfig;
14//!
15//! #[tokio::main]
16//! async fn main() -> helios_engine::Result<()> {
17//! let llm_config = LLMConfig {
18//! model_name: "gpt-3.5-turbo".to_string(),
19//! base_url: "https://api.openai.com/v1".to_string(),
20//! api_key: std::env::var("OPENAI_API_KEY").unwrap(),
21//! temperature: 0.7,
22//! max_tokens: 2048,
23//! };
24//!
25//! let client = LLMClient::new(helios_engine::llm::LLMProviderType::Remote(llm_config)).await?;
26//! let messages = vec![
27//! ChatMessage::system("You are a helpful assistant."),
28//! ChatMessage::user("What is the capital of France?"),
29//! ];
30//!
31//! let response = client.chat(messages, None, None, None, None).await?;
32//! println!("Response: {}", response.content);
33//! Ok(())
34//! }
35//! ```
36//!
37//! ## Overview
38//!
39//! ```no_run
40//! use helios_engine::{Agent, Config, CalculatorTool};
41//!
42//! #[tokio::main]
43//! async fn main() -> helios_engine::Result<()> {
44//! let config = Config::from_file("config.toml")?;
45//!
46//! let mut agent = Agent::builder("MyAgent")
47//! .config(config)
48//! .system_prompt("You are a helpful assistant.")
49//! .tool(Box::new(CalculatorTool))
50//! .build()
51//! .await?;
52//!
53//! let response = agent.chat("What is 15 * 7?").await?;
54//! println!("Agent: {}", response);
55//! Ok(())
56//! }
57//! ```
58//!
59//! ## Features
60//!
61//! - **Direct LLM Access**: Use `LLMClient` for simple, direct calls to LLM models.
62//! - **Agent System**: Create intelligent agents with tools and persistent conversation.
63//! - **Tool Support**: Extensible tool system for adding custom functionality.
64//! - **Multi-Provider**: Works with OpenAI, Azure OpenAI, local models, and any OpenAI-compatible API.
65//! - **Type-Safe**: Leverages Rust's type system for reliability.
66//! - **Async**: Built on Tokio for high-performance async operations.
67
68// Modules
69
70/// Defines the `Agent` struct and its associated builder, which are central to the Helios Engine.
71pub mod agent;
72
73/// Provides chat-related functionality, including `ChatMessage`, `ChatSession`, and `Role`.
74pub mod chat;
75
76/// Handles configuration for the engine, including LLM providers and agent settings.
77pub mod config;
78
79/// Defines the custom `HeliosError` and `Result` types for error handling.
80pub mod error;
81
82/// Manages interactions with Large Language Models (LLMs), including different providers.
83pub mod llm;
84
85/// Contains the tool system, including the `Tool` trait and various tool implementations.
86pub mod tools;
87
88/// Simplified tool creation with the builder pattern.
89pub mod tool_builder;
90
91/// Macros for ultra-simple tool creation.
92pub mod tool_macro;
93
94/// Provides HTTP server functionality for exposing OpenAI-compatible API endpoints.
95pub mod serve;
96
97/// RAG (Retrieval-Augmented Generation) system with vector stores and embeddings.
98pub mod rag;
99
100/// RAG tool implementation for agent use.
101pub mod rag_tool;
102
103/// Forest of Agents - Multi-agent collaboration system.
104pub mod forest;
105
106// Re-exports
107
108/// Re-export of the `Agent` and `AgentBuilder` for convenient access.
109pub use agent::{Agent, AgentBuilder};
110
111/// Re-export of chat-related types.
112pub use chat::{ChatMessage, ChatSession, Role};
113
114#[cfg(not(feature = "local"))]
115pub use config::{Config, LLMConfig};
116/// Re-export of configuration types.
117#[cfg(feature = "local")]
118pub use config::{Config, LLMConfig, LocalConfig};
119
120/// Re-export of the custom error and result types.
121pub use error::{HeliosError, Result};
122
123/// Re-export of LLM-related types.
124#[cfg(feature = "local")]
125pub use llm::{
126 Delta, LLMClient, LLMProvider, LLMRequest, LLMResponse, LocalLLMProvider, StreamChoice,
127 StreamChunk,
128};
129#[cfg(not(feature = "local"))]
130pub use llm::{Delta, LLMClient, LLMProvider, LLMRequest, LLMResponse, StreamChoice, StreamChunk};
131pub use tools::{
132 CalculatorTool, EchoTool, FileEditTool, FileIOTool, FileListTool, FileReadTool, FileSearchTool,
133 FileWriteTool, HttpRequestTool, JsonParserTool, MemoryDBTool, QdrantRAGTool, ShellCommandTool,
134 SystemInfoTool, TextProcessorTool, TimestampTool, Tool, ToolParameter, ToolRegistry,
135 ToolResult, WebScraperTool,
136};
137
138/// Re-export of tool builder for simplified tool creation.
139pub use tool_builder::ToolBuilder;
140
141/// Re-export of RAG system components.
142pub use rag::{
143 Document, EmbeddingProvider, InMemoryVectorStore, OpenAIEmbeddings, QdrantVectorStore,
144 RAGSystem, SearchResult, VectorStore,
145};
146
147/// Re-export of RAG tool.
148pub use rag_tool::RAGTool;
149
150/// Re-export of serve functionality.
151pub use serve::{
152 load_custom_endpoints_config, start_server, start_server_with_agent,
153 start_server_with_agent_and_custom_endpoints, start_server_with_custom_endpoints,
154 CustomEndpoint, CustomEndpointsConfig, ServerState,
155};
156
157/// Re-export of Forest of Agents functionality.
158pub use forest::{
159 AgentId, CreatePlanTool, DelegateTaskTool, ForestBuilder, ForestMessage, ForestOfAgents,
160 SendMessageTool, ShareContextTool, SharedContext, TaskItem, TaskPlan, TaskStatus,
161 UpdateTaskMemoryTool,
162};