serve_with_custom_endpoints/
serve_with_custom_endpoints.rs1use helios_engine::{Agent, CalculatorTool, Config, CustomEndpoint, CustomEndpointsConfig};
7
8#[tokio::main]
9async fn main() -> helios_engine::Result<()> {
10 tracing_subscriber::fmt()
12 .with_max_level(tracing::Level::INFO)
13 .init();
14
15 let config = Config::from_file("config.toml")?;
17
18 let agent = Agent::builder("API Agent")
20 .config(config)
21 .system_prompt("You are a helpful AI assistant with access to a calculator tool.")
22 .tool(Box::new(CalculatorTool))
23 .max_iterations(5)
24 .build()
25 .await?;
26
27 let custom_endpoints = CustomEndpointsConfig {
29 endpoints: vec![
30 CustomEndpoint {
31 method: "GET".to_string(),
32 path: "/api/version".to_string(),
33 response: serde_json::json!({
34 "version": "0.2.8",
35 "service": "Helios Engine",
36 "features": ["agents", "tools", "streaming", "custom_endpoints"]
37 }),
38 status_code: 200,
39 },
40 CustomEndpoint {
41 method: "GET".to_string(),
42 path: "/api/status".to_string(),
43 response: serde_json::json!({
44 "status": "operational",
45 "uptime": "unknown",
46 "model": "agent-based"
47 }),
48 status_code: 200,
49 },
50 CustomEndpoint {
51 method: "POST".to_string(),
52 path: "/api/echo".to_string(),
53 response: serde_json::json!({
54 "message": "Echo endpoint - this returns static data",
55 "note": "For dynamic responses, use the chat completions endpoint"
56 }),
57 status_code: 200,
58 },
59 ],
60 };
61
62 println!("Starting server on http://127.0.0.1:8000");
64 println!("📡 OpenAI-compatible API endpoints:");
65 println!(" POST /v1/chat/completions");
66 println!(" GET /v1/models");
67 println!("📡 Custom endpoints:");
68 println!(" GET /api/version");
69 println!(" GET /api/status");
70 println!(" POST /api/echo");
71 println!();
72 println!("Try: curl http://127.0.0.1:8000/api/version");
73
74 helios_engine::serve::start_server_with_agent_and_custom_endpoints(
75 agent,
76 "local-model".to_string(),
77 "127.0.0.1:8000",
78 Some(custom_endpoints),
79 )
80 .await?;
81
82 Ok(())
83}