pub struct Agent {
pub history: Box<dyn History>,
/* private fields */
}Expand description
Agent loop that coordinates LLM calls and tool execution.
Uses ParallelExecutor by default.
Fields§
§history: Box<dyn History>Implementations§
Source§impl Agent
impl Agent
Sourcepub fn new(llm: impl LLMProvider + 'static) -> Self
pub fn new(llm: impl LLMProvider + 'static) -> Self
Create a new agent loop
Examples found in repository?
7async fn main() {
8 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10 let llm = OpenAIProvider::new()
11 .api_key(api_key)
12 .base_url("https://openrouter.ai/api/v1")
13 .model("google/gemini-3-flash-preview");
14
15 let agent = Agent::new(llm).system("You are a helpful assistant");
16
17 run_cli_loop(agent).await
18}More examples
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub fn stream_callback<F>(self, callback: F) -> Self
pub fn stream_callback<F>(self, callback: F) -> Self
Set stream callback for LLM responses
§Example
use tiny_loop::{Agent, llm::OpenAIProvider};
let agent = Agent::new(OpenAIProvider::new())
.stream_callback(|chunk| print!("{}", chunk));Examples found in repository?
4pub async fn run_cli_loop(agent: Agent) {
5 let mut agent = agent.stream_callback(|chunk| {
6 print!("{}", chunk);
7 io::stdout().flush().unwrap();
8 });
9
10 println!("Chatbot started. Type 'quit' to exit.\n");
11
12 loop {
13 print!("> ");
14 io::stdout().flush().unwrap();
15
16 let mut input = String::new();
17 io::stdin().read_line(&mut input).unwrap();
18 let input = input.trim();
19
20 if input == "quit" {
21 break;
22 }
23
24 match agent.chat(input).await {
25 Ok(_) => println!("\n"),
26 Err(e) => eprintln!("Error: {}\n", e),
27 }
28 }
29}Sourcepub fn history(self, history: impl History + 'static) -> Self
pub fn history(self, history: impl History + 'static) -> Self
Set custom history manager (default: InfiniteHistory)
§Example
use tiny_loop::{Agent, history::InfiniteHistory, llm::OpenAIProvider};
let agent = Agent::new(OpenAIProvider::new())
.history(InfiniteHistory::new());Examples found in repository?
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}Sourcepub fn system(self, content: impl Into<String>) -> Self
pub fn system(self, content: impl Into<String>) -> Self
Append a system message
§Example
use tiny_loop::{Agent, llm::OpenAIProvider};
let agent = Agent::new(OpenAIProvider::new())
.system("You are a helpful assistant");Examples found in repository?
7async fn main() {
8 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10 let llm = OpenAIProvider::new()
11 .api_key(api_key)
12 .base_url("https://openrouter.ai/api/v1")
13 .model("google/gemini-3-flash-preview");
14
15 let agent = Agent::new(llm).system("You are a helpful assistant");
16
17 run_cli_loop(agent).await
18}More examples
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub fn executor(self, executor: impl ToolExecutor + 'static) -> Self
pub fn executor(self, executor: impl ToolExecutor + 'static) -> Self
Set a custom tool executor (default: ParallelExecutor)
§Example
use tiny_loop::{Agent, tool::SequentialExecutor, llm::OpenAIProvider};
let agent = Agent::new(OpenAIProvider::new())
.executor(SequentialExecutor::new());Sourcepub fn tool<Args, Fut>(self, tool: fn(Args) -> Fut) -> Self
pub fn tool<Args, Fut>(self, tool: fn(Args) -> Fut) -> Self
Register a tool function created by #[tool]
To register a tool method with an instance, use Self::bind.
To register external tools (e.g. from MCP servers) use Self::external
§Example
use tiny_loop::{Agent, tool::tool, llm::OpenAIProvider};
#[tool]
async fn fetch(
/// URL to fetch
url: String,
) -> String {
todo!()
}
let agent = Agent::new(OpenAIProvider::new())
.tool(fetch);Examples found in repository?
27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}Sourcepub fn bind<T, Args, Fut>(self, ins: T, tool: fn(T, Args) -> Fut) -> Self
pub fn bind<T, Args, Fut>(self, ins: T, tool: fn(T, Args) -> Fut) -> Self
Bind an instance to a tool method created by #[tool]
To register a standalone tool function, use Self::tool.
To register external tools (e.g. from MCP servers) use Self::external
§Example
use tiny_loop::{Agent, tool::tool, llm::OpenAIProvider};
use std::sync::Arc;
#[derive(Clone)]
struct Database {
data: Arc<String>,
}
#[tool]
impl Database {
/// Fetch data from database
async fn fetch(
self,
/// Data key
key: String,
) -> String {
todo!()
}
}
let db = Database { data: Arc::new("data".into()) };
let agent = Agent::new(OpenAIProvider::new())
.bind(db, Database::fetch);Examples found in repository?
63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}Sourcepub fn external<Fut>(
self,
defs: Vec<ToolDefinition>,
exec: impl Fn(String, String) -> Fut + Clone + Send + Sync + 'static,
) -> Self
pub fn external<Fut>( self, defs: Vec<ToolDefinition>, exec: impl Fn(String, String) -> Fut + Clone + Send + Sync + 'static, ) -> Self
Register external tools (e.g. from MCP servers)
To register a standalone tool function, use tool.
To register a tool method with an instance, use bind.
§Example
use tiny_loop::{Agent, llm::OpenAIProvider, types::{Parameters, ToolDefinition, ToolFunction}};
use serde_json::{json, Value};
let defs = vec![ToolDefinition {
tool_type: "function".into(),
function: ToolFunction {
name: "get_weather".into(),
description: "Get weather information".into(),
parameters: Parameters::from_object(
json!({
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "City name"
}
},
"required": ["city"]
}).as_object().unwrap().clone()
),
},
}];
let external_executor = move |name: String, args: String| {
async move {
let _args = serde_json::from_str::<Value>(&args).unwrap();
"result".into()
}
};
let agent = Agent::new(OpenAIProvider::new())
.external(defs, external_executor);Examples found in repository?
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub async fn run(&mut self) -> Result<String>
pub async fn run(&mut self) -> Result<String>
Run the agent loop until completion. Return the last AI’s response
Sourcepub async fn chat(&mut self, prompt: impl Into<String>) -> Result<String>
pub async fn chat(&mut self, prompt: impl Into<String>) -> Result<String>
Run the agent loop with a new user input appended. Return the last AI’s response
Examples found in repository?
4pub async fn run_cli_loop(agent: Agent) {
5 let mut agent = agent.stream_callback(|chunk| {
6 print!("{}", chunk);
7 io::stdout().flush().unwrap();
8 });
9
10 println!("Chatbot started. Type 'quit' to exit.\n");
11
12 loop {
13 print!("> ");
14 io::stdout().flush().unwrap();
15
16 let mut input = String::new();
17 io::stdin().read_line(&mut input).unwrap();
18 let input = input.trim();
19
20 if input == "quit" {
21 break;
22 }
23
24 match agent.chat(input).await {
25 Ok(_) => println!("\n"),
26 Err(e) => eprintln!("Error: {}\n", e),
27 }
28 }
29}