Skip to main content

Agent

Struct Agent 

Source
pub struct Agent {
    pub history: Box<dyn History>,
    /* private fields */
}
Expand description

Agent loop that coordinates LLM calls and tool execution. Uses ParallelExecutor by default.

Fields§

§history: Box<dyn History>

Implementations§

Source§

impl Agent

Source

pub fn new(llm: impl LLMProvider + 'static) -> Self

Create a new agent loop

Examples found in repository?
examples/chatbot.rs (line 15)
7async fn main() {
8    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10    let llm = OpenAIProvider::new()
11        .api_key(api_key)
12        .base_url("https://openrouter.ai/api/v1")
13        .model("google/gemini-3-flash-preview");
14
15    let agent = Agent::new(llm).system("You are a helpful assistant");
16
17    run_cli_loop(agent).await
18}
More examples
Hide additional examples
examples/history.rs (line 42)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
examples/fn_tools.rs (line 35)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
examples/bind_tools.rs (line 83)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
examples/mcp.rs (line 73)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub fn stream_callback<F>(self, callback: F) -> Self
where F: FnMut(String) + Send + 'static,

Set stream callback for LLM responses

§Example
use tiny_loop::{Agent, llm::OpenAIProvider};

let agent = Agent::new(OpenAIProvider::new())
    .stream_callback(|chunk| print!("{}", chunk));
Examples found in repository?
examples/common/streaming_cli.rs (lines 5-8)
4pub async fn run_cli_loop(agent: Agent) {
5    let mut agent = agent.stream_callback(|chunk| {
6        print!("{}", chunk);
7        io::stdout().flush().unwrap();
8    });
9
10    println!("Chatbot started. Type 'quit' to exit.\n");
11
12    loop {
13        print!("> ");
14        io::stdout().flush().unwrap();
15
16        let mut input = String::new();
17        io::stdin().read_line(&mut input).unwrap();
18        let input = input.trim();
19
20        if input == "quit" {
21            break;
22        }
23
24        match agent.chat(input).await {
25            Ok(_) => println!("\n"),
26            Err(e) => eprintln!("Error: {}\n", e),
27        }
28    }
29}
Source

pub fn history(self, history: impl History + 'static) -> Self

Set custom history manager (default: InfiniteHistory)

§Example
use tiny_loop::{Agent, history::InfiniteHistory, llm::OpenAIProvider};

let agent = Agent::new(OpenAIProvider::new())
    .history(InfiniteHistory::new());
Examples found in repository?
examples/history.rs (line 44)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
Source

pub fn system(self, content: impl Into<String>) -> Self

Append a system message

§Example
use tiny_loop::{Agent, llm::OpenAIProvider};

let agent = Agent::new(OpenAIProvider::new())
    .system("You are a helpful assistant");
Examples found in repository?
examples/chatbot.rs (line 15)
7async fn main() {
8    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10    let llm = OpenAIProvider::new()
11        .api_key(api_key)
12        .base_url("https://openrouter.ai/api/v1")
13        .model("google/gemini-3-flash-preview");
14
15    let agent = Agent::new(llm).system("You are a helpful assistant");
16
17    run_cli_loop(agent).await
18}
More examples
Hide additional examples
examples/history.rs (line 43)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
examples/fn_tools.rs (line 36)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
examples/bind_tools.rs (line 84)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
examples/mcp.rs (line 74)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub fn executor(self, executor: impl ToolExecutor + 'static) -> Self

Set a custom tool executor (default: ParallelExecutor)

§Example
use tiny_loop::{Agent, tool::SequentialExecutor, llm::OpenAIProvider};

let agent = Agent::new(OpenAIProvider::new())
    .executor(SequentialExecutor::new());
Source

pub fn tool<Args, Fut>(self, tool: fn(Args) -> Fut) -> Self
where Fut: Future<Output = String> + Send + 'static, Args: ToolArgs + 'static,

Register a tool function created by #[tool]

To register a tool method with an instance, use Self::bind. To register external tools (e.g. from MCP servers) use Self::external

§Example
use tiny_loop::{Agent, tool::tool, llm::OpenAIProvider};

#[tool]
async fn fetch(
    /// URL to fetch
    url: String,
) -> String {
    todo!()
}

let agent = Agent::new(OpenAIProvider::new())
    .tool(fetch);
Examples found in repository?
examples/fn_tools.rs (line 37)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
Source

pub fn bind<T, Args, Fut>(self, ins: T, tool: fn(T, Args) -> Fut) -> Self
where T: Send + Sync + Clone + 'static, Fut: Future<Output = String> + Send + 'static, Args: ToolArgs + 'static,

Bind an instance to a tool method created by #[tool]

To register a standalone tool function, use Self::tool. To register external tools (e.g. from MCP servers) use Self::external

§Example
use tiny_loop::{Agent, tool::tool, llm::OpenAIProvider};
use std::sync::Arc;

#[derive(Clone)]
struct Database {
    data: Arc<String>,
}

#[tool]
impl Database {
    /// Fetch data from database
    async fn fetch(
        self,
        /// Data key
        key: String,
    ) -> String {
        todo!()
    }
}

let db = Database { data: Arc::new("data".into()) };
let agent = Agent::new(OpenAIProvider::new())
    .bind(db, Database::fetch);
Examples found in repository?
examples/bind_tools.rs (line 85)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
Source

pub fn external<Fut>( self, defs: Vec<ToolDefinition>, exec: impl Fn(String, String) -> Fut + Clone + Send + Sync + 'static, ) -> Self
where Fut: Future<Output = String> + Send + 'static,

Register external tools (e.g. from MCP servers)

To register a standalone tool function, use tool. To register a tool method with an instance, use bind.

§Example
use tiny_loop::{Agent, llm::OpenAIProvider, types::{Parameters, ToolDefinition, ToolFunction}};
use serde_json::{json, Value};

let defs = vec![ToolDefinition {
    tool_type: "function".into(),
    function: ToolFunction {
        name: "get_weather".into(),
        description: "Get weather information".into(),
        parameters: Parameters::from_object(
            json!({
                "type": "object",
                "properties": {
                    "city": {
                        "type": "string",
                        "description": "City name"
                    }
                },
                "required": ["city"]
            }).as_object().unwrap().clone()
        ),
    },
}];

let external_executor = move |name: String, args: String| {
    async move {
        let _args = serde_json::from_str::<Value>(&args).unwrap();
        "result".into()
    }
};

let agent = Agent::new(OpenAIProvider::new())
    .external(defs, external_executor);
Examples found in repository?
examples/mcp.rs (line 75)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub async fn run(&mut self) -> Result<String>

Run the agent loop until completion. Return the last AI’s response

Source

pub async fn chat(&mut self, prompt: impl Into<String>) -> Result<String>

Run the agent loop with a new user input appended. Return the last AI’s response

Examples found in repository?
examples/common/streaming_cli.rs (line 24)
4pub async fn run_cli_loop(agent: Agent) {
5    let mut agent = agent.stream_callback(|chunk| {
6        print!("{}", chunk);
7        io::stdout().flush().unwrap();
8    });
9
10    println!("Chatbot started. Type 'quit' to exit.\n");
11
12    loop {
13        print!("> ");
14        io::stdout().flush().unwrap();
15
16        let mut input = String::new();
17        io::stdin().read_line(&mut input).unwrap();
18        let input = input.trim();
19
20        if input == "quit" {
21            break;
22        }
23
24        match agent.chat(input).await {
25            Ok(_) => println!("\n"),
26            Err(e) => eprintln!("Error: {}\n", e),
27        }
28    }
29}

Auto Trait Implementations§

§

impl Freeze for Agent

§

impl !RefUnwindSafe for Agent

§

impl !Send for Agent

§

impl !Sync for Agent

§

impl Unpin for Agent

§

impl !UnwindSafe for Agent

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more