Skip to main content

mcp/
mcp.rs

1mod common;
2
3use common::run_cli_loop;
4use rmcp::{
5    ServiceExt,
6    model::CallToolRequestParams,
7    transport::{ConfigureCommandExt, TokioChildProcess},
8};
9use tiny_loop::{
10    Agent,
11    llm::OpenAIProvider,
12    types::{Parameters, ToolDefinition, ToolFunction},
13};
14use tokio::process::Command;
15
16#[tokio::main]
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}