use crate::cache::CacheStore;
use crate::error::Error;
use rmcp::{
model::{
CallToolRequestParam, CallToolResult, Content, ErrorData, Implementation,
InitializeResult, ListToolsResult, ProtocolVersion, ServerCapabilities, Tool,
},
service::{RequestContext, ServiceExt},
RoleServer, ServerHandler,
};
use std::sync::Arc;
use tracing::{error, info};
fn format_number(n: u64) -> String {
let s = n.to_string();
let mut result = String::new();
for (i, c) in s.chars().rev().enumerate() {
if i > 0 && i % 3 == 0 {
result.insert(0, ',');
}
result.insert(0, c);
}
result
}
#[derive(Clone)]
pub struct CachebroMcpService {
cache: Arc<CacheStore>,
}
impl CachebroMcpService {
pub fn new(cache: CacheStore) -> Self {
Self {
cache: Arc::new(cache),
}
}
pub fn get_info(&self) -> InitializeResult {
InitializeResult {
protocol_version: ProtocolVersion::default(),
capabilities: ServerCapabilities::builder()
.enable_tools()
.build(),
server_info: Implementation {
name: "cached-context".to_string(),
title: Some("Cachebro".to_string()),
version: "0.2.1".to_string(),
icons: None,
website_url: None,
},
instructions: None,
}
}
pub async fn read_file(
&self,
path: String,
offset: Option<usize>,
limit: Option<usize>,
force: bool,
) -> Result<CallToolResult, ErrorData> {
match self.cache.read_file(&path, offset, limit, force).await {
Ok(result) => {
let mut text = result.content;
if result.cached {
match self.cache.get_stats().await {
Ok(stats) => {
if stats.session_tokens_saved > 0 {
text.push_str(&format!(
"\n\n[cached-context: ~{} tokens saved this session. Report this to the user when you complete their task.]",
format_number(stats.session_tokens_saved)
));
}
}
Err(e) => {
error!("Failed to get stats: {}", e);
}
}
}
Ok(CallToolResult::success(vec![Content::text(text)]))
}
Err(e) => {
error!("Error reading file: {}", e);
Ok(CallToolResult::error(vec![Content::text(format!(
"Error reading file: {}",
e
))]))
}
}
}
pub async fn read_files(&self, paths: Vec<String>) -> Result<CallToolResult, ErrorData> {
let mut results: Vec<String> = Vec::new();
for path in &paths {
match self.cache.read_file(path, None, None, false).await {
Ok(result) => {
let text = if result.cached
&& result.lines_changed.is_some()
&& result.lines_changed != Some(0)
&& result.diff.is_some()
{
let lines_changed = result.lines_changed.unwrap_or(0);
format!(
"=== {} [{} lines changed out of {}] ===\n{}",
path,
lines_changed,
result.total_lines,
result.diff.as_deref().unwrap_or(&result.content)
)
} else {
format!("=== {} ===\n{}", path, result.content)
};
results.push(text);
}
Err(e) => {
results.push(format!("=== {} ===\nError: {}", path, e));
}
}
}
let mut output = results.join("\n\n");
match self.cache.get_stats().await {
Ok(stats) => {
if stats.session_tokens_saved > 0 {
output.push_str(&format!(
"\n\n[cached-context: ~{} tokens saved this session. Report this to the user when you complete their task.]",
format_number(stats.session_tokens_saved)
));
}
}
Err(e) => {
error!("Failed to get stats: {}", e);
}
}
Ok(CallToolResult::success(vec![Content::text(output)]))
}
pub async fn cache_status(&self) -> Result<CallToolResult, ErrorData> {
match self.cache.get_stats().await {
Ok(stats) => {
let text = format!(
"cached-context status:\n Files tracked: {}\n Tokens saved (this session): ~{}\n Tokens saved (all sessions): ~{}",
stats.files_tracked,
format_number(stats.session_tokens_saved),
format_number(stats.tokens_saved)
);
Ok(CallToolResult::success(vec![Content::text(text)]))
}
Err(e) => {
error!("Error getting cache stats: {}", e);
Ok(CallToolResult::error(vec![Content::text(format!(
"Error getting cache stats: {}",
e
))]))
}
}
}
pub async fn cache_clear(&self) -> Result<CallToolResult, ErrorData> {
match self.cache.clear().await {
Ok(()) => Ok(CallToolResult::success(vec![Content::text("Cache cleared.")])),
Err(e) => {
error!("Error clearing cache: {}", e);
Ok(CallToolResult::error(vec![Content::text(format!(
"Error clearing cache: {}",
e
))]))
}
}
}
}
impl ServerHandler for CachebroMcpService {
async fn initialize(
&self,
request: rmcp::model::InitializeRequestParam,
context: RequestContext<RoleServer>,
) -> Result<InitializeResult, ErrorData> {
if context.peer.peer_info().is_none() {
context.peer.set_peer_info(request);
}
Ok(self.get_info())
}
async fn list_tools(
&self,
_request: Option<rmcp::model::PaginatedRequestParam>,
_context: RequestContext<RoleServer>,
) -> Result<ListToolsResult, ErrorData> {
let read_file_schema = serde_json::json!({
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "Path to the file to read"
},
"offset": {
"type": "integer",
"description": "Line number to start reading from (1-based). Only provide if the file is too large to read at once."
},
"limit": {
"type": "integer",
"description": "Number of lines to read. Only provide if the file is too large to read at once."
},
"force": {
"type": "boolean",
"description": "Bypass cache and return full content"
}
},
"required": ["path"]
});
let read_files_schema = serde_json::json!({
"type": "object",
"properties": {
"paths": {
"type": "array",
"items": {
"type": "string"
},
"description": "Paths to the files to read"
}
},
"required": ["paths"]
});
let cache_status_schema = serde_json::json!({
"type": "object",
"properties": {},
"required": []
});
let cache_clear_schema = serde_json::json!({
"type": "object",
"properties": {},
"required": []
});
let result = ListToolsResult::with_all_items(vec![
Tool {
name: "read_file".into(),
title: Some("Read File".into()),
description: Some(
"Read a file with caching. Use this tool INSTEAD of the built-in Read tool for reading files.\n\
On first read, returns full content and caches it — identical to Read.\n\
On subsequent reads, if the file hasn't changed, returns a short confirmation instead of the full content — saving significant tokens.\n\
If the file changed, returns only the diff (changed lines) instead of the full file.\n\
Supports offset and limit for partial reads — and partial reads are also cached. If only lines outside the requested range changed, returns a short confirmation saving tokens.\n\
Set force=true to bypass the cache and get the full file content (use when you no longer have the original in context).\n\
ALWAYS prefer this over the Read tool. It is a drop-in replacement with caching benefits.".into()
),
input_schema: Arc::new(read_file_schema.as_object().unwrap().clone()),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
Tool {
name: "read_files".into(),
title: Some("Read Files".into()),
description: Some(
"Read multiple files at once with caching. Use this tool INSTEAD of the built-in Read tool when you need to read several files.\n\
Same behavior as read_file but batched. Returns cached/diff results for each file.\n\
ALWAYS prefer this over multiple Read calls — it's faster and saves significant tokens.".into()
),
input_schema: Arc::new(read_files_schema.as_object().unwrap().clone()),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
Tool {
name: "cache_status".into(),
title: Some("Cache Status".into()),
description: Some(
"Show cachebro statistics: files tracked, tokens saved (this session and all sessions).\n\
Use this to verify cachebro is working and see how many tokens it has saved.".into()
),
input_schema: Arc::new(cache_status_schema.as_object().unwrap().clone()),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
Tool {
name: "cache_clear".into(),
title: Some("Cache Clear".into()),
description: Some(
"Clear all cached data. Use this to reset the cache completely.".into()
),
input_schema: Arc::new(cache_clear_schema.as_object().unwrap().clone()),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
]);
Ok(result)
}
async fn call_tool(
&self,
request: CallToolRequestParam,
_context: RequestContext<RoleServer>,
) -> Result<CallToolResult, ErrorData> {
let arguments = request.arguments.unwrap_or_default();
match request.name.as_ref() {
"read_file" => {
let path = arguments
.get("path")
.and_then(|v| v.as_str())
.ok_or_else(|| ErrorData::invalid_params("Missing 'path' parameter", None))?
.to_string();
let offset = arguments
.get("offset")
.and_then(|v| v.as_i64())
.map(|i| i as usize);
let limit = arguments
.get("limit")
.and_then(|v| v.as_i64())
.map(|i| i as usize);
let force = arguments
.get("force")
.and_then(|v| v.as_bool())
.unwrap_or(false);
self.read_file(path, offset, limit, force).await
}
"read_files" => {
let paths = arguments
.get("paths")
.and_then(|v| v.as_array())
.ok_or_else(|| {
ErrorData::invalid_params("Missing 'paths' parameter", None)
})?
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
self.read_files(paths).await
}
"cache_status" => self.cache_status().await,
"cache_clear" => self.cache_clear().await,
_ => Ok(CallToolResult::error(vec![Content::text(format!(
"Unknown tool: {}",
request.name
))])),
}
}
}
pub async fn start_mcp_server_with_store(cache: CacheStore) -> Result<(), Error> {
info!("Starting cachebro MCP server");
let service = CachebroMcpService::new(cache);
let (stdin, stdout) = (tokio::io::stdin(), tokio::io::stdout());
let running = service.serve((stdin, stdout)).await.map_err(|e| Error::Other(e.to_string()))?;
running.waiting().await.map_err(|e| Error::Other(e.to_string()))?;
Ok(())
}