use std::path::PathBuf;
use rmcp::handler::server::router::tool::ToolRouter;
use rmcp::handler::server::wrapper::{Json, Parameters};
use rmcp::{ServerHandler, ServiceExt, tool, tool_handler, tool_router};
use crate::git::diff::ChangeScope;
use crate::tools::types::{FunctionContextEntry, detect_language};
use crate::tools::{
ContextArgs, FunctionContextResponse, HistoryArgs, HistoryResponse, ManifestArgs,
ManifestOptions, ManifestResponse, ReviewChangeArgs, ReviewChangeResponse, SnapshotArgs,
SnapshotOptions, SnapshotResponse, build_function_context_with_options, build_history,
build_manifest, build_review_change, build_snapshots, build_worktree_manifest,
};
fn change_scope_label(scope: ChangeScope) -> &'static str {
match scope {
ChangeScope::Committed => "committed",
ChangeScope::Staged => "staged",
ChangeScope::Unstaged => "unstaged",
}
}
fn review_change_response_truncated(response: &crate::tools::ReviewChangeResponse) -> bool {
response.manifest.pagination.next_cursor.is_some()
|| response.function_context.pagination.next_cursor.is_some()
|| !response
.manifest
.metadata
.function_analysis_truncated
.is_empty()
|| !response
.function_context
.metadata
.function_analysis_truncated
.is_empty()
}
fn functions_per_language_counts(
entries: &[FunctionContextEntry],
) -> std::collections::HashMap<&'static str, u64> {
let mut counts: std::collections::HashMap<&'static str, u64> = std::collections::HashMap::new();
for entry in entries {
let language = detect_language(&entry.file);
if language != "unknown" {
*counts.entry(language).or_insert(0) += 1;
}
}
counts
}
#[derive(Debug, Clone)]
pub struct GitPrismServer {
tool_router: ToolRouter<Self>,
}
impl GitPrismServer {
pub fn new() -> Self {
Self {
tool_router: Self::tool_router(),
}
}
}
impl Default for GitPrismServer {
fn default() -> Self {
Self::new()
}
}
#[tool_router]
impl GitPrismServer {
#[tool(name = "get_change_manifest")]
async fn get_change_manifest(
&self,
Parameters(args): Parameters<ManifestArgs>,
) -> Result<Json<ManifestResponse>, String> {
let start = std::time::Instant::now();
let tool_name = "get_change_manifest";
let base_ref_clone = args.base_ref.clone();
let head_ref_clone = args.head_ref.clone();
let result = tokio::task::spawn_blocking(move || {
let repo_path = match args.repo_path {
Some(p) => PathBuf::from(p),
None => std::env::current_dir()
.map_err(|e| format!("cannot determine working directory: {e}"))?,
};
let root_span = tracing::info_span!(
"mcp.tool.get_change_manifest",
tool_name = "get_change_manifest",
repo_path_hash = crate::privacy::hash_repo_path(&repo_path).as_str(),
ref_base = crate::privacy::normalize_ref_pattern(&args.base_ref).as_str(),
ref_head = tracing::field::Empty,
page_number = tracing::field::Empty,
page_size = tracing::field::Empty,
response_files_count = tracing::field::Empty,
response_bytes = tracing::field::Empty,
response_truncated = tracing::field::Empty,
);
let _enter = root_span.enter();
if let Some(ref head) = args.head_ref {
root_span.record(
"ref_head",
crate::privacy::normalize_ref_pattern(head).as_str(),
);
} else {
root_span.record("ref_head", "worktree");
}
let page_size = crate::pagination::clamp_page_size(args.page_size);
let offset = if let Some(ref cursor_str) = args.cursor {
let cursor =
crate::pagination::decode_cursor(cursor_str).map_err(|e| e.to_string())?;
let reader =
crate::git::reader::RepoReader::open(&repo_path).map_err(|e| e.to_string())?;
let base_sha = reader
.resolve_commit(&args.base_ref)
.map_err(|e| e.to_string())?
.sha;
let head_sha = match &args.head_ref {
Some(h) => reader.resolve_commit(h).map_err(|e| e.to_string())?.sha,
None => "WORKTREE".to_string(),
};
crate::pagination::validate_cursor(&cursor, &base_sha, &head_sha)
.map_err(|e| e.to_string())?;
cursor.offset
} else {
0
};
root_span.record("page_number", (offset / page_size) as i64);
root_span.record("page_size", page_size as i64);
if args.cursor.is_some() {
crate::metrics::get().record_pagination_page(tool_name);
}
let options = ManifestOptions {
include_patterns: args.include_patterns,
exclude_patterns: args.exclude_patterns,
include_function_analysis: args.include_function_analysis,
max_response_tokens: if args.max_response_tokens == 0 {
None
} else {
Some(args.max_response_tokens)
},
};
let result = match args.head_ref {
Some(head) => build_manifest(
&repo_path,
&args.base_ref,
&head,
&options,
offset,
page_size,
),
None => {
build_worktree_manifest(&repo_path, &args.base_ref, &options, offset, page_size)
}
};
match &result {
Ok(manifest) => {
root_span.record("response_files_count", manifest.files.len() as i64);
root_span.record(
"response_truncated",
manifest.pagination.next_cursor.is_some(),
);
let bytes = serde_json::to_vec(manifest).map(|v| v.len()).unwrap_or(0);
root_span.record("response_bytes", bytes as i64);
}
Err(e) => {
tracing::error!(error = %e, "tool invocation failed");
}
}
result.map(Json).map_err(|e| e.to_string())
})
.await
.map_err(|e| e.to_string())?;
let metrics = crate::metrics::get();
let duration_ms = start.elapsed().as_millis() as f64;
metrics.record_duration(tool_name, duration_ms);
match &result {
Ok(Json(response)) => {
metrics.record_request(tool_name, "success");
let json_bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
metrics.record_response_bytes(tool_name, json_bytes as f64);
metrics.record_tokens_estimated(tool_name, (json_bytes / 4) as f64);
metrics.record_files_returned(response.files.len() as f64);
for file in &response.files {
if file.language != "unknown" {
metrics.record_language(&file.language);
}
metrics.record_change_scope(change_scope_label(file.change_scope));
if let Some(fns) = &file.functions_changed {
metrics.record_functions_changed(&file.language, fns.len() as f64);
}
}
if response.pagination.next_cursor.is_some() {
metrics.record_truncated(tool_name, "paginated");
}
if !response.metadata.function_analysis_truncated.is_empty() {
metrics.record_truncated(tool_name, "token_budget");
}
metrics.record_ref_pattern(crate::privacy::classify_ref_mode(
&base_ref_clone,
head_ref_clone.as_deref(),
));
}
Err(e) => {
metrics.record_request(tool_name, "error");
metrics.record_error(tool_name, crate::privacy::classify_error_kind(e));
}
}
result
}
#[tool(name = "get_commit_history")]
async fn get_commit_history(
&self,
Parameters(args): Parameters<HistoryArgs>,
) -> Result<Json<HistoryResponse>, String> {
let start = std::time::Instant::now();
let tool_name = "get_commit_history";
let base_ref_clone = args.base_ref.clone();
let head_ref_clone = args.head_ref.clone();
let result = tokio::task::spawn_blocking(move || {
let repo_path = match args.repo_path {
Some(p) => PathBuf::from(p),
None => std::env::current_dir()
.map_err(|e| format!("cannot determine working directory: {e}"))?,
};
let root_span = tracing::info_span!(
"mcp.tool.get_commit_history",
tool_name = "get_commit_history",
repo_path_hash = crate::privacy::hash_repo_path(&repo_path).as_str(),
ref_base = crate::privacy::normalize_ref_pattern(&args.base_ref).as_str(),
ref_head = crate::privacy::normalize_ref_pattern(&args.head_ref).as_str(),
page_number = tracing::field::Empty,
page_size = tracing::field::Empty,
response_files_count = tracing::field::Empty,
response_bytes = tracing::field::Empty,
response_truncated = tracing::field::Empty,
);
let _enter = root_span.enter();
let page_size = crate::pagination::clamp_page_size(args.page_size);
let offset = if let Some(ref cursor_str) = args.cursor {
let cursor =
crate::pagination::decode_cursor(cursor_str).map_err(|e| e.to_string())?;
let reader =
crate::git::reader::RepoReader::open(&repo_path).map_err(|e| e.to_string())?;
let base_sha = reader
.resolve_commit(&args.base_ref)
.map_err(|e| e.to_string())?
.sha;
let head_sha = reader
.resolve_commit(&args.head_ref)
.map_err(|e| e.to_string())?
.sha;
crate::pagination::validate_cursor(&cursor, &base_sha, &head_sha)
.map_err(|e| e.to_string())?;
cursor.offset
} else {
0
};
root_span.record("page_number", (offset / page_size) as i64);
root_span.record("page_size", page_size as i64);
if args.cursor.is_some() {
crate::metrics::get().record_pagination_page(tool_name);
}
let options = ManifestOptions {
include_patterns: vec![],
exclude_patterns: vec![],
include_function_analysis: true,
max_response_tokens: None,
};
let result = build_history(
&repo_path,
&args.base_ref,
&args.head_ref,
&options,
offset,
page_size,
);
match &result {
Ok(response) => {
let total_files: usize = response.commits.iter().map(|c| c.files.len()).sum();
root_span.record("response_files_count", total_files as i64);
root_span.record(
"response_truncated",
response.pagination.next_cursor.is_some(),
);
let bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
root_span.record("response_bytes", bytes as i64);
}
Err(e) => {
tracing::error!(error = %e, "tool invocation failed");
}
}
result.map(Json).map_err(|e| e.to_string())
})
.await
.map_err(|e| e.to_string())?;
let metrics = crate::metrics::get();
let duration_ms = start.elapsed().as_millis() as f64;
metrics.record_duration(tool_name, duration_ms);
match &result {
Ok(Json(response)) => {
metrics.record_request(tool_name, "success");
let json_bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
metrics.record_response_bytes(tool_name, json_bytes as f64);
metrics.record_tokens_estimated(tool_name, (json_bytes / 4) as f64);
metrics.record_ref_pattern(crate::privacy::classify_ref_mode(
&base_ref_clone,
Some(&head_ref_clone),
));
for commit in &response.commits {
for file in &commit.files {
if file.language != "unknown" {
metrics.record_language(&file.language);
}
metrics.record_change_scope(change_scope_label(file.change_scope));
}
}
if response.pagination.next_cursor.is_some() {
metrics.record_truncated(tool_name, "paginated");
}
}
Err(e) => {
metrics.record_request(tool_name, "error");
metrics.record_error(tool_name, crate::privacy::classify_error_kind(e));
}
}
result
}
#[tool(name = "get_file_snapshots")]
async fn get_file_snapshots(
&self,
Parameters(args): Parameters<SnapshotArgs>,
) -> Result<Json<SnapshotResponse>, String> {
let start = std::time::Instant::now();
let tool_name = "get_file_snapshots";
let base_ref_clone = args.base_ref.clone();
let head_ref_clone = args.head_ref.clone();
let result = tokio::task::spawn_blocking(move || {
let repo_path = match args.repo_path {
Some(p) => PathBuf::from(p),
None => std::env::current_dir()
.map_err(|e| format!("cannot determine working directory: {e}"))?,
};
let head_ref = args.head_ref.as_deref().unwrap_or("HEAD");
let root_span = tracing::info_span!(
"mcp.tool.get_file_snapshots",
tool_name = "get_file_snapshots",
repo_path_hash = crate::privacy::hash_repo_path(&repo_path).as_str(),
ref_base = crate::privacy::normalize_ref_pattern(&args.base_ref).as_str(),
ref_head = tracing::field::Empty,
response_files_count = tracing::field::Empty,
response_bytes = tracing::field::Empty,
response_truncated = tracing::field::Empty,
);
let _enter = root_span.enter();
if args.head_ref.is_some() {
root_span.record(
"ref_head",
crate::privacy::normalize_ref_pattern(head_ref).as_str(),
);
} else {
root_span.record("ref_head", "worktree");
}
let options = SnapshotOptions {
include_before: args.include_before,
include_after: args.include_after,
max_file_size_bytes: args.max_file_size_bytes,
line_range: args.line_range,
include_diff_hunks: args.include_diff_hunks,
};
let result =
build_snapshots(&repo_path, &args.base_ref, head_ref, &args.paths, &options);
match &result {
Ok(response) => {
root_span.record("response_files_count", response.files.len() as i64);
let is_any_file_truncated = response.files.iter().any(|f| {
f.before.as_ref().is_some_and(|c| c.truncated)
|| f.after.as_ref().is_some_and(|c| c.truncated)
});
root_span.record("response_truncated", is_any_file_truncated);
let bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
root_span.record("response_bytes", bytes as i64);
}
Err(e) => {
tracing::error!(error = %e, "tool invocation failed");
}
}
result.map(Json).map_err(|e| e.to_string())
})
.await
.map_err(|e| e.to_string())?;
let metrics = crate::metrics::get();
let duration_ms = start.elapsed().as_millis() as f64;
metrics.record_duration(tool_name, duration_ms);
match &result {
Ok(Json(response)) => {
metrics.record_request(tool_name, "success");
let json_bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
metrics.record_response_bytes(tool_name, json_bytes as f64);
metrics.record_tokens_estimated(tool_name, (json_bytes / 4) as f64);
metrics.record_ref_pattern(crate::privacy::classify_ref_mode(
&base_ref_clone,
head_ref_clone.as_deref(),
));
for file in &response.files {
let truncated = file.before.as_ref().is_some_and(|c| c.truncated)
|| file.after.as_ref().is_some_and(|c| c.truncated);
if truncated {
metrics.record_truncated(tool_name, "max_file_size");
}
}
}
Err(e) => {
metrics.record_request(tool_name, "error");
metrics.record_error(tool_name, crate::privacy::classify_error_kind(e));
}
}
result
}
#[tool(name = "get_function_context")]
async fn get_function_context(
&self,
Parameters(args): Parameters<ContextArgs>,
) -> Result<Json<FunctionContextResponse>, String> {
let start = std::time::Instant::now();
let tool_name = "get_function_context";
let base_ref_clone = args.base_ref.clone();
let head_ref_clone = args.head_ref.clone();
let result = tokio::task::spawn_blocking(move || {
let repo_path = match args.repo_path {
Some(p) => PathBuf::from(p),
None => std::env::current_dir()
.map_err(|e| format!("cannot determine working directory: {e}"))?,
};
let root_span = tracing::info_span!(
"mcp.tool.get_function_context",
tool_name = "get_function_context",
repo_path_hash = crate::privacy::hash_repo_path(&repo_path).as_str(),
ref_base = crate::privacy::normalize_ref_pattern(&args.base_ref).as_str(),
ref_head = crate::privacy::normalize_ref_pattern(&args.head_ref).as_str(),
response_functions_count = tracing::field::Empty,
response_files_count = tracing::field::Empty,
response_bytes = tracing::field::Empty,
response_truncated = tracing::field::Empty,
);
let _enter = root_span.enter();
let has_cursor = args.cursor.is_some();
let context_options = crate::tools::ContextOptions {
cursor: args.cursor,
page_size: args.page_size,
function_names: args.function_names,
max_response_tokens: if args.max_response_tokens == 0 {
None
} else {
Some(args.max_response_tokens)
},
};
if has_cursor {
crate::metrics::get().record_pagination_page(tool_name);
}
let result = build_function_context_with_options(
&repo_path,
&args.base_ref,
&args.head_ref,
&context_options,
);
match &result {
Ok(response) => {
root_span.record("response_functions_count", response.functions.len() as i64);
let unique_files: std::collections::HashSet<&str> =
response.functions.iter().map(|f| f.file.as_str()).collect();
root_span.record("response_files_count", unique_files.len() as i64);
let is_truncated = !response.metadata.function_analysis_truncated.is_empty();
let is_paginated = response.pagination.next_cursor.is_some();
root_span.record("response_truncated", is_truncated || is_paginated);
let bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
root_span.record("response_bytes", bytes as i64);
}
Err(e) => {
tracing::error!(error = %e, "tool invocation failed");
}
}
result.map(Json).map_err(|e| e.to_string())
})
.await
.map_err(|e| e.to_string())?;
let metrics = crate::metrics::get();
let duration_ms = start.elapsed().as_millis() as f64;
metrics.record_duration(tool_name, duration_ms);
match &result {
Ok(Json(response)) => {
metrics.record_request(tool_name, "success");
let json_bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
metrics.record_response_bytes(tool_name, json_bytes as f64);
metrics.record_tokens_estimated(tool_name, (json_bytes / 4) as f64);
let mut seen_files: std::collections::HashSet<&str> =
std::collections::HashSet::new();
for func in &response.functions {
seen_files.insert(func.file.as_str());
}
metrics.record_files_returned(seen_files.len() as f64);
let functions_per_language = functions_per_language_counts(&response.functions);
for (language, count) in &functions_per_language {
metrics.record_language(language);
metrics.record_functions_changed(language, *count as f64);
}
metrics.record_ref_pattern(crate::privacy::classify_ref_mode(
&base_ref_clone,
Some(&head_ref_clone),
));
if response.pagination.next_cursor.is_some() {
metrics.record_truncated(tool_name, "paginated");
}
if !response.metadata.function_analysis_truncated.is_empty() {
metrics.record_truncated(tool_name, "token_budget");
}
}
Err(e) => {
metrics.record_request(tool_name, "error");
metrics.record_error(tool_name, crate::privacy::classify_error_kind(e));
}
}
result
}
#[tool(name = "review_change")]
async fn review_change(
&self,
Parameters(args): Parameters<ReviewChangeArgs>,
) -> Result<Json<ReviewChangeResponse>, String> {
let start = std::time::Instant::now();
let tool_name = "review_change";
let base_ref_clone = args.base_ref.clone();
let head_ref_clone = args.head_ref.clone();
let result = tokio::task::spawn_blocking(move || {
let repo_path = match args.repo_path.as_deref() {
Some(p) => PathBuf::from(p),
None => std::env::current_dir()
.map_err(|e| format!("cannot determine working directory: {e}"))?,
};
let root_span = tracing::info_span!(
"mcp.tool.review_change",
tool_name = "review_change",
repo_path_hash = crate::privacy::hash_repo_path(&repo_path).as_str(),
ref_base = crate::privacy::normalize_ref_pattern(&args.base_ref).as_str(),
ref_head = tracing::field::Empty,
response_files_count = tracing::field::Empty,
response_functions_count = tracing::field::Empty,
response_bytes = tracing::field::Empty,
response_truncated = tracing::field::Empty,
);
let _enter = root_span.enter();
if let Some(ref head) = args.head_ref {
root_span.record(
"ref_head",
crate::privacy::normalize_ref_pattern(head).as_str(),
);
} else {
root_span.record("ref_head", "worktree");
}
if args.manifest_cursor.is_some() || args.function_context_cursor.is_some() {
crate::metrics::get().record_pagination_page(tool_name);
}
let result = build_review_change(&repo_path, args);
match &result {
Ok(response) => {
root_span.record("response_files_count", response.manifest.files.len() as i64);
root_span.record(
"response_functions_count",
response.function_context.functions.len() as i64,
);
root_span.record(
"response_truncated",
review_change_response_truncated(response),
);
let bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
root_span.record("response_bytes", bytes as i64);
}
Err(e) => {
tracing::error!(error = %e, "tool invocation failed");
}
}
result.map(Json).map_err(|e| e.to_string())
})
.await
.map_err(|e| e.to_string())?;
let metrics = crate::metrics::get();
let duration_ms = start.elapsed().as_millis() as f64;
metrics.record_duration(tool_name, duration_ms);
match &result {
Ok(Json(response)) => {
metrics.record_request(tool_name, "success");
let json_bytes = serde_json::to_vec(response).map(|v| v.len()).unwrap_or(0);
metrics.record_response_bytes(tool_name, json_bytes as f64);
metrics.record_tokens_estimated(tool_name, (json_bytes / 4) as f64);
metrics.record_files_returned(response.manifest.files.len() as f64);
for file in &response.manifest.files {
if file.language != "unknown" {
metrics.record_language(&file.language);
}
metrics.record_change_scope(change_scope_label(file.change_scope));
if let Some(fns) = &file.functions_changed {
metrics.record_functions_changed(&file.language, fns.len() as f64);
}
}
let functions_per_language =
functions_per_language_counts(&response.function_context.functions);
for (language, count) in &functions_per_language {
metrics.record_language(language);
metrics.record_functions_changed(language, *count as f64);
}
if response.manifest.pagination.next_cursor.is_some()
|| response.function_context.pagination.next_cursor.is_some()
{
metrics.record_truncated(tool_name, "paginated");
}
if !response
.manifest
.metadata
.function_analysis_truncated
.is_empty()
|| !response
.function_context
.metadata
.function_analysis_truncated
.is_empty()
{
metrics.record_truncated(tool_name, "token_budget");
}
metrics.record_ref_pattern(crate::privacy::classify_ref_mode(
&base_ref_clone,
head_ref_clone.as_deref(),
));
}
Err(e) => {
metrics.record_request(tool_name, "error");
metrics.record_error(tool_name, crate::privacy::classify_error_kind(e));
}
}
result
}
}
#[tool_handler(router = self.tool_router)]
impl ServerHandler for GitPrismServer {
fn get_info(&self) -> rmcp::model::ServerInfo {
let mut server_info = rmcp::model::ServerInfo::default();
server_info.capabilities = rmcp::model::ServerCapabilities::builder()
.enable_tools()
.build();
server_info
}
}
pub async fn run_server() -> anyhow::Result<()> {
let telemetry = crate::telemetry::init();
if std::env::var("GIT_PRISM_OTLP_ENDPOINT").is_ok_and(|v| !v.is_empty())
&& !telemetry.is_active()
{
eprintln!(
"git-prism: WARNING: GIT_PRISM_OTLP_ENDPOINT is set but telemetry failed to \
initialize; spans and metrics will NOT be exported. See earlier stderr lines \
for the underlying cause (trace exporter, metric exporter, or tracing subscriber)."
);
}
crate::metrics::get().record_session_started();
let server = GitPrismServer::new();
let transport = tokio::io::join(tokio::io::stdin(), tokio::io::stdout());
server.serve(transport).await?.waiting().await?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
const EXPECTED_TOOLS: &[&str] = &[
"get_change_manifest",
"get_commit_history",
"get_file_snapshots",
"get_function_context",
"review_change",
];
#[test]
fn it_registers_get_change_manifest_tool() {
let router = GitPrismServer::tool_router();
assert!(
router.has_route("get_change_manifest"),
"get_change_manifest must be registered"
);
}
#[test]
fn it_registers_get_commit_history_tool() {
let router = GitPrismServer::tool_router();
assert!(
router.has_route("get_commit_history"),
"get_commit_history must be registered"
);
}
#[test]
fn it_registers_get_file_snapshots_tool() {
let router = GitPrismServer::tool_router();
assert!(
router.has_route("get_file_snapshots"),
"get_file_snapshots must be registered"
);
}
#[test]
fn it_registers_get_function_context_tool() {
let router = GitPrismServer::tool_router();
assert!(
router.has_route("get_function_context"),
"get_function_context must be registered as an MCP tool"
);
}
fn make_entry(name: &str, file: &str) -> crate::tools::types::FunctionContextEntry {
use crate::tools::types::{
BlastRadius, CalleeEntry, CallerEntry, FunctionChangeType, FunctionContextEntry,
ScopingMode,
};
FunctionContextEntry {
name: name.to_string(),
file: file.to_string(),
change_type: FunctionChangeType::Modified,
blast_radius: BlastRadius::compute(0, 0),
scoping_mode: ScopingMode::Scoped,
callers: Vec::<CallerEntry>::new(),
callees: Vec::<CalleeEntry>::new(),
test_references: Vec::<CallerEntry>::new(),
caller_count: 0,
truncated: false,
}
}
#[test]
fn it_registers_review_change_tool() {
let router = GitPrismServer::tool_router();
assert!(
router.has_route("review_change"),
"review_change must be registered as an MCP tool (issue #240)"
);
}
#[test]
fn it_publishes_comparative_git_diff_framing_in_review_change_schema() {
let desc = schema_description_for("review_change");
assert!(
desc.contains("git diff"),
"review_change MCP schema description must mention 'git diff' to compete with the \
porcelain agents reach for by default. Got: {desc}"
);
assert!(
desc.contains("instead of"),
"review_change MCP schema description must use the 'instead of' framing — that's the \
load-bearing pitch flipping pretraining bias. Got: {desc}"
);
assert!(
desc.contains("blast radius"),
"review_change MCP schema description must mention 'blast radius' so agents know \
they get caller analysis, not just a manifest. Got: {desc}"
);
assert!(
desc.contains("40/60"),
"review_change MCP schema description must document the 40/60 budget split so agents \
know how their max_response_tokens is allocated. Got: {desc}"
);
}
#[test]
fn it_returns_empty_map_when_entries_is_empty() {
let counts = functions_per_language_counts(&[]);
assert!(
counts.is_empty(),
"empty input must produce empty map, not panic or insert spurious entries"
);
}
#[test]
fn it_counts_function_context_entries_per_language() {
let entries = vec![
make_entry("calculate", "src/lib.rs"),
make_entry("helper", "src/main.rs"),
make_entry("process_data", "scripts/tool.py"),
make_entry("Binary", "blob.bin"),
];
let counts = functions_per_language_counts(&entries);
assert_eq!(counts.get("rust").copied(), Some(2));
assert_eq!(counts.get("python").copied(), Some(1));
assert!(
!counts.contains_key("unknown"),
"unknown language must be excluded from metric labels"
);
}
#[test]
fn it_registers_exactly_five_tools() {
let router = GitPrismServer::tool_router();
let tools = router.list_all();
let names: Vec<&str> = tools.iter().map(|t| t.name.as_ref()).collect();
assert_eq!(
tools.len(),
5,
"expected exactly five MCP tools, found {}: {:?}",
tools.len(),
names
);
}
#[test]
fn it_advertises_tools_in_server_capabilities() {
let server = GitPrismServer::new();
let server_info = server.get_info();
assert_eq!(
server_info.capabilities.tools,
Some(rmcp::model::ToolsCapability { list_changed: None }),
"tools capability must be enabled with list_changed=None (static tool set, no notifications)"
);
}
#[test]
fn it_does_not_advertise_resources_or_prompts_capabilities() {
let server = GitPrismServer::new();
let server_info = server.get_info();
assert!(
server_info.capabilities.resources.is_none(),
"resources capability must not be advertised — this server does not implement resources"
);
assert!(
server_info.capabilities.prompts.is_none(),
"prompts capability must not be advertised — this server does not implement prompts"
);
}
#[test]
fn it_labels_all_change_scope_variants() {
assert_eq!(change_scope_label(ChangeScope::Committed), "committed");
assert_eq!(change_scope_label(ChangeScope::Staged), "staged");
assert_eq!(change_scope_label(ChangeScope::Unstaged), "unstaged");
}
const MIN_DESCRIPTION_CHARS: usize = 80;
fn schema_description_for(tool_name: &str) -> String {
let router = GitPrismServer::tool_router();
let tools = router.list_all();
let registered: Vec<&str> = tools.iter().map(|t| t.name.as_ref()).collect();
let tool = tools
.iter()
.find(|t| t.name.as_ref() == tool_name)
.unwrap_or_else(|| {
panic!(
"tool {tool_name:?} must be registered; registered tools are: {registered:?}"
)
});
tool.description
.as_deref()
.unwrap_or_else(|| {
panic!(
"tool {tool_name:?} must have a non-empty description in its MCP schema; \
the description field is None — add a doc comment to the #[tool] handler"
)
})
.to_string()
}
#[test]
fn it_publishes_cost_warning_in_get_file_snapshots_schema() {
let desc = schema_description_for("get_file_snapshots");
assert!(
desc.contains("COST WARNING"),
"get_file_snapshots MCP schema description must include the cost warning \
(issue #211). Got: {desc}"
);
assert!(
desc.contains("get_change_manifest"),
"get_file_snapshots MCP schema description must name the cheaper alternatives \
(issue #211). Got: {desc}"
);
assert!(
desc.contains("get_function_context"),
"get_file_snapshots MCP schema description must name get_function_context as the \
cheaper second-call alternative (issue #211). Got: {desc}"
);
assert!(
desc.contains("line_range"),
"get_file_snapshots MCP schema description must mention line_range as a narrowing \
lever (issue #211). Got: {desc}"
);
assert!(
desc.contains("include_before"),
"get_file_snapshots MCP schema description must mention include_before as a \
half-cost lever (issue #211). Got: {desc}"
);
assert!(
desc.contains("one path at a time") || desc.contains("linearly"),
"get_file_snapshots MCP schema description must instruct agents to call with one \
path at a time / note that cost scales linearly (issue #211). Got: {desc}"
);
assert!(
desc.contains("1.") && desc.contains("2.") && desc.contains("3."),
"get_file_snapshots MCP schema description must preserve the numbered 1/2/3 call \
ordering that tells agents to try get_change_manifest first, then \
get_function_context, then get_file_snapshots (issue #211). Got: {desc}"
);
}
#[test]
fn it_publishes_first_resort_hint_in_get_change_manifest_schema() {
let desc = schema_description_for("get_change_manifest");
assert!(
desc.contains("cheapest tool"),
"get_change_manifest MCP schema description must identify itself as the cheapest \
first-resort tool (issue #211). Got: {desc}"
);
assert!(
desc.contains("function-level") || desc.contains("function"),
"get_change_manifest MCP schema description must describe its function-level value \
proposition (issue #211). Got: {desc}"
);
assert!(
desc.contains("import"),
"get_change_manifest MCP schema description must mention that it reports import \
changes (issue #211). Got: {desc}"
);
assert!(
desc.contains("what changed"),
"get_change_manifest MCP schema description must use the 'what changed between X \
and Y' phrasing that orients agents (issue #211). Got: {desc}"
);
}
#[test]
fn it_publishes_second_call_hint_in_get_function_context_schema() {
let desc = schema_description_for("get_function_context");
assert!(
desc.contains("recommended second call"),
"get_function_context MCP schema description must identify itself as the recommended \
second call (issue #211). Got: {desc}"
);
assert!(
desc.contains("callers"),
"get_function_context MCP schema description must mention callers — a core part of \
its value prop (issue #211). Got: {desc}"
);
assert!(
desc.contains("callees"),
"get_function_context MCP schema description must mention callees — a core part of \
its value prop (issue #211). Got: {desc}"
);
assert!(
desc.contains("get_change_manifest"),
"get_function_context MCP schema description must name get_change_manifest as its \
predecessor in the call order (issue #211). Got: {desc}"
);
assert!(
desc.contains("blast radius"),
"get_function_context MCP schema description must mention blast radius as the \
reason to make the second call (issue #211). Got: {desc}"
);
}
#[test]
fn it_keeps_cost_warning_scoped_to_get_file_snapshots() {
for tool_name in [
"get_change_manifest",
"get_function_context",
"get_commit_history",
] {
let desc = schema_description_for(tool_name);
assert!(
!desc.contains("COST WARNING"),
"{tool_name} MCP schema description must NOT contain the cost-warning banner; \
that text belongs only to get_file_snapshots. Got: {desc}"
);
}
}
#[test]
fn it_does_not_market_get_file_snapshots_as_a_first_or_second_resort() {
let desc = schema_description_for("get_file_snapshots");
assert!(
!desc.contains("cheapest tool"),
"get_file_snapshots description must NOT claim to be the cheapest tool. Got: {desc}"
);
assert!(
!desc.contains("recommended second call"),
"get_file_snapshots description must NOT claim to be the recommended second call. \
Got: {desc}"
);
}
#[test]
fn it_matches_expected_tools() {
let router = GitPrismServer::tool_router();
let mut actual: Vec<String> = router
.list_all()
.iter()
.map(|t| t.name.to_string())
.collect();
actual.sort();
let mut expected: Vec<String> = EXPECTED_TOOLS.iter().map(|s| s.to_string()).collect();
expected.sort();
assert_eq!(
actual, expected,
"MCP tool set drifted from EXPECTED_TOOLS. \
If you added, removed, or renamed a tool, update the EXPECTED_TOOLS constant \
in src/server.rs AND update README.md 'N MCP tools' count, CHANGELOG, and CLAUDE.md."
);
}
#[test]
fn it_publishes_comparative_framing_for_get_change_manifest() {
let desc = schema_description_for("get_change_manifest");
assert!(
desc.contains("git diff"),
"get_change_manifest description must reference \"git diff\" (issue #237). Got: {desc}",
);
assert!(
desc.len() >= MIN_DESCRIPTION_CHARS,
"get_change_manifest description is only {} chars; need >= {} (issue #237). Got: {desc}",
desc.len(),
MIN_DESCRIPTION_CHARS,
);
}
#[test]
fn it_publishes_comparative_framing_for_get_commit_history() {
let desc = schema_description_for("get_commit_history");
assert!(
desc.contains("git log"),
"get_commit_history description must reference \"git log\" (issue #237). Got: {desc}",
);
assert!(
desc.len() >= MIN_DESCRIPTION_CHARS,
"get_commit_history description is only {} chars; need >= {} (issue #237). Got: {desc}",
desc.len(),
MIN_DESCRIPTION_CHARS,
);
}
#[test]
fn it_publishes_comparative_framing_for_get_file_snapshots() {
let desc = schema_description_for("get_file_snapshots");
assert!(
desc.contains("git show"),
"get_file_snapshots description must reference \"git show\" (issue #237). Got: {desc}",
);
assert!(
desc.len() >= MIN_DESCRIPTION_CHARS,
"get_file_snapshots description is only {} chars; need >= {} (issue #237). Got: {desc}",
desc.len(),
MIN_DESCRIPTION_CHARS,
);
}
#[test]
fn it_publishes_comparative_framing_for_get_function_context() {
let desc = schema_description_for("get_function_context");
assert!(
desc.contains("git log -S"),
"get_function_context description must reference \"git log -S\" (issue #237). Got: {desc}",
);
assert!(
desc.len() >= MIN_DESCRIPTION_CHARS,
"get_function_context description is only {} chars; need >= {} (issue #237). Got: {desc}",
desc.len(),
MIN_DESCRIPTION_CHARS,
);
}
#[test]
fn it_snapshots_get_change_manifest_description() {
insta::assert_snapshot!(
"get_change_manifest_description",
schema_description_for("get_change_manifest")
);
}
#[test]
fn it_snapshots_get_commit_history_description() {
insta::assert_snapshot!(
"get_commit_history_description",
schema_description_for("get_commit_history")
);
}
#[test]
fn it_snapshots_get_file_snapshots_description() {
insta::assert_snapshot!(
"get_file_snapshots_description",
schema_description_for("get_file_snapshots")
);
}
#[test]
fn it_snapshots_get_function_context_description() {
insta::assert_snapshot!(
"get_function_context_description",
schema_description_for("get_function_context")
);
}
#[test]
fn it_snapshots_review_change_description() {
insta::assert_snapshot!(
"review_change_description",
schema_description_for("review_change")
);
}
}