use std::collections::{HashMap, HashSet};
use serde_json::{json, Value};
use super::super::ToolResult;
use super::{truncate_response, unique_file_paths};
use crate::errors::{Result, TokenSaveError};
use crate::tokensave::TokenSave;
pub(super) async fn handle_affected(cg: &TokenSave, args: Value) -> Result<ToolResult> {
let files: Vec<String> = args
.get("files")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(std::string::ToString::to_string))
.collect()
})
.ok_or_else(|| TokenSaveError::Config {
message: "missing required parameter: files (array of strings)".to_string(),
})?;
let max_depth = args
.get("depth")
.and_then(serde_json::Value::as_u64)
.map_or(5, |v| v.min(10) as usize);
let custom_filter = args.get("filter").and_then(|v| v.as_str());
let custom_glob = custom_filter.and_then(|p| glob::Pattern::new(p).ok());
let files_with_inline_tests = cg
.get_files_with_test_annotations()
.await
.unwrap_or_default();
let matches_test = |path: &str| -> bool {
if let Some(ref g) = custom_glob {
g.matches(path)
} else {
crate::tokensave::is_test_file(path) || files_with_inline_tests.contains(path)
}
};
let mut affected: HashSet<String> = HashSet::new();
let mut visited: HashSet<String> = HashSet::new();
let mut queue: std::collections::VecDeque<(String, usize)> = std::collections::VecDeque::new();
for file in &files {
if matches_test(file) {
affected.insert(file.clone());
}
if visited.insert(file.clone()) {
queue.push_back((file.clone(), 0));
}
}
while let Some((file, depth)) = queue.pop_front() {
if depth >= max_depth {
continue;
}
let dependents = cg.get_file_dependents(&file).await?;
for dep in dependents {
if !visited.insert(dep.clone()) {
continue;
}
if matches_test(&dep) {
affected.insert(dep.clone());
} else {
queue.push_back((dep, depth + 1));
}
}
}
let mut result: Vec<String> = affected.into_iter().collect();
result.sort();
let touched_files = result.clone();
let output = json!({
"changed_files": files,
"affected_tests": result,
"count": result.len(),
});
let formatted = serde_json::to_string_pretty(&output).unwrap_or_default();
Ok(ToolResult {
value: json!({
"content": [{ "type": "text", "text": truncate_response(&formatted) }]
}),
touched_files,
})
}
pub(super) async fn handle_diff_context(cg: &TokenSave, args: Value) -> Result<ToolResult> {
debug_assert!(
args.is_object(),
"handle_diff_context expects an object argument"
);
let files: Vec<String> = args
.get("files")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(std::string::ToString::to_string))
.collect()
})
.ok_or_else(|| TokenSaveError::Config {
message: "missing required parameter: files (array of strings)".to_string(),
})?;
let depth = args
.get("depth")
.and_then(serde_json::Value::as_u64)
.map_or(2, |v| v.min(10) as usize);
let mut modified_symbols: Vec<Value> = Vec::new();
let mut impacted_symbols: Vec<Value> = Vec::new();
let mut affected_tests: HashSet<String> = HashSet::new();
let mut all_touched_files: Vec<String> = Vec::new();
let files_with_inline_tests = cg
.get_files_with_test_annotations()
.await
.unwrap_or_default();
let has_tests =
|path: &str| crate::tokensave::is_test_file(path) || files_with_inline_tests.contains(path);
for file in &files {
let nodes = cg.get_nodes_by_file(file).await?;
for node in &nodes {
all_touched_files.push(node.file_path.clone());
modified_symbols.push(json!({
"id": node.id,
"name": node.name,
"kind": node.kind.as_str(),
"file": node.file_path,
"line": node.start_line,
}));
let impact = cg.get_impact_radius(&node.id, depth).await?;
for impacted in &impact.nodes {
if impacted.id != node.id {
impacted_symbols.push(json!({
"id": impacted.id,
"name": impacted.name,
"kind": impacted.kind.as_str(),
"file": impacted.file_path,
"line": impacted.start_line,
}));
if has_tests(&impacted.file_path) {
affected_tests.insert(impacted.file_path.clone());
}
}
}
}
}
let mut visited: HashSet<String> = HashSet::new();
let mut queue: std::collections::VecDeque<(String, usize)> = std::collections::VecDeque::new();
for file in &files {
if has_tests(file) {
affected_tests.insert(file.clone());
}
if visited.insert(file.clone()) {
queue.push_back((file.clone(), 0));
}
}
while let Some((file, d)) = queue.pop_front() {
if d >= depth {
continue;
}
let dependents = cg.get_file_dependents(&file).await?;
for dep in dependents {
if !visited.insert(dep.clone()) {
continue;
}
if has_tests(&dep) {
affected_tests.insert(dep.clone());
} else {
queue.push_back((dep, d + 1));
}
}
}
let mut tests_sorted: Vec<String> = affected_tests.into_iter().collect();
tests_sorted.sort();
let touched_files = unique_file_paths(
all_touched_files
.iter()
.map(std::string::String::as_str)
.chain(files.iter().map(std::string::String::as_str)),
);
let output = json!({
"changed_files": files,
"modified_symbols": modified_symbols,
"impacted_symbols_count": impacted_symbols.len(),
"impacted_symbols": impacted_symbols,
"affected_tests": tests_sorted,
});
let formatted = serde_json::to_string_pretty(&output).unwrap_or_default();
Ok(ToolResult {
value: json!({
"content": [{ "type": "text", "text": truncate_response(&formatted) }]
}),
touched_files,
})
}
fn git_diff_files(
project_root: &std::path::Path,
from_ref: &str,
to_ref: &str,
) -> std::result::Result<Vec<String>, String> {
let repo = gix::open(project_root).map_err(|e| format!("failed to open git repo: {e}"))?;
let from_tree = repo
.rev_parse_single(from_ref)
.map_err(|e| format!("cannot resolve '{from_ref}': {e}"))?
.object()
.map_err(|e| format!("cannot read object for '{from_ref}': {e}"))?
.peel_to_tree()
.map_err(|e| format!("cannot peel '{from_ref}' to tree: {e}"))?;
let to_tree = repo
.rev_parse_single(to_ref)
.map_err(|e| format!("cannot resolve '{to_ref}': {e}"))?
.object()
.map_err(|e| format!("cannot read object for '{to_ref}': {e}"))?
.peel_to_tree()
.map_err(|e| format!("cannot peel '{to_ref}' to tree: {e}"))?;
let mut changed = Vec::new();
from_tree
.changes()
.map_err(|e| format!("diff init failed: {e}"))?
.for_each_to_obtain_tree(&to_tree, |change| {
use gix::object::tree::diff::Change;
match &change {
Change::Addition { location, .. }
| Change::Deletion { location, .. }
| Change::Modification { location, .. } => {
changed.push(location.to_string());
}
Change::Rewrite {
source_location,
location,
..
} => {
changed.push(source_location.to_string());
changed.push(location.to_string());
}
}
Ok::<_, std::convert::Infallible>(std::ops::ControlFlow::Continue(()))
})
.map_err(|e| format!("tree diff failed: {e}"))?;
Ok(changed)
}
fn git_changed_files(
project_root: &std::path::Path,
staged_only: bool,
) -> std::result::Result<Vec<String>, String> {
let repo = gix::open(project_root).map_err(|e| format!("failed to open git repo: {e}"))?;
let head_tree = repo
.head()
.map_err(|e| format!("cannot read HEAD: {e}"))?
.peel_to_commit()
.map_err(|e| format!("cannot peel HEAD to commit: {e}"))?
.tree()
.map_err(|e| format!("cannot read HEAD tree: {e}"))?;
let index = repo
.index()
.map_err(|e| format!("cannot read index: {e}"))?;
let mut changed = HashSet::new();
for entry in index.entries() {
let path = entry.path(&index);
let path_str = String::from_utf8_lossy(path.as_ref()).to_string();
if path_str.is_empty() {
continue;
}
let head_entry = head_tree
.lookup_entry_by_path(std::path::Path::new(&path_str))
.ok()
.flatten();
match head_entry {
Some(he) => {
if he.object_id() != entry.id {
changed.insert(path_str);
}
}
None => {
changed.insert(path_str);
}
}
}
if !staged_only {
for entry in index.entries() {
let path = entry.path(&index);
let path_str = String::from_utf8_lossy(path.as_ref()).to_string();
if path_str.is_empty() {
continue;
}
let full_path = project_root.join(&path_str);
if let Ok(meta) = std::fs::metadata(&full_path) {
use std::time::UNIX_EPOCH;
let mtime = meta
.modified()
.unwrap_or(UNIX_EPOCH)
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs() as u32;
if mtime > entry.stat.mtime.secs {
changed.insert(path_str);
}
}
}
}
let mut result: Vec<String> = changed.into_iter().collect();
result.sort();
Ok(result)
}
fn git_recent_commits(
project_root: &std::path::Path,
count: usize,
) -> std::result::Result<Vec<String>, String> {
let repo = gix::open(project_root).map_err(|e| format!("failed to open git repo: {e}"))?;
let mut commits = Vec::new();
let head = repo
.head()
.map_err(|e| format!("cannot read HEAD: {e}"))?
.into_peeled_id()
.map_err(|e| format!("cannot peel HEAD: {e}"))?;
let mut current_id = head.detach();
for _ in 0..count {
let commit = repo
.find_object(current_id)
.map_err(|e| format!("cannot find object: {e}"))?
.try_into_commit()
.map_err(|e| format!("not a commit: {e}"))?;
let message = commit
.message_raw()
.map_err(|e| format!("cannot read commit message: {e}"))?;
let subject = String::from_utf8_lossy(message.as_ref())
.lines()
.next()
.unwrap_or("")
.to_string();
commits.push(subject);
let parent_id = commit.parent_ids().next().map(gix::Id::detach);
match parent_id {
Some(pid) => current_id = pid,
None => break,
}
}
Ok(commits)
}
fn git_commit_log(
project_root: &std::path::Path,
base_ref: &str,
head_ref: &str,
) -> std::result::Result<Vec<Value>, String> {
let repo = gix::open(project_root).map_err(|e| format!("failed to open git repo: {e}"))?;
let base_id = repo
.rev_parse_single(base_ref)
.map_err(|e| format!("cannot resolve '{base_ref}': {e}"))?
.detach();
let head_id = repo
.rev_parse_single(head_ref)
.map_err(|e| format!("cannot resolve '{head_ref}': {e}"))?
.detach();
let mut commits = Vec::new();
let mut current_id = head_id;
for _ in 0..100 {
if current_id == base_id {
break;
}
let commit = repo
.find_object(current_id)
.map_err(|e| format!("cannot find object: {e}"))?
.try_into_commit()
.map_err(|e| format!("not a commit: {e}"))?;
let message = commit
.message_raw()
.map_err(|e| format!("cannot read message: {e}"))?;
let subject = String::from_utf8_lossy(message.as_ref())
.lines()
.next()
.unwrap_or("")
.to_string();
let short_id = format!("{:.7}", commit.id);
commits.push(json!({"hash": short_id, "subject": subject}));
let parent_id = commit.parent_ids().next().map(gix::Id::detach);
match parent_id {
Some(pid) => current_id = pid,
None => break,
}
}
Ok(commits)
}
fn classify_file_role(path: &str, files_with_inline_tests: &HashSet<String>) -> &'static str {
if crate::tokensave::is_test_file(path) || files_with_inline_tests.contains(path) {
return "test";
}
let lower = path.to_lowercase();
let ext = std::path::Path::new(&lower)
.extension()
.and_then(|e| e.to_str());
if matches!(
ext,
Some("toml" | "yaml" | "yml" | "json" | "lock" | "ini" | "cfg")
) || lower.contains("config")
{
return "config";
}
if matches!(ext, Some("md" | "rst" | "txt"))
|| lower.starts_with("docs/")
|| lower.starts_with("doc/")
{
return "docs";
}
"source"
}
pub(super) async fn handle_changelog(cg: &TokenSave, args: Value) -> Result<ToolResult> {
debug_assert!(
args.is_object(),
"handle_changelog expects an object argument"
);
let from_ref = args
.get("from_ref")
.and_then(|v| v.as_str())
.ok_or_else(|| TokenSaveError::Config {
message: "missing required parameter: from_ref".to_string(),
})?;
let to_ref =
args.get("to_ref")
.and_then(|v| v.as_str())
.ok_or_else(|| TokenSaveError::Config {
message: "missing required parameter: to_ref".to_string(),
})?;
let changed_files: Vec<String> = match git_diff_files(cg.project_root(), from_ref, to_ref) {
Ok(files) => files,
Err(e) => {
return Ok(ToolResult {
value: json!({
"content": [{ "type": "text", "text": format!("git diff failed: {}", e) }]
}),
touched_files: vec![],
});
}
};
let mut added: Vec<Value> = Vec::new();
let mut modified: Vec<Value> = Vec::new();
let mut file_symbols: HashMap<String, Vec<Value>> = HashMap::new();
for file in &changed_files {
let nodes = cg.get_nodes_by_file(file).await?;
let symbols: Vec<Value> = nodes
.iter()
.map(|n| {
json!({
"id": n.id,
"name": n.name,
"kind": n.kind.as_str(),
"file": n.file_path,
"line": n.start_line,
"signature": n.signature,
})
})
.collect();
if symbols.is_empty() {
modified.push(json!({
"file": file,
"status": "removed_or_not_indexed",
}));
} else {
for sym in &symbols {
added.push(sym.clone());
}
}
file_symbols.insert(file.clone(), symbols);
}
let touched_files: Vec<String> = changed_files.clone();
let result = json!({
"from_ref": from_ref,
"to_ref": to_ref,
"changed_file_count": changed_files.len(),
"changed_files": changed_files,
"symbols_in_changed_files": added,
"files_not_indexed": modified,
});
let formatted = serde_json::to_string_pretty(&result).unwrap_or_default();
Ok(ToolResult {
value: json!({
"content": [{ "type": "text", "text": truncate_response(&formatted) }]
}),
touched_files,
})
}
pub(super) async fn handle_commit_context(cg: &TokenSave, args: Value) -> Result<ToolResult> {
let staged_only = args
.get("staged_only")
.and_then(serde_json::Value::as_bool)
.unwrap_or(false);
let changed_files = match git_changed_files(cg.project_root(), staged_only) {
Ok(files) => files,
Err(e) => {
return Ok(ToolResult {
value: json!({"content": [{"type": "text", "text": format!("git error: {}", e)}]}),
touched_files: vec![],
});
}
};
if changed_files.is_empty() {
return Ok(ToolResult {
value: json!({"content": [{"type": "text", "text": "No changes detected."}]}),
touched_files: vec![],
});
}
let files_with_inline_tests = cg
.get_files_with_test_annotations()
.await
.unwrap_or_default();
let mut file_roles: Vec<Value> = Vec::new();
let mut symbols_by_role: HashMap<&str, Vec<Value>> = HashMap::new();
for file in &changed_files {
let role = classify_file_role(file, &files_with_inline_tests);
let nodes = cg.get_nodes_by_file(file).await.unwrap_or_default();
file_roles.push(json!({"file": file, "role": role, "symbols": nodes.len()}));
for node in &nodes {
symbols_by_role.entry(role).or_default().push(json!({
"name": node.name,
"kind": node.kind.as_str(),
"file": node.file_path,
"line": node.start_line,
}));
}
}
let has_tests = file_roles.iter().any(|f| f["role"] == "test");
let has_source = file_roles.iter().any(|f| f["role"] == "source");
let category = match (has_source, has_tests) {
(true, true) => "feature/fix (source + tests)",
(true, false) => "feature/fix/refactor",
(false, true) => "test",
(false, false) => "chore/docs/config",
};
let recent_commits = git_recent_commits(cg.project_root(), 5).unwrap_or_default();
let total_symbols: usize = symbols_by_role.values().map(std::vec::Vec::len).sum();
let output = json!({
"changed_files": file_roles,
"symbols_by_role": symbols_by_role,
"suggested_category": category,
"recent_commits": recent_commits,
"summary": format!("{} file(s) changed, {} symbol(s) affected", changed_files.len(), total_symbols),
});
let formatted = serde_json::to_string_pretty(&output).unwrap_or_default();
Ok(ToolResult {
value: json!({"content": [{"type": "text", "text": truncate_response(&formatted)}]}),
touched_files: changed_files,
})
}
pub(super) async fn handle_pr_context(cg: &TokenSave, args: Value) -> Result<ToolResult> {
let base = args
.get("base_ref")
.and_then(|v| v.as_str())
.unwrap_or("main");
let head = args
.get("head_ref")
.and_then(|v| v.as_str())
.unwrap_or("HEAD");
let changed_files = match git_diff_files(cg.project_root(), base, head) {
Ok(files) => files,
Err(e) => {
return Ok(ToolResult {
value: json!({"content": [{"type": "text", "text": format!("git error: {}", e)}]}),
touched_files: vec![],
});
}
};
let commits = git_commit_log(cg.project_root(), base, head).unwrap_or_default();
let mut symbols_added: Vec<Value> = Vec::new();
let mut symbols_modified: Vec<Value> = Vec::new();
let mut test_files_changed: Vec<String> = Vec::new();
let mut impacted_modules: HashSet<String> = HashSet::new();
let files_with_inline_tests = cg
.get_files_with_test_annotations()
.await
.unwrap_or_default();
let has_tests =
|path: &str| crate::tokensave::is_test_file(path) || files_with_inline_tests.contains(path);
for file in &changed_files {
if has_tests(file) {
test_files_changed.push(file.clone());
}
let nodes = cg.get_nodes_by_file(file).await.unwrap_or_default();
for node in &nodes {
let sym = json!({
"name": node.name,
"kind": node.kind.as_str(),
"file": node.file_path,
"line": node.start_line,
});
let callers = cg.get_callers(&node.id, 1).await.unwrap_or_default();
let has_external_callers = callers
.iter()
.any(|(c, _)| !changed_files.contains(&c.file_path));
if has_external_callers {
symbols_modified.push(sym);
for (caller, _) in &callers {
if !changed_files.contains(&caller.file_path) {
#[allow(clippy::map_unwrap_or)]
let dir = caller
.file_path
.rfind('/')
.map(|i| &caller.file_path[..i])
.unwrap_or(&caller.file_path);
impacted_modules.insert(dir.to_string());
}
}
} else {
symbols_added.push(sym);
}
}
}
let mut affected_tests: HashSet<String> = HashSet::new();
for file in &changed_files {
if has_tests(file) {
continue;
}
let nodes = cg.get_nodes_by_file(file).await.unwrap_or_default();
for node in &nodes {
let impact = cg.get_impact_radius(&node.id, 2).await.unwrap_or_default();
for impacted in &impact.nodes {
if has_tests(&impacted.file_path) {
affected_tests.insert(impacted.file_path.clone());
}
}
}
}
let mut impacted_sorted: Vec<String> = impacted_modules.into_iter().collect();
impacted_sorted.sort();
let mut affected_sorted: Vec<String> = affected_tests.into_iter().collect();
affected_sorted.sort();
let output = json!({
"base": base,
"head": head,
"commits": commits,
"files_changed": changed_files.len(),
"symbols_added": symbols_added.len(),
"symbols_modified": symbols_modified.len(),
"added": symbols_added,
"modified": symbols_modified,
"test_files_changed": test_files_changed,
"affected_tests": affected_sorted,
"impacted_modules": impacted_sorted,
});
let formatted = serde_json::to_string_pretty(&output).unwrap_or_default();
Ok(ToolResult {
value: json!({"content": [{"type": "text", "text": truncate_response(&formatted)}]}),
touched_files: changed_files,
})
}
pub(super) fn handle_branch_list(cg: &TokenSave) -> ToolResult {
let tokensave_dir = crate::config::get_tokensave_dir(cg.project_root());
let current = cg.active_branch();
let meta = crate::branch_meta::load_branch_meta(&tokensave_dir);
let branches: Vec<Value> = match meta {
Some(ref meta) => meta
.branches
.iter()
.map(|(name, entry)| {
let db_path = tokensave_dir.join(&entry.db_file);
let size_bytes = db_path.metadata().map_or(0, |m| m.len());
json!({
"name": name,
"parent": entry.parent,
"size_bytes": size_bytes,
"last_synced_at": entry.last_synced_at,
"is_current": current == Some(name.as_str()),
"is_default": Some(name.as_str()) == meta.default_branch.as_str().into(),
})
})
.collect(),
None => vec![],
};
let result = json!({
"branch_count": branches.len(),
"current_branch": current,
"branches": branches,
});
let output = serde_json::to_string_pretty(&result).unwrap_or_default();
ToolResult {
value: json!({
"content": [{ "type": "text", "text": truncate_response(&output) }]
}),
touched_files: vec![],
}
}
pub(super) async fn handle_branch_search(cg: &TokenSave, args: Value) -> Result<ToolResult> {
let branch =
args.get("branch")
.and_then(|v| v.as_str())
.ok_or_else(|| TokenSaveError::Config {
message: "missing required parameter: branch".to_string(),
})?;
let query =
args.get("query")
.and_then(|v| v.as_str())
.ok_or_else(|| TokenSaveError::Config {
message: "missing required parameter: query".to_string(),
})?;
let limit = args
.get("limit")
.and_then(serde_json::Value::as_u64)
.map_or(10, |v| v.min(500) as usize);
let branch_cg = TokenSave::open_branch(cg.project_root(), branch).await?;
let results = branch_cg.search(query, limit).await?;
let items: Vec<Value> = results
.iter()
.map(|r| {
json!({
"id": r.node.id,
"name": r.node.name,
"kind": r.node.kind.as_str(),
"file": r.node.file_path,
"line": r.node.start_line,
"signature": r.node.signature,
"score": r.score,
"branch": branch,
})
})
.collect();
let output = serde_json::to_string_pretty(&items).unwrap_or_default();
Ok(ToolResult {
value: json!({
"content": [{ "type": "text", "text": truncate_response(&output) }]
}),
touched_files: vec![],
})
}
pub(super) async fn handle_branch_diff(cg: &TokenSave, args: Value) -> Result<ToolResult> {
let project_root = cg.project_root();
let tokensave_dir = crate::config::get_tokensave_dir(project_root);
let meta = crate::branch_meta::load_branch_meta(&tokensave_dir).ok_or_else(|| {
TokenSaveError::Config {
message: "no branch tracking configured — run `tokensave branch add` first".to_string(),
}
})?;
let base_name = args
.get("base")
.and_then(|v| v.as_str())
.unwrap_or(&meta.default_branch);
let head_name = args
.get("head")
.and_then(|v| v.as_str())
.or_else(|| cg.active_branch())
.ok_or_else(|| TokenSaveError::Config {
message: "cannot determine head branch — specify it explicitly".to_string(),
})?;
if base_name == head_name {
return Err(TokenSaveError::Config {
message: format!("base and head are the same branch: '{base_name}'"),
});
}
let file_filter = args.get("file").and_then(|v| v.as_str());
let kind_filter = args.get("kind").and_then(|v| v.as_str());
let base_cg = TokenSave::open_branch(project_root, base_name).await?;
let head_cg = if cg.active_branch() == Some(head_name) && !cg.is_fallback() {
None } else {
Some(TokenSave::open_branch(project_root, head_name).await?)
};
let head_ref = head_cg.as_ref().unwrap_or(cg);
let base_files = base_cg.get_all_files().await?;
let head_files = head_ref.get_all_files().await?;
let base_file_set: HashSet<&str> = base_files.iter().map(|f| f.path.as_str()).collect();
let head_file_set: HashSet<&str> = head_files.iter().map(|f| f.path.as_str()).collect();
let all_files: HashSet<&str> = base_file_set.union(&head_file_set).copied().collect();
let mut added = Vec::new();
let mut removed = Vec::new();
let mut changed = Vec::new();
let mut touched = Vec::new();
for file_path in &all_files {
if let Some(filter) = file_filter {
if !file_path.starts_with(filter) && *file_path != filter {
continue;
}
}
let base_nodes = base_cg
.get_nodes_by_file(file_path)
.await
.unwrap_or_default();
let head_nodes = head_ref
.get_nodes_by_file(file_path)
.await
.unwrap_or_default();
let base_map: HashMap<&str, &crate::types::Node> = base_nodes
.iter()
.map(|n| (n.qualified_name.as_str(), n))
.collect();
let head_map: HashMap<&str, &crate::types::Node> = head_nodes
.iter()
.map(|n| (n.qualified_name.as_str(), n))
.collect();
for (qn, node) in &head_map {
if let Some(filter) = kind_filter {
if node.kind.as_str() != filter {
continue;
}
}
if !base_map.contains_key(qn) {
added.push(json!({
"name": node.name,
"qualified_name": node.qualified_name,
"kind": node.kind.as_str(),
"file": node.file_path,
"line": node.start_line,
"signature": node.signature,
}));
touched.push(node.file_path.clone());
}
}
for (qn, node) in &base_map {
if let Some(filter) = kind_filter {
if node.kind.as_str() != filter {
continue;
}
}
if !head_map.contains_key(qn) {
removed.push(json!({
"name": node.name,
"qualified_name": node.qualified_name,
"kind": node.kind.as_str(),
"file": node.file_path,
"line": node.start_line,
"signature": node.signature,
}));
touched.push(node.file_path.clone());
}
}
for (qn, head_node) in &head_map {
if let Some(filter) = kind_filter {
if head_node.kind.as_str() != filter {
continue;
}
}
if let Some(base_node) = base_map.get(qn) {
if base_node.signature != head_node.signature {
changed.push(json!({
"name": head_node.name,
"qualified_name": head_node.qualified_name,
"kind": head_node.kind.as_str(),
"file": head_node.file_path,
"line": head_node.start_line,
"base_signature": base_node.signature,
"head_signature": head_node.signature,
}));
touched.push(head_node.file_path.clone());
}
}
}
}
let result = json!({
"base": base_name,
"head": head_name,
"summary": {
"added": added.len(),
"removed": removed.len(),
"changed": changed.len(),
},
"added": added,
"removed": removed,
"changed": changed,
});
let output = serde_json::to_string_pretty(&result).unwrap_or_default();
let touched_files = unique_file_paths(touched.iter().map(std::string::String::as_str));
Ok(ToolResult {
value: json!({
"content": [{ "type": "text", "text": truncate_response(&output) }]
}),
touched_files,
})
}