use crate::server::helpers::{
parse_semantic_path, pathfinder_to_error_data, require_symbol_target, serialize_metadata,
treesitter_error_to_error_data,
};
use crate::server::types::{
AnalyzeImpactParams, GetDefinitionParams, GetDefinitionResponse, ReadWithDeepContextParams,
};
use crate::server::PathfinderServer;
use pathfinder_common::error::PathfinderError;
use pathfinder_lsp::LspError;
use rmcp::handler::server::wrapper::Json;
use rmcp::model::{CallToolResult, ErrorData};
enum CallDirection {
Incoming,
Outgoing,
}
struct LspResolution {
dependencies: Vec<crate::server::types::DeepContextDependency>,
degraded: bool,
degraded_reason: Option<String>,
engines: Vec<&'static str>,
}
impl PathfinderServer {
async fn resolve_lsp_dependencies(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
start_line: usize,
name_column: usize,
) -> LspResolution {
let mut dependencies = Vec::new();
let mut degraded = true;
let mut degraded_reason = Some("no_lsp".to_owned());
let mut engines = vec!["tree-sitter"];
let lsp_result = self
.lawyer
.call_hierarchy_prepare(
self.workspace_root.path(),
&semantic_path.file_path,
u32::try_from(start_line + 1).unwrap_or(1),
u32::try_from(name_column + 1).unwrap_or(1),
)
.await;
match lsp_result {
Ok(items) if !items.is_empty() => {
self.append_outgoing_deps(
&items[0],
&mut dependencies,
&mut engines,
&mut degraded,
&mut degraded_reason,
)
.await;
}
Ok(_) => {
let probe = self
.lawyer
.goto_definition(
self.workspace_root.path(),
&semantic_path.file_path,
u32::try_from(start_line + 1).unwrap_or(1),
u32::try_from(name_column + 1).unwrap_or(1),
)
.await;
if matches!(probe, Ok(Some(_))) {
engines.push("lsp");
degraded = false;
degraded_reason = None;
} else {
engines.push("lsp");
degraded = true;
degraded_reason = Some("lsp_warmup_empty_unverified".to_owned());
}
}
Err(LspError::NoLspAvailable | LspError::UnsupportedCapability { .. }) => {}
Err(e) => {
tracing::warn!(
tool = "read_with_deep_context",
error = %e,
"call_hierarchy_prepare failed"
);
}
}
LspResolution {
dependencies,
degraded,
degraded_reason,
engines,
}
}
async fn append_outgoing_deps(
&self,
item: &pathfinder_lsp::types::CallHierarchyItem,
dependencies: &mut Vec<crate::server::types::DeepContextDependency>,
engines: &mut Vec<&'static str>,
degraded: &mut bool,
degraded_reason: &mut Option<String>,
) {
match self
.lawyer
.call_hierarchy_outgoing(self.workspace_root.path(), item)
.await
{
Ok(outgoing) => {
engines.push("lsp");
for call in outgoing {
let callee = call.item;
let signature = callee.detail.clone().unwrap_or_else(|| callee.name.clone());
let sp = format!("{}::{}", callee.file, callee.name);
dependencies.push(crate::server::types::DeepContextDependency {
semantic_path: sp,
signature,
file: callee.file,
line: callee.line as usize,
});
}
*degraded = false;
*degraded_reason = None;
}
Err(e) => {
tracing::warn!(
tool = "read_with_deep_context",
error = %e,
"call_hierarchy_outgoing failed"
);
}
}
}
#[expect(
clippy::too_many_lines,
reason = "Sequential pipeline: parse → sandbox → TS → LSP (with warmup retry) → grep fallback. Extraction done at helper level; remaining orchestration is linear."
)]
pub(crate) async fn get_definition_impl(
&self,
params: GetDefinitionParams,
) -> Result<Json<GetDefinitionResponse>, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "get_definition",
semantic_path = %params.semantic_path,
"get_definition: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
require_symbol_target(&semantic_path, ¶ms.semantic_path)?;
if let Err(e) = self.sandbox.check(&semantic_path.file_path) {
let duration_ms = start.elapsed().as_millis();
tracing::warn!(
tool = "get_definition",
error_code = e.error_code(),
duration_ms,
"sandbox check failed"
);
return Err(pathfinder_to_error_data(&e));
}
let ts_start = std::time::Instant::now();
let symbol_scope = self
.surgeon
.read_symbol_scope(self.workspace_root.path(), &semantic_path)
.await
.map_err(treesitter_error_to_error_data)?;
let tree_sitter_ms = ts_start.elapsed().as_millis();
let file_content =
tokio::fs::read_to_string(self.workspace_root.path().join(&semantic_path.file_path))
.await
.unwrap_or_default();
let _did_open_result = self
.lawyer
.did_open(
self.workspace_root.path(),
&semantic_path.file_path,
&file_content,
)
.await;
let lsp_start = std::time::Instant::now();
let lsp_result = self
.lawyer
.goto_definition(
self.workspace_root.path(),
&semantic_path.file_path,
u32::try_from(symbol_scope.start_line + 1).unwrap_or(1),
u32::try_from(symbol_scope.name_column + 1).unwrap_or(1),
)
.await;
let lsp_ms = lsp_start.elapsed().as_millis();
let _did_close_result = self
.lawyer
.did_close(self.workspace_root.path(), &semantic_path.file_path)
.await;
let duration_ms = start.elapsed().as_millis();
match lsp_result {
Ok(Some(def)) => {
tracing::info!(
tool = "get_definition",
file = %def.file,
definition_line = def.line,
tree_sitter_ms,
lsp_ms,
duration_ms,
engines_used = ?["tree-sitter", "lsp"],
"get_definition: complete"
);
Ok(Json(GetDefinitionResponse {
file: def.file,
line: def.line,
column: def.column,
preview: def.preview,
degraded: false,
degraded_reason: None,
}))
}
Ok(None) => {
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
let retry_lsp_result = self
.lawyer
.goto_definition(
self.workspace_root.path(),
&semantic_path.file_path,
u32::try_from(symbol_scope.start_line + 1).unwrap_or(1),
u32::try_from(symbol_scope.name_column + 1).unwrap_or(1),
)
.await;
if let Ok(Some(def)) = retry_lsp_result {
tracing::info!(
tool = "get_definition",
file = %def.file,
definition_line = def.line,
tree_sitter_ms,
lsp_ms,
duration_ms = start.elapsed().as_millis(),
engines_used = ?["tree-sitter", "lsp"],
"get_definition: complete (succeeded on retry after warmup wait)"
);
return Ok(Json(GetDefinitionResponse {
file: def.file,
line: def.line,
column: def.column,
preview: def.preview,
degraded: false,
degraded_reason: None,
}));
}
tracing::info!(
tool = "get_definition",
semantic_path = %params.semantic_path,
tree_sitter_ms,
lsp_ms,
duration_ms,
"get_definition: no definition found via LSP — attempting grep-based fallback"
);
if let Some(mut def) = self.fallback_definition_grep(&semantic_path).await {
def.degraded_reason = Some(
"lsp_warmup_grep_fallback: LSP returned no result (likely warming up); \
result from Ripgrep pattern search — may not be the canonical definition. \
Verify with read_source_file."
.to_owned(),
);
tracing::info!(
tool = "get_definition",
file = %def.file,
line = def.line,
duration_ms,
degraded = true,
degraded_reason = "lsp_warmup_grep_fallback",
engines_used = ?["tree-sitter", "lsp", "ripgrep"],
"get_definition: degraded complete (grep fallback after LSP None)"
);
return Ok(Json(def));
}
tracing::info!(
tool = "get_definition",
semantic_path = %params.semantic_path,
tree_sitter_ms,
lsp_ms,
duration_ms,
"get_definition: no definition found (LSP None, grep empty)"
);
Err(pathfinder_to_error_data(&PathfinderError::SymbolNotFound {
semantic_path: params.semantic_path,
did_you_mean: vec![],
}))
}
Err(LspError::NoLspAvailable) => {
tracing::info!(
tool = "get_definition",
symbol = %semantic_path,
"get_definition: no LSP — attempting grep-based fallback"
);
if let Some(mut def) = self.fallback_definition_grep(&semantic_path).await {
def.degraded_reason = Some(
"no_lsp_grep_fallback: LSP unavailable; result from Ripgrep \
pattern search — may not be the canonical definition. \
Verify with read_source_file."
.to_owned(),
);
tracing::info!(
tool = "get_definition",
file = %def.file,
line = def.line,
duration_ms,
degraded = true,
degraded_reason = "no_lsp_grep_fallback",
engines_used = ?["tree-sitter", "ripgrep"],
"get_definition: degraded complete (grep fallback)"
);
return Ok(Json(def));
}
tracing::info!(
tool = "get_definition",
duration_ms,
degraded = true,
degraded_reason = "no_lsp",
engines_used = ?["none"],
"get_definition: degraded (no LSP, grep fallback also empty)"
);
Err(pathfinder_to_error_data(&PathfinderError::NoLspAvailable {
language: symbol_scope.language,
}))
}
Err(e) => {
tracing::warn!(
tool = "get_definition",
error = %e,
tree_sitter_ms,
lsp_ms,
duration_ms,
engines_used = ?["lsp"],
"get_definition: LSP error"
);
Err(pathfinder_to_error_data(&PathfinderError::LspError {
message: e.to_string(),
}))
}
}
}
async fn fallback_definition_grep(
&self,
semantic_path: &pathfinder_common::types::SemanticPath,
) -> Option<GetDefinitionResponse> {
let symbol_chain = semantic_path.symbol_chain.as_ref()?;
let symbol_name = symbol_chain.segments.last()?.name.clone();
let expected_file = &semantic_path.file_path;
if let Some(result) = self
.grep_definition_in_file(symbol_name.clone(), expected_file.clone())
.await
{
return Some(result);
}
if symbol_chain.segments.len() >= 2 {
let parent_name = symbol_chain.segments[symbol_chain.segments.len() - 2]
.name
.clone();
if let Some(result) = self.grep_impl_method(&parent_name, &symbol_name).await {
return Some(result);
}
}
self.grep_definition_global(symbol_name).await
}
async fn grep_definition_in_file(
&self,
symbol_name: String,
file_path: std::path::PathBuf,
) -> Option<GetDefinitionResponse> {
let pattern = format!(
r"(?:(?:pub|export|public|private|protected|internal|open)\s*(?:\([^)]*\)\s*)?(?:async\s*)?)?(?:fn|def|func|function|class|struct|type|interface|const|let|var|enum|trait|mod)\s+{symbol_name}\\b"
);
let glob = file_path.to_string_lossy().replace('\\', "/");
let search_result = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: pattern,
is_regex: true,
max_results: 5,
path_glob: glob,
exclude_glob: String::default(),
context_lines: 0,
})
.await;
if let Ok(result) = search_result {
if !result.matches.is_empty() {
let m = &result.matches[0];
return Some(GetDefinitionResponse {
file: m.file.clone(),
line: u32::try_from(m.line).unwrap_or(u32::MAX),
column: u32::try_from(m.column).unwrap_or(1),
preview: m.content.clone(),
degraded: true,
degraded_reason: Some(
"grep_fallback_file_scoped: result from file-scoped Ripgrep search. \
Verify with read_source_file."
.to_owned(),
),
});
}
}
None
}
async fn grep_impl_method(
&self,
parent_name: &str,
method_name: &str,
) -> Option<GetDefinitionResponse> {
let impl_pattern = format!(r"impl\s+(?:<[^>]+>\s+)?{parent_name}\\b");
let search_result = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: impl_pattern,
is_regex: true,
max_results: 10,
path_glob: "**/*.rs".to_owned(),
exclude_glob: String::default(),
context_lines: 0,
})
.await;
if let Ok(result) = search_result {
for m in &result.matches {
let method_pattern = format!(
r"(?:(?:pub|export|public|private|protected|internal|open)\s*(?:\([^)]*\)\s*)?(?:async\s*)?)?fn\s+{method_name}\\b"
);
let file_search = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: method_pattern,
is_regex: true,
max_results: 5,
path_glob: m.file.clone(),
exclude_glob: String::default(),
context_lines: 0,
})
.await;
if let Ok(file_result) = file_search {
if !file_result.matches.is_empty() {
let hit = &file_result.matches[0];
return Some(GetDefinitionResponse {
file: hit.file.clone(),
line: u32::try_from(hit.line).unwrap_or(u32::MAX),
column: u32::try_from(hit.column).unwrap_or(1),
preview: hit.content.clone(),
degraded: true,
degraded_reason: Some(
"grep_fallback_impl_scoped: result from impl-scoped Ripgrep search. \
Verify with read_source_file."
.to_owned(),
),
});
}
}
}
}
None
}
async fn grep_definition_global(&self, symbol_name: String) -> Option<GetDefinitionResponse> {
let pattern = format!(
r"(?:(?:pub|export|public|private|protected|internal|open)\s*(?:\([^)]*\)\s*)?(?:async\s*)?)?(?:fn|def|func|function|class|struct|type|interface|const|let|var|enum|trait|mod)\s+{symbol_name}\\b"
);
let search_result = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: pattern,
is_regex: true,
max_results: 10,
path_glob: "**/*".to_owned(),
exclude_glob: "**/{test,tests,mock}*/**".to_owned(),
context_lines: 0,
})
.await;
if let Ok(result) = search_result {
if !result.matches.is_empty() {
let m = &result.matches[0];
return Some(GetDefinitionResponse {
file: m.file.clone(),
line: u32::try_from(m.line).unwrap_or(u32::MAX),
column: u32::try_from(m.column).unwrap_or(1),
preview: m.content.clone(),
degraded: true,
degraded_reason: Some(
"grep_fallback_global: result from global Ripgrep search — \
may not be the canonical definition. Verify with read_source_file."
.to_owned(),
),
});
}
}
None
}
pub(crate) async fn read_with_deep_context_impl(
&self,
params: ReadWithDeepContextParams,
) -> Result<CallToolResult, ErrorData> {
let start = std::time::Instant::now();
tracing::info!(
tool = "read_with_deep_context",
semantic_path = %params.semantic_path,
"read_with_deep_context: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
require_symbol_target(&semantic_path, ¶ms.semantic_path)?;
if let Err(e) = self.sandbox.check(&semantic_path.file_path) {
let duration_ms = start.elapsed().as_millis();
tracing::warn!(
tool = "read_with_deep_context",
error_code = e.error_code(),
duration_ms,
"sandbox check failed"
);
return Err(pathfinder_to_error_data(&e));
}
let ts_start = std::time::Instant::now();
let scope = self
.surgeon
.read_symbol_scope(self.workspace_root.path(), &semantic_path)
.await
.map_err(treesitter_error_to_error_data)?;
let tree_sitter_ms = ts_start.elapsed().as_millis();
let file_content =
tokio::fs::read_to_string(self.workspace_root.path().join(&semantic_path.file_path))
.await
.unwrap_or_default();
let _did_open_result = self
.lawyer
.did_open(
self.workspace_root.path(),
&semantic_path.file_path,
&file_content,
)
.await;
let lsp_start = std::time::Instant::now();
let LspResolution {
dependencies,
degraded,
degraded_reason,
engines,
} = self
.resolve_lsp_dependencies(&semantic_path, scope.start_line, scope.name_column)
.await;
let _did_close_result = self
.lawyer
.did_close(self.workspace_root.path(), &semantic_path.file_path)
.await;
let lsp_ms = lsp_start.elapsed().as_millis();
let duration_ms = start.elapsed().as_millis();
tracing::info!(
tool = "read_with_deep_context",
semantic_path = %params.semantic_path,
tree_sitter_ms,
lsp_ms,
duration_ms,
degraded,
degraded_reason,
engines_used = ?engines,
"read_with_deep_context: complete"
);
let dep_count = dependencies.len();
let metadata = crate::server::types::ReadWithDeepContextMetadata {
start_line: scope.start_line,
end_line: scope.end_line,
version_hash: scope.version_hash.short().to_owned(),
language: scope.language,
dependencies,
degraded,
degraded_reason: degraded_reason.clone(),
};
let text = if degraded {
let reason = degraded_reason.as_deref().unwrap_or("unknown");
format!(
"DEGRADED MODE ({}) — {dep_count} dependencies loaded (results may be incomplete)\n\n{}",
reason, scope.content
)
} else {
format!("{dep_count} dependencies loaded\n\n{}", scope.content)
};
let mut res = CallToolResult::success(vec![rmcp::model::Content::text(text)]);
res.structured_content = serialize_metadata(&metadata);
Ok(res)
}
async fn bfs_call_hierarchy(
&self,
initial_item: &pathfinder_lsp::types::CallHierarchyItem,
direction: CallDirection,
max_depth: u32,
files_referenced: &mut std::collections::HashSet<String>,
) -> (Vec<crate::server::types::ImpactReference>, u32) {
let mut queue = std::collections::VecDeque::new();
queue.push_back((initial_item.clone(), 0));
let mut seen = std::collections::HashSet::new();
seen.insert((initial_item.file.clone(), initial_item.line));
files_referenced.insert(initial_item.file.clone());
let mut references = Vec::new();
let mut max_depth_reached = 0;
while let Some((item, current_depth)) = queue.pop_front() {
max_depth_reached = std::cmp::max(max_depth_reached, current_depth);
if current_depth >= max_depth {
continue;
}
let hierarchy_result = match direction {
CallDirection::Incoming => {
self.lawyer
.call_hierarchy_incoming(self.workspace_root.path(), &item)
.await
}
CallDirection::Outgoing => {
self.lawyer
.call_hierarchy_outgoing(self.workspace_root.path(), &item)
.await
}
};
match hierarchy_result {
Ok(calls) => {
for call in calls {
let referenced_item = call.item;
files_referenced.insert(referenced_item.file.clone());
let key = (referenced_item.file.clone(), referenced_item.line);
if seen.insert(key) {
queue.push_back((referenced_item.clone(), current_depth + 1));
references.push(crate::server::types::ImpactReference {
semantic_path: format!(
"{}::{}",
referenced_item.file, referenced_item.name
),
file: referenced_item.file.clone(),
line: referenced_item.line as usize,
snippet: referenced_item
.detail
.unwrap_or_else(|| referenced_item.name.clone()),
version_hash: String::default(), direction: match direction {
CallDirection::Incoming => "incoming".to_owned(),
CallDirection::Outgoing => "outgoing".to_owned(),
},
depth: current_depth as usize,
});
}
}
}
Err(e) => {
let direction_name = match direction {
CallDirection::Incoming => "call_hierarchy_incoming",
CallDirection::Outgoing => "call_hierarchy_outgoing",
};
tracing::warn!(
tool = "analyze_impact",
error = %e,
file = %item.file,
line = item.line,
depth = current_depth,
"{direction_name} failed during BFS (partial impact graph)"
);
}
}
}
(references, max_depth_reached)
}
#[expect(
clippy::too_many_lines,
reason = "Sequential pipeline (parse→sandbox→tree-sitter→LSP→BFS→version hash)."
)]
pub(crate) async fn analyze_impact_impl(
&self,
params: AnalyzeImpactParams,
) -> Result<CallToolResult, ErrorData> {
let start = std::time::Instant::now();
let max_depth = params.max_depth.clamp(1, 5);
tracing::info!(
tool = "analyze_impact",
semantic_path = %params.semantic_path,
max_depth = max_depth,
"analyze_impact: start"
);
let semantic_path = parse_semantic_path(¶ms.semantic_path)?;
require_symbol_target(&semantic_path, ¶ms.semantic_path)?;
if let Err(e) = self.sandbox.check(&semantic_path.file_path) {
let duration_ms = start.elapsed().as_millis();
tracing::warn!(
tool = "analyze_impact",
error_code = e.error_code(),
duration_ms,
"sandbox check failed"
);
return Err(pathfinder_to_error_data(&e));
}
let ts_start = std::time::Instant::now();
let scope = match self
.surgeon
.read_symbol_scope(self.workspace_root.path(), &semantic_path)
.await
{
Ok(s) => s,
Err(e) => {
let duration_ms = start.elapsed().as_millis();
tracing::warn!(
tool = "analyze_impact",
error = %e,
duration_ms,
"tree-sitter read failed"
);
return Err(treesitter_error_to_error_data(e));
}
};
let tree_sitter_ms = ts_start.elapsed().as_millis();
let file_content =
tokio::fs::read_to_string(self.workspace_root.path().join(&semantic_path.file_path))
.await
.unwrap_or_default();
let _did_open_result = self
.lawyer
.did_open(
self.workspace_root.path(),
&semantic_path.file_path,
&file_content,
)
.await;
let lsp_start = std::time::Instant::now();
let mut incoming: Option<Vec<crate::server::types::ImpactReference>> = None;
let mut outgoing: Option<Vec<crate::server::types::ImpactReference>> = None;
let mut degraded = true;
let mut degraded_reason = Some("no_lsp".to_owned());
let mut engines = vec!["tree-sitter"];
let mut files_referenced = std::collections::HashSet::new();
let mut max_depth_reached = 0;
let lsp_result = self
.lawyer
.call_hierarchy_prepare(
self.workspace_root.path(),
&semantic_path.file_path,
u32::try_from(scope.start_line + 1).unwrap_or(1),
u32::try_from(scope.name_column + 1).unwrap_or(1),
)
.await;
match lsp_result {
Ok(items) if !items.is_empty() => {
engines.push("lsp");
degraded = false;
degraded_reason = None;
let initial_item = &items[0];
let (incoming_refs, depth_in) = self
.bfs_call_hierarchy(
initial_item,
CallDirection::Incoming,
max_depth,
&mut files_referenced,
)
.await;
incoming = Some(incoming_refs);
max_depth_reached = std::cmp::max(max_depth_reached, depth_in);
let (outgoing_refs, depth_out) = self
.bfs_call_hierarchy(
initial_item,
CallDirection::Outgoing,
max_depth,
&mut files_referenced,
)
.await;
outgoing = Some(outgoing_refs);
max_depth_reached = std::cmp::max(max_depth_reached, depth_out);
}
Ok(_) => {
let probe = self
.lawyer
.goto_definition(
self.workspace_root.path(),
&semantic_path.file_path,
u32::try_from(scope.start_line + 1).unwrap_or(1),
u32::try_from(scope.name_column + 1).unwrap_or(1),
)
.await;
if matches!(probe, Ok(Some(_))) {
engines.push("lsp");
degraded = false;
degraded_reason = None;
incoming = Some(Vec::new());
outgoing = Some(Vec::new());
} else {
tracing::info!(
tool = "analyze_impact",
symbol = %semantic_path,
"analyze_impact: call_hierarchy_prepare returned [] but goto_definition \
probe returned no result — LSP likely warming up, attempting grep-based reference fallback"
);
engines.push("lsp");
degraded = true;
degraded_reason = Some("lsp_warmup_empty_unverified".to_owned());
let symbol_name = semantic_path
.symbol_chain
.as_ref()
.and_then(|c| c.segments.last())
.map(|s| s.name.clone())
.unwrap_or_default();
let search_result = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: symbol_name.clone(),
is_regex: false,
max_results: 20,
path_glob: "**/*".to_owned(),
exclude_glob: String::default(),
context_lines: 0,
})
.await;
if let Ok(result) = search_result {
if !result.matches.is_empty() {
let refs: Vec<crate::server::types::ImpactReference> = result
.matches
.into_iter()
.filter(|m| {
let m_path = std::path::Path::new(&m.file);
m_path != std::path::Path::new(&semantic_path.file_path)
})
.take(10) .map(|m| {
files_referenced.insert(m.file.clone());
crate::server::types::ImpactReference {
semantic_path: format!("{}::{symbol_name}", m.file),
file: m.file,
line: usize::try_from(m.line).unwrap_or(usize::MAX),
snippet: m.content,
version_hash: m.version_hash,
direction: "incoming_heuristic".to_owned(),
depth: 0,
}
})
.collect();
incoming = Some(refs);
degraded_reason = Some("lsp_warmup_grep_fallback".to_owned());
tracing::info!(
tool = "analyze_impact",
references_found = incoming.as_ref().map_or(0, Vec::len),
"analyze_impact: grep-based fallback references found during LSP warmup"
);
}
}
}
}
Err(LspError::NoLspAvailable | LspError::UnsupportedCapability { .. }) => {
tracing::info!(
tool = "analyze_impact",
symbol = %semantic_path,
"analyze_impact: no LSP — attempting grep-based reference fallback"
);
let symbol_name = semantic_path
.symbol_chain
.as_ref()
.and_then(|c| c.segments.last())
.map(|s| s.name.clone())
.unwrap_or_default();
let search_result = self
.scout
.search(&pathfinder_search::SearchParams {
workspace_root: self.workspace_root.path().to_path_buf(),
query: symbol_name.clone(),
is_regex: false,
max_results: 20,
path_glob: "**/*".to_owned(),
exclude_glob: String::default(),
context_lines: 0,
})
.await;
if let Ok(result) = search_result {
if !result.matches.is_empty() {
let refs: Vec<crate::server::types::ImpactReference> = result
.matches
.into_iter()
.filter(|m| {
let m_path = std::path::Path::new(&m.file);
m_path != std::path::Path::new(&semantic_path.file_path)
})
.take(10) .map(|m| {
files_referenced.insert(m.file.clone());
crate::server::types::ImpactReference {
semantic_path: format!("{}::{symbol_name}", m.file),
file: m.file,
line: usize::try_from(m.line).unwrap_or(usize::MAX),
snippet: m.content,
version_hash: m.version_hash,
direction: "incoming_heuristic".to_owned(),
depth: 0,
}
})
.collect();
incoming = Some(refs);
degraded_reason = Some("no_lsp_grep_fallback".to_owned());
tracing::info!(
tool = "analyze_impact",
references_found = incoming.as_ref().map_or(0, Vec::len),
"analyze_impact: grep-based fallback references found"
);
}
}
}
Err(e) => {
tracing::warn!(
tool = "analyze_impact",
error = %e,
"call_hierarchy_prepare failed"
);
}
}
let _did_close_result = self
.lawyer
.did_close(self.workspace_root.path(), &semantic_path.file_path)
.await;
let lsp_ms = lsp_start.elapsed().as_millis();
let duration_ms = start.elapsed().as_millis();
let mut version_hashes = std::collections::HashMap::new();
let target_file_path = self.workspace_root.path().join(&semantic_path.file_path);
if let Ok(bytes) = tokio::fs::read(&target_file_path).await {
let hash = pathfinder_common::types::VersionHash::compute(&bytes);
version_hashes.insert(
semantic_path.file_path.to_string_lossy().to_string(),
hash.short().to_owned(),
);
}
for file_ref in &files_referenced {
let abs_path = self.workspace_root.path().join(file_ref);
if let Ok(bytes) = tokio::fs::read(&abs_path).await {
let hash = pathfinder_common::types::VersionHash::compute(&bytes);
version_hashes.insert(file_ref.clone(), hash.short().to_owned());
}
}
tracing::info!(
tool = "analyze_impact",
semantic_path = %params.semantic_path,
tree_sitter_ms,
lsp_ms,
duration_ms,
degraded,
degraded_reason,
engines_used = ?engines,
"analyze_impact: complete"
);
let inc_count = incoming.as_ref().map_or(0, Vec::len);
let out_count = outgoing.as_ref().map_or(0, Vec::len);
let degraded_reason_cloned = degraded_reason.clone();
let metadata = crate::server::types::AnalyzeImpactMetadata {
incoming,
outgoing,
depth_reached: max_depth_reached,
files_referenced: files_referenced.len(),
degraded,
degraded_reason,
version_hashes,
};
let mut text_parts = Vec::new();
if degraded {
text_parts.push(format!(
"Degraded analysis ({}) — LSP unavailable — reference counts are UNRELIABLE. Do NOT trust zero as 'confirmed no callers'. Grep-based heuristic was used if available. Use search_codebase for manual verification.",
degraded_reason_cloned.as_deref().unwrap_or("unknown")
));
}
text_parts.push(format!("Incoming references: {inc_count}"));
text_parts.push(format!("Outgoing references: {out_count}"));
let text = text_parts.join("\n");
let mut res = CallToolResult::success(vec![rmcp::model::Content::text(text)]);
res.structured_content = serialize_metadata(&metadata);
Ok(res)
}
#[allow(clippy::too_many_lines)]
#[tracing::instrument(skip(self, params), fields(language = ?params.language))]
pub(crate) async fn lsp_health_impl(
&self,
params: crate::server::types::LspHealthParams,
) -> Result<
rmcp::handler::server::wrapper::Json<crate::server::types::LspHealthResponse>,
ErrorData,
> {
let capability_status = self.lawyer.capability_status().await;
let mut languages = Vec::new();
let mut overall_status = "unavailable";
for (lang, status) in &capability_status {
if let Some(ref filter) = params.language {
if lang != filter {
continue;
}
}
let (status_str, uptime) = if status.navigation_ready == Some(true) {
("ready", status.uptime_seconds.map(format_uptime))
} else if status.navigation_ready == Some(false)
|| status.indexing_complete == Some(false)
{
("warming_up", status.uptime_seconds.map(format_uptime))
} else if status.uptime_seconds.is_some() {
("starting", status.uptime_seconds.map(format_uptime))
} else {
("unavailable", None)
};
let indexing_status = match status.indexing_complete {
Some(true) => Some("complete".to_owned()),
Some(false) => Some("in_progress".to_owned()),
None => None,
};
match status_str {
"ready" => overall_status = "ready",
"warming_up" if overall_status != "ready" => {
overall_status = "warming_up";
}
"starting" if overall_status != "ready" && overall_status != "warming_up" => {
overall_status = "starting";
}
_ => {}
}
languages.push(crate::server::types::LspLanguageHealth {
language: lang.clone(),
status: status_str.to_owned(),
uptime,
diagnostics_strategy: status.diagnostics_strategy.clone(),
supports_call_hierarchy: status.supports_call_hierarchy,
supports_diagnostics: status.supports_diagnostics,
supports_definition: status.supports_definition,
supports_formatting: status.supports_formatting,
indexing_status,
navigation_ready: status.navigation_ready,
probe_verified: false,
install_hint: None,
degraded_tools: compute_degraded_tools(status),
validation_latency_ms: compute_validation_latency(
status.diagnostics_strategy.as_deref(),
),
});
}
for lang_health in &mut languages {
if lang_health.status == "warming_up" {
let cache_action = {
let cache = self
.probe_cache
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
match cache.get(&lang_health.language) {
Some(entry) if entry.is_valid() && entry.success => {
ProbeAction::UseCachedReady
}
Some(entry) if entry.is_valid() && !entry.success => {
ProbeAction::SkipProbe
}
Some(_) => {
ProbeAction::Probe
}
None => ProbeAction::Probe,
}
};
match cache_action {
ProbeAction::UseCachedReady => {
"ready".clone_into(&mut lang_health.status);
lang_health.probe_verified = true;
if overall_status != "ready" {
overall_status = "ready";
}
continue;
}
ProbeAction::SkipProbe => {
continue;
}
ProbeAction::Probe => {}
}
let uptime_secs = parse_uptime_to_seconds(lang_health.uptime.as_deref());
if let Some(secs) = uptime_secs {
if secs > 10 {
let probe_result =
self.probe_language_readiness(&lang_health.language).await;
if probe_result {
"ready".clone_into(&mut lang_health.status);
lang_health.probe_verified = true;
self.probe_cache
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.insert(
lang_health.language.clone(),
crate::server::ProbeCacheEntry::new(true),
);
if overall_status != "ready" {
overall_status = "ready";
}
} else {
self.probe_cache
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.insert(
lang_health.language.clone(),
crate::server::ProbeCacheEntry::new(false),
);
}
}
}
}
}
let missing_languages = self.lawyer.missing_languages();
for missing in &missing_languages {
if let Some(ref filter) = params.language {
if &missing.language_id != filter {
continue;
}
}
languages.push(crate::server::types::LspLanguageHealth {
language: missing.language_id.clone(),
status: "unavailable".to_owned(),
uptime: None,
diagnostics_strategy: None,
supports_call_hierarchy: None,
supports_diagnostics: None,
supports_definition: None,
supports_formatting: None,
indexing_status: None,
navigation_ready: None,
probe_verified: false,
install_hint: Some(missing.install_hint.clone()),
degraded_tools: vec![
"analyze_impact".to_owned(),
"read_with_deep_context".to_owned(),
"validate_only".to_owned(),
],
validation_latency_ms: None,
});
}
if languages.is_empty() && params.language.is_none() {
overall_status = "unavailable";
}
Ok(rmcp::handler::server::wrapper::Json(
crate::server::types::LspHealthResponse {
status: overall_status.to_owned(),
languages,
},
))
}
async fn probe_language_readiness(&self, language_id: &str) -> bool {
let probe_file = self.find_probe_file(language_id);
let Some(file_path) = probe_file else {
return false; };
let content = tokio::fs::read_to_string(self.workspace_root.path().join(&file_path))
.await
.unwrap_or_default();
let _ = self
.lawyer
.did_open(self.workspace_root.path(), &file_path, &content)
.await;
let result = self
.lawyer
.goto_definition(self.workspace_root.path(), &file_path, 1, 1)
.await;
let _ = self
.lawyer
.did_close(self.workspace_root.path(), &file_path)
.await;
result.is_ok()
}
pub(crate) fn find_probe_file(&self, language_id: &str) -> Option<std::path::PathBuf> {
let extensions: &[&str] = match language_id {
"rust" => &["rs"],
"go" => &["go"],
"typescript" => &["ts", "tsx"],
"javascript" => &["js", "jsx"],
"python" => &["py"],
"ruby" => &["rb"],
"java" => &["java"],
_ => return None,
};
let candidates = match language_id {
"rust" => vec!["src/main.rs", "src/lib.rs"],
"go" => vec!["main.go", "cmd/main.go"],
"typescript" => vec![
"src/index.ts",
"index.ts",
"src/main.ts",
"src/index.tsx",
"index.tsx",
"src/main.tsx",
],
"javascript" => vec![
"src/index.js",
"index.js",
"src/main.js",
"src/index.jsx",
"index.jsx",
"src/main.jsx",
],
"python" => vec!["src/__init__.py", "main.py", "setup.py", "__init__.py"],
"ruby" => vec!["lib/main.rb", "main.rb"],
"java" => vec!["src/main/java/Main.java"],
_ => vec![],
};
for candidate in candidates {
let path = self.workspace_root.path().join(candidate);
if path.exists() {
return Some(std::path::PathBuf::from(candidate));
}
}
self.find_file_by_extension_recursive(self.workspace_root.path(), extensions, 0, 4)
}
fn find_file_by_extension_recursive(
&self,
current_dir: &std::path::Path,
extensions: &[&str],
current_depth: usize,
max_depth: usize,
) -> Option<std::path::PathBuf> {
if current_depth > max_depth {
return None;
}
let Ok(entries) = std::fs::read_dir(current_dir) else {
return None;
};
for entry in entries.flatten() {
let path = entry.path();
let Ok(metadata) = entry.metadata() else {
continue;
};
if metadata.is_dir() {
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if name.starts_with('.')
|| name == "node_modules"
|| name == "target"
|| name == "vendor"
|| name == "dist"
|| name == "build"
|| name == "__pycache__"
|| name == ".git"
{
continue;
}
}
if let Some(found) = self.find_file_by_extension_recursive(
&path,
extensions,
current_depth + 1,
max_depth,
) {
return Some(found);
}
} else if metadata.is_file() {
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if extensions.iter().any(|&e| e.eq_ignore_ascii_case(ext)) {
if let Ok(rel_path) = path.strip_prefix(self.workspace_root.path()) {
return Some(rel_path.to_path_buf());
}
}
}
}
}
None
}
}
fn format_uptime(seconds: u64) -> String {
if seconds < 60 {
format!("{seconds}s")
} else if seconds < 3600 {
let mins = seconds / 60;
let secs = seconds % 60;
if secs == 0 {
format!("{mins}m")
} else {
format!("{mins}m{secs}s")
}
} else {
let hours = seconds / 3600;
let mins = (seconds % 3600) / 60;
if mins == 0 {
format!("{hours}h")
} else {
format!("{hours}h{mins}m")
}
}
}
enum ProbeAction {
UseCachedReady,
SkipProbe,
Probe,
}
fn compute_degraded_tools(status: &pathfinder_lsp::types::LspLanguageStatus) -> Vec<String> {
let mut degraded = Vec::new();
if status.supports_call_hierarchy != Some(true) {
degraded.push("analyze_impact".to_owned());
degraded.push("read_with_deep_context".to_owned());
}
if status.supports_diagnostics != Some(true)
&& status.diagnostics_strategy.as_deref() != Some("push")
{
degraded.push("validate_only".to_owned());
}
degraded
}
fn compute_validation_latency(strategy: Option<&str>) -> Option<u64> {
match strategy {
Some("push") => Some(10_000), Some("pull") => Some(2_000), _ => None,
}
}
fn parse_uptime_to_seconds(uptime: Option<&str>) -> Option<u64> {
let uptime = uptime?;
let mut seconds = 0u64;
if let Some(h_pos) = uptime.find('h') {
let h_str = &uptime[..h_pos];
if let Ok(h) = h_str.parse::<u64>() {
seconds += h * 3600;
}
}
let min_part = if let Some(h_pos) = uptime.find('h') {
&uptime[h_pos + 1..]
} else {
uptime
};
if let Some(m_pos) = min_part.find('m') {
let m_str = &min_part[..m_pos];
if let Ok(m) = m_str.parse::<u64>() {
seconds += m * 60;
}
}
let sec_part = if let Some(m_pos) = min_part.find('m') {
&min_part[m_pos + 1..]
} else {
min_part
};
if let Some(s_pos) = sec_part.find('s') {
let s_str = &sec_part[..s_pos];
if let Ok(s) = s_str.parse::<u64>() {
seconds += s;
}
}
Some(seconds)
}
#[cfg(test)]
#[allow(clippy::expect_used, clippy::unwrap_used)]
mod tests {
use super::*;
use crate::server::types::{
AnalyzeImpactParams, GetDefinitionParams, ReadWithDeepContextParams,
};
use pathfinder_common::config::PathfinderConfig;
use pathfinder_common::sandbox::Sandbox;
use pathfinder_common::types::{SymbolScope, VersionHash, WorkspaceRoot};
use pathfinder_lsp::types::{CallHierarchyCall, CallHierarchyItem};
use pathfinder_lsp::{DefinitionLocation, MockLawyer};
use pathfinder_search::MockScout;
use pathfinder_treesitter::mock::MockSurgeon;
use std::sync::Arc;
use tempfile::tempdir;
fn make_server_with_lawyer(
mock_surgeon: Arc<MockSurgeon>,
mock_lawyer: Arc<MockLawyer>,
) -> (PathfinderServer, tempfile::TempDir) {
let ws_dir = tempdir().expect("temp dir");
let ws = WorkspaceRoot::new(ws_dir.path()).expect("valid root");
let config = PathfinderConfig::default();
let sandbox = Sandbox::new(ws.path(), &config.sandbox);
let server = PathfinderServer::with_all_engines(
ws,
config,
sandbox,
Arc::new(MockScout::default()),
mock_surgeon,
mock_lawyer,
);
(server, ws_dir)
}
fn make_scope() -> SymbolScope {
SymbolScope {
content: "fn login() { }".to_owned(),
start_line: 9,
end_line: 9,
name_column: 0,
version_hash: VersionHash::compute(b"fn login() { }"),
language: "rust".to_owned(),
}
}
#[tokio::test]
async fn test_get_definition_routes_to_lawyer_success() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.set_goto_definition_result(Ok(Some(DefinitionLocation {
file: "src/auth.rs".into(),
line: 42,
column: 5,
preview: "pub fn login() -> bool {".into(),
})));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer.clone());
let params = GetDefinitionParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.get_definition_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.file, "src/auth.rs");
assert_eq!(val.line, 42);
assert_eq!(val.preview, "pub fn login() -> bool {");
assert!(!val.degraded);
assert_eq!(lawyer.goto_definition_call_count(), 1);
}
#[tokio::test]
async fn test_get_definition_degrades_when_no_lsp() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(pathfinder_lsp::NoOpLawyer);
let ws_dir = tempdir().expect("temp dir");
let ws = WorkspaceRoot::new(ws_dir.path()).expect("valid root");
let config = PathfinderConfig::default();
let sandbox = Sandbox::new(ws.path(), &config.sandbox);
let server = PathfinderServer::with_all_engines(
ws,
config,
sandbox,
Arc::new(MockScout::default()),
surgeon,
lawyer,
);
let params = GetDefinitionParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.get_definition_impl(params).await;
let Err(err) = result else {
panic!("expected error but got Ok");
};
let code = err
.data
.as_ref()
.and_then(|d| d.get("error"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert_eq!(code, "NO_LSP_AVAILABLE");
}
#[tokio::test]
async fn test_get_definition_rejects_empty_semantic_path() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = GetDefinitionParams {
semantic_path: String::default(), };
let result = server.get_definition_impl(params).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_get_definition_rejects_sandbox_denied_path() {
let surgeon = Arc::new(MockSurgeon::new());
let lawyer = Arc::new(MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = GetDefinitionParams {
semantic_path: ".git/objects/abc::def".to_owned(), };
let result = server.get_definition_impl(params).await;
let Err(err) = result else {
panic!("expected error but got Ok");
};
let code = err
.data
.as_ref()
.and_then(|d| d.get("error"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert_eq!(code, "ACCESS_DENIED");
}
#[tokio::test]
async fn test_read_with_deep_context_degrades_when_call_hierarchy_unsupported() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(pathfinder_lsp::NoOpLawyer);
let ws_dir = tempdir().expect("temp dir");
let ws = WorkspaceRoot::new(ws_dir.path()).expect("valid root");
let config = PathfinderConfig::default();
let sandbox = Sandbox::new(ws.path(), &config.sandbox);
let server = PathfinderServer::with_all_engines(
ws,
config,
sandbox,
Arc::new(MockScout::default()),
surgeon,
lawyer,
);
let params = ReadWithDeepContextParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.read_with_deep_context_impl(params).await;
let call_res = result.expect("should succeed");
let text_content = match &call_res.content[0].raw {
rmcp::model::RawContent::Text(t) => t.text.clone(),
_ => panic!("expected text content"),
};
let val: crate::server::types::ReadWithDeepContextMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert_eq!(text_content, "DEGRADED MODE (no_lsp) — 0 dependencies loaded (results may be incomplete)\n\nfn login() { }");
assert!(val.degraded);
assert_eq!(val.degraded_reason.as_deref(), Some("no_lsp"));
assert!(val.dependencies.is_empty());
}
#[tokio::test]
async fn test_read_with_deep_context_lsp_populates_dependencies() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
let item = CallHierarchyItem {
name: "login".into(),
kind: "function".into(),
detail: None,
file: "src/auth.rs".into(),
line: 9,
column: 4,
data: None,
};
lawyer.push_prepare_call_hierarchy_result(Ok(vec![item.clone()]));
lawyer.push_outgoing_call_result(Ok(vec![CallHierarchyCall {
item: CallHierarchyItem {
name: "validate_token".into(),
kind: "function".into(),
detail: Some("fn validate_token() -> bool".into()),
file: "src/token.rs".into(),
line: 15,
column: 4,
data: None,
},
call_sites: vec![9],
}]));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = ReadWithDeepContextParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.read_with_deep_context_impl(params).await;
let call_res = result.expect("should succeed");
let text_content = match &call_res.content[0].raw {
rmcp::model::RawContent::Text(t) => t.text.clone(),
_ => panic!("expected text content"),
};
let val: crate::server::types::ReadWithDeepContextMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert_eq!(text_content, "1 dependencies loaded\n\nfn login() { }");
assert!(!val.degraded);
assert_eq!(val.degraded_reason, None);
assert_eq!(val.dependencies.len(), 1);
assert_eq!(
val.dependencies[0].semantic_path,
"src/token.rs::validate_token"
);
assert_eq!(val.dependencies[0].signature, "fn validate_token() -> bool");
assert_eq!(val.dependencies[0].file, "src/token.rs");
assert_eq!(val.dependencies[0].line, 15);
}
#[tokio::test]
async fn test_analyze_impact_returns_empty_degraded() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(pathfinder_lsp::NoOpLawyer);
let ws_dir = tempdir().expect("temp dir");
let ws = WorkspaceRoot::new(ws_dir.path()).expect("valid root");
let config = PathfinderConfig::default();
let sandbox = Sandbox::new(ws.path(), &config.sandbox);
let server = PathfinderServer::with_all_engines(
ws,
config,
sandbox,
Arc::new(MockScout::default()),
surgeon,
lawyer,
);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(
val.incoming.is_none(),
"incoming must be null (not empty) when degraded"
);
assert!(
val.outgoing.is_none(),
"outgoing must be null (not empty) when degraded"
);
assert!(val.degraded);
assert_eq!(val.degraded_reason.as_deref(), Some("no_lsp"));
}
#[tokio::test]
async fn test_analyze_impact_lsp_populates_incoming_and_outgoing() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
let item = CallHierarchyItem {
name: "login".into(),
kind: "function".into(),
detail: None,
file: "src/auth.rs".into(),
line: 9,
column: 4,
data: None,
};
lawyer.push_prepare_call_hierarchy_result(Ok(vec![item.clone()]));
lawyer.push_incoming_call_result(Ok(vec![CallHierarchyCall {
item: CallHierarchyItem {
name: "handle_request".into(),
kind: "function".into(),
detail: Some("fn handle_request()".into()),
file: "src/server.rs".into(),
line: 20,
column: 4,
data: None,
},
call_sites: vec![25],
}]));
lawyer.push_outgoing_call_result(Ok(vec![CallHierarchyCall {
item: CallHierarchyItem {
name: "validate_token".into(),
kind: "function".into(),
detail: Some("fn validate_token() -> bool".into()),
file: "src/token.rs".into(),
line: 15,
column: 4,
data: None,
},
call_sites: vec![9],
}]));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 1,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(!val.degraded);
assert_eq!(val.degraded_reason, None);
assert_eq!(val.depth_reached, 1); assert_eq!(val.files_referenced, 3); let incoming = val
.incoming
.as_ref()
.expect("incoming must be Some when not degraded");
let outgoing = val
.outgoing
.as_ref()
.expect("outgoing must be Some when not degraded");
assert_eq!(incoming.len(), 1);
assert_eq!(incoming[0].file, "src/server.rs");
assert_eq!(outgoing.len(), 1);
assert_eq!(outgoing[0].file, "src/token.rs");
}
#[tokio::test]
async fn test_get_definition_lsp_error_returns_lsp_error() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.set_goto_definition_result(Err("LSP protocol error".to_string()));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = GetDefinitionParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.get_definition_impl(params).await;
let Err(err) = result else {
panic!("expected error but got Ok");
};
let code = err
.data
.as_ref()
.and_then(|d| d.get("error"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert_eq!(code, "LSP_ERROR");
}
#[tokio::test]
async fn test_get_definition_lsp_none_no_grep_fallback_returns_symbol_not_found() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = GetDefinitionParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.get_definition_impl(params).await;
let Err(err) = result else {
panic!("expected error but got Ok");
};
let code = err
.data
.as_ref()
.and_then(|d| d.get("error"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert_eq!(code, "SYMBOL_NOT_FOUND");
}
#[tokio::test]
async fn test_get_definition_grep_fallback_with_mock_scout() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let _lawyer = Arc::new(MockLawyer::default());
let ws_dir = tempdir().expect("temp dir");
let ws = WorkspaceRoot::new(ws_dir.path()).expect("valid root");
let config = PathfinderConfig::default();
let sandbox = Sandbox::new(ws.path(), &config.sandbox);
std::fs::create_dir_all(ws_dir.path().join("src")).unwrap();
std::fs::write(
ws_dir.path().join("src/other.rs"),
"fn login() -> bool { true }",
)
.unwrap();
let scout = Arc::new(MockScout::default());
scout.set_result(Ok(pathfinder_search::SearchResult {
matches: vec![pathfinder_search::SearchMatch {
file: "src/other.rs".to_string(),
line: 1,
column: 1,
content: "fn login() -> bool { true }".to_string(),
context_before: vec![],
context_after: vec![],
enclosing_semantic_path: None,
version_hash: "sha256:abc".to_string(),
known: Some(false),
}],
total_matches: 1,
truncated: false,
}));
let server = PathfinderServer::with_all_engines(
ws,
config,
sandbox,
scout,
surgeon,
Arc::new(pathfinder_lsp::NoOpLawyer),
);
let params = GetDefinitionParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.get_definition_impl(params).await;
let Ok(res) = result else {
panic!("expected Ok with grep fallback, got Err");
};
assert!(res.0.degraded);
assert_eq!(res.0.file, "src/other.rs");
assert!(res
.0
.degraded_reason
.as_ref()
.unwrap()
.contains("grep_fallback"));
}
#[tokio::test]
async fn test_analyze_impact_empty_hierarchy_confirmed_zero() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.push_prepare_call_hierarchy_result(Ok(vec![]));
lawyer.set_goto_definition_result(Ok(Some(DefinitionLocation {
file: "src/auth.rs".into(),
line: 10,
column: 4,
preview: "fn login() {}".into(),
})));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(
!val.degraded,
"must not be degraded when probe confirms LSP is warm"
);
assert_eq!(val.degraded_reason, None);
let incoming = val
.incoming
.as_ref()
.expect("must be Some when confirmed-zero");
let outgoing = val
.outgoing
.as_ref()
.expect("must be Some when confirmed-zero");
assert!(incoming.is_empty(), "confirmed zero callers");
assert!(outgoing.is_empty(), "confirmed zero callees");
}
#[tokio::test]
async fn test_analyze_impact_empty_hierarchy_warmup_degrades() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.push_prepare_call_hierarchy_result(Ok(vec![]));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(
val.degraded,
"must be degraded when goto_definition probe also returns None"
);
assert_eq!(
val.degraded_reason.as_deref(),
Some("lsp_warmup_empty_unverified"),
"degraded_reason must indicate warmup ambiguity"
);
assert!(
val.incoming.is_none(),
"incoming must be None (unknown) during warmup, not Some([]) (confirmed-zero)"
);
assert!(
val.outgoing.is_none(),
"outgoing must be None (unknown) during warmup, not Some([]) (confirmed-zero)"
);
}
#[tokio::test]
async fn test_analyze_impact_lsp_error_degrades() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.push_prepare_call_hierarchy_result(Err("LSP crashed".to_string()));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(val.degraded);
assert_eq!(val.degraded_reason.as_deref(), Some("no_lsp"));
}
#[tokio::test]
async fn test_read_with_deep_context_outgoing_error_degrades() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
let item = CallHierarchyItem {
name: "login".into(),
kind: "function".into(),
detail: None,
file: "src/auth.rs".into(),
line: 9,
column: 4,
data: None,
};
lawyer.push_prepare_call_hierarchy_result(Ok(vec![item]));
lawyer.push_outgoing_call_result(Err("outgoing failed".to_string()));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = ReadWithDeepContextParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.read_with_deep_context_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::ReadWithDeepContextMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(val.degraded);
assert_eq!(val.degraded_reason.as_deref(), Some("no_lsp"));
assert!(val.dependencies.is_empty());
}
#[tokio::test]
async fn test_read_with_deep_context_empty_hierarchy_zero_deps() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.push_prepare_call_hierarchy_result(Ok(vec![]));
lawyer.set_goto_definition_result(Ok(Some(DefinitionLocation {
file: "src/auth.rs".into(),
line: 10,
column: 4,
preview: "fn login() {}".into(),
})));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = ReadWithDeepContextParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.read_with_deep_context_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::ReadWithDeepContextMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(
!val.degraded,
"must not be degraded when probe confirms LSP is warm"
);
assert_eq!(val.degraded_reason, None);
assert!(val.dependencies.is_empty(), "confirmed zero dependencies");
}
#[tokio::test]
async fn test_read_with_deep_context_empty_hierarchy_warmup_degrades() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
lawyer.push_prepare_call_hierarchy_result(Ok(vec![]));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = ReadWithDeepContextParams {
semantic_path: "src/auth.rs::login".to_owned(),
};
let result = server.read_with_deep_context_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::ReadWithDeepContextMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(
val.degraded,
"must be degraded when goto_definition probe also returns None"
);
assert_eq!(
val.degraded_reason.as_deref(),
Some("lsp_warmup_empty_unverified"),
"degraded_reason must indicate warmup ambiguity"
);
assert!(val.dependencies.is_empty());
}
#[tokio::test]
async fn test_analyze_impact_bfs_respects_max_depth() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
let item = CallHierarchyItem {
name: "login".into(),
kind: "function".into(),
detail: None,
file: "src/auth.rs".into(),
line: 9,
column: 4,
data: None,
};
lawyer.push_prepare_call_hierarchy_result(Ok(vec![item.clone()]));
let caller_item = CallHierarchyItem {
name: "caller".into(),
kind: "function".into(),
detail: None,
file: "src/caller.rs".into(),
line: 5,
column: 4,
data: None,
};
lawyer.push_incoming_call_result(Ok(vec![CallHierarchyCall {
item: caller_item.clone(),
call_sites: vec![9],
}]));
lawyer.push_incoming_call_result(Ok(vec![CallHierarchyCall {
item: CallHierarchyItem {
name: "top_level".into(),
kind: "function".into(),
detail: None,
file: "src/main.rs".into(),
line: 1,
column: 0,
data: None,
},
call_sites: vec![5],
}]));
lawyer.push_outgoing_call_result(Ok(vec![]));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 1, };
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(!val.degraded);
let _incoming = val.incoming.as_ref().expect("must be Some");
assert_eq!(val.depth_reached, 1);
}
#[tokio::test]
async fn test_analyze_impact_rejects_sandbox_denied_path() {
let surgeon = Arc::new(MockSurgeon::new());
let lawyer = Arc::new(MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: ".git/objects/abc::def".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
let Err(err) = result else {
panic!("expected error but got Ok");
};
let code = err
.data
.as_ref()
.and_then(|d| d.get("error"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert_eq!(code, "ACCESS_DENIED");
}
#[tokio::test]
async fn test_analyze_impact_tree_sitter_error() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon.read_symbol_scope_results.lock().unwrap().push(Err(
pathfinder_treesitter::SurgeonError::ParseError {
path: std::path::PathBuf::from("src/auth.rs"),
reason: "parse failed".to_string(),
},
));
let lawyer = Arc::new(MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
assert!(result.is_err(), "tree-sitter error should propagate");
}
#[tokio::test]
async fn test_analyze_impact_bfs_lsp_error_graceful_partial_graph() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let lawyer = Arc::new(MockLawyer::default());
let item = CallHierarchyItem {
name: "login".into(),
kind: "function".into(),
detail: None,
file: "src/auth.rs".into(),
line: 9,
column: 4,
data: None,
};
lawyer.push_prepare_call_hierarchy_result(Ok(vec![item]));
lawyer.push_incoming_call_result(Ok(vec![CallHierarchyCall {
item: CallHierarchyItem {
name: "caller".into(),
kind: "function".into(),
detail: None,
file: "src/server.rs".into(),
line: 20,
column: 4,
data: None,
},
call_sites: vec![9],
}]));
lawyer.push_outgoing_call_result(Err("LSP crashed during outgoing".to_string()));
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 1,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed despite partial failure");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(!val.degraded);
let incoming = val.incoming.as_ref().expect("incoming must be Some");
assert_eq!(incoming.len(), 1, "incoming caller should be present");
let outgoing = val.outgoing.as_ref().expect("outgoing must be Some");
assert!(outgoing.is_empty(), "outgoing should be empty due to error");
}
#[tokio::test]
async fn test_analyze_impact_grep_fallback_with_mock_scout() {
let surgeon = Arc::new(MockSurgeon::new());
surgeon
.read_symbol_scope_results
.lock()
.unwrap()
.push(Ok(make_scope()));
let ws_dir = tempdir().expect("temp dir");
let ws = WorkspaceRoot::new(ws_dir.path()).expect("valid root");
let config = PathfinderConfig::default();
let sandbox = Sandbox::new(ws.path(), &config.sandbox);
std::fs::create_dir_all(ws_dir.path().join("src")).unwrap();
std::fs::write(
ws_dir.path().join("src/auth.rs"),
"fn login() -> bool { true }",
)
.unwrap();
let scout = Arc::new(MockScout::default());
std::fs::write(
ws_dir.path().join("src/caller.rs"),
"fn handle_request() { login(); }",
)
.unwrap();
scout.set_result(Ok(pathfinder_search::SearchResult {
matches: vec![pathfinder_search::SearchMatch {
file: "src/caller.rs".to_string(),
line: 1,
column: 1,
content: "fn handle_request() { login(); }".to_string(),
context_before: vec![],
context_after: vec![],
enclosing_semantic_path: None,
version_hash: "sha256:abc".to_string(),
known: Some(false),
}],
total_matches: 1,
truncated: false,
}));
let server = PathfinderServer::with_all_engines(
ws,
config,
sandbox,
scout,
surgeon,
Arc::new(pathfinder_lsp::NoOpLawyer),
);
let params = AnalyzeImpactParams {
semantic_path: "src/auth.rs::login".to_owned(),
max_depth: 2,
};
let result = server.analyze_impact_impl(params).await;
let call_res = result.expect("should succeed");
let val: crate::server::types::AnalyzeImpactMetadata =
serde_json::from_value(call_res.structured_content.unwrap()).unwrap();
assert!(val.degraded);
assert_eq!(val.degraded_reason.as_deref(), Some("no_lsp_grep_fallback"));
let incoming = val.incoming.as_ref().expect("must be Some from grep");
assert_eq!(incoming.len(), 1);
assert_eq!(incoming[0].file, "src/caller.rs");
assert_eq!(incoming[0].direction, "incoming_heuristic");
assert!(
val.version_hashes.contains_key("src/auth.rs"),
"version_hashes must include the referenced file"
);
}
#[tokio::test]
async fn test_lsp_health_includes_diagnostics_strategy() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
let params = crate::server::types::LspHealthParams::default();
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.status, "unavailable");
assert!(val.languages.is_empty());
}
#[tokio::test]
async fn test_lsp_health_shows_push_for_go() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"go".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(15),
diagnostics_strategy: Some("push".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(false),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("go".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
let go_health = &val.languages[0];
assert_eq!(go_health.language, "go");
assert_eq!(go_health.status, "ready");
assert_eq!(go_health.diagnostics_strategy, Some("push".to_string()));
assert_eq!(go_health.supports_call_hierarchy, Some(true));
assert_eq!(go_health.supports_diagnostics, Some(true));
}
#[tokio::test]
async fn test_lsp_health_shows_pull_for_rust() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(20),
diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
let rust_health = &val.languages[0];
assert_eq!(rust_health.language, "rust");
assert_eq!(rust_health.status, "ready");
assert_eq!(rust_health.diagnostics_strategy, Some("pull".to_string()));
assert_eq!(rust_health.supports_call_hierarchy, Some(true));
assert_eq!(rust_health.supports_diagnostics, Some(true));
}
#[tokio::test]
async fn test_lsp_health_shows_capabilities() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"typescript".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(10),
diagnostics_strategy: Some("push".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true), supports_diagnostics: Some(true),
supports_formatting: Some(false), },
)]));
let params = crate::server::types::LspHealthParams::default();
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
let ts_health = &val.languages[0];
assert_eq!(ts_health.supports_definition, Some(true));
assert_eq!(ts_health.supports_call_hierarchy, Some(true));
assert_eq!(ts_health.supports_diagnostics, Some(true));
assert_eq!(ts_health.supports_formatting, Some(false));
}
#[tokio::test]
async fn test_lsp_health_probe_upgrades_warming_up_to_ready() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, ws_dir) = make_server_with_lawyer(surgeon, lawyer.clone());
std::fs::create_dir_all(ws_dir.path().join("src")).unwrap();
std::fs::write(
ws_dir.path().join("src/main.rs"),
r#"fn main() { println!("Hello"); }"#,
)
.unwrap();
lawyer.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(false), uptime_seconds: Some(30), diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
lawyer.set_goto_definition_result(Ok(Some(pathfinder_lsp::types::DefinitionLocation {
file: "src/main.rs".to_string(),
line: 1,
column: 0,
preview: "fn main()".to_string(),
})));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.status, "ready");
assert_eq!(val.languages.len(), 1);
let rust_health = &val.languages[0];
assert_eq!(rust_health.language, "rust");
assert_eq!(rust_health.status, "ready");
assert_eq!(rust_health.uptime, Some("30s".to_string()));
assert_eq!(rust_health.indexing_status, Some("in_progress".to_string()));
assert!(!rust_health.probe_verified);
}
#[tokio::test]
async fn test_lsp_health_probe_keeps_warming_up_when_probe_fails() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, ws_dir) = make_server_with_lawyer(surgeon, lawyer.clone());
std::fs::create_dir_all(ws_dir.path().join("src")).unwrap();
std::fs::write(ws_dir.path().join("src/main.rs"), "fn main() {}").unwrap();
lawyer.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(false), uptime_seconds: Some(30), diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
lawyer.set_goto_definition_result(Err("Connection lost".to_string()));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.status, "ready");
assert_eq!(val.languages.len(), 1);
let rust_health = &val.languages[0];
assert_eq!(rust_health.language, "rust");
assert_eq!(rust_health.status, "ready");
assert!(!rust_health.probe_verified);
}
#[tokio::test]
async fn test_lsp_health_no_probe_for_recently_started() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer.clone());
lawyer.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(false), uptime_seconds: Some(5), diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
lawyer.set_goto_definition_result(Ok(Some(pathfinder_lsp::types::DefinitionLocation {
file: "src/main.rs".to_string(),
line: 1,
column: 0,
preview: "fn main()".to_string(),
})));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.status, "ready");
assert_eq!(val.languages.len(), 1);
let rust_health = &val.languages[0];
assert_eq!(rust_health.language, "rust");
assert_eq!(rust_health.status, "ready");
assert_eq!(rust_health.indexing_status, Some("in_progress".to_string()));
assert!(!rust_health.probe_verified);
}
#[tokio::test]
async fn test_lsp_health_no_probe_for_already_ready() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer.clone());
lawyer.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true), uptime_seconds: Some(60), diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
lawyer.set_goto_definition_result(Ok(Some(pathfinder_lsp::types::DefinitionLocation {
file: "src/main.rs".to_string(),
line: 1,
column: 0,
preview: "fn main()".to_string(),
})));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.status, "ready");
assert_eq!(val.languages.len(), 1);
let rust_health = &val.languages[0];
assert_eq!(rust_health.status, "ready");
assert!(!rust_health.probe_verified);
}
#[tokio::test]
async fn test_parse_uptime_to_seconds() {
assert_eq!(parse_uptime_to_seconds(Some("5s")), Some(5));
assert_eq!(parse_uptime_to_seconds(Some("1m30s")), Some(90));
assert_eq!(parse_uptime_to_seconds(Some("2h15m")), Some(8100));
assert_eq!(parse_uptime_to_seconds(Some("1h30m45s")), Some(5445));
assert_eq!(parse_uptime_to_seconds(Some("1m")), Some(60));
assert_eq!(parse_uptime_to_seconds(Some("1h")), Some(3600));
assert_eq!(parse_uptime_to_seconds(None), None);
}
#[tokio::test]
async fn test_find_probe_file() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, ws_dir) = make_server_with_lawyer(surgeon, lawyer);
std::fs::create_dir_all(ws_dir.path().join("src")).unwrap();
std::fs::write(ws_dir.path().join("main.go"), "package main").unwrap();
std::fs::write(ws_dir.path().join("src/index.ts"), "export const x = 1;").unwrap();
assert_eq!(
server.find_probe_file("go"),
Some(std::path::PathBuf::from("main.go"))
);
assert_eq!(
server.find_probe_file("typescript"),
Some(std::path::PathBuf::from("src/index.ts"))
);
assert_eq!(server.find_probe_file("rust"), None); }
#[tokio::test]
async fn test_find_probe_file_recursive_monorepo() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let (server, ws_dir) = make_server_with_lawyer(surgeon, lawyer);
std::fs::create_dir_all(
ws_dir
.path()
.join("apps")
.join("backend")
.join("cmd")
.join("server"),
)
.unwrap();
std::fs::write(
ws_dir
.path()
.join("apps")
.join("backend")
.join("cmd")
.join("server")
.join("main.go"),
"package main\nfunc main() {}",
)
.unwrap();
std::fs::create_dir_all(ws_dir.path().join("node_modules").join("react")).unwrap();
std::fs::write(
ws_dir
.path()
.join("node_modules")
.join("react")
.join("index.ts"),
"export const React = {};",
)
.unwrap();
let probe = server.find_probe_file("go");
assert!(probe.is_some(), "Should find Go file in monorepo structure");
let probe_path = probe.unwrap();
assert!(
probe_path.to_str().unwrap().contains("main.go"),
"Should find a main.go file, got: {probe_path:?}"
);
std::fs::create_dir_all(ws_dir.path().join("tools").join("fath-factory").join("src"))
.unwrap();
std::fs::write(
ws_dir
.path()
.join("tools")
.join("fath-factory")
.join("src")
.join("__init__.py"),
"",
)
.unwrap();
let py_probe = server.find_probe_file("python");
assert!(
py_probe.is_some(),
"Should find Python file in tools/ directory"
);
}
#[tokio::test]
async fn test_lsp_health_includes_missing_languages_with_install_hint() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"typescript".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(60),
diagnostics_strategy: Some("push".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(false),
},
)]));
lawyer_clone.set_missing_languages(vec![
pathfinder_lsp::client::MissingLanguage {
language_id: "python".to_string(),
marker_file: "pyproject.toml".to_string(),
tried_binaries: vec!["pyright".to_string(), "pylsp".to_string()],
install_hint: "Install pyright: npm install -g pyright".to_string(),
},
pathfinder_lsp::client::MissingLanguage {
language_id: "go".to_string(),
marker_file: "go.mod".to_string(),
tried_binaries: vec!["gopls".to_string()],
install_hint: "Install gopls: go install golang.org/x/tools/gopls@latest"
.to_string(),
},
]);
let params = crate::server::types::LspHealthParams::default();
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 3);
let python_health = val.languages.iter().find(|l| l.language == "python");
let go_health = val.languages.iter().find(|l| l.language == "go");
let ts_health = val.languages.iter().find(|l| l.language == "typescript");
assert!(ts_health.is_some());
assert_eq!(ts_health.unwrap().status, "ready");
assert!(python_health.is_some());
assert_eq!(python_health.unwrap().status, "unavailable");
assert_eq!(
python_health.unwrap().install_hint,
Some("Install pyright: npm install -g pyright".to_string())
);
assert!(go_health.is_some());
assert_eq!(go_health.unwrap().status, "unavailable");
assert_eq!(
go_health.unwrap().install_hint,
Some("Install gopls: go install golang.org/x/tools/gopls@latest".to_string())
);
}
#[tokio::test]
async fn test_lsp_health_missing_language_filter_works() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::new());
lawyer_clone.set_missing_languages(vec![
pathfinder_lsp::client::MissingLanguage {
language_id: "python".to_string(),
marker_file: "pyproject.toml".to_string(),
tried_binaries: vec!["pyright".to_string()],
install_hint: "Install pyright".to_string(),
},
pathfinder_lsp::client::MissingLanguage {
language_id: "rust".to_string(),
marker_file: "Cargo.toml".to_string(),
tried_binaries: vec!["rust-analyzer".to_string()],
install_hint: "Install rust-analyzer".to_string(),
},
]);
let params = crate::server::types::LspHealthParams {
language: Some("python".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
assert_eq!(val.languages[0].language, "python");
assert_eq!(
val.languages[0].install_hint,
Some("Install pyright".to_string())
);
}
#[tokio::test]
async fn test_health_shows_degraded_tools_for_no_diagnostics() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"go".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(60),
diagnostics_strategy: None,
supports_definition: Some(true),
supports_call_hierarchy: None,
supports_diagnostics: None,
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("go".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
let go_health = &val.languages[0];
assert_eq!(go_health.language, "go");
assert!(
go_health
.degraded_tools
.contains(&"analyze_impact".to_owned()),
"degraded_tools should include analyze_impact when call hierarchy unsupported"
);
assert!(
go_health
.degraded_tools
.contains(&"read_with_deep_context".to_owned()),
"degraded_tools should include read_with_deep_context when call hierarchy unsupported"
);
assert!(
go_health
.degraded_tools
.contains(&"validate_only".to_owned()),
"degraded_tools should include validate_only when diagnostics unsupported"
);
}
#[tokio::test]
async fn test_health_shows_empty_degraded_when_fully_capable() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(60),
diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
let rust_health = &val.languages[0];
assert_eq!(rust_health.language, "rust");
assert!(
rust_health.degraded_tools.is_empty(),
"degraded_tools should be empty when all capabilities supported, got: {:?}",
rust_health.degraded_tools
);
}
#[tokio::test]
async fn test_health_shows_push_latency() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"go".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(60),
diagnostics_strategy: Some("push".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("go".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
assert_eq!(val.languages.len(), 1);
let go_health = &val.languages[0];
assert_eq!(go_health.language, "go");
assert_eq!(
go_health.validation_latency_ms,
Some(10_000),
"push diagnostics should have ~10s validation latency"
);
assert!(
go_health.degraded_tools.is_empty(),
"fully capable LSP should have no degraded tools"
);
}
#[tokio::test]
async fn test_health_shows_pull_latency() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true),
indexing_complete: Some(true),
uptime_seconds: Some(60),
diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let call_res = result.expect("should succeed");
let val = call_res.0;
let rust_health = &val.languages[0];
assert_eq!(
rust_health.validation_latency_ms,
Some(2_000),
"pull diagnostics should have ~2s validation latency"
);
}
#[tokio::test]
async fn test_lsp_health_ready_but_still_indexing_shows_confidence_gradient() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"python".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: false, reason: "LSP connected but does not support diagnostics".to_string(),
navigation_ready: Some(true), indexing_complete: Some(false), uptime_seconds: Some(5),
diagnostics_strategy: Some("none".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(false),
supports_formatting: Some(false),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("python".to_string()),
};
let result = server.lsp_health_impl(params).await;
let val = result.expect("should succeed").0;
let py_health = &val.languages[0];
assert_eq!(py_health.status, "ready");
assert_eq!(py_health.indexing_status, Some("in_progress".to_string()));
assert_eq!(py_health.navigation_ready, Some(true));
assert_eq!(py_health.diagnostics_strategy, Some("none".to_string()));
assert!(py_health
.degraded_tools
.contains(&"validate_only".to_string()));
}
#[tokio::test]
async fn test_lsp_health_fully_indexed_shows_complete_confidence() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(true), indexing_complete: Some(true), uptime_seconds: Some(120),
diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(true),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let val = result.expect("should succeed").0;
let rust_health = &val.languages[0];
assert_eq!(rust_health.status, "ready");
assert_eq!(rust_health.navigation_ready, Some(true));
assert_eq!(rust_health.indexing_status, Some("complete".to_string()));
assert!(rust_health.degraded_tools.is_empty());
}
#[tokio::test]
async fn test_probe_cache_positive_result_never_expires() {
let entry = crate::server::ProbeCacheEntry::new(true);
assert!(entry.is_valid(), "positive entry should always be valid");
}
#[tokio::test]
async fn test_probe_cache_negative_result_is_initially_valid() {
let entry = crate::server::ProbeCacheEntry::new(false);
assert!(entry.is_valid(), "fresh negative entry should be valid");
}
#[tokio::test]
async fn test_probe_negative_cache_skips_reprobe() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
server
.probe_cache
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.insert(
"rust".to_string(),
crate::server::ProbeCacheEntry::new(false),
);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(false),
indexing_complete: Some(false),
uptime_seconds: Some(30), diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(false),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let val = result.expect("should succeed").0;
let rust_health = &val.languages[0];
assert_eq!(rust_health.status, "warming_up");
assert!(
!rust_health.probe_verified,
"should not be probe-verified when using negative cache"
);
}
#[tokio::test]
async fn test_probe_cache_positive_upgrades_to_ready() {
let surgeon = Arc::new(MockSurgeon::default());
let lawyer = Arc::new(pathfinder_lsp::MockLawyer::default());
let lawyer_clone = lawyer.clone();
let (server, _ws) = make_server_with_lawyer(surgeon, lawyer);
server
.probe_cache
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.insert(
"rust".to_string(),
crate::server::ProbeCacheEntry::new(true),
);
lawyer_clone.set_capability_status(std::collections::HashMap::from([(
"rust".to_string(),
pathfinder_lsp::types::LspLanguageStatus {
validation: true,
reason: "LSP connected".to_string(),
navigation_ready: Some(false),
indexing_complete: Some(false),
uptime_seconds: Some(30),
diagnostics_strategy: Some("pull".to_string()),
supports_definition: Some(false),
supports_call_hierarchy: Some(true),
supports_diagnostics: Some(true),
supports_formatting: Some(true),
},
)]));
let params = crate::server::types::LspHealthParams {
language: Some("rust".to_string()),
};
let result = server.lsp_health_impl(params).await;
let val = result.expect("should succeed").0;
let rust_health = &val.languages[0];
assert_eq!(rust_health.status, "ready");
assert!(
rust_health.probe_verified,
"should be probe-verified from cache"
);
}
}