#![allow(clippy::too_many_lines)]
mod support;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{Duration, Instant};
use serde_json::{Value, json};
use sqry_core::graph::CodeGraph;
use sqry_core::graph::unified::build::BuildConfig;
use sqry_core::graph::unified::node::{NodeId, NodeKind};
use sqry_core::graph::unified::persistence::{GraphStorage, save_to_path};
use sqry_daemon::DaemonError;
use sqry_daemon::workspace::WorkspaceBuilder;
#[allow(unused_imports)]
use sqry_daemon_protocol::{ENVELOPE_VERSION, SearchItem, SearchMode, SearchRequest, SearchResult};
#[allow(unused_imports)]
use support::ipc::{TestIpcClient, TestServer, expect_error, expect_success};
use tempfile::TempDir;
fn copy_cli_basic_fixture(tmp_root: &Path) {
let repo_root = repo_root();
let src = repo_root.join("test-fixtures").join("cli-basic");
assert!(
src.is_dir(),
"test-fixtures/cli-basic missing at {} — required by DAEMON_SEARCH_TESTS",
src.display()
);
let dst = tmp_root.to_path_buf();
std::fs::create_dir_all(&dst).expect("create tmp_root");
for entry in std::fs::read_dir(&src).expect("read cli-basic dir") {
let entry = entry.expect("dir entry");
let from = entry.path();
let name = entry.file_name();
if name == std::ffi::OsStr::new(".sqry") {
continue;
}
let file_type = entry.file_type().expect("file type");
if !file_type.is_file() {
continue;
}
let to = dst.join(name);
std::fs::copy(&from, &to).unwrap_or_else(|e| {
panic!("copy {} -> {}: {}", from.display(), to.display(), e);
});
}
}
fn repo_root() -> PathBuf {
let mut cur = std::env::current_exe().expect("current_exe");
while cur.pop() {
let cargo = cur.join("Cargo.toml");
if cargo.is_file()
&& let Ok(s) = std::fs::read_to_string(&cargo)
&& s.contains("[workspace]")
{
return cur;
}
}
panic!(
"could not locate workspace root from {:?}",
std::env::current_exe()
);
}
struct PersistingBuilder {
plugins: Arc<sqry_core::plugin::PluginManager>,
cfg: BuildConfig,
}
impl std::fmt::Debug for PersistingBuilder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("PersistingBuilder").finish_non_exhaustive()
}
}
impl WorkspaceBuilder for PersistingBuilder {
fn build(&self, root: &Path) -> Result<CodeGraph, DaemonError> {
let g =
sqry_core::graph::unified::build::build_unified_graph(root, &self.plugins, &self.cfg)
.map_err(|e| DaemonError::WorkspaceBuildFailed {
root: root.to_path_buf(),
reason: format!("daemon-search-tests build: {e}"),
})?;
let graph_dir = root.join(".sqry").join("graph");
std::fs::create_dir_all(&graph_dir).map_err(|e| DaemonError::WorkspaceBuildFailed {
root: root.to_path_buf(),
reason: format!("create .sqry/graph: {e}"),
})?;
save_to_path(&g, graph_dir.join("snapshot.sqry").as_path()).map_err(|e| {
DaemonError::WorkspaceBuildFailed {
root: root.to_path_buf(),
reason: format!("persist snapshot: {e}"),
}
})?;
Ok(g)
}
fn load_persisted(&self, root: &Path) -> Result<CodeGraph, DaemonError> {
let storage = GraphStorage::new(root);
if !storage.snapshot_exists() {
return Err(DaemonError::WorkspaceBuildFailed {
root: root.to_path_buf(),
reason: "load_persisted: snapshot missing".into(),
});
}
sqry_core::graph::unified::persistence::load_from_path(
storage.snapshot_path(),
Some(&self.plugins),
)
.map_err(|e| DaemonError::WorkspaceBuildFailed {
root: root.to_path_buf(),
reason: format!("load_persisted: {e}"),
})
}
}
fn in_process_exact_projection(graph: &CodeGraph, pattern: &str) -> SearchResult {
let snapshot = graph.snapshot();
let mut node_ids = snapshot.find_by_exact_name(pattern);
node_ids.sort_unstable();
node_ids.dedup();
let store = graph.macro_metadata();
let node_ids: Vec<NodeId> = node_ids
.into_iter()
.filter(|nid| {
store
.get(*nid)
.is_none_or(|m| m.macro_generated != Some(true))
})
.collect();
let items: Vec<SearchItem> = node_ids
.into_iter()
.filter_map(|nid| node_to_search_item_independent(graph, nid))
.collect();
let total = items.len() as u64;
SearchResult {
items,
total,
truncated: false,
cursor: None,
}
}
fn node_to_search_item_independent(graph: &CodeGraph, nid: NodeId) -> Option<SearchItem> {
let entry = graph.nodes().get(nid)?;
let strings = graph.strings();
let files = graph.files();
let name = strings.resolve(entry.name).map(|s| s.to_string())?;
let qualified_name = entry
.qualified_name
.and_then(|id| strings.resolve(id))
.map_or_else(|| name.clone(), |s| s.to_string());
let file_path = files
.resolve(entry.file)
.map(|p| p.to_string_lossy().into_owned())?;
let language = language_from_path_local(Path::new(&file_path));
let kind = node_kind_to_str_local(entry.kind).to_owned();
Some(SearchItem {
name,
qualified_name,
kind,
language,
file_path,
start_line: entry.start_line,
start_column: entry.start_column,
end_line: entry.end_line,
end_column: entry.end_column,
score: None,
})
}
fn node_kind_to_str_local(kind: NodeKind) -> &'static str {
match kind {
NodeKind::Function => "function",
NodeKind::Method => "method",
NodeKind::Class => "class",
NodeKind::Interface => "interface",
NodeKind::Trait => "trait",
NodeKind::Module => "module",
NodeKind::Variable => "variable",
NodeKind::Constant => "constant",
NodeKind::Type => "type",
NodeKind::Struct => "struct",
NodeKind::Enum => "enum",
NodeKind::EnumVariant => "enum_variant",
NodeKind::Macro => "macro",
NodeKind::Parameter => "parameter",
NodeKind::Property => "property",
NodeKind::Import => "import",
NodeKind::Export => "export",
NodeKind::Component => "component",
NodeKind::Service => "service",
NodeKind::Resource => "resource",
NodeKind::Endpoint => "endpoint",
NodeKind::Test => "test",
NodeKind::CallSite => "call_site",
NodeKind::StyleRule => "style_rule",
NodeKind::StyleAtRule => "style_at_rule",
NodeKind::StyleVariable => "style_variable",
NodeKind::Lifetime => "lifetime",
NodeKind::TypeParameter => "type_parameter",
NodeKind::Annotation => "annotation",
NodeKind::AnnotationValue => "annotation_value",
NodeKind::LambdaTarget => "lambda_target",
NodeKind::JavaModule => "java_module",
NodeKind::EnumConstant => "enum_constant",
NodeKind::Other => "other",
}
}
fn language_from_path_local(path: &Path) -> String {
path.extension().and_then(|ext| ext.to_str()).map_or_else(
|| "unknown".to_string(),
|ext| match ext.to_lowercase().as_str() {
"rs" => "rust".to_string(),
"py" | "pyw" => "python".to_string(),
"ts" | "mts" | "cts" => "typescript".to_string(),
"tsx" => "typescriptreact".to_string(),
"js" | "mjs" | "cjs" => "javascript".to_string(),
"jsx" => "javascriptreact".to_string(),
"go" => "go".to_string(),
"java" => "java".to_string(),
_ => "unknown".to_string(),
},
)
}
fn build_search_params(workspace_root: &Path, pattern: &str) -> Value {
json!({
"envelope_version": ENVELOPE_VERSION,
"pattern": pattern,
"search_path": workspace_root.to_string_lossy(),
"mode": "exact",
"include_generated": false,
})
}
async fn daemon_search(
client: &mut TestIpcClient,
workspace_root: &Path,
pattern: &str,
) -> SearchResult {
let resp = client
.request(
"daemon/search",
build_search_params(workspace_root, pattern),
)
.await;
let envelope = expect_success(&resp);
serde_json::from_value::<SearchResult>(envelope["result"].clone()).unwrap_or_else(|e| {
panic!(
"daemon/search response did not decode as SearchResult: {e}\n envelope: {envelope}"
);
})
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn daemon_search_parity_with_in_process_for_five_fixture_queries() {
let tmp = TempDir::new().expect("tempdir");
copy_cli_basic_fixture(tmp.path());
let workspace_root = tmp.path().to_path_buf();
let plugins = Arc::new(sqry_plugin_registry::create_plugin_manager());
let builder: Arc<dyn WorkspaceBuilder> = Arc::new(PersistingBuilder {
plugins: Arc::clone(&plugins),
cfg: BuildConfig::default(),
});
let server = TestServer::with_builder(Arc::clone(&builder)).await;
let mut client = TestIpcClient::connect(&server.path).await;
client.hello(1).await;
let load_resp = client
.request(
"daemon/load",
json!({ "index_root": workspace_root.to_string_lossy() }),
)
.await;
expect_success(&load_resp);
let in_process_graph = sqry_core::graph::unified::build::build_unified_graph(
&workspace_root,
&plugins,
&BuildConfig::default(),
)
.expect("in-process build");
let queries = [
"calculate_sum",
"Calculator",
"PI",
"Processor",
"does_not_exist",
];
for pat in queries {
let daemon_result = daemon_search(&mut client, &workspace_root, pat).await;
let in_process_result = in_process_exact_projection(&in_process_graph, pat);
assert!(
!daemon_result.truncated,
"fixture is too small to truncate; got {daemon_result:?} for pattern {pat}",
);
let daemon_json = serde_json::to_string(&daemon_result.items).expect("serialize daemon");
let in_process_json =
serde_json::to_string(&in_process_result.items).expect("serialize in-process");
assert_eq!(
daemon_json, in_process_json,
"parity FAILED for pattern {pat}\n daemon: {daemon_json}\n in-process: {in_process_json}",
);
assert_eq!(
daemon_result.total, in_process_result.total,
"total mismatch for pattern {pat}",
);
}
drop(client);
server.stop().await;
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn daemon_search_latency_p99_under_100ms() {
let tmp = TempDir::new().expect("tempdir");
copy_cli_basic_fixture(tmp.path());
let workspace_root = tmp.path().to_path_buf();
let plugins = Arc::new(sqry_plugin_registry::create_plugin_manager());
let builder: Arc<dyn WorkspaceBuilder> = Arc::new(PersistingBuilder {
plugins: Arc::clone(&plugins),
cfg: BuildConfig::default(),
});
let server = TestServer::with_builder(Arc::clone(&builder)).await;
let mut client = TestIpcClient::connect(&server.path).await;
client.hello(1).await;
expect_success(
&client
.request(
"daemon/load",
json!({ "index_root": workspace_root.to_string_lossy() }),
)
.await,
);
let _warm = daemon_search(&mut client, &workspace_root, "calculate_sum").await;
let mut samples: Vec<Duration> = Vec::with_capacity(100);
for _ in 0..100 {
let t = Instant::now();
let _ = daemon_search(&mut client, &workspace_root, "calculate_sum").await;
samples.push(t.elapsed());
}
samples.sort();
let p99 = samples[98];
assert!(
p99 < Duration::from_millis(100),
"daemon/search p99 = {p99:?} exceeds the 100ms threshold (samples \
min={:?} median={:?} p95={:?} max={:?}). The DAG acceptance \
criterion is `p99 < 100ms across 100 successive daemon/search \
calls on a warmed workspace`.",
samples[0],
samples[50],
samples[95],
samples[99],
);
drop(client);
server.stop().await;
}
#[cfg(feature = "test-hooks")]
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn daemon_search_workspace_evicted_reload_on_read() {
use sqry_core::project::{ProjectRootMode, canonicalize_path};
use sqry_daemon::WorkspaceKey;
let tmp = TempDir::new().expect("tempdir");
copy_cli_basic_fixture(tmp.path());
let workspace_root = tmp.path().to_path_buf();
let plugins = Arc::new(sqry_plugin_registry::create_plugin_manager());
let builder: Arc<dyn WorkspaceBuilder> = Arc::new(PersistingBuilder {
plugins: Arc::clone(&plugins),
cfg: BuildConfig::default(),
});
let server = TestServer::with_builder(Arc::clone(&builder)).await;
let mut client = TestIpcClient::connect(&server.path).await;
client.hello(1).await;
expect_success(
&client
.request(
"daemon/load",
json!({ "index_root": workspace_root.to_string_lossy() }),
)
.await,
);
let _warm = daemon_search(&mut client, &workspace_root, "calculate_sum").await;
let canonical = canonicalize_path(&workspace_root).expect("canonicalize");
let key = WorkspaceKey::new(canonical, ProjectRootMode::GitRoot, 0);
assert!(
server.manager.evict_for_test(&key),
"evict_for_test must succeed against the freshly-loaded workspace",
);
let resp = client
.request(
"daemon/search",
build_search_params(&workspace_root, "calculate_sum"),
)
.await;
let envelope = expect_success(&resp);
let result: SearchResult = serde_json::from_value(envelope["result"].clone())
.expect("post-eviction SearchResult decode");
assert!(
!result.items.is_empty(),
"post-eviction reload must return the same hits the pre-eviction \
path did — got an empty result set, which means the reload \
loaded a different (empty) graph",
);
assert_eq!(envelope["meta"]["workspace_state"], json!("Loaded"));
drop(client);
server.stop().await;
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn daemon_load_workspace_incompatible_graph_returns_32005() {
struct IncompatibleBuilder;
impl std::fmt::Debug for IncompatibleBuilder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("IncompatibleBuilder")
.finish_non_exhaustive()
}
}
impl WorkspaceBuilder for IncompatibleBuilder {
fn build(&self, root: &Path) -> Result<CodeGraph, DaemonError> {
Err(DaemonError::WorkspaceIncompatibleGraph {
root: root.to_path_buf(),
reason: "test fixture: snapshot built with plugin selection \
that mismatches the daemon's enabled plugins"
.into(),
})
}
}
let tmp = TempDir::new().expect("tempdir");
copy_cli_basic_fixture(tmp.path());
let workspace_root = tmp.path().to_path_buf();
let builder: Arc<dyn WorkspaceBuilder> = Arc::new(IncompatibleBuilder);
let server = TestServer::with_builder(Arc::clone(&builder)).await;
let mut client = TestIpcClient::connect(&server.path).await;
client.hello(1).await;
let resp = client
.request(
"daemon/load",
json!({ "index_root": workspace_root.to_string_lossy() }),
)
.await;
let err = expect_error(&resp);
assert_eq!(
err.code, -32005,
"incompatible-graph load must surface as JSON-RPC -32005 \
(WorkspaceIncompatibleGraph wire mapping), got: {err:?}",
);
drop(client);
server.stop().await;
}