#[cfg(test)]
mod tests {
use serde_json::{json, Value};
use sha2::{Digest, Sha256};
use splice::graph::magellan_integration::MagellanIntegration;
use splice::ingest::rust::extract_rust_symbols;
use std::collections::HashMap;
use std::io::Write;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tempfile::{NamedTempFile, TempDir};
fn extract_json_from_stdout(stdout: &str) -> String {
let json_start = stdout.find(r#"{"status":"#);
if json_start.is_none() {
let start = stdout.find('{');
let end = stdout.rfind('}');
match (start, end) {
(Some(start), Some(end)) if end >= start => return stdout[start..=end].to_string(),
_ => return String::new(),
}
}
let start = json_start.unwrap();
let mut depth = 0;
let mut end = None;
let chars: Vec<char> = stdout.chars().collect();
let mut i = start;
while i < chars.len() {
match chars[i] {
'{' => depth += 1,
'}' => {
depth -= 1;
if depth == 0 {
end = Some(i);
break;
}
}
_ => {}
}
i += 1;
}
match end {
Some(end_idx) => stdout[start..=end_idx].to_string(),
None => String::new(),
}
}
fn get_splice_binary() -> PathBuf {
if let Ok(path) = std::env::var("SPLICE_TEST_BIN") {
return PathBuf::from(path);
}
if let Ok(path) = std::env::var("CARGO_BIN_EXE_splice") {
return PathBuf::from(path);
}
let mut path = std::env::current_exe().unwrap();
path.pop(); let deps_dir = path.clone();
path.pop(); let bin_path = path.join("splice");
if bin_path.exists() {
return bin_path;
}
if let Ok(entries) = std::fs::read_dir(deps_dir) {
let mut candidates: Vec<(std::time::SystemTime, PathBuf)> = Vec::new();
for entry in entries.flatten() {
let path = entry.path();
let name = path.file_name().and_then(|s| s.to_str()).unwrap_or("");
if !name.starts_with("splice-") || !path.is_file() {
continue;
}
if let Ok(metadata) = entry.metadata() {
#[cfg(unix)]
let is_executable = metadata.permissions().mode() & 0o111 != 0;
#[cfg(not(unix))]
let is_executable = true;
if !is_executable {
continue;
}
if let Ok(modified) = metadata.modified() {
let len = metadata.len();
if len > 50_000_000 {
candidates.push((modified, path));
}
}
}
}
if let Some((_, path)) = candidates.into_iter().max_by_key(|(time, _)| *time) {
return path;
}
}
bin_path
}
#[test]
fn test_cli_successful_patch() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
let source = r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#;
std::fs::write(&lib_rs_path, source).expect("Failed to write lib.rs");
let patch_path = workspace_path.join("patch.rs");
let patch_content = r#"
pub fn greet(name: &str) -> String {
format!("Greetings, {}!", name)
}
"#;
std::fs::write(&patch_path, patch_content).expect("Failed to write patch file");
let _graph_db_path = workspace_path.join("splice_graph.db");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&lib_rs_path)
.arg("--symbol")
.arg("greet")
.arg("--with")
.arg(&patch_path)
.current_dir(workspace_path)
.output();
match output {
Ok(result) => {
println!("CLI exit code: {:?}", result.status.code());
println!("stdout: {}", String::from_utf8_lossy(&result.stdout));
println!("stderr: {}", String::from_utf8_lossy(&result.stderr));
}
Err(e) => {
println!("Failed to execute splice binary: {}", e);
}
}
}
#[test]
fn test_cli_ambiguous_symbol_fails() {
}
#[test]
fn test_cli_syntax_failure_propagates() {
}
#[test]
fn test_analyzer_off_by_default() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
let source = r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#;
std::fs::write(&lib_rs_path, source).expect("Failed to write lib.rs");
let patch_path = workspace_path.join("patch.rs");
let patch_content = r#"
pub fn greet(name: &str) -> String {
format!("Greetings, {}!", name)
}
"#;
std::fs::write(&patch_path, patch_content).expect("Failed to write patch file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&lib_rs_path)
.arg("--symbol")
.arg("greet")
.arg("--with")
.arg(&patch_path)
.current_dir(workspace_path)
.output();
match output {
Ok(result) => {
assert_eq!(
result.status.code(),
Some(0),
"CLI should succeed with analyzer off by default"
);
assert!(result.status.success(), "CLI should exit with success");
}
Err(e) => {
panic!("Failed to execute splice binary: {}", e);
}
}
}
#[test]
fn test_analyzer_required_but_missing() {
}
#[test]
fn test_analyzer_failure_causes_rollback() {
}
#[test]
fn test_plan_execution_success() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
let source = r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
pub fn farewell(name: &str) -> String {
format!("Goodbye, {}!", name)
}
"#;
std::fs::write(&lib_rs_path, source).expect("Failed to write lib.rs");
let patches_dir = workspace_path.join("patches");
std::fs::create_dir(&patches_dir).expect("Failed to create patches directory");
let patch1_path = patches_dir.join("greet.rs");
let patch1_content = r#"
pub fn greet(name: &str) -> String {
format!("Greetings, {}!", name)
}
"#;
std::fs::write(&patch1_path, patch1_content).expect("Failed to write patch1");
let patch2_path = patches_dir.join("farewell.rs");
let patch2_content = r#"
pub fn farewell(name: &str) -> String {
format!("Farewell, {}!", name)
}
"#;
std::fs::write(&patch2_path, patch2_content).expect("Failed to write patch2");
let plan_path = workspace_path.join("plan.json");
let plan_content = r#"{
"steps": [
{
"file": "src/lib.rs",
"symbol": "greet",
"kind": "function",
"with": "patches/greet.rs"
},
{
"file": "src/lib.rs",
"symbol": "farewell",
"kind": "function",
"with": "patches/farewell.rs"
}
]
}
"#;
std::fs::write(&plan_path, plan_content).expect("Failed to write plan.json");
}
#[test]
fn test_plan_failure_stops_execution() {
}
#[test]
fn test_plan_invalid_schema() {
}
#[test]
fn test_cli_symbol_not_found_returns_structured_json() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#,
)
.expect("Failed to write lib.rs");
let patch_path = workspace_path.join("patch.rs");
std::fs::write(
&patch_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hi, {}!", name)
}
"#,
)
.expect("Failed to write patch.rs");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&lib_rs_path)
.arg("--symbol")
.arg("missing_symbol")
.arg("--with")
.arg(&patch_path)
.current_dir(workspace_path)
.output()
.expect("Failed to run splice CLI");
assert!(
!output.status.success(),
"CLI should fail when symbol cannot be resolved"
);
let stderr = String::from_utf8_lossy(&output.stderr);
let payload: Value =
serde_json::from_str(&stderr).expect("stderr should contain JSON payload");
assert_eq!(
payload.get("status").and_then(|v| v.as_str()),
Some("error"),
"status should be error"
);
let error = payload
.get("error")
.and_then(|v| v.as_object())
.expect("error object missing");
assert_eq!(
error.get("kind").and_then(|v| v.as_str()),
Some("SymbolNotFound"),
"kind should be SymbolNotFound"
);
assert_eq!(
error.get("symbol").and_then(|v| v.as_str()),
Some("missing_symbol"),
"symbol field should echo missing symbol"
);
assert_eq!(
error.get("file").and_then(|v| v.as_str()),
lib_rs_path.to_str(),
"file should reference requested source file"
);
assert!(
error.get("hint").and_then(|v| v.as_str()).is_some(),
"hint should be populated for guidance"
);
}
#[test]
fn test_cli_query_broken_pipe_is_graceful() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#,
)
.expect("Failed to write lib.rs");
let db_path = workspace_path.join("magellan.db");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&lib_rs_path)
.expect("Failed to index source file");
let splice_binary = get_splice_binary();
let mut child = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("--label")
.arg("fn")
.arg("--show-code")
.stdout(Stdio::piped())
.spawn()
.expect("Failed to run splice CLI");
drop(child.stdout.take());
let status = child.wait().expect("Failed to wait for splice CLI");
assert!(
status.success(),
"CLI should exit cleanly when stdout pipe closes"
);
}
#[test]
fn test_cli_patch_syntax_error_emits_diagnostics() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#,
)
.expect("Failed to write lib.rs");
let patch_path = workspace_path.join("patch.rs");
std::fs::write(
&patch_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
"#,
)
.expect("Failed to write patch.rs");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&lib_rs_path)
.arg("--symbol")
.arg("greet")
.arg("--with")
.arg(&patch_path)
.current_dir(workspace_path)
.output()
.expect("Failed to execute splice CLI");
assert!(
!output.status.success(),
"CLI should fail when patch introduces syntax errors"
);
let stderr = String::from_utf8_lossy(&output.stderr);
let payload: Value =
serde_json::from_str(&stderr).expect("stderr should contain JSON payload");
let error = payload
.get("error")
.and_then(|v| v.as_object())
.expect("error object missing");
let diagnostics = error
.get("diagnostics")
.and_then(|v| v.as_array())
.expect("diagnostics array missing from payload");
assert!(
!diagnostics.is_empty(),
"diagnostics array should contain at least one entry"
);
let first = diagnostics[0]
.as_object()
.expect("diagnostic entry should be an object");
assert_eq!(
first.get("tool").and_then(|v| v.as_str()),
Some("tree-sitter"),
"tree-sitter should report syntax errors"
);
assert!(
first
.get("message")
.and_then(|v| v.as_str())
.map(|m| m.contains("syntax"))
.unwrap_or(false),
"diagnostic message should mention syntax issues"
);
}
#[test]
fn test_cli_cargo_check_failure_emits_diagnostics() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#,
)
.expect("Failed to write lib.rs");
let patch_path = workspace_path.join("patch.rs");
std::fs::write(
&patch_path,
r#"
pub fn greet(name: &str) -> String {
missing_helper(name)
}
"#,
)
.expect("Failed to write patch.rs");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&lib_rs_path)
.arg("--symbol")
.arg("greet")
.arg("--with")
.arg(&patch_path)
.current_dir(workspace_path)
.output()
.expect("Failed to execute splice CLI");
assert!(
!output.status.success(),
"CLI should fail when cargo check reports errors"
);
let stderr = String::from_utf8_lossy(&output.stderr);
let payload: Value =
serde_json::from_str(&stderr).expect("stderr should contain JSON payload");
let error = payload
.get("error")
.and_then(|v| v.as_object())
.expect("error object missing");
assert_eq!(
error.get("kind").and_then(|v| v.as_str()),
Some("CargoCheckFailed"),
"expected CargoCheckFailed error kind"
);
let diagnostics = error
.get("diagnostics")
.and_then(|v| v.as_array())
.expect("diagnostics array missing");
assert!(
!diagnostics.is_empty(),
"diagnostics should not be empty for cargo failures"
);
let first = diagnostics[0]
.as_object()
.expect("diagnostic entry should be an object");
assert_eq!(
first.get("tool").and_then(|v| v.as_str()),
Some("cargo-check"),
"cargo-check diagnostics expected"
);
let file_value = first
.get("file")
.and_then(|v| v.as_str())
.expect("diagnostic should include file path");
assert!(
file_value.ends_with("src/lib.rs"),
"diagnostic should point to the patched file"
);
assert!(
first.get("line").and_then(|v| v.as_u64()).is_some(),
"diagnostic should contain a line number"
);
assert_eq!(
first.get("code").and_then(|v| v.as_str()),
Some("E0425"),
"diagnostic should expose compiler error code"
);
let tool_version = first
.get("tool_version")
.and_then(|v| v.as_str())
.expect("tool_version should be present");
assert!(
tool_version.to_lowercase().contains("cargo"),
"tool_version should describe cargo"
);
assert!(
first
.get("tool_path")
.and_then(|v| v.as_str())
.map(|s| !s.is_empty())
.unwrap_or(false),
"tool_path should contain the detected binary path"
);
assert_eq!(
first.get("remediation").and_then(|v| v.as_str()),
Some("https://doc.rust-lang.org/error-index.html#E0425"),
"remediation link should point to the Rust error index"
);
}
#[test]
fn test_cli_batch_patch_rolls_back_on_failure() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn helper(x: i32) -> i32 {
x + 1
}
pub mod a;
pub mod b;
"#,
)
.expect("Failed to write lib.rs");
let file_a = src_dir.join("a.rs");
std::fs::write(
&file_a,
r#"
use crate::helper;
pub fn value() -> i32 {
helper(10)
}
"#,
)
.expect("Failed to write a.rs");
let file_b = src_dir.join("b.rs");
std::fs::write(
&file_b,
r#"
use crate::helper;
pub fn broken() -> i32 {
helper(5)
}
"#,
)
.expect("Failed to write b.rs");
let symbols_a = extract_rust_symbols(
&file_a,
std::fs::read(&file_a).expect("read a.rs").as_slice(),
)
.expect("parse a.rs");
let span_a = symbols_a
.iter()
.find(|s| s.name == "value")
.expect("value span");
let symbols_b = extract_rust_symbols(
&file_b,
std::fs::read(&file_b).expect("read b.rs").as_slice(),
)
.expect("parse b.rs");
let span_b = symbols_b
.iter()
.find(|s| s.name == "broken")
.expect("broken span");
let relative_a = file_a
.strip_prefix(workspace_path)
.expect("a.rs relative path");
let relative_b = file_b
.strip_prefix(workspace_path)
.expect("b.rs relative path");
let batch_path = workspace_path.join("batch.json");
let batch_json = json!({
"batches": [
{
"replacements": [
{
"file": relative_a,
"start": span_a.byte_start,
"end": span_a.byte_end,
"content": r#"
pub fn value() -> i32 {
helper(42)
}
"#
},
{
"file": relative_b,
"start": span_b.byte_start,
"end": span_b.byte_end,
"content": r#"
pub fn broken() -> i32 {
helper("oops")
}
"#
}
]
}
]
});
std::fs::write(
&batch_path,
serde_json::to_string_pretty(&batch_json).unwrap(),
)
.expect("write batch.json");
let replaced_a = std::fs::read_to_string(&file_a).expect("read replaced a.rs");
let replaced_b = std::fs::read_to_string(&file_b).expect("read replaced b.rs");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--batch")
.arg(&batch_path)
.arg("--language")
.arg("rust")
.current_dir(workspace_path)
.output()
.expect("Failed to run splice CLI");
assert!(
!output.status.success(),
"CLI should fail because the second replacement introduces a type error"
);
let stderr = String::from_utf8_lossy(&output.stderr);
let payload: Value = serde_json::from_str(&stderr).expect("stderr should contain JSON");
assert_eq!(
payload.get("status").and_then(|v| v.as_str()),
Some("error")
);
assert_eq!(
payload
.get("error")
.and_then(|v| v.get("kind"))
.and_then(|v| v.as_str()),
Some("CargoCheckFailed"),
"batch failures should return CargoCheckFailed errors"
);
assert_eq!(
replaced_a,
std::fs::read_to_string(&file_a).expect("read patched a.rs"),
"a.rs should remain unchanged when batch fails"
);
assert_eq!(
replaced_b,
std::fs::read_to_string(&file_b).expect("read patched b.rs"),
"b.rs should remain unchanged when batch fails"
);
}
#[test]
fn test_cli_batch_patch_success_returns_metadata() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn helper(x: i32) -> i32 {
x + 1
}
pub mod a;
"#,
)
.expect("Failed to write lib.rs");
let file_a = src_dir.join("a.rs");
std::fs::write(
&file_a,
r#"
use crate::helper;
pub fn value() -> i32 {
helper(10)
}
"#,
)
.expect("Failed to write a.rs");
let file_b = src_dir.join("b.rs");
std::fs::write(
&file_b,
r#"
use crate::helper;
pub fn broken() -> i32 {
helper(5)
}
"#,
)
.expect("Failed to write b.rs");
let symbols_a = extract_rust_symbols(
&file_a,
std::fs::read(&file_a).expect("read a.rs").as_slice(),
)
.expect("parse a.rs");
let span_a = symbols_a
.iter()
.find(|s| s.name == "value")
.expect("value span");
let symbols_b = extract_rust_symbols(
&file_b,
std::fs::read(&file_b).expect("read b.rs").as_slice(),
)
.expect("parse b.rs");
let span_b = symbols_b
.iter()
.find(|s| s.name == "broken")
.expect("broken span");
let relative_a = file_a
.strip_prefix(workspace_path)
.expect("a.rs relative path");
let relative_b = file_b
.strip_prefix(workspace_path)
.expect("b.rs relative path");
let batch_path = workspace_path.join("batch-success.json");
let batch_json = json!({
"batches": [
{
"replacements": [
{
"file": relative_a,
"start": span_a.byte_start,
"end": span_a.byte_end,
"content": r#"
pub fn value() -> i32 {
helper(42)
}
"#
},
{
"file": relative_b,
"start": span_b.byte_start,
"end": span_b.byte_end,
"content": r#"
pub fn broken() -> i32 {
helper(7)
}
"#
}
]
}
]
});
std::fs::write(
&batch_path,
serde_json::to_string_pretty(&batch_json).unwrap(),
)
.expect("write batch-success.json");
let before_hash_a = hash_file(&file_a);
let before_hash_b = hash_file(&file_b);
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--batch")
.arg(&batch_path)
.arg("--language")
.arg("rust")
.current_dir(workspace_path)
.output()
.expect("Failed to run splice CLI");
if !output.status.success() {
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
}
assert!(
output.status.success(),
"CLI should succeed when both replacements are valid"
);
let stdout = String::from_utf8_lossy(&output.stdout);
let payload: Value = serde_json::from_str(&stdout).expect("stdout should be JSON payload");
assert_eq!(
payload.get("status").and_then(|v| v.as_str()),
Some("ok"),
"success payload should set status=ok"
);
let data = payload
.get("data")
.and_then(|v| v.as_object())
.expect("success payload should include metadata");
assert_eq!(
data.get("batches_applied").and_then(|v| v.as_u64()),
Some(1),
"metadata should report number of batches"
);
let files = data
.get("files")
.and_then(|v| v.as_array())
.expect("metadata should include per-file entries");
assert_eq!(files.len(), 2, "two files should be reported");
let after_hash_a = hash_file(&file_a);
let after_hash_b = hash_file(&file_b);
let mut expected = HashMap::new();
expected.insert(
file_a.to_string_lossy().to_string(),
(before_hash_a.clone(), after_hash_a.clone()),
);
expected.insert(
file_b.to_string_lossy().to_string(),
(before_hash_b.clone(), after_hash_b.clone()),
);
for entry in files {
let obj = entry
.as_object()
.expect("file metadata entries should be JSON objects");
let file = obj
.get("file")
.and_then(|v| v.as_str())
.expect("file entry should be a string");
let before = obj
.get("before_hash")
.and_then(|v| v.as_str())
.expect("before_hash should be a string");
let after = obj
.get("after_hash")
.and_then(|v| v.as_str())
.expect("after_hash should be a string");
let (expected_before, expected_after) = expected
.get(file)
.unwrap_or_else(|| panic!("unexpected file in metadata: {}", file));
assert_eq!(
before, expected_before,
"before hash mismatch for file {}",
file
);
assert_eq!(
after, expected_after,
"after hash mismatch for file {}",
file
);
}
let final_a = std::fs::read_to_string(&file_a).expect("read final a.rs");
assert!(
final_a.contains("helper(42)"),
"file a.rs should reflect the batch replacement"
);
let final_b = std::fs::read_to_string(&file_b).expect("read final b.rs");
assert!(
final_b.contains("helper(7)"),
"file b.rs should reflect the batch replacement"
);
}
#[test]
fn test_cli_patch_preview() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
std::fs::write(
&lib_rs_path,
r#"
pub fn helper(x: i32) -> i32 {
x + 1
}
pub mod a;
"#,
)
.expect("Failed to write lib.rs");
let a_rs_path = src_dir.join("a.rs");
std::fs::write(
&a_rs_path,
r#"
use crate::helper;
pub fn value() -> i32 {
helper(10)
}
"#,
)
.expect("Failed to write a.rs");
let patch_path = workspace_path.join("patch.rs");
std::fs::write(
&patch_path,
r#"
pub fn value() -> i32 {
helper(20)
}
"#,
)
.expect("Failed to write patch file");
let replaced_content =
std::fs::read_to_string(&a_rs_path).expect("Failed to read replaced file");
let splice_binary = get_splice_binary();
eprintln!("Using splice binary: {:?}", splice_binary);
eprintln!("Binary exists: {}", splice_binary.exists());
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&a_rs_path)
.arg("--symbol")
.arg("value")
.arg("--with")
.arg(&patch_path)
.arg("--preview")
.arg("--json")
.env("RUSTC_WRAPPER", "")
.current_dir(workspace_path)
.output()
.expect("Failed to run splice CLI");
if !output.status.success() && output.status.code() != Some(1) {
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
panic!(
"CLI preview failed with exit code {:?}\nstdout: {}\nstderr: {}",
output.status.code(),
stdout,
stderr
);
}
let stdout = String::from_utf8_lossy(&output.stdout);
let json_output = extract_json_from_stdout(&stdout);
let payload: Value =
serde_json::from_str(&json_output).expect("stdout should be JSON payload");
let data = payload
.get("data")
.and_then(|v| v.as_object())
.expect("data missing");
let preview_report = data
.get("preview_report")
.and_then(|v| v.as_object())
.expect("preview_report missing");
assert_eq!(
preview_report
.get("file")
.and_then(|v| v.as_str())
.expect("file missing in preview_report"),
a_rs_path.to_string_lossy()
);
assert!(
preview_report
.get("lines_added")
.and_then(|v| v.as_u64())
.unwrap_or(0)
>= 1
);
assert!(
preview_report
.get("bytes_removed")
.and_then(|v| v.as_u64())
.is_some(),
"preview_report must include bytes_removed"
);
assert_eq!(
replaced_content,
std::fs::read_to_string(&a_rs_path).expect("file unchanged after preview")
);
let files = data
.get("files")
.and_then(|v| v.as_array())
.expect("files array missing");
assert_eq!(files.len(), 1);
assert_eq!(
files[0]
.get("file")
.and_then(|v| v.as_str())
.expect("file entry missing"),
a_rs_path.to_string_lossy()
);
}
#[test]
fn test_cli_backup_and_undo() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let cargo_toml_path = workspace_path.join("Cargo.toml");
let mut cargo_toml = NamedTempFile::new().expect("Failed to create Cargo.toml");
write!(
cargo_toml,
r#"[package]
name = "temp-test"
version = "0.1.0"
edition = "2021"
[lib]
name = "temp_test"
path = "src/lib.rs"
"#
)
.expect("Failed to write Cargo.toml");
std::fs::rename(cargo_toml.path(), &cargo_toml_path).expect("Failed to move Cargo.toml");
let src_dir = workspace_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let lib_rs_path = src_dir.join("lib.rs");
let replaced_content = r#"
pub fn greet(name: &str) -> String {
format!("Hello, {}!", name)
}
"#;
std::fs::write(&lib_rs_path, replaced_content).expect("Failed to write lib.rs");
let patch_path = workspace_path.join("patch.rs");
std::fs::write(
&patch_path,
r#"
pub fn greet(name: &str) -> String {
format!("Hi, {}!", name)
}
"#,
)
.expect("Failed to write patch.rs");
let symbols = extract_rust_symbols(
&lib_rs_path,
std::fs::read(&lib_rs_path).expect("read lib.rs").as_slice(),
)
.expect("parse lib.rs");
let span = symbols
.iter()
.find(|s| s.name == "greet")
.expect("greet span");
let batch_path = workspace_path.join("batch.json");
let batch_json = json!({
"batches": [
{
"replacements": [
{
"file": lib_rs_path.strip_prefix(workspace_path).unwrap(),
"start": span.byte_start,
"end": span.byte_end,
"content": r#"
pub fn greet(name: &str) -> String {
format!("Hi, {}!", name)
}
"#
}
]
}
]
});
std::fs::write(
&batch_path,
serde_json::to_string_pretty(&batch_json).unwrap(),
)
.expect("write batch.json");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("patch")
.arg("--batch")
.arg(&batch_path)
.arg("--language")
.arg("rust")
.arg("--create-backup")
.current_dir(workspace_path)
.output()
.expect("Failed to run splice CLI");
assert!(
output.status.success(),
"CLI should succeed: {}",
String::from_utf8_lossy(&output.stderr)
);
let modified_content = std::fs::read_to_string(&lib_rs_path).expect("read modified lib.rs");
assert!(modified_content.contains("Hi, "), "File should be patched");
assert!(
!modified_content.contains("Hello, "),
"Old content should be gone"
);
let stdout = String::from_utf8_lossy(&output.stdout);
let payload: Value = serde_json::from_str(&stdout).expect("stdout should be JSON");
let backup_manifest = payload
.get("data")
.and_then(|v| v.get("backup_manifest"))
.and_then(|v| v.as_str())
.expect("backup_manifest should be in response");
let manifest_path = std::path::PathBuf::from(backup_manifest);
let undo_output = Command::new(&splice_binary)
.arg("undo")
.arg("--manifest")
.arg(&manifest_path)
.current_dir(workspace_path)
.output()
.expect("Failed to run splice undo");
assert!(
undo_output.status.success(),
"Undo should succeed: {}",
String::from_utf8_lossy(&undo_output.stderr)
);
let restored_content = std::fs::read_to_string(&lib_rs_path).expect("read restored lib.rs");
assert_eq!(
restored_content, replaced_content,
"File should be restored to replaced content"
);
}
#[test]
fn test_cli_query_magellan_flags() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let db_path = workspace_dir.path().join("test.db");
let file_path = workspace_dir.path().join("lib.rs");
std::fs::write(
&file_path,
"/// Example\npub fn demo() { println!(\"hi\"); }\n",
)
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open magellan db");
integration
.index_file(&file_path)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("-C") .arg("1")
.arg("--json")
.output()
.expect("Failed to run splice query");
assert!(
output.status.success(),
"splice query failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json_payload = extract_json_from_stdout(&stdout);
let payload: Value = serde_json::from_str(&json_payload).expect("stdout should be JSON");
let symbols = payload
.get("result")
.and_then(|v| v.get("symbols"))
.and_then(|v| v.as_array())
.expect("result.symbols should be array");
let first = symbols.first().expect("expected at least one symbol");
let span = first; assert!(span.get("context").is_some(), "context should be present");
}
#[test]
fn test_cli_query_magellan_relationships_does_not_fail() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let db_path = workspace_dir.path().join("test.db");
let file_path = workspace_dir.path().join("lib.rs");
std::fs::write(
&file_path,
"pub fn callee() {}\n\npub fn caller() { callee(); }\n",
)
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open magellan db");
integration
.index_file(&file_path)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("--relationships")
.arg("--json")
.output()
.expect("Failed to run splice query");
assert!(
output.status.success(),
"splice query with relationships failed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json_payload = extract_json_from_stdout(&stdout);
let payload: Value = serde_json::from_str(&json_payload).expect("stdout should be JSON");
let symbols = payload
.get("result")
.and_then(|v| v.get("symbols"))
.and_then(|v| v.as_array())
.expect("result.symbols should be array");
assert!(!symbols.is_empty(), "expected at least one symbol");
}
#[test]
fn test_cli_query_pagination_fields() {
}
fn hash_file(path: &std::path::Path) -> String {
let bytes = std::fs::read(path).expect("Failed to read file for hashing");
let mut hasher = Sha256::new();
hasher.update(&bytes);
format!("{:x}", hasher.finalize())
}
#[test]
fn test_magellan_database_error_maps_to_spl_e091() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let nonexistent_db = workspace_path
.join("nonexistent")
.join("path")
.join("to")
.join("database.db");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&nonexistent_db)
.output()
.expect("Failed to run splice status");
assert_eq!(
output.status.code(),
Some(3),
"Expected exit code 3 for database error, got {:?}",
output.status.code()
);
let stderr = String::from_utf8_lossy(&output.stderr);
let json_output = extract_json_from_stdout(&stderr);
let payload: Value =
serde_json::from_str(&json_output).expect("stderr should contain valid JSON payload");
assert_eq!(
payload.get("status").and_then(|v| v.as_str()),
Some("error"),
"status should be 'error'"
);
let error = payload
.get("error")
.and_then(|v| v.as_object())
.expect("error object should be present");
let error_code = error
.get("error_code")
.and_then(|v| v.as_object())
.expect("error_code should be present");
assert_eq!(
error_code.get("code").and_then(|v| v.as_str()),
Some("SPL-E091"),
"error_code.code should be 'SPL-E091' for Magellan errors"
);
assert_eq!(
error_code.get("severity").and_then(|v| v.as_str()),
Some("error"),
"error_code.severity should be 'error'"
);
let hint = error_code
.get("hint")
.and_then(|v| v.as_str())
.unwrap_or("");
assert!(
hint.contains("Magellan") || hint.contains("database") || hint.contains("ingest"),
"hint should mention Magellan, database, or ingest: {}",
hint
);
let message = error.get("message").and_then(|v| v.as_str()).unwrap_or("");
assert!(
message.to_lowercase().contains("database")
|| message.to_lowercase().contains("magellan")
|| message.contains(&nonexistent_db.to_string_lossy().to_string()),
"error message should reference database, Magellan, or the path: {}",
message
);
}
#[test]
fn test_magellan_query_error_preserves_context() {
let workspace_dir = TempDir::new().expect("Failed to temp workspace");
let workspace_path = workspace_dir.path();
let db_path = workspace_path.join("test.db");
let file_path = workspace_path.join("lib.rs");
std::fs::write(&file_path, r#"pub fn existing_func() -> i32 { 42 }"#)
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&file_path)
.expect("Failed to index test file");
let db_metadata = std::fs::metadata(&db_path).expect("Failed to get db metadata");
let original_size = db_metadata.len();
let truncated_size = original_size / 2;
let mut db_file = std::fs::File::options()
.write(true)
.open(&db_path)
.expect("Failed to open database for writing");
db_file
.set_len(truncated_size)
.expect("Failed to truncate database");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.output()
.expect("Failed to run splice query");
assert!(
!output.status.success(),
"CLI should fail on corrupted database"
);
let exit_code = output.status.code();
assert!(
exit_code == Some(1) || exit_code == Some(3),
"Expected exit code 1 or 3, got {:?}",
exit_code
);
let stderr = String::from_utf8_lossy(&output.stderr);
let json_output = extract_json_from_stdout(&stderr);
let payload: Value =
serde_json::from_str(&json_output).expect("stderr should contain JSON payload");
let error = payload
.get("error")
.and_then(|v| v.as_object())
.expect("error should be present");
if let Some(error_code) = error.get("error_code").and_then(|v| v.as_object()) {
let code = error_code
.get("code")
.and_then(|v| v.as_str())
.unwrap_or("");
assert!(!code.is_empty(), "error code should not be empty");
if error.get("kind").and_then(|v| v.as_str()) == Some("Magellan") {
assert_eq!(code, "SPL-E091", "Magellan errors should map to SPL-E091");
}
}
let message = error.get("message").and_then(|v| v.as_str()).unwrap_or("");
assert!(!message.is_empty(), "error message should not be empty");
}
#[test]
fn test_symbol_not_found_error_code() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let db_path = workspace_path.join("test.db");
let file_path = workspace_path.join("lib.rs");
std::fs::write(&file_path, r#"pub fn existing_func() -> i32 { 42 }"#)
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&file_path)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("nonexistent_function")
.output()
.expect("Failed to run splice find");
assert_eq!(
output.status.code(),
Some(1),
"Expected exit code 1 for symbol not found, got {:?}",
output.status.code()
);
let stderr = String::from_utf8_lossy(&output.stderr);
let json_output = extract_json_from_stdout(&stderr);
let payload: Value =
serde_json::from_str(&json_output).expect("stderr should contain JSON payload");
assert_eq!(
payload.get("status").and_then(|v| v.as_str()),
Some("error"),
"status should be 'error'"
);
let error = payload
.get("error")
.and_then(|v| v.as_object())
.expect("error should be present");
let error_code = error
.get("error_code")
.and_then(|v| v.as_object())
.expect("error_code should be present");
assert_eq!(
error_code.get("code").and_then(|v| v.as_str()),
Some("SPL-E001"),
"error_code.code should be 'SPL-E001' for symbol not found, got {:?}",
error_code.get("code")
);
assert_ne!(
error_code.get("code").and_then(|v| v.as_str()),
Some("SPL-E091"),
"Symbol not found should use SPL-E001, not SPL-E091"
);
let message = error.get("message").and_then(|v| v.as_str()).unwrap_or("");
assert!(
message.contains("nonexistent_function") || message.contains("not found"),
"error message should mention the symbol or 'not found': {}",
message
);
}
#[test]
fn test_exit_code_mapping_completeness() {
let workspace_dir = TempDir::new().expect("Failed to create temp workspace");
let workspace_path = workspace_dir.path();
let splice_binary = get_splice_binary();
{
let db_path = workspace_path.join("test_success.db");
let file_path = workspace_path.join("lib.rs");
std::fs::write(&file_path, "pub fn test() {}\n").expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&file_path)
.expect("Failed to index test file");
let output = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&db_path)
.output()
.expect("Failed to run splice status");
assert_eq!(
output.status.code(),
Some(0),
"Exit code 0: status with valid db should succeed"
);
}
{
let db_path = workspace_path.join("test_error.db");
let file_path = workspace_path.join("lib.rs");
std::fs::write(&file_path, "pub fn test() {}\n").expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&file_path)
.expect("Failed to index test file");
let output = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("nonexistent")
.output()
.expect("Failed to run splice find");
assert_eq!(
output.status.code(),
Some(1),
"Exit code 1: finding nonexistent symbol should return error"
);
}
{
let output = Command::new(&splice_binary)
.arg("find")
.arg("--name")
.arg("test")
.output()
.expect("Failed to run splice find without --db");
assert_eq!(
output.status.code(),
Some(2),
"Exit code 2: missing --db flag should return usage error"
);
}
{
let output = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg("/nonexistent/path/to/database.db")
.output()
.expect("Failed to run splice status with nonexistent db");
assert_eq!(
output.status.code(),
Some(3),
"Exit code 3: nonexistent database should return database error"
);
}
{
let test_file = workspace_path.join("validation_test.rs");
std::fs::write(
&test_file,
r#"
pub fn test_func() -> i32 {
42
}
"#,
)
.expect("Failed to write test file");
let output = Command::new(&splice_binary)
.arg("delete")
.arg("--file")
.arg(&test_file)
.arg("--symbol")
.arg("nonexistent_symbol")
.arg("--dry-run")
.output()
.expect("Failed to run splice delete");
let exit_code = output.status.code();
assert!(
exit_code.is_some() && exit_code.unwrap() > 0,
"Delete with nonexistent symbol should fail with non-zero exit code, got {:?}",
exit_code
);
}
let test_cases = vec![("find", vec!["--db", "/nonexistent.db", "--name", "test"])];
for (cmd, args) in test_cases {
let output = Command::new(&splice_binary)
.arg(cmd)
.args(args)
.output()
.expect("Failed to run command");
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let json_output = extract_json_from_stdout(&stderr);
if let Ok(payload) = serde_json::from_str::<Value>(&json_output) {
if payload.get("status").and_then(|v| v.as_str()) == Some("error") {
let _has_error_code = payload
.get("error")
.and_then(|v| v.get("error_code"))
.is_some();
assert!(
payload.get("error").is_some(),
"Error responses should have error object"
);
}
}
}
}
}
#[test]
fn test_query_status_command_returns_statistics() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
let test_file = temp_dir.path().join("test.rs");
std::fs::write(&test_file, "pub fn test_func() {}\n").expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&test_file)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&db_path)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice status");
assert!(
output.status.success(),
"status command should succeed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json_str = extract_json_from_stdout(&stdout);
let payload: Value =
serde_json::from_str(&json_str).expect("stdout should contain valid JSON");
assert_eq!(payload.get("status").and_then(|v| v.as_str()), Some("ok"));
let data = payload
.get("data")
.expect("should have data field when --output json");
let files = data
.get("files")
.and_then(|v| v.as_u64())
.expect("should have files count");
let symbols = data
.get("symbols")
.and_then(|v| v.as_u64())
.expect("should have symbols count");
let db_path_out = data
.get("db_path")
.and_then(|v| v.as_str())
.expect("should have db_path");
assert!(files >= 1, "should have at least 1 file");
assert!(symbols >= 1, "should have at least 1 symbol");
assert!(
db_path_out.contains("test.db") || db_path_out.contains("test"),
"db_path should reference test database"
);
let output_human = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&db_path)
.output()
.expect("Failed to run splice status (human format)");
assert!(output_human.status.success());
let stdout_human = String::from_utf8_lossy(&output_human.stdout);
let payload_human: Value =
serde_json::from_str(&stdout_human).expect("human format should still be JSON");
assert_eq!(
payload_human.get("status").and_then(|v| v.as_str()),
Some("ok")
);
assert!(
payload_human.get("data").is_none(),
"without --output json, data field should not be present"
);
}
#[test]
fn test_query_query_command_lists_symbols() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
let test_file = temp_dir.path().join("lib.rs");
std::fs::write(
&test_file,
r#"
pub fn helper() {}
pub fn main() { helper(); }
pub struct TestStruct;
"#,
)
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&test_file)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("--label")
.arg("fn")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice query");
assert!(
output.status.success(),
"query command should succeed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json_str = extract_json_from_stdout(&stdout);
let payload: Value =
serde_json::from_str(&json_str).expect("stdout should contain valid JSON");
assert_eq!(payload.get("status").and_then(|v| v.as_str()), Some("ok"));
assert_eq!(payload.get("status").and_then(|v| v.as_str()), Some("ok"));
let output_list = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--list")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice query --list");
assert!(output_list.status.success(), "query --list should succeed");
let stdout_list = String::from_utf8_lossy(&output_list.stdout);
let json_list = extract_json_from_stdout(&stdout_list);
let payload_list: Value =
serde_json::from_str(&json_list).expect("stdout should contain valid JSON");
assert_eq!(
payload_list.get("status").and_then(|v| v.as_str()),
Some("ok")
);
}
#[test]
fn test_query_find_command_locates_symbol() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
let test_file = temp_dir.path().join("calc.rs");
std::fs::write(&test_file, "pub fn calculate(x: i32) -> i32 { x + 1 }\n")
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&test_file)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("calculate")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find");
assert!(
output.status.success(),
"find command should succeed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json_str = extract_json_from_stdout(&stdout);
let payload: Value =
serde_json::from_str(&json_str).expect("stdout should contain valid JSON");
assert_eq!(payload.get("status").and_then(|v| v.as_str()), Some("ok"));
let data = payload.get("data").expect("should have data field");
let symbols = data
.get("symbols")
.and_then(|v| v.as_array())
.expect("data.symbols should be array");
let count = data
.get("count")
.and_then(|v| v.as_u64())
.expect("data.count should be present");
assert_eq!(count, 1, "should find exactly 1 symbol named 'calculate'");
let symbol = symbols.first().expect("should have at least one symbol");
assert_eq!(
symbol.get("name").and_then(|v| v.as_str()),
Some("calculate"),
"found symbol should be named 'calculate'"
);
assert_eq!(
symbol.get("kind").and_then(|v| v.as_str()),
Some("fn"),
"symbol should be a function"
);
assert!(
symbol.get("file_path").is_some(),
"symbol should have file_path"
);
assert!(
symbol.get("byte_start").is_some(),
"symbol should have byte_start"
);
assert!(
symbol.get("byte_end").is_some(),
"symbol should have byte_end"
);
let test_file2 = temp_dir.path().join("other.rs");
std::fs::write(&test_file2, "pub fn calculate(y: i32) -> i32 { y * 2 }\n")
.expect("Failed to write second test file");
integration
.index_file(&test_file2)
.expect("Failed to index second test file");
let output_first = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("calculate")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find (first match)");
let stdout_first = String::from_utf8_lossy(&output_first.stdout);
let json_first = extract_json_from_stdout(&stdout_first);
let payload_first: Value =
serde_json::from_str(&json_first).expect("stdout should contain valid JSON");
let count_first = payload_first
.get("data")
.and_then(|d| d.get("count"))
.and_then(|v| v.as_u64())
.unwrap_or(0);
assert_eq!(
count_first, 1,
"without --ambiguous, should return first match only"
);
let output_ambiguous = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("calculate")
.arg("--ambiguous")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find --ambiguous");
let stdout_amb = String::from_utf8_lossy(&output_ambiguous.stdout);
let json_amb = extract_json_from_stdout(&stdout_amb);
let payload_amb: Value =
serde_json::from_str(&json_amb).expect("stdout should contain valid JSON");
let count_amb = payload_amb
.get("data")
.and_then(|d| d.get("count"))
.and_then(|v| v.as_u64())
.expect("should have count");
assert_eq!(
count_amb, 2,
"with --ambiguous, should return all 2 matches"
);
}
#[test]
fn test_query_refs_command_shows_relationships() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
let test_file = temp_dir.path().join("refs.rs");
std::fs::write(
&test_file,
r#"
pub fn caller() {
callee();
}
pub fn callee() {}
"#,
)
.expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&test_file)
.expect("Failed to index test file");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("refs")
.arg("--db")
.arg(&db_path)
.arg("--path")
.arg(&test_file)
.arg("--name")
.arg("caller")
.arg("--direction")
.arg("out")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice refs");
let stdout = String::from_utf8_lossy(&output.stdout);
let json_str = extract_json_from_stdout(&stdout);
let payload: Value =
serde_json::from_str(&json_str).expect("stdout should contain valid JSON");
let status = payload.get("status").and_then(|v| v.as_str());
assert!(
status == Some("ok") || status == Some("error"),
"status should be ok or error"
);
if status == Some("ok") {
assert!(
payload.get("data").is_some(),
"ok response should have data field"
);
}
let output_in = Command::new(&splice_binary)
.arg("refs")
.arg("--db")
.arg(&db_path)
.arg("--path")
.arg(&test_file)
.arg("--name")
.arg("callee")
.arg("--direction")
.arg("in")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice refs --direction in");
assert!(
output_in.status.success() || output_in.status.code() != Some(0),
"refs command should return valid response"
);
let output_both = Command::new(&splice_binary)
.arg("refs")
.arg("--db")
.arg(&db_path)
.arg("--path")
.arg(&test_file)
.arg("--name")
.arg("caller")
.arg("--direction")
.arg("both")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice refs --direction both");
assert!(
output_both.status.success() || output_both.status.code() != Some(0),
"refs command should return valid response"
);
}
#[test]
fn test_query_files_command_lists_indexed_files() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
let lib_rs = temp_dir.path().join("lib.rs");
let main_rs = temp_dir.path().join("main.rs");
let helpers_rs = temp_dir.path().join("helpers.rs");
std::fs::write(&lib_rs, "pub fn lib_func() {}\n").expect("Failed to write lib.rs");
std::fs::write(&main_rs, "fn main() {}\n").expect("Failed to write main.rs");
std::fs::write(&helpers_rs, "pub fn helper() {}\n").expect("Failed to write helpers.rs");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&lib_rs)
.expect("Failed to index lib.rs");
integration
.index_file(&main_rs)
.expect("Failed to index main.rs");
integration
.index_file(&helpers_rs)
.expect("Failed to index helpers.rs");
let splice_binary = get_splice_binary();
let output = Command::new(&splice_binary)
.arg("files")
.arg("--db")
.arg(&db_path)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice files");
assert!(
output.status.success(),
"files command should succeed: {}",
String::from_utf8_lossy(&output.stderr)
);
let stdout = String::from_utf8_lossy(&output.stdout);
let json_str = extract_json_from_stdout(&stdout);
let payload: Value =
serde_json::from_str(&json_str).expect("stdout should contain valid JSON");
assert_eq!(payload.get("status").and_then(|v| v.as_str()), Some("ok"));
let data = payload.get("data").expect("should have data field");
let files = data
.get("files")
.and_then(|v| v.as_array())
.expect("data.files should be array");
let count = data
.get("count")
.and_then(|v| v.as_u64())
.expect("data.count should be present");
assert_eq!(count, 3, "should have 3 indexed files");
assert_eq!(files.len(), 3, "files array should have 3 entries");
for file in files {
assert!(
file.get("path").and_then(|p| p.as_str()).is_some(),
"file entry should have path"
);
assert!(
file.get("hash").and_then(|h| h.as_str()).is_some(),
"file entry should have hash"
);
}
let file_paths: Vec<&str> = files
.iter()
.filter_map(|f| f.get("path").and_then(|p| p.as_str()))
.collect();
assert!(
file_paths.iter().any(|p| p.contains("lib.rs")),
"should include lib.rs"
);
assert!(
file_paths.iter().any(|p| p.contains("main.rs")),
"should include main.rs"
);
assert!(
file_paths.iter().any(|p| p.contains("helpers.rs")),
"should include helpers.rs"
);
let output_symbols = Command::new(&splice_binary)
.arg("files")
.arg("--db")
.arg(&db_path)
.arg("--symbols")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice files --symbols");
assert!(output_symbols.status.success());
let stdout_symbols = String::from_utf8_lossy(&output_symbols.stdout);
let json_symbols = extract_json_from_stdout(&stdout_symbols);
let payload_symbols: Value =
serde_json::from_str(&json_symbols).expect("stdout should contain valid JSON");
let files_symbols = payload_symbols
.get("data")
.and_then(|d| d.get("files"))
.and_then(|v| v.as_array())
.expect("should have files array with symbols");
for file in files_symbols {
assert!(
file.get("symbol_count").and_then(|s| s.as_u64()).is_some(),
"with --symbols flag, each file should have symbol_count"
);
}
}
#[test]
fn test_query_error_codes_match_magellan_conventions() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
let splice_binary = get_splice_binary();
let nonexistent_dir_db = temp_dir.path().join("nonexistent").join("test.db");
let output_db = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&nonexistent_dir_db)
.output()
.expect("Failed to run splice status with nonexistent directory db");
assert!(
!output_db.status.success() || output_db.status.code() == Some(0),
"command with invalid db path should either fail or succeed with empty db"
);
let output_usage = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.output();
match output_usage {
Ok(result) => {
let exit_code = result.status.code();
assert!(
exit_code == Some(1) || exit_code == Some(2),
"missing required args should return exit code 1 or 2 (usage error)"
);
}
Err(_e) => {
}
}
let test_file = temp_dir.path().join("real.rs");
std::fs::write(&test_file, "pub fn real() {}\n").expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&test_file)
.expect("Failed to index test file");
let _nonexistent_file = temp_dir.path().join("nonexistent.rs");
let output_file = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice query with labels");
assert!(
output_file.status.success(),
"query should succeed even with no matching results"
);
}
#[test]
fn test_llm_discovery_workflow_single_tool() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let temp_path = temp_dir.path();
let src_dir = temp_path.join("src");
std::fs::create_dir(&src_dir).expect("Failed to create src directory");
let main_rs = src_dir.join("main.rs");
std::fs::write(
&main_rs,
r#"
fn main() {
helper();
process();
}
"#,
)
.expect("Failed to write main.rs");
let helper_rs = src_dir.join("helper.rs");
std::fs::write(
&helper_rs,
r#"
pub fn helper() {}
"#,
)
.expect("Failed to write helper.rs");
let process_rs = src_dir.join("process.rs");
std::fs::write(
&process_rs,
r#"
pub fn process() {
helper();
}
"#,
)
.expect("Failed to write process.rs");
let db_path = temp_path.join("magellan.db");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&main_rs)
.expect("Failed to index main.rs");
integration
.index_file(&helper_rs)
.expect("Failed to index helper.rs");
integration
.index_file(&process_rs)
.expect("Failed to index process.rs");
let splice_binary = get_splice_binary();
let output_status = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&db_path)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice status");
assert!(
output_status.status.success(),
"status command should succeed: {}",
String::from_utf8_lossy(&output_status.stderr)
);
let status_json: Value =
serde_json::from_slice(&output_status.stdout).expect("Invalid JSON from status");
assert_eq!(status_json["status"], "ok", "status should be ok");
assert!(
status_json.get("data").is_some(),
"status should have data field when --output json is used"
);
let files_count = status_json["data"]["files"].as_u64().unwrap_or(0);
assert_eq!(files_count, 3, "status should report 3 files indexed");
let output_query = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("--label")
.arg("fn")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice query");
assert!(
output_query.status.success(),
"query command should succeed: {}",
String::from_utf8_lossy(&output_query.stderr)
);
let query_json: Value =
serde_json::from_slice(&output_query.stdout).expect("Invalid JSON from query");
assert_eq!(query_json["status"], "ok", "query status should be ok");
let output_find = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("process")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find");
assert!(
output_find.status.success(),
"find command should succeed: {}",
String::from_utf8_lossy(&output_find.stderr)
);
let find_json: Value =
serde_json::from_slice(&output_find.stdout).expect("Invalid JSON from find");
assert_eq!(find_json["status"], "ok", "find status should be ok");
if let Some(data) = find_json.get("data") {
if let Some(symbols) = data.get("symbols").and_then(|v| v.as_array()) {
if !symbols.is_empty() {
let first_symbol = &symbols[0];
assert!(
first_symbol.get("file_path").is_some(),
"symbol should have file_path field"
);
}
}
}
let output_refs = Command::new(&splice_binary)
.arg("refs")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("process")
.arg("--path")
.arg(&process_rs)
.arg("--direction")
.arg("out")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice refs");
assert!(
output_refs.status.success(),
"refs command should succeed: {}",
String::from_utf8_lossy(&output_refs.stderr)
);
let refs_json: Value =
serde_json::from_slice(&output_refs.stdout).expect("Invalid JSON from refs");
assert_eq!(refs_json["status"], "ok", "refs status should be ok");
assert_eq!(status_json["status"], "ok");
assert_eq!(query_json["status"], "ok");
assert_eq!(find_json["status"], "ok");
assert_eq!(refs_json["status"], "ok");
}
#[test]
fn test_llm_edit_workflow_span_safe() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let temp_path = temp_dir.path();
let source_rs = temp_path.join("source.rs");
std::fs::write(
&source_rs,
r#"
pub fn calculate(x: i32) -> i32 {
x + 1
}
"#,
)
.expect("Failed to write source.rs");
let splice_binary = get_splice_binary();
let output_find = Command::new(&splice_binary)
.arg("find")
.arg("--file")
.arg(&source_rs)
.arg("--name")
.arg("calculate")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find");
if output_find.status.success() {
let find_json: Value =
serde_json::from_slice(&output_find.stdout).expect("Invalid JSON from find");
assert_eq!(find_json["status"], "ok");
if let Some(data) = find_json.get("data") {
if let Some(symbols) = data.get("symbols").and_then(|v| v.as_array()) {
if !symbols.is_empty() {
let first_symbol = &symbols[0];
assert!(
first_symbol.get("byte_start").is_some()
|| first_symbol.get("line_start").is_some(),
"symbol should have span coordinates"
);
}
}
}
}
let replacement_rs = temp_path.join("replacement.rs");
std::fs::write(
&replacement_rs,
r#"
pub fn calculate(x: i32) -> i32 {
x * 2
}
"#,
)
.expect("Failed to write replacement.rs");
let output_patch = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&source_rs)
.arg("--symbol")
.arg("calculate")
.arg("--with")
.arg(&replacement_rs)
.arg("--dry-run")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice patch --dry-run");
let patch_json_str =
extract_json_from_stdout(&String::from_utf8_lossy(&output_patch.stdout));
if output_patch.status.success() && !patch_json_str.is_empty() {
let patch_json: Value =
serde_json::from_str(&patch_json_str).expect("Invalid JSON from patch");
assert_eq!(patch_json["status"], "ok", "patch status should be ok");
assert!(
patch_json.get("message").is_some() || patch_json.get("data").is_some(),
"patch should have message or data field"
);
}
}
#[test]
fn test_llm_end_to_end_refactor_workflow() {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let temp_path = temp_dir.path();
let lib_rs = temp_path.join("lib.rs");
std::fs::write(
&lib_rs,
r#"
pub fn old_name() -> i32 {
42
}
"#,
)
.expect("Failed to write lib.rs");
let main_rs = temp_path.join("main.rs");
std::fs::write(
&main_rs,
r#"
fn main() {
let result = old_name();
}
"#,
)
.expect("Failed to write main.rs");
let splice_binary = get_splice_binary();
let db_path = temp_path.join("magellan.db");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open Magellan db");
integration
.index_file(&lib_rs)
.expect("Failed to index lib.rs");
integration
.index_file(&main_rs)
.expect("Failed to index main.rs");
let output_status = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&db_path)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice status");
assert!(
output_status.status.success(),
"status should succeed: {}",
String::from_utf8_lossy(&output_status.stderr)
);
let status_json: Value =
serde_json::from_slice(&output_status.stdout).expect("Invalid JSON from status");
let files_count = status_json["data"]["files"].as_u64().unwrap_or(0);
assert_eq!(files_count, 2, "should have 2 files indexed");
let output_find = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("old_name")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find");
assert!(
output_find.status.success(),
"find should succeed: {}",
String::from_utf8_lossy(&output_find.stderr)
);
let find_json: Value =
serde_json::from_slice(&output_find.stdout).expect("Invalid JSON from find");
assert_eq!(find_json["status"], "ok");
let output_refs = Command::new(&splice_binary)
.arg("refs")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("old_name")
.arg("--path")
.arg(&lib_rs)
.arg("--direction")
.arg("in")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice refs");
assert!(
output_refs.status.success(),
"refs should succeed: {}",
String::from_utf8_lossy(&output_refs.stderr)
);
let refs_json: Value =
serde_json::from_slice(&output_refs.stdout).expect("Invalid JSON from refs");
assert_eq!(refs_json["status"], "ok");
let new_lib_content = temp_path.join("new_lib.rs");
std::fs::write(
&new_lib_content,
r#"
pub fn new_name() -> i32 {
42
}
"#,
)
.expect("Failed to write new_lib.rs");
let new_main_content = temp_path.join("new_main.rs");
std::fs::write(
&new_main_content,
r#"
fn main() {
let result = new_name();
}
"#,
)
.expect("Failed to write new_main.rs");
let output_patch_def = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&lib_rs)
.arg("--symbol")
.arg("old_name")
.arg("--with")
.arg(&new_lib_content)
.arg("--dry-run")
.output()
.expect("Failed to run splice patch for definition");
assert!(
output_patch_def.status.success() || output_patch_def.status.code() == Some(1),
"patch dry-run should succeed or fail gracefully: {}",
String::from_utf8_lossy(&output_patch_def.stderr)
);
let output_patch_call = Command::new(&splice_binary)
.arg("patch")
.arg("--file")
.arg(&main_rs)
.arg("--symbol")
.arg("old_name")
.arg("--with")
.arg(&new_main_content)
.arg("--dry-run")
.output()
.expect("Failed to run splice patch for call site");
assert!(
output_patch_call.status.success() || output_patch_call.status.code() == Some(1),
"patch dry-run should succeed or fail gracefully: {}",
String::from_utf8_lossy(&output_patch_call.stderr)
);
}
#[test]
fn test_benchmark_status_command_performance() {
use std::time::Instant;
let splice_binary = get_splice_binary();
let test_cases = vec![(10, "10 files"), (50, "50 files"), (100, "100 files")];
for (num_files, description) in test_cases {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("benchmark.db");
for i in 0..num_files {
let file_path = temp_dir.path().join(format!("test_{:03}.rs", i));
let source = format!(
r#"/// Test function {}
pub fn test_function_{}() -> i32 {{
// Implementation line 1
// Implementation line 2
// Implementation line 3
{}
}}
/// Test struct {}
pub struct TestStruct{} {{
field: i32,
}}
impl TestStruct{} {{
pub fn new(value: i32) -> Self {{
Self {{ field: value }}
}}
}}
"#,
i,
i,
i * 42,
i,
i,
i
);
std::fs::write(&file_path, source).expect("Failed to write test file");
let mut integration = MagellanIntegration::open(&db_path)
.expect("Failed to open MagellanIntegration");
integration
.index_file(&file_path)
.expect("Failed to index file");
}
let start = Instant::now();
let output = Command::new(&splice_binary)
.arg("status")
.arg("--db")
.arg(&db_path)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice status");
let duration = start.elapsed();
assert!(
output.status.success(),
"{}: status command failed: {}",
description,
String::from_utf8_lossy(&output.stderr)
);
println!(
"Status command ({} files): {}ms",
num_files,
duration.as_millis()
);
}
}
#[test]
fn test_benchmark_query_command_performance() {
use std::time::Instant;
let splice_binary = get_splice_binary();
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("benchmark_query.db");
let num_files = 100;
for i in 0..num_files {
let file_path = temp_dir.path().join(format!("query_test_{:03}.rs", i));
let source = format!(
r#"/// Query test function {}
pub fn query_function_{}(x: i32) -> i32 {{
x + {}
}}
/// Query test struct
pub struct QueryStruct{} {{
value: i32,
}}
/// Query test impl
impl QueryStruct{} {{
pub fn process(&self) -> i32 {{
self.value * 2
}}
}}
"#,
i, i, i, i, i
);
std::fs::write(&file_path, source).expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open MagellanIntegration");
integration
.index_file(&file_path)
.expect("Failed to index file");
}
let query_tests = vec![
(vec!["--label", "rust"], "Label-only query (rust)"),
(
vec!["--label", "rust", "--label", "fn"],
"Multi-label query (rust + fn)",
),
];
let iterations = 10;
let mut all_timings = Vec::new();
for (args, description) in query_tests {
let mut total_duration_ms = 0;
for _iter in 0..iterations {
let start = Instant::now();
let output = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.args(&args)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice query");
let duration = start.elapsed();
total_duration_ms += duration.as_millis();
assert!(
output.status.success(),
"{}: query command failed: {}",
description,
String::from_utf8_lossy(&output.stderr)
);
}
let avg_ms = total_duration_ms / iterations as u128;
all_timings.push((description, avg_ms));
println!(
"Query command ({}): {}ms average over {} iterations",
description, avg_ms, iterations
);
}
let output = Command::new(&splice_binary)
.arg("query")
.arg("--db")
.arg(&db_path)
.arg("--label")
.arg("rust")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice query");
let stdout_str = String::from_utf8_lossy(&output.stdout);
let first_json_start = stdout_str.find('{').unwrap_or(0);
let first_json_end = stdout_str
.find("},\n")
.or_else(|| stdout_str.find("}\n"))
.unwrap_or(stdout_str.len());
let json_str = &stdout_str[first_json_start..=first_json_end];
if let Ok(json) = serde_json::from_str::<Value>(json_str) {
if json["result"]["query"]["symbols"].is_array() {
let symbol_count = json["result"]["query"]["symbols"]
.as_array()
.map_or(0, |a| a.len());
assert!(
symbol_count > 0,
"Query should return symbols, got {}",
symbol_count
);
}
}
println!(
"Query performance summary: label queries use index (O(log n)), file queries are direct lookup (O(1))"
);
}
#[test]
fn test_benchmark_find_command_performance() {
use std::time::Instant;
let splice_binary = get_splice_binary();
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("benchmark_find.db");
let num_files = 100;
let unique_name = "unique_benchmark_symbol_xyz123";
for i in 0..num_files {
let file_path = temp_dir.path().join(format!("find_test_{:03}.rs", i));
let source = if i == 50 {
format!(
r#"/// Unique benchmark function
pub fn {}() -> i32 {{
42
}}
"#,
unique_name
)
} else {
format!(
r#"/// Common function
pub fn process_data(x: i32) -> i32 {{
x + {}
}}
"#,
i
)
};
std::fs::write(&file_path, source).expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open MagellanIntegration");
integration
.index_file(&file_path)
.expect("Failed to index file");
}
let iterations = 10;
let mut total_duration_unique_ms = 0;
for _iter in 0..iterations {
let start = Instant::now();
let output = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg(unique_name)
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find");
let duration = start.elapsed();
total_duration_unique_ms += duration.as_millis();
assert!(
output.status.success(),
"find by unique name failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
let avg_unique_ms = total_duration_unique_ms / iterations as u128;
println!(
"Find command (unique name): {}ms average over {} iterations",
avg_unique_ms, iterations
);
let mut total_duration_common_ms = 0;
for _iter in 0..iterations {
let start = Instant::now();
let output = Command::new(&splice_binary)
.arg("find")
.arg("--db")
.arg(&db_path)
.arg("--name")
.arg("process_data")
.arg("--ambiguous")
.arg("--output")
.arg("json")
.output()
.expect("Failed to run splice find");
let duration = start.elapsed();
total_duration_common_ms += duration.as_millis();
assert!(
output.status.success(),
"find by common name failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
let avg_common_ms = total_duration_common_ms / iterations as u128;
println!(
"Find command (common name): {}ms average over {} iterations",
avg_common_ms, iterations
);
println!(
"Find performance note: O(N) where N = number of files (Magellan has no global symbol index)"
);
}
#[test]
fn test_benchmark_export_command_performance() {
use std::time::Instant;
let splice_binary = get_splice_binary();
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("benchmark_export.db");
let num_files = 100;
for i in 0..num_files {
let file_path = temp_dir.path().join(format!("export_test_{:03}.rs", i));
let source = format!(
r#"/// Export test function 1
pub fn export_func_1_{}() -> i32 {{ {} }}
/// Export test function 2
pub fn export_func_2_{}() -> i32 {{ {} }}
/// Export test struct
pub struct ExportStruct{} {{
field: i32,
}}
/// Export test enum
pub enum ExportEnum{} {{
VariantA,
VariantB,
}}
/// Export test impl
impl ExportStruct{} {{
pub fn new(value: i32) -> Self {{
Self {{ field: value }}
}}
}}
"#,
i, i, i, i, i, i, i
);
std::fs::write(&file_path, source).expect("Failed to write test file");
let mut integration =
MagellanIntegration::open(&db_path).expect("Failed to open MagellanIntegration");
integration
.index_file(&file_path)
.expect("Failed to index file");
}
let iterations = 5;
let formats = vec![("json", "json"), ("jsonl", "jsonl"), ("csv", "csv")];
for (format_arg, extension) in formats {
let mut total_duration_ms = 0;
let mut total_file_size = 0;
for _iter in 0..iterations {
let output_path = temp_dir
.path()
.join(format!("export_{}.{}", _iter, extension));
let start = Instant::now();
let output = Command::new(&splice_binary)
.arg("export")
.arg("--db")
.arg(&db_path)
.arg("--format")
.arg(format_arg)
.arg("--file")
.arg(&output_path)
.output()
.expect("Failed to run splice export");
let duration = start.elapsed();
total_duration_ms += duration.as_millis();
assert!(
output.status.success(),
"export {} format failed: {}",
extension,
String::from_utf8_lossy(&output.stderr)
);
if let Ok(metadata) = std::fs::metadata(&output_path) {
total_file_size += metadata.len();
}
assert!(
output_path.exists(),
"export {} output file should exist",
extension
);
assert!(
output_path.metadata().map(|m| m.len()).unwrap_or(0) > 0,
"export {} output file should not be empty",
extension
);
}
let avg_ms = total_duration_ms / iterations as u128;
let avg_size = total_file_size / iterations as u64;
let estimated_symbols = 500;
let throughput = (estimated_symbols as f64 / avg_ms as f64) * 1000.0;
println!(
"Export command ({} format): {}ms average over {} iterations, avg size: {} bytes, throughput: {:.0} symbols/sec",
extension, avg_ms, iterations, avg_size, throughput
);
}
println!(
"Export performance note: reads first 100 files for memory safety (documented limitation)"
);
}
}