use anyhow::{Context, Result};
use serde_json::{Value, json};
use std::collections::BTreeMap;
use std::fs;
use std::path::PathBuf;
const SNAPSHOT_DIR: &str = "tests/snapshots/tool_schemas";
fn generate_tool_schema_hash(tool_name: &str, schema: &Value) -> Result<String> {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let canonical =
serde_json::to_string(schema).context("Failed to serialize schema for hashing")?;
let mut hasher = DefaultHasher::new();
canonical.hash(&mut hasher);
let hash = hasher.finish();
Ok(format!("{}-{:016x}", tool_name, hash))
}
fn snapshot_current_tool_schemas() -> Result<BTreeMap<String, Value>> {
use tempfile::TempDir;
use vtcode_core::tools::ToolRegistry;
let temp_dir =
TempDir::new().context("Failed to create temporary directory for tool registry")?;
let runtime = tokio::runtime::Runtime::new()
.context("Failed to create tokio runtime for tool registry")?;
let schemas = runtime.block_on(async {
let registry = ToolRegistry::new(temp_dir.path().to_path_buf()).await;
let mut schemas = BTreeMap::new();
let tool_names = registry.available_tools().await;
for tool_name in tool_names {
if let Some(schema) = registry.get_tool_schema(&tool_name).await {
schemas.insert(tool_name, schema);
}
}
if schemas.is_empty() {
schemas.insert(
"unified_file".to_string(),
json!({
"name": "unified_file",
"description": "Unified file operations",
"parameters": {
"type": "object",
"properties": {
"action": {
"type": "string",
"description": "File action"
},
"path": {
"type": "string",
"description": "Path to the file to read"
},
},
"required": ["action"]
}
}),
);
schemas.insert(
"unified_exec".to_string(),
json!({
"name": "unified_exec",
"description": "Unified execution and PTY operations",
"parameters": {
"type": "object",
"properties": {
"action": {
"type": "string",
"description": "Execution action"
},
"command": {
"type": "string",
"description": "Command to run"
}
},
"required": ["action"]
}
}),
);
schemas.insert(
"unified_search".to_string(),
json!({
"name": "unified_search",
"description": "Unified discovery and search",
"parameters": {
"type": "object",
"properties": {
"action": {
"type": "string",
"description": "Search action"
},
},
"required": ["action"]
}
}),
);
}
Result::<_, anyhow::Error>::Ok(schemas)
})?;
Ok(schemas)
}
fn validate_schema_stability(tool_name: &str, current: &Value, baseline: &Value) -> Result<()> {
let current_str = serde_json::to_string(current)?;
let baseline_str = serde_json::to_string(baseline)?;
if current_str != baseline_str {
anyhow::bail!(
"Schema drift detected for tool '{}'\n\nBaseline:\n{}\n\nCurrent:\n{}",
tool_name,
serde_json::to_string_pretty(baseline)?,
serde_json::to_string_pretty(current)?
);
}
Ok(())
}
fn validate_whitespace_consistency(schema: &Value) -> Result<()> {
let schema_str = serde_json::to_string_pretty(schema)?;
if schema_str.contains("\r\n") {
anyhow::bail!("Tool schema contains CRLF line endings - use LF only");
}
for (line_num, line) in schema_str.lines().enumerate() {
if line.ends_with(' ') || line.ends_with('\t') {
anyhow::bail!("Tool schema line {} has trailing whitespace", line_num + 1);
}
}
let blank_line_pattern = "\n\n\n";
if schema_str.contains(blank_line_pattern) {
anyhow::bail!("Tool schema contains multiple consecutive blank lines");
}
Ok(())
}
fn validate_encoding_invariants(schema: &Value) -> Result<()> {
let schema_str = serde_json::to_string(schema)?;
if !schema_str.is_char_boundary(0) || !schema_str.is_char_boundary(schema_str.len()) {
anyhow::bail!("Tool schema has invalid UTF-8 boundaries");
}
if schema_str
.chars()
.any(|c| c.is_control() && c != '\n' && c != '\t')
{
anyhow::bail!("Tool schema contains unexpected control characters");
}
if let Some(desc) = schema.get("description")
&& let Some(desc_str) = desc.as_str()
&& desc_str != desc_str.trim()
{
anyhow::bail!(
"Tool description has leading/trailing whitespace: '{}'",
desc_str
);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_snapshot_generation() {
let schemas = snapshot_current_tool_schemas().unwrap();
assert!(!schemas.is_empty());
assert!(schemas.contains_key("unified_file"));
assert!(schemas.contains_key("unified_exec"));
assert!(schemas.contains_key("unified_search"));
}
#[test]
fn test_schema_hash_stability() {
let schema = json!({"name": "test", "description": "Test tool"});
let hash1 = generate_tool_schema_hash("test", &schema).unwrap();
let hash2 = generate_tool_schema_hash("test", &schema).unwrap();
assert_eq!(hash1, hash2);
}
#[test]
fn test_schema_stability_validation() {
let baseline = json!({
"name": "test",
"description": "Test tool"
});
let current = baseline.clone();
assert!(validate_schema_stability("test", ¤t, &baseline).is_ok());
}
#[test]
fn test_schema_drift_detection() {
let baseline = json!({
"name": "test",
"description": "Test tool"
});
let current = json!({
"name": "test",
"description": "Test tool modified"
});
let result = validate_schema_stability("test", ¤t, &baseline);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("Schema drift"));
}
#[test]
fn test_whitespace_validation_trailing_space() {
let schema = json!({
"name": "test",
"description": "Test tool"
});
assert!(validate_whitespace_consistency(&schema).is_ok());
}
#[test]
fn test_encoding_invariants() {
let schema = json!({
"name": "test",
"description": "Test tool"
});
assert!(validate_encoding_invariants(&schema).is_ok());
}
#[test]
fn test_description_trimming() {
let schema_with_spaces = json!({
"name": "test",
"description": " Test tool with spaces "
});
let result = validate_encoding_invariants(&schema_with_spaces);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("leading/trailing whitespace")
);
}
#[test]
fn test_all_current_tools_valid() {
let schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, schema) in &schemas {
validate_whitespace_consistency(schema)
.unwrap_or_else(|e| panic!("Tool {} failed whitespace check: {}", tool_name, e));
validate_encoding_invariants(schema)
.unwrap_or_else(|e| panic!("Tool {} failed encoding check: {}", tool_name, e));
}
}
#[test]
fn test_required_fields_present() {
let schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, schema) in &schemas {
assert!(
schema.get("name").is_some(),
"Tool {} missing 'name' field",
tool_name
);
assert!(
schema.get("description").is_some(),
"Tool {} missing 'description' field",
tool_name
);
assert!(
schema.get("parameters").is_some(),
"Tool {} missing 'parameters' field",
tool_name
);
}
}
#[test]
fn test_parameter_schema_structure() {
let schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, schema) in &schemas {
let params = schema.get("parameters").expect("missing parameters");
assert!(
params.get("type").is_some(),
"Tool {} parameters missing 'type'",
tool_name
);
assert!(
params.get("properties").is_some(),
"Tool {} parameters missing 'properties'",
tool_name
);
}
}
}
#[cfg(test)]
mod ci_tests {
use super::*;
#[test]
#[ignore] fn ci_validate_no_schema_drift() {
let snapshot_path = PathBuf::from(SNAPSHOT_DIR);
if !snapshot_path.exists() {
fs::create_dir_all(&snapshot_path).unwrap();
let schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, schema) in schemas {
let file_path = snapshot_path.join(format!("{}.json", tool_name));
let content = serde_json::to_string_pretty(&schema).unwrap();
fs::write(file_path, content).unwrap();
}
println!("Created baseline snapshots in {}", SNAPSHOT_DIR);
return;
}
let current_schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, current_schema) in current_schemas {
let snapshot_file = snapshot_path.join(format!("{}.json", tool_name));
if !snapshot_file.exists() {
panic!(
"No snapshot found for tool '{}' - run with --update-snapshots to create",
tool_name
);
}
let baseline_content = fs::read_to_string(&snapshot_file).unwrap();
let baseline_schema: Value = serde_json::from_str(&baseline_content).unwrap();
validate_schema_stability(&tool_name, ¤t_schema, &baseline_schema).unwrap();
}
}
}
#[cfg(test)]
pub fn update_schema_snapshots() -> Result<()> {
let snapshot_path = PathBuf::from(SNAPSHOT_DIR);
fs::create_dir_all(&snapshot_path)?;
let schemas = snapshot_current_tool_schemas()?;
let count = schemas.len();
for (tool_name, schema) in schemas {
let file_path = snapshot_path.join(format!("{}.json", tool_name));
let content = serde_json::to_string_pretty(&schema)?;
fs::write(file_path, content)?;
}
println!("Updated {} tool schema snapshots", count);
Ok(())
}
#[cfg(test)]
mod integration_tests {
use super::*;
use assert_fs::TempDir;
use vtcode_core::tools::ToolRegistry;
#[tokio::test]
async fn test_actual_tool_schemas_are_valid() {
let temp_dir = TempDir::new().unwrap();
let _registry = ToolRegistry::new(temp_dir.path().to_path_buf()).await;
assert!(temp_dir.path().exists(), "Registry workspace should exist");
}
#[tokio::test]
async fn test_tool_registry_serialization_consistency() {
let temp_dir = TempDir::new().unwrap();
let _registry = ToolRegistry::new(temp_dir.path().to_path_buf()).await;
let _registry2 = ToolRegistry::new(temp_dir.path().to_path_buf()).await;
assert!(
temp_dir.path().exists(),
"Tool registries should be consistently creatable"
);
}
#[test]
fn test_tool_descriptions_are_trimmed() {
let schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, schema) in schemas {
if let Some(desc) = schema.get("description").and_then(|v| v.as_str()) {
assert_eq!(
desc.trim(),
desc,
"Tool '{}' description should be trimmed",
tool_name
);
}
}
}
#[test]
fn test_tool_parameter_schemas_are_consistent() {
let schemas = snapshot_current_tool_schemas().unwrap();
for (tool_name, schema) in schemas {
let params = schema
.get("parameters")
.unwrap_or_else(|| panic!("Tool '{}' missing parameters", tool_name));
assert!(
params.get("type").is_some(),
"Tool '{}' parameters missing type",
tool_name
);
assert!(
params.get("properties").is_some(),
"Tool '{}' parameters missing properties",
tool_name
);
validate_encoding_invariants(&schema).unwrap_or_else(|e| {
panic!("Tool '{}' failed encoding validation: {}", tool_name, e)
});
validate_whitespace_consistency(&schema).unwrap_or_else(|e| {
panic!("Tool '{}' failed whitespace validation: {}", tool_name, e)
});
}
}
}