use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::fs;
use std::path::PathBuf;
use std::sync::Mutex;
use std::time::{SystemTime, UNIX_EPOCH};
const OUTPUT_DIR: &str = "/tmp/syncable-cli/outputs";
const MAX_AGE_SECS: u64 = 3600;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SessionRef {
pub ref_id: String,
pub tool: String,
pub contains: String,
pub summary: String,
pub timestamp: u64,
pub size_bytes: usize,
}
static SESSION_REGISTRY: Mutex<Vec<SessionRef>> = Mutex::new(Vec::new());
pub fn register_session_ref(
ref_id: &str,
tool: &str,
contains: &str,
summary: &str,
size_bytes: usize,
) {
if let Ok(mut registry) = SESSION_REGISTRY.lock() {
registry.retain(|r| r.ref_id != ref_id);
registry.push(SessionRef {
ref_id: ref_id.to_string(),
tool: tool.to_string(),
contains: contains.to_string(),
summary: summary.to_string(),
timestamp: SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0),
size_bytes,
});
}
}
pub fn get_session_refs() -> Vec<SessionRef> {
SESSION_REGISTRY
.lock()
.map(|r| r.clone())
.unwrap_or_default()
}
pub fn cleanup_session_registry() {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
if let Ok(mut registry) = SESSION_REGISTRY.lock() {
registry.retain(|r| now - r.timestamp < MAX_AGE_SECS);
}
}
pub fn format_session_refs_for_agent() -> String {
let refs = get_session_refs();
if refs.is_empty() {
return String::new();
}
let mut output = String::from("\n📦 AVAILABLE DATA FOR RETRIEVAL:\n");
output.push_str("─────────────────────────────────\n");
for r in &refs {
let age = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0)
.saturating_sub(r.timestamp);
let age_str = if age < 60 {
format!("{}s ago", age)
} else {
format!("{}m ago", age / 60)
};
output.push_str(&format!(
"\n• {} [{}]\n Contains: {}\n Summary: {}\n Retrieve: retrieve_output(\"{}\") or with query\n",
r.ref_id, age_str, r.contains, r.summary, r.ref_id
));
}
output.push_str("\n─────────────────────────────────\n");
output.push_str(
"Query examples: \"severity:critical\", \"file:deployment.yaml\", \"code:DL3008\"\n",
);
output
}
fn generate_ref_id() -> String {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0);
let ts_part = format!("{:x}", timestamp)
.chars()
.rev()
.take(6)
.collect::<String>();
let rand_part: String = (0..4)
.map(|_| {
let idx = (timestamp as usize + rand_simple()) % 36;
"abcdefghijklmnopqrstuvwxyz0123456789"
.chars()
.nth(idx)
.unwrap()
})
.collect();
format!("{}_{}", ts_part, rand_part)
}
fn rand_simple() -> usize {
let ptr = Box::into_raw(Box::new(0u8));
let addr = ptr as usize;
unsafe { drop(Box::from_raw(ptr)) };
addr.wrapping_mul(1103515245).wrapping_add(12345) % (1 << 31)
}
fn ensure_output_dir() -> std::io::Result<PathBuf> {
let path = PathBuf::from(OUTPUT_DIR);
if !path.exists() {
fs::create_dir_all(&path)?;
}
Ok(path)
}
pub fn store_output(output: &Value, tool_name: &str) -> String {
let ref_id = format!("{}_{}", tool_name, generate_ref_id());
if let Ok(dir) = ensure_output_dir() {
let path = dir.join(format!("{}.json", ref_id));
let stored = serde_json::json!({
"ref_id": ref_id,
"tool": tool_name,
"timestamp": SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0),
"data": output
});
if let Ok(json_str) = serde_json::to_string(&stored) {
let _ = fs::write(&path, json_str);
}
}
ref_id
}
pub fn retrieve_output(ref_id: &str) -> Option<Value> {
let path = PathBuf::from(OUTPUT_DIR).join(format!("{}.json", ref_id));
if !path.exists() {
return None;
}
let content = fs::read_to_string(&path).ok()?;
let stored: Value = serde_json::from_str(&content).ok()?;
stored.get("data").cloned()
}
pub fn retrieve_filtered(
ref_id: &str,
query: Option<&str>,
limit: usize,
offset: usize,
) -> Option<Value> {
let data = retrieve_output(ref_id)?;
if is_analyze_project_output(&data) {
return retrieve_analyze_project(&data, query);
}
let query = match query {
Some(q) if !q.is_empty() => q,
_ => return Some(data),
};
let (filter_type, filter_value) = parse_query(query);
let issues = find_issues_array(&data).unwrap_or_default();
let filtered: Vec<Value> = issues
.iter()
.filter(|issue| matches_filter(issue, &filter_type, &filter_value))
.cloned()
.collect();
let total_matches = filtered.len();
let page: Vec<Value> = filtered
.into_iter()
.skip(offset)
.take(limit)
.map(|v| truncate_result_value(v))
.collect();
let showing = page.len();
let has_more = offset + showing < total_matches;
let mut result = serde_json::json!({
"query": query,
"total_matches": total_matches,
"showing": showing,
"offset": offset,
"has_more": has_more,
"results": page
});
if has_more {
result.as_object_mut().unwrap().insert(
"next_command".to_string(),
Value::String(format!(
"sync-ctl retrieve '{}' --query '{}' --offset {} --limit {}",
ref_id,
query,
offset + limit,
limit
)),
);
}
Some(result)
}
fn truncate_result_value(mut value: Value) -> Value {
if let Some(obj) = value.as_object_mut() {
for field in ["description", "message", "details"] {
if let Some(s) = obj.get(field).and_then(|v| v.as_str()) {
if s.len() > 200 {
let truncated = format!("{}...", &s[..200]);
obj.insert(field.to_string(), Value::String(truncated));
}
}
}
if let Some(refs) = obj.get("references").and_then(|v| v.as_array()) {
if refs.len() > 3 {
let truncated: Vec<Value> = refs.iter().take(3).cloned().collect();
let remaining = refs.len() - 3;
obj.insert("references".to_string(), Value::Array(truncated));
obj.insert(
"references_truncated".to_string(),
Value::Number(remaining.into()),
);
}
}
}
value
}
fn parse_query(query: &str) -> (String, String) {
if let Some(idx) = query.find(':') {
let (t, v) = query.split_at(idx);
(t.to_lowercase(), v[1..].to_string())
} else {
("any".to_string(), query.to_string())
}
}
fn find_issues_array(data: &Value) -> Option<Vec<Value>> {
let issue_fields = [
"issues",
"findings",
"violations",
"warnings",
"errors",
"recommendations",
"results",
"failures",
"diagnostics",
"vulnerable_dependencies",
"dependencies",
];
for field in &issue_fields {
if let Some(arr) = data.get(field).and_then(|v| v.as_array()) {
if *field == "vulnerable_dependencies" && !arr.is_empty() {
let mut flat = Vec::new();
for dep in arr {
let dep_name = dep
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("unknown");
let dep_version = dep.get("version").and_then(|v| v.as_str()).unwrap_or("?");
let source_dir = dep.get("source_dir").cloned();
let language = dep.get("language").cloned();
if let Some(vulns) = dep.get("vulnerabilities").and_then(|v| v.as_array()) {
for vuln in vulns {
let mut entry = vuln.clone();
if let Some(obj) = entry.as_object_mut() {
obj.insert(
"package".to_string(),
Value::String(dep_name.to_string()),
);
obj.insert(
"package_version".to_string(),
Value::String(dep_version.to_string()),
);
if let Some(sd) = &source_dir {
obj.insert("source_dir".to_string(), sd.clone());
}
if let Some(lang) = &language {
obj.insert("language".to_string(), lang.clone());
}
}
flat.push(entry);
}
}
}
return Some(flat);
}
return Some(arr.clone());
}
}
if let Some(arr) = data.as_array() {
return Some(arr.clone());
}
None
}
fn matches_filter(issue: &Value, filter_type: &str, filter_value: &str) -> bool {
match filter_type {
"severity" | "level" => {
let sev = issue
.get("severity")
.or_else(|| issue.get("level"))
.and_then(|v| v.as_str())
.unwrap_or("");
sev.to_lowercase().contains(&filter_value.to_lowercase())
}
"file" | "path" => {
let file = issue
.get("file")
.or_else(|| issue.get("path"))
.or_else(|| issue.get("filename"))
.and_then(|v| v.as_str())
.unwrap_or("");
file.to_lowercase().contains(&filter_value.to_lowercase())
}
"code" | "rule" => {
let code = issue
.get("code")
.or_else(|| issue.get("rule"))
.or_else(|| issue.get("rule_id"))
.and_then(|v| v.as_str())
.unwrap_or("");
code.to_lowercase().contains(&filter_value.to_lowercase())
}
"container" | "resource" => {
let container = issue
.get("container")
.or_else(|| issue.get("resource"))
.or_else(|| issue.get("name"))
.and_then(|v| v.as_str())
.unwrap_or("");
container
.to_lowercase()
.contains(&filter_value.to_lowercase())
}
_ => {
let issue_str = serde_json::to_string(issue).unwrap_or_default();
issue_str
.to_lowercase()
.contains(&filter_value.to_lowercase())
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum OutputType {
MonorepoAnalysis,
ProjectAnalysis,
LintResult,
OptimizationResult,
Generic,
}
pub fn detect_output_type(data: &Value) -> OutputType {
if data.get("projects").is_some() || data.get("is_monorepo").is_some() {
return OutputType::MonorepoAnalysis;
}
if data.get("languages").is_some() && data.get("analysis_metadata").is_some() {
return OutputType::ProjectAnalysis;
}
if data.get("failures").is_some() {
return OutputType::LintResult;
}
if data.get("recommendations").is_some() {
return OutputType::OptimizationResult;
}
OutputType::Generic
}
fn is_analyze_project_output(data: &Value) -> bool {
matches!(
detect_output_type(data),
OutputType::MonorepoAnalysis | OutputType::ProjectAnalysis
)
}
pub fn retrieve_analyze_project(data: &Value, query: Option<&str>) -> Option<Value> {
let query = query.unwrap_or("compact:true");
let (query_type, query_value) = parse_query(query);
match query_type.as_str() {
"section" => match query_value.as_str() {
"summary" => Some(extract_summary(data)),
"projects" => Some(extract_projects_list(data)),
"frameworks" => Some(extract_all_frameworks(data)),
"languages" => Some(extract_all_languages(data)),
"services" => Some(extract_all_services(data)),
_ => Some(compact_analyze_output(data)),
},
"project" => extract_project_by_name(data, &query_value),
"service" => extract_service_by_name(data, &query_value),
"language" => extract_language_details(data, &query_value),
"framework" => extract_framework_details(data, &query_value),
"compact" => Some(compact_analyze_output(data)),
_ => {
Some(compact_analyze_output(data))
}
}
}
fn extract_summary(data: &Value) -> Value {
let mut summary = serde_json::Map::new();
if let Some(root) = data.get("root_path").and_then(|v| v.as_str()) {
summary.insert("root_path".to_string(), Value::String(root.to_string()));
}
if let Some(mono) = data.get("is_monorepo").and_then(|v| v.as_bool()) {
summary.insert("is_monorepo".to_string(), Value::Bool(mono));
}
if let Some(root) = data.get("project_root").and_then(|v| v.as_str()) {
summary.insert("project_root".to_string(), Value::String(root.to_string()));
}
if let Some(arch) = data.get("architecture_type").and_then(|v| v.as_str()) {
summary.insert(
"architecture_type".to_string(),
Value::String(arch.to_string()),
);
}
if let Some(projects) = data.get("projects").and_then(|v| v.as_array()) {
summary.insert(
"project_count".to_string(),
Value::Number(projects.len().into()),
);
let names: Vec<Value> = projects
.iter()
.filter_map(|p| p.get("name").and_then(|n| n.as_str()))
.map(|n| Value::String(n.to_string()))
.collect();
summary.insert("project_names".to_string(), Value::Array(names));
}
if let Some(languages) = data.get("languages").and_then(|v| v.as_array()) {
let names: Vec<Value> = languages
.iter()
.filter_map(|l| l.get("name").and_then(|n| n.as_str()))
.map(|n| Value::String(n.to_string()))
.collect();
summary.insert("languages".to_string(), Value::Array(names));
}
if let Some(techs) = data.get("technologies").and_then(|v| v.as_array()) {
let names: Vec<Value> = techs
.iter()
.filter_map(|t| t.get("name").and_then(|n| n.as_str()))
.map(|n| Value::String(n.to_string()))
.collect();
summary.insert("technologies".to_string(), Value::Array(names));
}
if let Some(services) = data.get("services").and_then(|v| v.as_array()) {
summary.insert(
"services_count".to_string(),
Value::Number(services.len().into()),
);
let service_names: Vec<Value> = services
.iter()
.filter_map(|s| s.get("name").and_then(|n| n.as_str()))
.map(|n| Value::String(n.to_string()))
.collect();
if !service_names.is_empty() {
summary.insert("services".to_string(), Value::Array(service_names));
}
}
Value::Object(summary)
}
fn extract_projects_list(data: &Value) -> Value {
let projects = data.get("projects").and_then(|v| v.as_array());
let list: Vec<Value> = projects
.map(|arr| {
arr.iter()
.map(|p| {
let mut proj = serde_json::Map::new();
if let Some(name) = p.get("name") {
proj.insert("name".to_string(), name.clone());
}
if let Some(path) = p.get("path") {
proj.insert("path".to_string(), path.clone());
}
if let Some(cat) = p.get("project_category") {
proj.insert("category".to_string(), cat.clone());
}
if let Some(analysis) = p.get("analysis") {
if let Some(langs) = analysis.get("languages").and_then(|v| v.as_array()) {
let lang_names: Vec<Value> = langs
.iter()
.filter_map(|l| l.get("name").and_then(|n| n.as_str()))
.map(|n| Value::String(n.to_string()))
.collect();
proj.insert("languages".to_string(), Value::Array(lang_names));
}
if let Some(fws) = analysis.get("frameworks").and_then(|v| v.as_array()) {
let fw_names: Vec<Value> = fws
.iter()
.filter_map(|f| f.get("name").and_then(|n| n.as_str()))
.map(|n| Value::String(n.to_string()))
.collect();
proj.insert("frameworks".to_string(), Value::Array(fw_names));
}
}
Value::Object(proj)
})
.collect()
})
.unwrap_or_default();
serde_json::json!({
"total_projects": list.len(),
"projects": list
})
}
fn extract_project_by_name(data: &Value, name: &str) -> Option<Value> {
let projects = data.get("projects").and_then(|v| v.as_array())?;
let project = projects.iter().find(|p| {
p.get("name")
.and_then(|n| n.as_str())
.map(|n| n.to_lowercase().contains(&name.to_lowercase()))
.unwrap_or(false)
})?;
Some(compact_project(project))
}
fn extract_service_by_name(data: &Value, name: &str) -> Option<Value> {
let projects = data.get("projects").and_then(|v| v.as_array())?;
for project in projects {
if let Some(services) = project
.get("analysis")
.and_then(|a| a.get("services"))
.and_then(|s| s.as_array())
&& let Some(service) = services.iter().find(|s| {
s.get("name")
.and_then(|n| n.as_str())
.map(|n| n.to_lowercase().contains(&name.to_lowercase()))
.unwrap_or(false)
})
{
return Some(service.clone());
}
}
None
}
fn extract_language_details(data: &Value, lang_name: &str) -> Option<Value> {
let mut results = Vec::new();
let process_languages = |languages: &[Value], proj_name: &str, results: &mut Vec<Value>| {
for lang in languages {
let name = lang.get("name").and_then(|n| n.as_str()).unwrap_or("");
if lang_name == "*" || name.to_lowercase().contains(&lang_name.to_lowercase()) {
let mut compact_lang = serde_json::Map::new();
if !proj_name.is_empty() {
compact_lang
.insert("project".to_string(), Value::String(proj_name.to_string()));
}
compact_lang.insert(
"name".to_string(),
lang.get("name").cloned().unwrap_or(Value::Null),
);
compact_lang.insert(
"version".to_string(),
lang.get("version").cloned().unwrap_or(Value::Null),
);
compact_lang.insert(
"confidence".to_string(),
lang.get("confidence").cloned().unwrap_or(Value::Null),
);
if let Some(files) = lang.get("files").and_then(|f| f.as_array()) {
compact_lang
.insert("file_count".to_string(), Value::Number(files.len().into()));
}
results.push(Value::Object(compact_lang));
}
}
};
if let Some(languages) = data.get("languages").and_then(|v| v.as_array()) {
process_languages(languages, "", &mut results);
}
if let Some(projects) = data.get("projects").and_then(|v| v.as_array()) {
for project in projects {
let proj_name = project
.get("name")
.and_then(|n| n.as_str())
.unwrap_or("unknown");
if let Some(languages) = project
.get("analysis")
.and_then(|a| a.get("languages"))
.and_then(|l| l.as_array())
{
process_languages(languages, proj_name, &mut results);
}
}
}
Some(serde_json::json!({
"query": format!("language:{}", lang_name),
"total_matches": results.len(),
"results": results
}))
}
fn extract_framework_details(data: &Value, fw_name: &str) -> Option<Value> {
let mut results = Vec::new();
let process_techs = |techs: &[Value], proj_name: &str, results: &mut Vec<Value>| {
for tech in techs {
let name = tech.get("name").and_then(|n| n.as_str()).unwrap_or("");
if fw_name == "*" || name.to_lowercase().contains(&fw_name.to_lowercase()) {
let mut compact_fw = serde_json::Map::new();
if !proj_name.is_empty() {
compact_fw.insert("project".to_string(), Value::String(proj_name.to_string()));
}
if let Some(v) = tech.get("name") {
compact_fw.insert("name".to_string(), v.clone());
}
if let Some(v) = tech.get("version") {
compact_fw.insert("version".to_string(), v.clone());
}
if let Some(v) = tech.get("category") {
compact_fw.insert("category".to_string(), v.clone());
}
results.push(Value::Object(compact_fw));
}
}
};
if let Some(techs) = data.get("technologies").and_then(|v| v.as_array()) {
process_techs(techs, "", &mut results);
}
if let Some(fws) = data.get("frameworks").and_then(|v| v.as_array()) {
process_techs(fws, "", &mut results);
}
if let Some(projects) = data.get("projects").and_then(|v| v.as_array()) {
for project in projects {
let proj_name = project
.get("name")
.and_then(|n| n.as_str())
.unwrap_or("unknown");
if let Some(frameworks) = project
.get("analysis")
.and_then(|a| a.get("frameworks"))
.and_then(|f| f.as_array())
{
process_techs(frameworks, proj_name, &mut results);
}
}
}
Some(serde_json::json!({
"query": format!("framework:{}", fw_name),
"total_matches": results.len(),
"results": results
}))
}
fn extract_all_frameworks(data: &Value) -> Value {
extract_framework_details(data, "*").unwrap_or(serde_json::json!({"results": []}))
}
fn extract_all_languages(data: &Value) -> Value {
extract_language_details(data, "*").unwrap_or(serde_json::json!({"results": []}))
}
fn extract_all_services(data: &Value) -> Value {
extract_projects_list(data)
}
fn compact_analyze_output(data: &Value) -> Value {
let mut result = serde_json::Map::new();
if let Some(v) = data.get("root_path") {
result.insert("root_path".to_string(), v.clone());
}
if let Some(v) = data.get("is_monorepo") {
result.insert("is_monorepo".to_string(), v.clone());
}
if let Some(projects) = data.get("projects").and_then(|v| v.as_array()) {
let compacted: Vec<Value> = projects.iter().map(compact_project).collect();
result.insert("projects".to_string(), Value::Array(compacted));
return Value::Object(result);
}
if let Some(v) = data.get("project_root") {
result.insert("project_root".to_string(), v.clone());
}
if let Some(v) = data.get("architecture_type") {
result.insert("architecture_type".to_string(), v.clone());
}
if let Some(v) = data.get("project_type") {
result.insert("project_type".to_string(), v.clone());
}
if let Some(languages) = data.get("languages").and_then(|v| v.as_array()) {
let compacted: Vec<Value> = languages
.iter()
.map(|lang| {
let mut compact_lang = serde_json::Map::new();
for key in &["name", "version", "confidence"] {
if let Some(v) = lang.get(*key) {
compact_lang.insert(key.to_string(), v.clone());
}
}
if let Some(files) = lang.get("files").and_then(|f| f.as_array()) {
compact_lang
.insert("file_count".to_string(), Value::Number(files.len().into()));
}
Value::Object(compact_lang)
})
.collect();
result.insert("languages".to_string(), Value::Array(compacted));
}
if let Some(techs) = data.get("technologies").and_then(|v| v.as_array()) {
let compacted: Vec<Value> = techs
.iter()
.map(|tech| {
let mut compact_tech = serde_json::Map::new();
for key in &["name", "version", "category", "confidence"] {
if let Some(v) = tech.get(*key) {
compact_tech.insert(key.to_string(), v.clone());
}
}
Value::Object(compact_tech)
})
.collect();
result.insert("technologies".to_string(), Value::Array(compacted));
}
if let Some(services) = data.get("services").and_then(|v| v.as_array()) {
result.insert("services".to_string(), Value::Array(services.clone()));
}
if let Some(meta) = data.get("analysis_metadata") {
result.insert("analysis_metadata".to_string(), meta.clone());
}
Value::Object(result)
}
fn compact_project(project: &Value) -> Value {
let mut compact = serde_json::Map::new();
for key in &["name", "path", "project_category"] {
if let Some(v) = project.get(*key) {
compact.insert(key.to_string(), v.clone());
}
}
if let Some(analysis) = project.get("analysis") {
let mut compact_analysis = serde_json::Map::new();
if let Some(v) = analysis.get("project_root") {
compact_analysis.insert("project_root".to_string(), v.clone());
}
if let Some(languages) = analysis.get("languages").and_then(|v| v.as_array()) {
let compacted: Vec<Value> = languages
.iter()
.map(|lang| {
let mut compact_lang = serde_json::Map::new();
for key in &["name", "version", "confidence"] {
if let Some(v) = lang.get(*key) {
compact_lang.insert(key.to_string(), v.clone());
}
}
if let Some(files) = lang.get("files").and_then(|f| f.as_array()) {
compact_lang
.insert("file_count".to_string(), Value::Number(files.len().into()));
}
Value::Object(compact_lang)
})
.collect();
compact_analysis.insert("languages".to_string(), Value::Array(compacted));
}
for key in &[
"frameworks",
"databases",
"services",
"build_tools",
"package_managers",
] {
if let Some(v) = analysis.get(*key) {
compact_analysis.insert(key.to_string(), v.clone());
}
}
compact.insert("analysis".to_string(), Value::Object(compact_analysis));
}
Value::Object(compact)
}
pub fn list_outputs() -> Vec<OutputInfo> {
let dir = match ensure_output_dir() {
Ok(d) => d,
Err(_) => return Vec::new(),
};
let mut outputs = Vec::new();
if let Ok(entries) = fs::read_dir(&dir) {
for entry in entries.flatten() {
if let Some(filename) = entry.file_name().to_str()
&& filename.ends_with(".json")
{
let ref_id = filename.trim_end_matches(".json").to_string();
if let Ok(content) = fs::read_to_string(entry.path())
&& let Ok(stored) = serde_json::from_str::<Value>(&content)
{
let tool = stored
.get("tool")
.and_then(|v| v.as_str())
.unwrap_or("unknown")
.to_string();
let timestamp = stored
.get("timestamp")
.and_then(|v| v.as_u64())
.unwrap_or(0);
let size = content.len();
outputs.push(OutputInfo {
ref_id,
tool,
timestamp,
size_bytes: size,
});
}
}
}
}
outputs.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
outputs
}
pub fn resolve_latest() -> Option<String> {
let output_dir = std::path::Path::new("/tmp/syncable-cli/outputs");
if !output_dir.exists() {
return None;
}
let mut newest: Option<(u64, String)> = None;
if let Ok(entries) = std::fs::read_dir(output_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().map_or(true, |e| e != "json") {
continue;
}
if let Ok(contents) = std::fs::read_to_string(&path) {
if let Ok(data) = serde_json::from_str::<Value>(&contents) {
if let Some(ts) = data.get("timestamp").and_then(|v| v.as_u64()) {
if let Some(ref_id) = data.get("ref_id").and_then(|v| v.as_str()) {
match &newest {
Some((best_ts, _)) if ts > *best_ts => {
newest = Some((ts, ref_id.to_string()));
}
None => {
newest = Some((ts, ref_id.to_string()));
}
_ => {}
}
}
}
}
}
}
}
newest.map(|(_, ref_id)| ref_id)
}
#[derive(Debug, Clone)]
pub struct OutputInfo {
pub ref_id: String,
pub tool: String,
pub timestamp: u64,
pub size_bytes: usize,
}
pub fn cleanup_old_outputs() {
let dir = match ensure_output_dir() {
Ok(d) => d,
Err(_) => return,
};
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
if let Ok(entries) = fs::read_dir(&dir) {
for entry in entries.flatten() {
if let Ok(content) = fs::read_to_string(entry.path())
&& let Ok(stored) = serde_json::from_str::<Value>(&content)
{
let timestamp = stored
.get("timestamp")
.and_then(|v| v.as_u64())
.unwrap_or(0);
if now - timestamp > MAX_AGE_SECS {
let _ = fs::remove_file(entry.path());
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_store_and_retrieve() {
let data = serde_json::json!({
"issues": [
{ "code": "test1", "severity": "high", "file": "test.yaml" }
]
});
let ref_id = store_output(&data, "test_tool");
assert!(ref_id.starts_with("test_tool_"));
let retrieved = retrieve_output(&ref_id);
assert!(retrieved.is_some());
assert_eq!(retrieved.unwrap(), data);
}
#[test]
fn test_filtered_retrieval() {
let data = serde_json::json!({
"issues": [
{ "code": "DL3008", "severity": "warning", "file": "Dockerfile1" },
{ "code": "DL3009", "severity": "info", "file": "Dockerfile2" },
{ "code": "DL3008", "severity": "warning", "file": "Dockerfile3" }
]
});
let ref_id = store_output(&data, "filter_test");
let filtered = retrieve_filtered(&ref_id, Some("code:DL3008"), 100, 0);
assert!(filtered.is_some());
let results = filtered.unwrap();
assert_eq!(results["total_matches"], 2);
let filtered = retrieve_filtered(&ref_id, Some("severity:info"), 100, 0);
assert!(filtered.is_some());
let results = filtered.unwrap();
assert_eq!(results["total_matches"], 1);
}
#[test]
fn test_parse_query() {
assert_eq!(
parse_query("severity:critical"),
("severity".to_string(), "critical".to_string())
);
assert_eq!(
parse_query("searchterm"),
("any".to_string(), "searchterm".to_string())
);
}
#[test]
fn test_analyze_project_detection() {
let analyze_data = serde_json::json!({
"root_path": "/test",
"is_monorepo": true,
"projects": []
});
assert!(is_analyze_project_output(&analyze_data));
let lint_data = serde_json::json!({
"issues": [{ "code": "DL3008" }]
});
assert!(!is_analyze_project_output(&lint_data));
}
#[test]
fn test_analyze_project_summary() {
let data = serde_json::json!({
"root_path": "/test/monorepo",
"is_monorepo": true,
"projects": [
{ "name": "api-gateway", "path": "services/api" },
{ "name": "web-app", "path": "apps/web" }
]
});
let summary = extract_summary(&data);
assert_eq!(summary["root_path"], "/test/monorepo");
assert_eq!(summary["is_monorepo"], true);
assert_eq!(summary["project_count"], 2);
}
#[test]
fn test_analyze_project_compact() {
let files: Vec<String> = (0..1000).map(|i| format!("/src/file{}.ts", i)).collect();
let data = serde_json::json!({
"root_path": "/test",
"is_monorepo": false,
"projects": [{
"name": "test-project",
"path": "",
"project_category": "Api",
"analysis": {
"project_root": "/test",
"languages": [{
"name": "TypeScript",
"version": "5.0",
"confidence": 0.95,
"files": files
}],
"frameworks": [{
"name": "React",
"version": "18.0"
}]
}
}]
});
let ref_id = store_output(&data, "analyze_project_test");
let result = retrieve_filtered(&ref_id, None, 100, 0);
assert!(result.is_some());
let compacted = result.unwrap();
let project = &compacted["projects"][0];
let lang = &project["analysis"]["languages"][0];
assert_eq!(lang["name"], "TypeScript");
assert_eq!(lang["file_count"], 1000);
assert!(lang.get("files").is_none());
let compacted_str = serde_json::to_string(&compacted).unwrap();
let original_str = serde_json::to_string(&data).unwrap();
assert!(compacted_str.len() < original_str.len() / 10); }
#[test]
fn test_analyze_project_section_queries() {
let data = serde_json::json!({
"root_path": "/test",
"is_monorepo": true,
"projects": [{
"name": "api-service",
"path": "services/api",
"project_category": "Api",
"analysis": {
"languages": [{
"name": "Go",
"version": "1.21",
"confidence": 0.9,
"files": ["/main.go", "/handler.go"]
}],
"frameworks": [{
"name": "Gin",
"version": "1.9",
"category": "Web"
}],
"services": [{
"name": "api-http",
"type": "http",
"port": 8080
}]
}
}]
});
let ref_id = store_output(&data, "analyze_query_test");
let projects = retrieve_filtered(&ref_id, Some("section:projects"), 100, 0);
assert!(projects.is_some());
assert_eq!(projects.as_ref().unwrap()["total_projects"], 1);
let frameworks = retrieve_filtered(&ref_id, Some("section:frameworks"), 100, 0);
assert!(frameworks.is_some());
assert_eq!(frameworks.as_ref().unwrap()["total_matches"], 1);
assert_eq!(frameworks.as_ref().unwrap()["results"][0]["name"], "Gin");
let languages = retrieve_filtered(&ref_id, Some("section:languages"), 100, 0);
assert!(languages.is_some());
assert_eq!(languages.as_ref().unwrap()["total_matches"], 1);
assert_eq!(languages.as_ref().unwrap()["results"][0]["name"], "Go");
assert_eq!(languages.as_ref().unwrap()["results"][0]["file_count"], 2);
let go = retrieve_filtered(&ref_id, Some("language:Go"), 100, 0);
assert!(go.is_some());
assert_eq!(go.as_ref().unwrap()["total_matches"], 1);
let gin = retrieve_filtered(&ref_id, Some("framework:Gin"), 100, 0);
assert!(gin.is_some());
assert_eq!(gin.as_ref().unwrap()["total_matches"], 1);
}
#[test]
fn test_find_issues_array_failures_field() {
let data = serde_json::json!({
"failures": [
{"code": "DL3008", "severity": "warning", "message": "Pin versions"},
{"code": "DL3009", "severity": "info", "message": "Delete apt cache"}
]
});
let result = find_issues_array(&data);
assert!(result.is_some());
assert_eq!(result.unwrap().len(), 2);
}
#[test]
fn test_find_issues_array_diagnostics_field() {
let data = serde_json::json!({
"diagnostics": [
{"code": "DC001", "severity": "error", "message": "Invalid compose version"}
]
});
let result = find_issues_array(&data);
assert!(result.is_some());
assert_eq!(result.unwrap().len(), 1);
}
#[test]
fn test_resolve_latest_returns_most_recent() {
use std::fs;
use std::path::Path;
let output_dir = Path::new("/tmp/syncable-cli/outputs");
fs::create_dir_all(output_dir).unwrap();
let _ = fs::remove_file(output_dir.join("test_old_aaa111.json"));
let _ = fs::remove_file(output_dir.join("test_new_bbb222.json"));
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs();
let old_data = serde_json::json!({
"ref_id": "test_old_aaa111",
"tool": "test_old",
"timestamp": now - 60,
"data": {}
});
let new_data = serde_json::json!({
"ref_id": "test_new_bbb222",
"tool": "test_new",
"timestamp": now + 9_999_999,
"data": {}
});
fs::write(
output_dir.join("test_old_aaa111.json"),
serde_json::to_string(&old_data).unwrap(),
)
.unwrap();
fs::write(
output_dir.join("test_new_bbb222.json"),
serde_json::to_string(&new_data).unwrap(),
)
.unwrap();
let latest = resolve_latest();
assert!(latest.is_some());
assert_eq!(latest.unwrap(), "test_new_bbb222");
let _ = fs::remove_file(output_dir.join("test_old_aaa111.json"));
let _ = fs::remove_file(output_dir.join("test_new_bbb222.json"));
}
}