use anyhow::Result;
use canon_core::proof::{self, GitContext, ProofReceipt};
use canon_embed::EmbeddingEngine;
use canon_store::GraphStore;
use crate::search::{QueryEngine, SearchResult};
use canon_core::DeviceIdentity;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use tracing::{error, info};
pub struct CanonServer {
graph: Arc<Mutex<GraphStore>>,
query_engine: Arc<QueryEngine>,
embedder: Arc<EmbeddingEngine>,
identity: DeviceIdentity,
data_dir: PathBuf,
git_context: Option<GitContext>,
}
#[derive(Debug, Deserialize)]
struct IndexInput {
path: String,
#[serde(default = "default_true")]
recursive: bool,
}
#[derive(Debug, Deserialize)]
struct SearchInput {
query: String,
#[serde(default = "default_top_k")]
top_k: usize,
}
#[derive(Debug, Deserialize)]
struct ProofInput {
query: String,
#[serde(default = "default_top_k")]
top_k: usize,
#[serde(default = "default_true")]
save: bool,
}
#[derive(Debug, Deserialize)]
struct VerifyInput {
receipt: String,
}
fn default_true() -> bool {
true
}
fn default_top_k() -> usize {
5
}
#[derive(Debug, Deserialize)]
struct JsonRpcRequest {
jsonrpc: String,
id: Option<serde_json::Value>,
method: String,
#[serde(default)]
params: serde_json::Value,
}
#[derive(Debug, Serialize)]
struct JsonRpcResponse {
jsonrpc: String,
id: serde_json::Value,
#[serde(skip_serializing_if = "Option::is_none")]
result: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
error: Option<JsonRpcError>,
}
#[derive(Debug, Serialize)]
struct JsonRpcError {
code: i32,
message: String,
}
#[derive(Debug, Serialize)]
struct ToolInfo {
name: String,
description: String,
#[serde(rename = "inputSchema")]
input_schema: serde_json::Value,
}
#[derive(Debug, Serialize)]
struct TextContent {
#[serde(rename = "type")]
content_type: String,
text: String,
}
#[derive(Debug, Serialize)]
struct CallToolResult {
content: Vec<TextContent>,
#[serde(rename = "isError", skip_serializing_if = "std::ops::Not::not")]
is_error: bool,
}
impl CallToolResult {
fn success(text: String) -> Self {
Self {
content: vec![TextContent {
content_type: "text".to_string(),
text,
}],
is_error: false,
}
}
fn error(text: String) -> Self {
Self {
content: vec![TextContent {
content_type: "text".to_string(),
text,
}],
is_error: true,
}
}
}
impl CanonServer {
pub fn new(
graph: Arc<Mutex<GraphStore>>,
query_engine: Arc<QueryEngine>,
embedder: Arc<EmbeddingEngine>,
identity: DeviceIdentity,
data_dir: PathBuf,
git_context: Option<GitContext>,
) -> Self {
Self {
graph,
query_engine,
embedder,
identity,
data_dir,
git_context,
}
}
pub async fn run_stdio(&self) -> Result<()> {
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
let stdin = tokio::io::stdin();
let mut stdout = tokio::io::stdout();
let reader = BufReader::new(stdin);
let mut lines = reader.lines();
while let Ok(Some(line)) = lines.next_line().await {
let line = line.trim().to_string();
if line.is_empty() {
continue;
}
let response = self.handle_message(&line).await;
if let Some(resp) = response {
let json = serde_json::to_string(&resp).unwrap_or_default();
stdout
.write_all(json.as_bytes())
.await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
}
}
Ok(())
}
async fn handle_message(&self, msg: &str) -> Option<JsonRpcResponse> {
let request: JsonRpcRequest = match serde_json::from_str(msg) {
Ok(r) => r,
Err(e) => {
error!("Failed to parse JSON-RPC request: {}", e);
return Some(JsonRpcResponse {
jsonrpc: "2.0".to_string(),
id: serde_json::Value::Null,
result: None,
error: Some(JsonRpcError {
code: -32700,
message: format!("Parse error: {}", e),
}),
});
}
};
let id = request.id.clone().unwrap_or(serde_json::Value::Null);
match request.method.as_str() {
"initialize" => {
let result = serde_json::json!({
"protocolVersion": "2024-11-05",
"capabilities": {
"tools": {}
},
"serverInfo": {
"name": "canon-protocol",
"version": env!("CARGO_PKG_VERSION")
},
"instructions": "Canon Protocol is a verified knowledge layer for AI-assisted development. Use canon_search to find relevant code — every search generates a signed proof receipt proving exactly what Canon served to you. Use canon_index to add files to the substrate, canon_state to check the cryptographic state, canon_proof to generate a proof for a specific query, and canon_verify to verify any proof receipt."
});
Some(JsonRpcResponse {
jsonrpc: "2.0".to_string(),
id,
result: Some(result),
error: None,
})
}
"notifications/initialized" => {
None
}
"tools/list" => {
let tools = self.list_tools();
Some(JsonRpcResponse {
jsonrpc: "2.0".to_string(),
id,
result: Some(serde_json::json!({ "tools": tools })),
error: None,
})
}
"tools/call" => {
let tool_name = request.params.get("name")
.and_then(|v| v.as_str())
.unwrap_or("");
let arguments = request.params.get("arguments")
.cloned()
.unwrap_or(serde_json::json!({}));
let result = self.call_tool(tool_name, arguments).await;
Some(JsonRpcResponse {
jsonrpc: "2.0".to_string(),
id,
result: Some(serde_json::to_value(result).unwrap()),
error: None,
})
}
_ => {
Some(JsonRpcResponse {
jsonrpc: "2.0".to_string(),
id,
result: None,
error: Some(JsonRpcError {
code: -32601,
message: format!("Method not found: {}", request.method),
}),
})
}
}
}
fn list_tools(&self) -> Vec<ToolInfo> {
vec![
ToolInfo {
name: "canon_index".to_string(),
description: "Index files into the verified Canon Protocol substrate. Parses, chunks, embeds, and stores files with cryptographic commitments.".to_string(),
input_schema: serde_json::json!({
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "File or directory path to index"
},
"recursive": {
"type": "boolean",
"description": "Index directory recursively (default true)",
"default": true
}
},
"required": ["path"]
}),
},
ToolInfo {
name: "canon_search".to_string(),
description: "Search the Canon Protocol knowledge base with hybrid semantic + lexical search. Automatically generates a signed proof receipt for every search, saved to .canon/proofs/.".to_string(),
input_schema: serde_json::json!({
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "Search query"
},
"top_k": {
"type": "number",
"description": "Max results (default 5)",
"default": 5
}
},
"required": ["query"]
}),
},
ToolInfo {
name: "canon_state".to_string(),
description: "Get the current Canon Protocol state root and substrate statistics.".to_string(),
input_schema: serde_json::json!({
"type": "object",
"properties": {}
}),
},
ToolInfo {
name: "canon_proof".to_string(),
description: "Generate a cryptographic proof receipt for a query. Creates an immutable, verifiable record of what Canon's substrate returned. Receipts are saved to .canon/proofs/ and can be committed alongside code.".to_string(),
input_schema: serde_json::json!({
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "The query to generate a proof for"
},
"top_k": {
"type": "number",
"description": "How many chunks to include (default 5)",
"default": 5
},
"save": {
"type": "boolean",
"description": "Save receipt to .canon/proofs/ (default true)",
"default": true
}
},
"required": ["query"]
}),
},
ToolInfo {
name: "canon_verify".to_string(),
description: "Verify a Canon Protocol proof receipt. Checks Ed25519 signature, Merkle proofs, and context hash.".to_string(),
input_schema: serde_json::json!({
"type": "object",
"properties": {
"receipt": {
"type": "string",
"description": "Proof receipt JSON (inline or file path)"
}
},
"required": ["receipt"]
}),
},
]
}
async fn call_tool(&self, name: &str, arguments: serde_json::Value) -> CallToolResult {
match name {
"canon_index" => self.tool_index(arguments).await,
"canon_search" => self.tool_search(arguments).await,
"canon_state" => self.tool_state().await,
"canon_proof" => self.tool_proof(arguments).await,
"canon_verify" => self.tool_verify(arguments).await,
_ => CallToolResult::error(format!("Unknown tool: {}", name)),
}
}
async fn tool_index(&self, args: serde_json::Value) -> CallToolResult {
let input: IndexInput = match serde_json::from_value(args) {
Ok(i) => i,
Err(e) => return CallToolResult::error(format!("Invalid input: {}", e)),
};
let path = PathBuf::from(&input.path);
if !path.exists() {
return CallToolResult::error(format!("Path does not exist: {}", input.path));
}
let files = if path.is_dir() {
collect_files(&path, input.recursive)
} else {
vec![path]
};
let mut indexed = 0;
let mut errors = Vec::new();
for file_path in &files {
match self.index_single_file(file_path) {
Ok(_) => indexed += 1,
Err(e) => errors.push(format!("{}: {}", file_path.display(), e)),
}
}
let state_root = {
let graph = self.graph.lock().unwrap();
graph.compute_merkle_root().unwrap_or([0u8; 32])
};
let root_hex: String = state_root.iter().map(|b| format!("{:02x}", b)).collect();
let mut msg = format!(
"Indexed {} files. State root: {}",
indexed,
&root_hex[..8]
);
if !errors.is_empty() {
msg.push_str(&format!("\n\nErrors ({}):\n{}", errors.len(), errors.join("\n")));
}
CallToolResult::success(msg)
}
fn index_single_file(&self, file_path: &std::path::Path) -> Result<()> {
let content = match crate::parser::parse_file(file_path) {
Ok(text) => text,
Err(_) => {
std::fs::read_to_string(file_path)
.map_err(|e| anyhow::anyhow!("Cannot read file: {}", e))?
}
};
if content.trim().is_empty() {
return Ok(());
}
let mtime = std::fs::metadata(file_path)
.map(|m| m.modified().ok())
.ok()
.flatten()
.map(|t| t.duration_since(std::time::UNIX_EPOCH).unwrap_or_default().as_secs() as i64)
.unwrap_or(0);
let mut doc = canon_core::Document::new(file_path.to_path_buf(), content.as_bytes(), mtime);
let mut graph = self.graph.lock().unwrap();
if let Ok(Some(existing)) = graph.get_document_by_path(file_path) {
if existing.hash == doc.hash {
return Ok(()); }
graph.delete_document(existing.id)?;
}
let chunker = crate::parser::Chunker::new(crate::parser::ChunkConfig::default());
let chunks = chunker.chunk(doc.id, &content)
.map_err(|e| anyhow::anyhow!("Chunking failed: {}", e))?;
let chunk_hashes: Vec<[u8; 32]> = chunks.iter().map(|c| c.text_hash).collect();
let hier_hash = canon_core::Document::compute_hierarchical_hash(&chunk_hashes);
doc.set_hierarchical_hash(hier_hash);
graph.insert_document(&doc)?;
let texts: Vec<&str> = chunks.iter().map(|c| c.text.as_str()).collect();
let vectors = self.embedder.embed_batch(&texts)
.map_err(|e| anyhow::anyhow!("Embedding failed: {}", e))?;
for (chunk, vector) in chunks.iter().zip(vectors.iter()) {
graph.insert_chunk(chunk)?;
let emb = canon_core::Embedding::new(chunk.id, vector, self.embedder.model_hash(), 0);
graph.insert_embedding(&emb)?;
graph.add_edge(&canon_core::Edge::doc_to_chunk(doc.id, chunk.id))?;
graph.add_edge(&canon_core::Edge::chunk_to_embedding(chunk.id, emb.id))?;
}
info!("Indexed: {} ({} chunks)", file_path.display(), chunks.len());
Ok(())
}
async fn tool_search(&self, args: serde_json::Value) -> CallToolResult {
let input: SearchInput = match serde_json::from_value(args) {
Ok(i) => i,
Err(e) => return CallToolResult::error(format!("Invalid input: {}", e)),
};
let results = match self.query_engine.search(&input.query, input.top_k) {
Ok(r) => r,
Err(e) => return CallToolResult::error(format!("Search failed: {}", e)),
};
let state_root = {
let graph = self.graph.lock().unwrap();
graph.compute_merkle_root().unwrap_or([0u8; 32])
};
let root_hex: String = state_root.iter().map(|b| format!("{:02x}", b)).collect();
let mut output = format!("Found {} results (state_root: {})\n\n", results.len(), &root_hex[..8]);
for (i, r) in results.iter().enumerate() {
let chunk_id_hex: String = r.chunk.id.as_bytes().iter().map(|b| format!("{:02x}", b)).collect();
output.push_str(&format!(
"--- Result {} ---\nPath: {}\nChunk ID: {}\nScore: {:.4}\nSequence: {}\n\n{}\n\n",
i + 1,
r.doc_path,
&chunk_id_hex[..8],
r.score,
r.chunk.sequence,
r.chunk.text.trim(),
));
}
if !results.is_empty() {
match self.generate_proof_receipt(&input.query, &results) {
Ok(receipt) => {
let timestamp = receipt.timestamp.replace(':', "-").replace('.', "-");
let root_prefix: String = receipt.state_root.iter().take(4).map(|b| format!("{:02x}", b)).collect();
let filename = format!("{}_{}.json", timestamp, root_prefix);
let path = self.data_dir.join("proofs").join(&filename);
if let Ok(json) = serde_json::to_string_pretty(&receipt) {
if let Err(e) = std::fs::write(&path, &json) {
error!("Failed to save auto-proof: {}", e);
} else {
output.push_str(&format!(
"---\nProof receipt saved: {} (state_root: {}, {} chunks signed)\n",
path.display(), &root_hex[..8], receipt.chunk_proofs.len()
));
}
}
}
Err(e) => {
error!("Auto-proof generation failed: {}", e);
}
}
}
CallToolResult::success(output)
}
async fn tool_state(&self) -> CallToolResult {
let graph = self.graph.lock().unwrap();
let state_root = match graph.compute_merkle_root() {
Ok(r) => r,
Err(e) => return CallToolResult::error(format!("Failed to compute state: {}", e)),
};
let stats = match graph.stats() {
Ok(s) => s,
Err(e) => return CallToolResult::error(format!("Failed to get stats: {}", e)),
};
let root_hex: String = state_root.iter().map(|b| format!("{:02x}", b)).collect();
let device_id_hex: String = self.identity.device_id.iter().map(|b| format!("{:02x}", b)).collect();
let output = format!(
"Canon Protocol State\n\n\
State Root: {}\n\
Documents: {}\n\
Chunks: {}\n\
Embeddings: {}\n\
Edges: {}\n\
Device ID: {}",
root_hex,
stats.documents,
stats.chunks,
stats.embeddings,
stats.edges,
device_id_hex,
);
CallToolResult::success(output)
}
async fn tool_proof(&self, args: serde_json::Value) -> CallToolResult {
let input: ProofInput = match serde_json::from_value(args) {
Ok(i) => i,
Err(e) => return CallToolResult::error(format!("Invalid input: {}", e)),
};
let results = match self.query_engine.search(&input.query, input.top_k) {
Ok(r) => r,
Err(e) => return CallToolResult::error(format!("Search failed: {}", e)),
};
if results.is_empty() {
return CallToolResult::error("No results found for query. Index files first with canon_index.".to_string());
}
let receipt = match self.generate_proof_receipt(&input.query, &results) {
Ok(r) => r,
Err(e) => return CallToolResult::error(format!("Proof generation failed: {}", e)),
};
let save_path = if input.save {
let timestamp = receipt.timestamp.replace(':', "-").replace('.', "-");
let root_prefix: String = receipt.state_root.iter().take(4).map(|b| format!("{:02x}", b)).collect();
let filename = format!("{}_{}.json", timestamp, root_prefix);
let path = self.data_dir.join("proofs").join(&filename);
match serde_json::to_string_pretty(&receipt) {
Ok(json) => {
if let Err(e) = std::fs::write(&path, &json) {
error!("Failed to save proof receipt: {}", e);
None
} else {
Some(path)
}
}
Err(e) => {
error!("Failed to serialize proof receipt: {}", e);
None
}
}
} else {
None
};
let root_hex: String = receipt.state_root.iter().map(|b| format!("{:02x}", b)).collect();
let device_hex: String = receipt.device_id.iter().map(|b| format!("{:02x}", b)).collect();
let mut output = format!(
"Proof Receipt Generated\n\n\
Query: {}\n\
Chunks: {} from {} sources\n\
State Root: {}\n\
Signed by: {}\n\
Timestamp: {}",
receipt.query,
receipt.chunk_proofs.len(),
receipt.sources.len(),
&root_hex[..8],
&device_hex[..8],
receipt.timestamp,
);
if let Some(ref gc) = receipt.git {
if let Some(ref commit) = gc.commit {
output.push_str(&format!("\nGit Commit: {}", &commit[..8.min(commit.len())]));
}
if let Some(ref branch) = gc.branch {
output.push_str(&format!("\nGit Branch: {}", branch));
}
}
output.push_str("\n\nSources:");
for src in &receipt.sources {
output.push_str(&format!(
"\n - {} (seq={}, score={:.3})",
src.document_path, src.chunk_sequence, src.relevance_score
));
}
match receipt.verify_all() {
Ok(()) => output.push_str("\n\nVerification: ALL CHECKS PASSED"),
Err(e) => output.push_str(&format!("\n\nVerification: FAILED - {}", e)),
}
if let Some(path) = save_path {
output.push_str(&format!("\n\nSaved to: {}", path.display()));
}
CallToolResult::success(output)
}
fn generate_proof_receipt(
&self,
query: &str,
results: &[SearchResult],
) -> Result<ProofReceipt> {
let query_hash = *blake3::hash(query.as_bytes()).as_bytes();
let assembler = canon_core::ContextAssembler::with_budget(4000);
let scored_chunks: Vec<canon_core::ScoredChunk> = results
.iter()
.map(|r| canon_core::ScoredChunk {
chunk: r.chunk.clone(),
score: r.score,
document_path: r.doc_path.clone(),
})
.collect();
let state_root = {
let graph = self.graph.lock().unwrap();
graph.compute_merkle_root()?
};
let assembled = assembler.assemble(scored_chunks, query, state_root);
let context_string = canon_core::ContextAssembler::format(&assembled);
let context_hash = *blake3::hash(context_string.as_bytes()).as_bytes();
let (sorted_chunk_ids, sorted_chunk_hashes, chunk_tree_root) = {
let graph = self.graph.lock().unwrap();
let sorted = graph.get_sorted_chunk_hashes()?;
let hashes: Vec<[u8; 32]> = sorted.iter().map(|(_, h)| *h).collect();
let root = proof::compute_chunk_tree_root(&hashes);
(sorted, hashes, root)
};
let mut chunk_proofs = Vec::new();
let mut sources = Vec::new();
for result in results {
let chunk_id_bytes = *result.chunk.id.as_bytes();
let index = sorted_chunk_ids
.iter()
.position(|(id, _)| *id == chunk_id_bytes);
if let Some(idx) = index {
let chunk_proof = proof::build_chunk_proof(
chunk_id_bytes,
result.chunk.text_hash,
idx,
&sorted_chunk_hashes,
);
chunk_proofs.push(chunk_proof);
}
sources.push(proof::SourceRef {
document_path: result.doc_path.clone(),
chunk_id: chunk_id_bytes,
chunk_text: result.chunk.text.clone(),
chunk_sequence: result.chunk.sequence,
relevance_score: result.score,
});
}
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default();
let secs = now.as_secs();
let timestamp = format_timestamp(secs);
let mut receipt = ProofReceipt {
version: 1,
query: query.to_string(),
query_hash,
timestamp,
context_hash,
state_root,
chunk_tree_root,
chunk_proofs,
sources,
signature: [0u8; 64],
signer_public_key: self.identity.public_key,
device_id: self.identity.device_id,
git: self.git_context.clone(),
};
let sig = self.identity.sign(&receipt.signing_bytes());
receipt.signature = sig;
Ok(receipt)
}
async fn tool_verify(&self, args: serde_json::Value) -> CallToolResult {
let input: VerifyInput = match serde_json::from_value(args) {
Ok(i) => i,
Err(e) => return CallToolResult::error(format!("Invalid input: {}", e)),
};
let receipt_json = if input.receipt.trim_start().starts_with('{') {
input.receipt.clone()
} else {
match std::fs::read_to_string(&input.receipt) {
Ok(content) => content,
Err(e) => return CallToolResult::error(format!("Cannot read file '{}': {}", input.receipt, e)),
}
};
let receipt: ProofReceipt = match serde_json::from_str(&receipt_json) {
Ok(r) => r,
Err(e) => return CallToolResult::error(format!("Invalid proof receipt JSON: {}", e)),
};
let mut output = format!(
"Proof Receipt Verification\n\n\
Query: {}\n\
Timestamp: {}\n\
Version: {}\n\n\
Checks:\n",
receipt.query,
receipt.timestamp,
receipt.version,
);
let sig_ok = match receipt.verify_signature() {
Ok(()) => {
output.push_str(" [PASS] Ed25519 signature valid\n");
true
}
Err(e) => {
output.push_str(&format!(" [FAIL] Ed25519 signature: {}\n", e));
false
}
};
let proofs_ok = match receipt.verify_chunk_proofs() {
Ok(()) => {
output.push_str(&format!(
" [PASS] Merkle proofs valid ({}/{} chunks)\n",
receipt.chunk_proofs.len(),
receipt.chunk_proofs.len()
));
true
}
Err(e) => {
output.push_str(&format!(" [FAIL] Merkle proofs: {}\n", e));
false
}
};
let git_ok = if receipt.git.is_some() {
output.push_str(" [PASS] Git context cryptographically bound to signature\n");
true
} else {
output.push_str(" [INFO] No git context (proof generated outside git repo)\n");
true
};
if sig_ok && proofs_ok {
output.push_str("\nResult: ALL CHECKS PASSED");
} else {
output.push_str("\nResult: VERIFICATION FAILED");
}
if let Some(ref gc) = receipt.git {
output.push_str("\n\nGit Context (signed):");
if let Some(ref author) = gc.author {
output.push_str(&format!("\n Author: {}", author));
}
if let Some(ref commit) = gc.commit {
output.push_str(&format!("\n Commit: {}", commit));
}
if let Some(ref branch) = gc.branch {
output.push_str(&format!("\n Branch: {}", branch));
}
output.push_str(&format!("\n Dirty: {}", gc.dirty));
}
output.push_str(&format!("\n\nWhat Canon served ({} chunks):", receipt.sources.len()));
for (i, src) in receipt.sources.iter().enumerate() {
output.push_str(&format!(
"\n\n--- Chunk {} of {} ---\n\
Source: {} (sequence {})\n\
Relevance: {:.3}\n\
Content:\n{}",
i + 1,
receipt.sources.len(),
src.document_path,
src.chunk_sequence,
src.relevance_score,
src.chunk_text.trim(),
));
}
CallToolResult::success(output)
}
}
fn collect_files(dir: &std::path::Path, recursive: bool) -> Vec<PathBuf> {
let mut files = Vec::new();
let entries = match std::fs::read_dir(dir) {
Ok(e) => e,
Err(_) => return files,
};
for entry in entries.flatten() {
let path = entry.path();
let name = path.file_name().map(|n| n.to_string_lossy().to_string()).unwrap_or_default();
if name.starts_with('.') || name == "node_modules" || name == "target" || name == "__pycache__" {
continue;
}
if path.is_dir() {
if recursive {
files.extend(collect_files(&path, true));
}
} else if is_indexable_file(&path) {
files.push(path);
}
}
files
}
fn is_indexable_file(path: &std::path::Path) -> bool {
let ext = path.extension()
.map(|e| e.to_string_lossy().to_lowercase())
.unwrap_or_default();
matches!(
ext.as_str(),
"rs" | "ts" | "tsx" | "js" | "jsx" | "py" | "go" | "java" | "c" | "cpp" | "h" | "hpp"
| "cs" | "rb" | "swift" | "kt" | "scala" | "sol" | "vy" | "md" | "markdown" | "txt" | "text" | "rst"
| "toml" | "yaml" | "yml" | "json" | "xml" | "html" | "css" | "scss"
| "sh" | "bash" | "zsh" | "fish"
| "sql" | "graphql" | "proto" | "tf"
| "dockerfile" | "makefile"
| "cfg" | "ini" | "conf" | "env"
) || path.file_name().map(|n| {
let name = n.to_string_lossy().to_lowercase();
matches!(
name.as_str(),
"dockerfile" | "makefile" | "rakefile" | "gemfile" | "cargo.toml" | "cargo.lock"
| ".gitignore" | ".env.example"
)
}).unwrap_or(false)
}
pub fn format_timestamp(secs: u64) -> String {
let days_since_epoch = secs / 86400;
let time_of_day = secs % 86400;
let hours = time_of_day / 3600;
let minutes = (time_of_day % 3600) / 60;
let seconds = time_of_day % 60;
let (year, month, day) = days_to_date(days_since_epoch);
format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:{:02}Z",
year, month, day, hours, minutes, seconds
)
}
pub fn days_to_date(days: u64) -> (u64, u64, u64) {
let z = days + 719468;
let era = z / 146097;
let doe = z - era * 146097;
let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365;
let y = yoe + era * 400;
let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
let mp = (5 * doy + 2) / 153;
let d = doy - (153 * mp + 2) / 5 + 1;
let m = if mp < 10 { mp + 3 } else { mp - 9 };
let y = if m <= 2 { y + 1 } else { y };
(y, m, d)
}