1mod access_log;
2mod analysis;
3mod app;
4mod cli;
5mod config;
6mod event_log;
7mod export_html;
8pub mod graph;
9mod import_csv;
10mod import_markdown;
11mod index;
12mod init;
13mod kg_sidecar;
14mod kql;
15mod ops;
16pub mod output;
17mod schema;
18mod storage;
19mod validate;
20mod vectors;
21
22pub use graph::{Edge, EdgeProperties, GraphFile, Metadata, Node, NodeProperties, Note};
24pub use output::FindMode;
25
26pub use validate::{
28 EDGE_TYPE_RULES, TYPE_TO_PREFIX, VALID_RELATIONS, VALID_TYPES, edge_type_rule,
29 format_edge_source_type_error, format_edge_target_type_error,
30};
31
32pub use index::Bm25Index;
34
35use std::ffi::OsString;
36use std::path::{Path, PathBuf};
37
38use anyhow::{Context, Result, anyhow, bail};
39use clap::Parser;
40use cli::{
41 AsOfArgs, AuditArgs, BaselineArgs, CheckArgs, Cli, Command, DiffAsOfArgs, ExportDotArgs,
42 ExportGraphmlArgs, ExportMdArgs, ExportMermaidArgs, FeedbackLogArgs, FeedbackSummaryArgs,
43 FindMode as CliFindMode, GraphCommand, HistoryArgs, ImportCsvArgs, ImportMarkdownArgs,
44 MergeStrategy, NoteAddArgs, NoteListArgs, SplitArgs, TemporalSource, TimelineArgs,
45 VectorCommand,
46};
47use serde::{Deserialize, Serialize};
48use serde_json::Value;
49use storage::{GraphStore, graph_store};
51
52use app::graph_node_edge::{GraphCommandContext, execute_edge, execute_node};
53use app::graph_note::{GraphNoteContext, execute_note};
54use app::graph_query_quality::{
55 execute_audit, execute_baseline, execute_check, execute_duplicates, execute_edge_gaps,
56 execute_feedback_log, execute_feedback_summary, execute_kql, execute_missing_descriptions,
57 execute_missing_facts, execute_quality, execute_stats,
58};
59use app::graph_transfer_temporal::{
60 GraphTransferContext, execute_access_log, execute_access_stats, execute_as_of,
61 execute_diff_as_of, execute_export_dot, execute_export_graphml, execute_export_html,
62 execute_export_json, execute_export_md, execute_export_mermaid, execute_history,
63 execute_import_csv, execute_import_json, execute_import_markdown, execute_split,
64 execute_timeline, execute_vector,
65};
66
67use schema::{GraphSchema, SchemaViolation};
68use validate::validate_graph;
69
70fn format_schema_violations(violations: &[SchemaViolation]) -> String {
75 let mut lines = Vec::new();
76 lines.push("schema violations:".to_owned());
77 for v in violations {
78 lines.push(format!(" - {}", v.message));
79 }
80 lines.join("\n")
81}
82
83pub(crate) fn bail_on_schema_violations(violations: &[SchemaViolation]) -> Result<()> {
84 if !violations.is_empty() {
85 anyhow::bail!("{}", format_schema_violations(violations));
86 }
87 Ok(())
88}
89
90fn validate_graph_with_schema(graph: &GraphFile, schema: &GraphSchema) -> Vec<SchemaViolation> {
91 let mut all_violations = Vec::new();
92 for node in &graph.nodes {
93 all_violations.extend(schema.validate_node_add(node));
94 }
95 let node_type_map: std::collections::HashMap<&str, &str> = graph
96 .nodes
97 .iter()
98 .map(|n| (n.id.as_str(), n.r#type.as_str()))
99 .collect();
100 for edge in &graph.edges {
101 if let (Some(src_type), Some(tgt_type)) = (
102 node_type_map.get(edge.source_id.as_str()),
103 node_type_map.get(edge.target_id.as_str()),
104 ) {
105 all_violations.extend(schema.validate_edge_add(
106 &edge.source_id,
107 src_type,
108 &edge.relation,
109 &edge.target_id,
110 tgt_type,
111 ));
112 }
113 }
114 all_violations.extend(schema.validate_uniqueness(&graph.nodes));
115 all_violations
116}
117
118pub fn run<I>(args: I, cwd: &Path) -> Result<()>
126where
127 I: IntoIterator<Item = OsString>,
128{
129 let rendered = run_args(args, cwd)?;
130 print!("{rendered}");
131 Ok(())
132}
133
134pub fn run_args<I>(args: I, cwd: &Path) -> Result<String>
138where
139 I: IntoIterator<Item = OsString>,
140{
141 let cli = Cli::parse_from(normalize_args(args));
142 let graph_root = default_graph_root(cwd);
143 execute(cli, cwd, &graph_root)
144}
145
146pub fn run_args_safe<I>(args: I, cwd: &Path) -> Result<String>
151where
152 I: IntoIterator<Item = OsString>,
153{
154 let cli = Cli::try_parse_from(normalize_args(args)).map_err(|err| anyhow!(err.to_string()))?;
155 let graph_root = default_graph_root(cwd);
156 execute(cli, cwd, &graph_root)
157}
158
159fn normalize_args<I>(args: I) -> Vec<OsString>
164where
165 I: IntoIterator<Item = OsString>,
166{
167 let collected: Vec<OsString> = args.into_iter().collect();
168 if collected.len() <= 1 {
169 return collected;
170 }
171 let first = collected[1].to_string_lossy();
172 if first.starts_with('-')
173 || first == "init"
174 || first == "create"
175 || first == "diff"
176 || first == "merge"
177 || first == "graph"
178 || first == "list"
179 || first == "feedback-log"
180 || first == "feedback-summary"
181 {
182 return collected;
183 }
184 let mut normalized = Vec::with_capacity(collected.len() + 1);
185 normalized.push(collected[0].clone());
186 normalized.push(OsString::from("graph"));
187 normalized.extend(collected.into_iter().skip(1));
188 normalized
189}
190
191fn execute(cli: Cli, cwd: &Path, graph_root: &Path) -> Result<String> {
196 match cli.command {
197 Command::Init(args) => Ok(init::render_init(&args)),
198 Command::Create { graph_name } => {
199 let store = graph_store(cwd, graph_root, false)?;
200 let path = store.create_graph(&graph_name)?;
201 let graph_file = store.load_graph(&path)?;
202 append_event_snapshot(&path, "graph.create", Some(graph_name.clone()), &graph_file)?;
203 Ok(format!("+ created {}\n", path.display()))
204 }
205 Command::Diff { left, right, json } => {
206 let store = graph_store(cwd, graph_root, false)?;
207 if json {
208 render_graph_diff_json(store.as_ref(), &left, &right)
209 } else {
210 render_graph_diff(store.as_ref(), &left, &right)
211 }
212 }
213 Command::Merge {
214 target,
215 source,
216 strategy,
217 } => {
218 let store = graph_store(cwd, graph_root, false)?;
219 merge_graphs(store.as_ref(), &target, &source, strategy)
220 }
221 Command::List(args) => {
222 let store = graph_store(cwd, graph_root, false)?;
223 if args.json {
224 render_graph_list_json(store.as_ref())
225 } else {
226 render_graph_list(store.as_ref(), args.full)
227 }
228 }
229 Command::FeedbackLog(args) => execute_feedback_log(cwd, &args),
230 Command::Graph {
231 graph,
232 legacy,
233 command,
234 } => {
235 let store = graph_store(cwd, graph_root, legacy)?;
236 let path = store.resolve_graph_path(&graph)?;
237 let mut graph_file = store.load_graph(&path)?;
238 let schema = GraphSchema::discover(cwd).ok().flatten().map(|(_, s)| s);
239 let user_short_uid = config::ensure_user_short_uid(cwd);
240
241 match command {
242 GraphCommand::Node { command } => execute_node(
243 command,
244 GraphCommandContext {
245 graph_name: &graph,
246 path: &path,
247 user_short_uid: &user_short_uid,
248 graph_file: &mut graph_file,
249 schema: schema.as_ref(),
250 store: store.as_ref(),
251 },
252 ),
253
254 GraphCommand::Edge { command } => execute_edge(
255 command,
256 GraphCommandContext {
257 graph_name: &graph,
258 path: &path,
259 user_short_uid: &user_short_uid,
260 graph_file: &mut graph_file,
261 schema: schema.as_ref(),
262 store: store.as_ref(),
263 },
264 ),
265
266 GraphCommand::Note { command } => execute_note(
267 command,
268 GraphNoteContext {
269 path: &path,
270 graph_file: &mut graph_file,
271 store: store.as_ref(),
272 _schema: schema.as_ref(),
273 },
274 ),
275
276 GraphCommand::Stats(args) => Ok(execute_stats(&graph_file, &args)),
277 GraphCommand::Check(args) => Ok(execute_check(&graph_file, cwd, &args)),
278 GraphCommand::Audit(args) => Ok(execute_audit(&graph_file, cwd, &args)),
279
280 GraphCommand::Quality { command } => Ok(execute_quality(command, &graph_file)),
281
282 GraphCommand::MissingDescriptions(args) => {
284 Ok(execute_missing_descriptions(&graph_file, &args))
285 }
286 GraphCommand::MissingFacts(args) => Ok(execute_missing_facts(&graph_file, &args)),
287 GraphCommand::Duplicates(args) => Ok(execute_duplicates(&graph_file, &args)),
288 GraphCommand::EdgeGaps(args) => Ok(execute_edge_gaps(&graph_file, &args)),
289
290 GraphCommand::ExportHtml(args) => execute_export_html(&graph, &graph_file, args),
291
292 GraphCommand::AccessLog(args) => execute_access_log(&path, args),
293
294 GraphCommand::AccessStats(_) => execute_access_stats(&path),
295 GraphCommand::ImportCsv(args) => execute_import_csv(
296 GraphTransferContext {
297 cwd,
298 graph_name: &graph,
299 path: &path,
300 graph_file: &mut graph_file,
301 schema: schema.as_ref(),
302 store: store.as_ref(),
303 },
304 args,
305 ),
306 GraphCommand::ImportMarkdown(args) => execute_import_markdown(
307 GraphTransferContext {
308 cwd,
309 graph_name: &graph,
310 path: &path,
311 graph_file: &mut graph_file,
312 schema: schema.as_ref(),
313 store: store.as_ref(),
314 },
315 args,
316 ),
317 GraphCommand::Kql(args) => execute_kql(&graph_file, args),
318 GraphCommand::ExportJson(args) => execute_export_json(&graph, &graph_file, args),
319 GraphCommand::ImportJson(args) => {
320 execute_import_json(&path, &graph, store.as_ref(), args)
321 }
322 GraphCommand::ExportDot(args) => execute_export_dot(&graph, &graph_file, args),
323 GraphCommand::ExportMermaid(args) => {
324 execute_export_mermaid(&graph, &graph_file, args)
325 }
326 GraphCommand::ExportGraphml(args) => {
327 execute_export_graphml(&graph, &graph_file, args)
328 }
329 GraphCommand::ExportMd(args) => execute_export_md(
330 GraphTransferContext {
331 cwd,
332 graph_name: &graph,
333 path: &path,
334 graph_file: &mut graph_file,
335 schema: schema.as_ref(),
336 store: store.as_ref(),
337 },
338 args,
339 ),
340 GraphCommand::Split(args) => execute_split(&graph, &graph_file, args),
341 GraphCommand::Vector { command } => execute_vector(
342 GraphTransferContext {
343 cwd,
344 graph_name: &graph,
345 path: &path,
346 graph_file: &mut graph_file,
347 schema: schema.as_ref(),
348 store: store.as_ref(),
349 },
350 command,
351 ),
352 GraphCommand::AsOf(args) => execute_as_of(&path, &graph, args),
353 GraphCommand::History(args) => execute_history(&path, &graph, args),
354 GraphCommand::Timeline(args) => execute_timeline(&path, &graph, args),
355 GraphCommand::DiffAsOf(args) => execute_diff_as_of(&path, &graph, args),
356 GraphCommand::FeedbackSummary(args) => {
357 Ok(execute_feedback_summary(cwd, &graph, &args)?)
358 }
359 GraphCommand::Baseline(args) => {
360 Ok(execute_baseline(cwd, &graph, &graph_file, &args)?)
361 }
362 }
363 }
364 }
365}
366
367fn render_graph_list(store: &dyn GraphStore, full: bool) -> Result<String> {
368 let graphs = store.list_graphs()?;
369
370 let mut lines = vec![format!("= graphs ({})", graphs.len())];
371 for (name, path) in graphs {
372 if full {
373 lines.push(format!("- {name} | {}", path.display()));
374 } else {
375 lines.push(format!("- {name}"));
376 }
377 }
378 Ok(format!("{}\n", lines.join("\n")))
379}
380
381#[derive(Debug, Serialize)]
382struct GraphListEntry {
383 name: String,
384 path: String,
385}
386
387#[derive(Debug, Serialize)]
388struct GraphListResponse {
389 graphs: Vec<GraphListEntry>,
390}
391
392fn render_graph_list_json(store: &dyn GraphStore) -> Result<String> {
393 let graphs = store.list_graphs()?;
394 let entries = graphs
395 .into_iter()
396 .map(|(name, path)| GraphListEntry {
397 name,
398 path: path.display().to_string(),
399 })
400 .collect();
401 let payload = GraphListResponse { graphs: entries };
402 Ok(serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned()))
403}
404
405#[derive(Debug, Serialize)]
406struct FindQueryResult {
407 query: String,
408 count: usize,
409 nodes: Vec<Node>,
410}
411
412#[derive(Debug, Serialize)]
413struct FindResponse {
414 total: usize,
415 queries: Vec<FindQueryResult>,
416}
417
418pub(crate) fn render_find_json_with_index(
419 graph: &GraphFile,
420 queries: &[String],
421 limit: usize,
422 mode: output::FindMode,
423 index: Option<&Bm25Index>,
424) -> String {
425 let mut total = 0usize;
426 let mut results = Vec::new();
427 for query in queries {
428 let (count, nodes) =
429 output::find_nodes_and_total_with_index(graph, query, limit, true, mode, index);
430 total += count;
431 results.push(FindQueryResult {
432 query: query.clone(),
433 count,
434 nodes,
435 });
436 }
437 let payload = FindResponse {
438 total,
439 queries: results,
440 };
441 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
442}
443
444#[derive(Debug, Serialize)]
445struct NodeGetResponse {
446 node: Node,
447}
448
449pub(crate) fn render_node_json(node: &Node) -> String {
450 let payload = NodeGetResponse { node: node.clone() };
451 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
452}
453
454fn render_graph_diff(store: &dyn GraphStore, left: &str, right: &str) -> Result<String> {
455 let left_path = store.resolve_graph_path(left)?;
456 let right_path = store.resolve_graph_path(right)?;
457 let left_graph = store.load_graph(&left_path)?;
458 let right_graph = store.load_graph(&right_path)?;
459 Ok(render_graph_diff_from_files(
460 left,
461 right,
462 &left_graph,
463 &right_graph,
464 ))
465}
466
467fn render_graph_diff_json(store: &dyn GraphStore, left: &str, right: &str) -> Result<String> {
468 let left_path = store.resolve_graph_path(left)?;
469 let right_path = store.resolve_graph_path(right)?;
470 let left_graph = store.load_graph(&left_path)?;
471 let right_graph = store.load_graph(&right_path)?;
472 Ok(render_graph_diff_json_from_files(
473 left,
474 right,
475 &left_graph,
476 &right_graph,
477 ))
478}
479
480#[derive(Debug, Serialize)]
481struct DiffEntry {
482 path: String,
483 left: Value,
484 right: Value,
485}
486
487#[derive(Debug, Serialize)]
488struct EntityDiff {
489 id: String,
490 diffs: Vec<DiffEntry>,
491}
492
493#[derive(Debug, Serialize)]
494struct GraphDiffResponse {
495 left: String,
496 right: String,
497 added_nodes: Vec<String>,
498 removed_nodes: Vec<String>,
499 changed_nodes: Vec<EntityDiff>,
500 added_edges: Vec<String>,
501 removed_edges: Vec<String>,
502 changed_edges: Vec<EntityDiff>,
503 added_notes: Vec<String>,
504 removed_notes: Vec<String>,
505 changed_notes: Vec<EntityDiff>,
506}
507
508fn render_graph_diff_json_from_files(
509 left: &str,
510 right: &str,
511 left_graph: &GraphFile,
512 right_graph: &GraphFile,
513) -> String {
514 use std::collections::{HashMap, HashSet};
515
516 let left_nodes: HashSet<String> = left_graph.nodes.iter().map(|n| n.id.clone()).collect();
517 let right_nodes: HashSet<String> = right_graph.nodes.iter().map(|n| n.id.clone()).collect();
518
519 let left_node_map: HashMap<String, &Node> =
520 left_graph.nodes.iter().map(|n| (n.id.clone(), n)).collect();
521 let right_node_map: HashMap<String, &Node> = right_graph
522 .nodes
523 .iter()
524 .map(|n| (n.id.clone(), n))
525 .collect();
526
527 let left_edges: HashSet<String> = left_graph
528 .edges
529 .iter()
530 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
531 .collect();
532 let right_edges: HashSet<String> = right_graph
533 .edges
534 .iter()
535 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
536 .collect();
537
538 let left_edge_map: HashMap<String, &Edge> = left_graph
539 .edges
540 .iter()
541 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
542 .collect();
543 let right_edge_map: HashMap<String, &Edge> = right_graph
544 .edges
545 .iter()
546 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
547 .collect();
548
549 let left_notes: HashSet<String> = left_graph.notes.iter().map(|n| n.id.clone()).collect();
550 let right_notes: HashSet<String> = right_graph.notes.iter().map(|n| n.id.clone()).collect();
551
552 let left_note_map: HashMap<String, &Note> =
553 left_graph.notes.iter().map(|n| (n.id.clone(), n)).collect();
554 let right_note_map: HashMap<String, &Note> = right_graph
555 .notes
556 .iter()
557 .map(|n| (n.id.clone(), n))
558 .collect();
559
560 let mut added_nodes: Vec<String> = right_nodes.difference(&left_nodes).cloned().collect();
561 let mut removed_nodes: Vec<String> = left_nodes.difference(&right_nodes).cloned().collect();
562 let mut added_edges: Vec<String> = right_edges.difference(&left_edges).cloned().collect();
563 let mut removed_edges: Vec<String> = left_edges.difference(&right_edges).cloned().collect();
564 let mut added_notes: Vec<String> = right_notes.difference(&left_notes).cloned().collect();
565 let mut removed_notes: Vec<String> = left_notes.difference(&right_notes).cloned().collect();
566
567 let mut changed_nodes: Vec<String> = left_nodes
568 .intersection(&right_nodes)
569 .filter_map(|id| {
570 let left_node = left_node_map.get(id.as_str())?;
571 let right_node = right_node_map.get(id.as_str())?;
572 if eq_serialized(*left_node, *right_node) {
573 None
574 } else {
575 Some(id.clone())
576 }
577 })
578 .collect();
579 let mut changed_edges: Vec<String> = left_edges
580 .intersection(&right_edges)
581 .filter_map(|key| {
582 let left_edge = left_edge_map.get(key.as_str())?;
583 let right_edge = right_edge_map.get(key.as_str())?;
584 if eq_serialized(*left_edge, *right_edge) {
585 None
586 } else {
587 Some(key.clone())
588 }
589 })
590 .collect();
591 let mut changed_notes: Vec<String> = left_notes
592 .intersection(&right_notes)
593 .filter_map(|id| {
594 let left_note = left_note_map.get(id.as_str())?;
595 let right_note = right_note_map.get(id.as_str())?;
596 if eq_serialized(*left_note, *right_note) {
597 None
598 } else {
599 Some(id.clone())
600 }
601 })
602 .collect();
603
604 added_nodes.sort();
605 removed_nodes.sort();
606 added_edges.sort();
607 removed_edges.sort();
608 added_notes.sort();
609 removed_notes.sort();
610 changed_nodes.sort();
611 changed_edges.sort();
612 changed_notes.sort();
613
614 let changed_nodes = changed_nodes
615 .into_iter()
616 .map(|id| EntityDiff {
617 diffs: left_node_map
618 .get(id.as_str())
619 .zip(right_node_map.get(id.as_str()))
620 .map(|(left_node, right_node)| diff_serialized_values_json(*left_node, *right_node))
621 .unwrap_or_default(),
622 id,
623 })
624 .collect();
625 let changed_edges = changed_edges
626 .into_iter()
627 .map(|id| EntityDiff {
628 diffs: left_edge_map
629 .get(id.as_str())
630 .zip(right_edge_map.get(id.as_str()))
631 .map(|(left_edge, right_edge)| diff_serialized_values_json(*left_edge, *right_edge))
632 .unwrap_or_default(),
633 id,
634 })
635 .collect();
636 let changed_notes = changed_notes
637 .into_iter()
638 .map(|id| EntityDiff {
639 diffs: left_note_map
640 .get(id.as_str())
641 .zip(right_note_map.get(id.as_str()))
642 .map(|(left_note, right_note)| diff_serialized_values_json(*left_note, *right_note))
643 .unwrap_or_default(),
644 id,
645 })
646 .collect();
647
648 let payload = GraphDiffResponse {
649 left: left.to_owned(),
650 right: right.to_owned(),
651 added_nodes,
652 removed_nodes,
653 changed_nodes,
654 added_edges,
655 removed_edges,
656 changed_edges,
657 added_notes,
658 removed_notes,
659 changed_notes,
660 };
661 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
662}
663
664fn render_graph_diff_from_files(
665 left: &str,
666 right: &str,
667 left_graph: &GraphFile,
668 right_graph: &GraphFile,
669) -> String {
670 use std::collections::{HashMap, HashSet};
671
672 let left_nodes: HashSet<String> = left_graph.nodes.iter().map(|n| n.id.clone()).collect();
673 let right_nodes: HashSet<String> = right_graph.nodes.iter().map(|n| n.id.clone()).collect();
674
675 let left_node_map: HashMap<String, &Node> =
676 left_graph.nodes.iter().map(|n| (n.id.clone(), n)).collect();
677 let right_node_map: HashMap<String, &Node> = right_graph
678 .nodes
679 .iter()
680 .map(|n| (n.id.clone(), n))
681 .collect();
682
683 let left_edges: HashSet<String> = left_graph
684 .edges
685 .iter()
686 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
687 .collect();
688 let right_edges: HashSet<String> = right_graph
689 .edges
690 .iter()
691 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
692 .collect();
693
694 let left_edge_map: HashMap<String, &Edge> = left_graph
695 .edges
696 .iter()
697 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
698 .collect();
699 let right_edge_map: HashMap<String, &Edge> = right_graph
700 .edges
701 .iter()
702 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
703 .collect();
704
705 let left_notes: HashSet<String> = left_graph.notes.iter().map(|n| n.id.clone()).collect();
706 let right_notes: HashSet<String> = right_graph.notes.iter().map(|n| n.id.clone()).collect();
707
708 let left_note_map: HashMap<String, &Note> =
709 left_graph.notes.iter().map(|n| (n.id.clone(), n)).collect();
710 let right_note_map: HashMap<String, &Note> = right_graph
711 .notes
712 .iter()
713 .map(|n| (n.id.clone(), n))
714 .collect();
715
716 let mut added_nodes: Vec<String> = right_nodes.difference(&left_nodes).cloned().collect();
717 let mut removed_nodes: Vec<String> = left_nodes.difference(&right_nodes).cloned().collect();
718 let mut added_edges: Vec<String> = right_edges.difference(&left_edges).cloned().collect();
719 let mut removed_edges: Vec<String> = left_edges.difference(&right_edges).cloned().collect();
720 let mut added_notes: Vec<String> = right_notes.difference(&left_notes).cloned().collect();
721 let mut removed_notes: Vec<String> = left_notes.difference(&right_notes).cloned().collect();
722
723 let mut changed_nodes: Vec<String> = left_nodes
724 .intersection(&right_nodes)
725 .filter_map(|id| {
726 let left_node = left_node_map.get(id.as_str())?;
727 let right_node = right_node_map.get(id.as_str())?;
728 if eq_serialized(*left_node, *right_node) {
729 None
730 } else {
731 Some(id.clone())
732 }
733 })
734 .collect();
735
736 let mut changed_edges: Vec<String> = left_edges
737 .intersection(&right_edges)
738 .filter_map(|key| {
739 let left_edge = left_edge_map.get(key.as_str())?;
740 let right_edge = right_edge_map.get(key.as_str())?;
741 if eq_serialized(*left_edge, *right_edge) {
742 None
743 } else {
744 Some(key.clone())
745 }
746 })
747 .collect();
748
749 let mut changed_notes: Vec<String> = left_notes
750 .intersection(&right_notes)
751 .filter_map(|id| {
752 let left_note = left_note_map.get(id.as_str())?;
753 let right_note = right_note_map.get(id.as_str())?;
754 if eq_serialized(*left_note, *right_note) {
755 None
756 } else {
757 Some(id.clone())
758 }
759 })
760 .collect();
761
762 added_nodes.sort();
763 removed_nodes.sort();
764 added_edges.sort();
765 removed_edges.sort();
766 added_notes.sort();
767 removed_notes.sort();
768 changed_nodes.sort();
769 changed_edges.sort();
770 changed_notes.sort();
771
772 let mut lines = vec![format!("= diff {left} -> {right}")];
773 lines.push(format!("+ nodes ({})", added_nodes.len()));
774 for id in added_nodes {
775 lines.push(format!("+ node {id}"));
776 }
777 lines.push(format!("- nodes ({})", removed_nodes.len()));
778 for id in removed_nodes {
779 lines.push(format!("- node {id}"));
780 }
781 lines.push(format!("~ nodes ({})", changed_nodes.len()));
782 for id in changed_nodes {
783 if let (Some(left_node), Some(right_node)) = (
784 left_node_map.get(id.as_str()),
785 right_node_map.get(id.as_str()),
786 ) {
787 lines.extend(render_entity_diff_lines("node", &id, left_node, right_node));
788 } else {
789 lines.push(format!("~ node {id}"));
790 }
791 }
792 lines.push(format!("+ edges ({})", added_edges.len()));
793 for edge in added_edges {
794 lines.push(format!("+ edge {edge}"));
795 }
796 lines.push(format!("- edges ({})", removed_edges.len()));
797 for edge in removed_edges {
798 lines.push(format!("- edge {edge}"));
799 }
800 lines.push(format!("~ edges ({})", changed_edges.len()));
801 for edge in changed_edges {
802 if let (Some(left_edge), Some(right_edge)) = (
803 left_edge_map.get(edge.as_str()),
804 right_edge_map.get(edge.as_str()),
805 ) {
806 lines.extend(render_entity_diff_lines(
807 "edge", &edge, left_edge, right_edge,
808 ));
809 } else {
810 lines.push(format!("~ edge {edge}"));
811 }
812 }
813 lines.push(format!("+ notes ({})", added_notes.len()));
814 for note_id in added_notes {
815 lines.push(format!("+ note {note_id}"));
816 }
817 lines.push(format!("- notes ({})", removed_notes.len()));
818 for note_id in removed_notes {
819 lines.push(format!("- note {note_id}"));
820 }
821 lines.push(format!("~ notes ({})", changed_notes.len()));
822 for note_id in changed_notes {
823 if let (Some(left_note), Some(right_note)) = (
824 left_note_map.get(note_id.as_str()),
825 right_note_map.get(note_id.as_str()),
826 ) {
827 lines.extend(render_entity_diff_lines(
828 "note", ¬e_id, left_note, right_note,
829 ));
830 } else {
831 lines.push(format!("~ note {note_id}"));
832 }
833 }
834
835 format!("{}\n", lines.join("\n"))
836}
837
838fn eq_serialized<T: Serialize>(left: &T, right: &T) -> bool {
839 match (serde_json::to_value(left), serde_json::to_value(right)) {
840 (Ok(left_value), Ok(right_value)) => left_value == right_value,
841 _ => false,
842 }
843}
844
845fn render_entity_diff_lines<T: Serialize>(
846 kind: &str,
847 id: &str,
848 left: &T,
849 right: &T,
850) -> Vec<String> {
851 let mut lines = Vec::new();
852 lines.push(format!("~ {kind} {id}"));
853 for diff in diff_serialized_values(left, right) {
854 lines.push(format!(" ~ {diff}"));
855 }
856 lines
857}
858
859fn diff_serialized_values<T: Serialize>(left: &T, right: &T) -> Vec<String> {
860 match (serde_json::to_value(left), serde_json::to_value(right)) {
861 (Ok(left_value), Ok(right_value)) => {
862 let mut diffs = Vec::new();
863 collect_value_diffs("", &left_value, &right_value, &mut diffs);
864 diffs
865 }
866 _ => vec!["<serialization failed>".to_owned()],
867 }
868}
869
870fn diff_serialized_values_json<T: Serialize>(left: &T, right: &T) -> Vec<DiffEntry> {
871 match (serde_json::to_value(left), serde_json::to_value(right)) {
872 (Ok(left_value), Ok(right_value)) => {
873 let mut diffs = Vec::new();
874 collect_value_diffs_json("", &left_value, &right_value, &mut diffs);
875 diffs
876 }
877 _ => Vec::new(),
878 }
879}
880
881fn collect_value_diffs_json(path: &str, left: &Value, right: &Value, out: &mut Vec<DiffEntry>) {
882 if left == right {
883 return;
884 }
885 match (left, right) {
886 (Value::Object(left_obj), Value::Object(right_obj)) => {
887 use std::collections::BTreeSet;
888
889 let mut keys: BTreeSet<&str> = BTreeSet::new();
890 for key in left_obj.keys() {
891 keys.insert(key.as_str());
892 }
893 for key in right_obj.keys() {
894 keys.insert(key.as_str());
895 }
896 for key in keys {
897 let left_value = left_obj.get(key).unwrap_or(&Value::Null);
898 let right_value = right_obj.get(key).unwrap_or(&Value::Null);
899 let next_path = if path.is_empty() {
900 key.to_owned()
901 } else {
902 format!("{path}.{key}")
903 };
904 collect_value_diffs_json(&next_path, left_value, right_value, out);
905 }
906 }
907 (Value::Array(_), Value::Array(_)) => {
908 let label = if path.is_empty() {
909 "<root>[]".to_owned()
910 } else {
911 format!("{path}[]")
912 };
913 out.push(DiffEntry {
914 path: label,
915 left: left.clone(),
916 right: right.clone(),
917 });
918 }
919 _ => {
920 let label = if path.is_empty() { "<root>" } else { path };
921 out.push(DiffEntry {
922 path: label.to_owned(),
923 left: left.clone(),
924 right: right.clone(),
925 });
926 }
927 }
928}
929
930fn collect_value_diffs(path: &str, left: &Value, right: &Value, out: &mut Vec<String>) {
931 if left == right {
932 return;
933 }
934 match (left, right) {
935 (Value::Object(left_obj), Value::Object(right_obj)) => {
936 use std::collections::BTreeSet;
937
938 let mut keys: BTreeSet<&str> = BTreeSet::new();
939 for key in left_obj.keys() {
940 keys.insert(key.as_str());
941 }
942 for key in right_obj.keys() {
943 keys.insert(key.as_str());
944 }
945 for key in keys {
946 let left_value = left_obj.get(key).unwrap_or(&Value::Null);
947 let right_value = right_obj.get(key).unwrap_or(&Value::Null);
948 let next_path = if path.is_empty() {
949 key.to_owned()
950 } else {
951 format!("{path}.{key}")
952 };
953 collect_value_diffs(&next_path, left_value, right_value, out);
954 }
955 }
956 (Value::Array(_), Value::Array(_)) => {
957 let label = if path.is_empty() {
958 "<root>[]".to_owned()
959 } else {
960 format!("{path}[]")
961 };
962 out.push(format!(
963 "{label}: {} -> {}",
964 format_value(left),
965 format_value(right)
966 ));
967 }
968 _ => {
969 let label = if path.is_empty() { "<root>" } else { path };
970 out.push(format!(
971 "{label}: {} -> {}",
972 format_value(left),
973 format_value(right)
974 ));
975 }
976 }
977}
978
979fn format_value(value: &Value) -> String {
980 let mut rendered =
981 serde_json::to_string(value).unwrap_or_else(|_| "<unserializable>".to_owned());
982 rendered = rendered.replace('\n', "\\n");
983 truncate_value(rendered, 160)
984}
985
986fn truncate_value(mut value: String, limit: usize) -> String {
987 if value.len() <= limit {
988 return value;
989 }
990 value.truncate(limit.saturating_sub(3));
991 value.push_str("...");
992 value
993}
994
995fn merge_graphs(
996 store: &dyn GraphStore,
997 target: &str,
998 source: &str,
999 strategy: MergeStrategy,
1000) -> Result<String> {
1001 use std::collections::HashMap;
1002
1003 let target_path = store.resolve_graph_path(target)?;
1004 let source_path = store.resolve_graph_path(source)?;
1005 let mut target_graph = store.load_graph(&target_path)?;
1006 let source_graph = store.load_graph(&source_path)?;
1007
1008 let mut node_index: HashMap<String, usize> = HashMap::new();
1009 for (idx, node) in target_graph.nodes.iter().enumerate() {
1010 node_index.insert(node.id.clone(), idx);
1011 }
1012
1013 let mut node_added = 0usize;
1014 let mut node_updated = 0usize;
1015 for node in &source_graph.nodes {
1016 if let Some(&idx) = node_index.get(&node.id) {
1017 if matches!(strategy, MergeStrategy::PreferNew) {
1018 target_graph.nodes[idx] = node.clone();
1019 node_updated += 1;
1020 }
1021 } else {
1022 target_graph.nodes.push(node.clone());
1023 node_index.insert(node.id.clone(), target_graph.nodes.len() - 1);
1024 node_added += 1;
1025 }
1026 }
1027
1028 let mut edge_index: HashMap<String, usize> = HashMap::new();
1029 for (idx, edge) in target_graph.edges.iter().enumerate() {
1030 let key = format!("{} {} {}", edge.source_id, edge.relation, edge.target_id);
1031 edge_index.insert(key, idx);
1032 }
1033
1034 let mut edge_added = 0usize;
1035 let mut edge_updated = 0usize;
1036 for edge in &source_graph.edges {
1037 let key = format!("{} {} {}", edge.source_id, edge.relation, edge.target_id);
1038 if let Some(&idx) = edge_index.get(&key) {
1039 if matches!(strategy, MergeStrategy::PreferNew) {
1040 target_graph.edges[idx] = edge.clone();
1041 edge_updated += 1;
1042 }
1043 } else {
1044 target_graph.edges.push(edge.clone());
1045 edge_index.insert(key, target_graph.edges.len() - 1);
1046 edge_added += 1;
1047 }
1048 }
1049
1050 let mut note_index: HashMap<String, usize> = HashMap::new();
1051 for (idx, note) in target_graph.notes.iter().enumerate() {
1052 note_index.insert(note.id.clone(), idx);
1053 }
1054
1055 let mut note_added = 0usize;
1056 let mut note_updated = 0usize;
1057 for note in &source_graph.notes {
1058 if let Some(&idx) = note_index.get(¬e.id) {
1059 if matches!(strategy, MergeStrategy::PreferNew) {
1060 target_graph.notes[idx] = note.clone();
1061 note_updated += 1;
1062 }
1063 } else {
1064 target_graph.notes.push(note.clone());
1065 note_index.insert(note.id.clone(), target_graph.notes.len() - 1);
1066 note_added += 1;
1067 }
1068 }
1069
1070 store.save_graph(&target_path, &target_graph)?;
1071 append_event_snapshot(
1072 &target_path,
1073 "graph.merge",
1074 Some(format!("{source} -> {target} ({strategy:?})")),
1075 &target_graph,
1076 )?;
1077
1078 let mut lines = vec![format!("+ merged {source} -> {target}")];
1079 lines.push(format!("nodes: +{node_added} ~{node_updated}"));
1080 lines.push(format!("edges: +{edge_added} ~{edge_updated}"));
1081 lines.push(format!("notes: +{note_added} ~{note_updated}"));
1082
1083 Ok(format!("{}\n", lines.join("\n")))
1084}
1085
1086pub(crate) fn export_graph_as_of(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1087 match resolve_temporal_source(path, args.source)? {
1088 TemporalSource::EventLog => export_graph_as_of_event_log(path, graph, args),
1089 _ => export_graph_as_of_backups(path, graph, args),
1090 }
1091}
1092
1093fn export_graph_as_of_backups(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1094 let backups = list_graph_backups(path)?;
1095 if backups.is_empty() {
1096 bail!("no backups found for graph: {graph}");
1097 }
1098 let target_ts = args.ts_ms / 1000;
1099 let mut selected = None;
1100 for (ts, backup_path) in backups {
1101 if ts <= target_ts {
1102 selected = Some((ts, backup_path));
1103 }
1104 }
1105 let Some((ts, backup_path)) = selected else {
1106 bail!("no backup at or before ts_ms={}", args.ts_ms);
1107 };
1108
1109 let output_path = args
1110 .output
1111 .clone()
1112 .unwrap_or_else(|| format!("{graph}.asof.{}.json", args.ts_ms));
1113 let raw = read_gz_to_string(&backup_path)?;
1114 std::fs::write(&output_path, raw)?;
1115 Ok(format!("+ exported {output_path} (as-of {ts})\n"))
1116}
1117
1118fn export_graph_as_of_event_log(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1119 let entries = event_log::read_log(path)?;
1120 if entries.is_empty() {
1121 bail!("no event log entries found for graph: {graph}");
1122 }
1123 let selected = select_event_at_or_before(&entries, args.ts_ms)
1124 .ok_or_else(|| anyhow!("no event log entry at or before ts_ms={}", args.ts_ms))?;
1125 let output_path = args
1126 .output
1127 .clone()
1128 .unwrap_or_else(|| format!("{graph}.asof.{}.json", args.ts_ms));
1129 let mut snapshot = selected.graph.clone();
1130 snapshot.refresh_counts();
1131 let raw = serde_json::to_string_pretty(&snapshot).context("failed to serialize graph")?;
1132 std::fs::write(&output_path, raw)?;
1133 Ok(format!(
1134 "+ exported {output_path} (as-of {})\n",
1135 selected.ts_ms
1136 ))
1137}
1138
1139fn list_graph_backups(path: &Path) -> Result<Vec<(u64, PathBuf)>> {
1140 let parent = path
1141 .parent()
1142 .ok_or_else(|| anyhow!("missing parent directory"))?;
1143 let stem = path
1144 .file_stem()
1145 .and_then(|s| s.to_str())
1146 .ok_or_else(|| anyhow!("invalid graph filename"))?;
1147 let prefix = format!("{stem}.bck.");
1148 let suffix = ".gz";
1149
1150 let mut backups = Vec::new();
1151 for entry in std::fs::read_dir(parent)? {
1152 let entry = entry?;
1153 let name = entry.file_name();
1154 let name = name.to_string_lossy();
1155 if !name.starts_with(&prefix) || !name.ends_with(suffix) {
1156 continue;
1157 }
1158 let ts_part = &name[prefix.len()..name.len() - suffix.len()];
1159 if let Ok(ts) = ts_part.parse::<u64>() {
1160 backups.push((ts, entry.path()));
1161 }
1162 }
1163 backups.sort_by_key(|(ts, _)| *ts);
1164 Ok(backups)
1165}
1166
1167fn read_gz_to_string(path: &Path) -> Result<String> {
1168 use flate2::read::GzDecoder;
1169 use std::io::Read;
1170
1171 let data = std::fs::read(path)?;
1172 let mut decoder = GzDecoder::new(&data[..]);
1173 let mut out = String::new();
1174 decoder.read_to_string(&mut out)?;
1175 Ok(out)
1176}
1177
1178pub(crate) fn append_event_snapshot(
1179 path: &Path,
1180 action: &str,
1181 detail: Option<String>,
1182 graph: &GraphFile,
1183) -> Result<()> {
1184 event_log::append_snapshot(path, action, detail, graph)
1185}
1186
1187pub(crate) fn export_graph_json(
1188 graph: &str,
1189 graph_file: &GraphFile,
1190 output: Option<&str>,
1191) -> Result<String> {
1192 let output_path = output
1193 .map(|value| value.to_owned())
1194 .unwrap_or_else(|| format!("{graph}.export.json"));
1195 let raw = serde_json::to_string_pretty(graph_file).context("failed to serialize graph")?;
1196 std::fs::write(&output_path, raw)?;
1197 Ok(format!("+ exported {output_path}\n"))
1198}
1199
1200pub(crate) fn import_graph_json(
1201 path: &Path,
1202 graph: &str,
1203 input: &str,
1204 store: &dyn GraphStore,
1205) -> Result<String> {
1206 let raw = std::fs::read_to_string(input)
1207 .with_context(|| format!("failed to read import file: {input}"))?;
1208 let mut imported: GraphFile =
1209 serde_json::from_str(&raw).with_context(|| format!("invalid JSON: {input}"))?;
1210 imported.metadata.name = graph.to_owned();
1211 imported.refresh_counts();
1212 store.save_graph(path, &imported)?;
1213 append_event_snapshot(path, "graph.import", Some(input.to_owned()), &imported)?;
1214 Ok(format!("+ imported {input} -> {graph}\n"))
1215}
1216
1217pub(crate) fn import_graph_csv(
1218 path: &Path,
1219 graph: &str,
1220 graph_file: &mut GraphFile,
1221 store: &dyn GraphStore,
1222 args: &ImportCsvArgs,
1223 schema: Option<&GraphSchema>,
1224) -> Result<String> {
1225 if args.nodes.is_none() && args.edges.is_none() && args.notes.is_none() {
1226 bail!("expected at least one of --nodes/--edges/--notes");
1227 }
1228 let strategy = match args.strategy {
1229 MergeStrategy::PreferNew => import_csv::CsvStrategy::PreferNew,
1230 MergeStrategy::PreferOld => import_csv::CsvStrategy::PreferOld,
1231 };
1232 let summary = import_csv::import_csv_into_graph(
1233 graph_file,
1234 import_csv::CsvImportArgs {
1235 nodes_path: args.nodes.as_deref(),
1236 edges_path: args.edges.as_deref(),
1237 notes_path: args.notes.as_deref(),
1238 strategy,
1239 },
1240 )?;
1241 if let Some(schema) = schema {
1242 let all_violations = validate_graph_with_schema(graph_file, schema);
1243 bail_on_schema_violations(&all_violations)?;
1244 }
1245 store.save_graph(path, graph_file)?;
1246 append_event_snapshot(path, "graph.import-csv", None, graph_file)?;
1247 let mut lines = vec![format!("+ imported csv into {graph}")];
1248 lines.extend(import_csv::merge_summary_lines(&summary));
1249 Ok(format!("{}\n", lines.join("\n")))
1250}
1251
1252pub(crate) fn import_graph_markdown(
1253 path: &Path,
1254 graph: &str,
1255 graph_file: &mut GraphFile,
1256 store: &dyn GraphStore,
1257 args: &ImportMarkdownArgs,
1258 schema: Option<&GraphSchema>,
1259) -> Result<String> {
1260 let strategy = match args.strategy {
1261 MergeStrategy::PreferNew => import_markdown::MarkdownStrategy::PreferNew,
1262 MergeStrategy::PreferOld => import_markdown::MarkdownStrategy::PreferOld,
1263 };
1264 let summary = import_markdown::import_markdown_into_graph(
1265 graph_file,
1266 import_markdown::MarkdownImportArgs {
1267 path: &args.path,
1268 notes_as_nodes: args.notes_as_nodes,
1269 strategy,
1270 },
1271 )?;
1272 if let Some(schema) = schema {
1273 let all_violations = validate_graph_with_schema(graph_file, schema);
1274 bail_on_schema_violations(&all_violations)?;
1275 }
1276 store.save_graph(path, graph_file)?;
1277 append_event_snapshot(path, "graph.import-md", Some(args.path.clone()), graph_file)?;
1278 let mut lines = vec![format!("+ imported markdown into {graph}")];
1279 lines.extend(import_csv::merge_summary_lines(&summary));
1280 Ok(format!("{}\n", lines.join("\n")))
1281}
1282
1283pub(crate) fn export_graph_dot(
1284 graph: &str,
1285 graph_file: &GraphFile,
1286 args: &ExportDotArgs,
1287) -> Result<String> {
1288 let output_path = args
1289 .output
1290 .clone()
1291 .unwrap_or_else(|| format!("{graph}.dot"));
1292 let (nodes, edges) = select_subgraph(
1293 graph_file,
1294 args.focus.as_deref(),
1295 args.depth,
1296 &args.node_types,
1297 )?;
1298 let mut lines = Vec::new();
1299 lines.push("digraph kg {".to_owned());
1300 for node in &nodes {
1301 let label = format!("{}\\n{}", node.id, node.name);
1302 lines.push(format!(
1303 " \"{}\" [label=\"{}\"];",
1304 escape_dot(&node.id),
1305 escape_dot(&label)
1306 ));
1307 }
1308 for edge in &edges {
1309 lines.push(format!(
1310 " \"{}\" -> \"{}\" [label=\"{}\"];",
1311 escape_dot(&edge.source_id),
1312 escape_dot(&edge.target_id),
1313 escape_dot(&edge.relation)
1314 ));
1315 }
1316 lines.push("}".to_owned());
1317 std::fs::write(&output_path, format!("{}\n", lines.join("\n")))?;
1318 Ok(format!("+ exported {output_path}\n"))
1319}
1320
1321pub(crate) fn export_graph_mermaid(
1322 graph: &str,
1323 graph_file: &GraphFile,
1324 args: &ExportMermaidArgs,
1325) -> Result<String> {
1326 let output_path = args
1327 .output
1328 .clone()
1329 .unwrap_or_else(|| format!("{graph}.mmd"));
1330 let (nodes, edges) = select_subgraph(
1331 graph_file,
1332 args.focus.as_deref(),
1333 args.depth,
1334 &args.node_types,
1335 )?;
1336 let mut lines = Vec::new();
1337 lines.push("graph TD".to_owned());
1338 for node in &nodes {
1339 let label = format!("{}\\n{}", node.id, node.name);
1340 lines.push(format!(
1341 " {}[\"{}\"]",
1342 sanitize_mermaid_id(&node.id),
1343 escape_mermaid(&label)
1344 ));
1345 }
1346 for edge in &edges {
1347 lines.push(format!(
1348 " {} -- \"{}\" --> {}",
1349 sanitize_mermaid_id(&edge.source_id),
1350 escape_mermaid(&edge.relation),
1351 sanitize_mermaid_id(&edge.target_id)
1352 ));
1353 }
1354 std::fs::write(&output_path, format!("{}\n", lines.join("\n")))?;
1355 Ok(format!("+ exported {output_path}\n"))
1356}
1357
1358pub(crate) fn export_graph_graphml(
1359 graph: &str,
1360 graph_file: &GraphFile,
1361 args: &ExportGraphmlArgs,
1362) -> Result<String> {
1363 let output_path = args
1364 .output
1365 .clone()
1366 .unwrap_or_else(|| format!("{graph}.graphml"));
1367 let (nodes, edges) = select_subgraph(
1368 graph_file,
1369 args.focus.as_deref(),
1370 args.depth,
1371 &args.node_types,
1372 )?;
1373
1374 let mut lines = Vec::new();
1375 lines.push(r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string());
1376 lines.push(r#"<graphml xmlns="http://graphml.graphdrawing.org/xmlns" "#.to_string());
1377 lines.push(r#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance""#.to_string());
1378 lines.push(r#" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns"#.to_string());
1379 lines.push(r#" http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">"#.to_string());
1380 lines.push(r#" <key id="d0" for="node" attr.name="name" attr.type="string"/>"#.to_string());
1381 lines.push(r#" <key id="d1" for="node" attr.name="type" attr.type="string"/>"#.to_string());
1382 lines.push(
1383 r#" <key id="d2" for="node" attr.name="description" attr.type="string"/>"#.to_string(),
1384 );
1385 lines
1386 .push(r#" <key id="d3" for="edge" attr.name="relation" attr.type="string"/>"#.to_string());
1387 lines.push(r#" <key id="d4" for="edge" attr.name="detail" attr.type="string"/>"#.to_string());
1388 lines.push(format!(
1389 r#" <graph id="{}" edgedefault="directed">"#,
1390 escape_xml(graph)
1391 ));
1392
1393 for node in &nodes {
1394 lines.push(format!(r#" <node id="{}">"#, escape_xml(&node.id)));
1395 lines.push(format!(
1396 r#" <data key="d0">{}</data>"#,
1397 escape_xml(&node.name)
1398 ));
1399 lines.push(format!(
1400 r#" <data key="d1">{}</data>"#,
1401 escape_xml(&node.r#type)
1402 ));
1403 lines.push(format!(
1404 r#" <data key="d2">{}</data>"#,
1405 escape_xml(&node.properties.description)
1406 ));
1407 lines.push(" </node>".to_string());
1408 }
1409
1410 for edge in &edges {
1411 lines.push(format!(
1412 r#" <edge source="{}" target="{}">"#,
1413 escape_xml(&edge.source_id),
1414 escape_xml(&edge.target_id)
1415 ));
1416 lines.push(format!(
1417 r#" <data key="d3">{}</data>"#,
1418 escape_xml(&edge.relation)
1419 ));
1420 lines.push(format!(
1421 r#" <data key="d4">{}</data>"#,
1422 escape_xml(&edge.properties.detail)
1423 ));
1424 lines.push(" </edge>".to_string());
1425 }
1426
1427 lines.push(" </graph>".to_string());
1428 lines.push("</graphml>".to_string());
1429
1430 std::fs::write(&output_path, lines.join("\n"))?;
1431 Ok(format!("+ exported {output_path}\n"))
1432}
1433
1434fn escape_xml(s: &str) -> String {
1435 s.replace('&', "&")
1436 .replace('<', "<")
1437 .replace('>', ">")
1438 .replace('"', """)
1439 .replace('\'', "'")
1440}
1441
1442pub(crate) fn export_graph_md(
1443 graph: &str,
1444 graph_file: &GraphFile,
1445 args: &ExportMdArgs,
1446 _cwd: &Path,
1447) -> Result<String> {
1448 let output_dir = args
1449 .output
1450 .clone()
1451 .unwrap_or_else(|| format!("{}-md", graph));
1452
1453 let (nodes, edges) = select_subgraph(
1454 graph_file,
1455 args.focus.as_deref(),
1456 args.depth,
1457 &args.node_types,
1458 )?;
1459
1460 std::fs::create_dir_all(&output_dir)?;
1461
1462 let mut index_lines = format!("# {}\n\nNodes: {}\n\n## Index\n", graph, nodes.len());
1463
1464 for node in &nodes {
1465 let safe_name = sanitize_filename(&node.id);
1466 let filename = format!("{}.md", safe_name);
1467 let filepath = Path::new(&output_dir).join(&filename);
1468
1469 let mut content = String::new();
1470 content.push_str(&format!("# {}\n\n", node.name));
1471 content.push_str(&format!("**ID:** `{}`\n\n", node.id));
1472 content.push_str(&format!("**Type:** {}\n\n", node.r#type));
1473
1474 if !node.properties.description.is_empty() {
1475 content.push_str(&format!(
1476 "## Description\n\n{}\n\n",
1477 node.properties.description
1478 ));
1479 }
1480
1481 if !node.properties.key_facts.is_empty() {
1482 content.push_str("## Facts\n\n");
1483 for fact in &node.properties.key_facts {
1484 content.push_str(&format!("- {}\n", fact));
1485 }
1486 content.push('\n');
1487 }
1488
1489 if !node.properties.alias.is_empty() {
1490 content.push_str(&format!(
1491 "**Aliases:** {}\n\n",
1492 node.properties.alias.join(", ")
1493 ));
1494 }
1495
1496 content.push_str("## Relations\n\n");
1497 for edge in &edges {
1498 if edge.source_id == node.id {
1499 content.push_str(&format!(
1500 "- [[{}]] --({})--> [[{}]]\n",
1501 node.id, edge.relation, edge.target_id
1502 ));
1503 } else if edge.target_id == node.id {
1504 content.push_str(&format!(
1505 "- [[{}]] <--({})-- [[{}]]\n",
1506 edge.source_id, edge.relation, node.id
1507 ));
1508 }
1509 }
1510 content.push('\n');
1511
1512 content.push_str("## Backlinks\n\n");
1513 let backlinks: Vec<_> = edges.iter().filter(|e| e.target_id == node.id).collect();
1514 if backlinks.is_empty() {
1515 content.push_str("_No backlinks_\n");
1516 } else {
1517 for edge in backlinks {
1518 content.push_str(&format!("- [[{}]] ({})\n", edge.source_id, edge.relation));
1519 }
1520 }
1521
1522 std::fs::write(&filepath, content)?;
1523
1524 index_lines.push_str(&format!(
1525 "- [[{}]] - {} [{}]\n",
1526 node.id, node.name, node.r#type
1527 ));
1528 }
1529
1530 std::fs::write(Path::new(&output_dir).join("index.md"), index_lines)?;
1531
1532 Ok(format!(
1533 "+ exported {}/ ({} nodes)\n",
1534 output_dir,
1535 nodes.len()
1536 ))
1537}
1538
1539fn sanitize_filename(name: &str) -> String {
1540 name.replace([':', '/', '\\', ' '], "_").replace('&', "and")
1541}
1542
1543pub(crate) fn split_graph(graph: &str, graph_file: &GraphFile, args: &SplitArgs) -> Result<String> {
1544 let output_dir = args
1545 .output
1546 .clone()
1547 .unwrap_or_else(|| format!("{}-split", graph));
1548
1549 let nodes_dir = Path::new(&output_dir).join("nodes");
1550 let edges_dir = Path::new(&output_dir).join("edges");
1551 let notes_dir = Path::new(&output_dir).join("notes");
1552 let meta_dir = Path::new(&output_dir).join("metadata");
1553
1554 std::fs::create_dir_all(&nodes_dir)?;
1555 std::fs::create_dir_all(&edges_dir)?;
1556 std::fs::create_dir_all(¬es_dir)?;
1557 std::fs::create_dir_all(&meta_dir)?;
1558
1559 let meta_json = serde_json::to_string_pretty(&graph_file.metadata)?;
1560 std::fs::write(meta_dir.join("metadata.json"), meta_json)?;
1561
1562 let mut node_count = 0;
1563 for node in &graph_file.nodes {
1564 let safe_id = sanitize_filename(&node.id);
1565 let filepath = nodes_dir.join(format!("{}.json", safe_id));
1566 let node_json = serde_json::to_string_pretty(node)?;
1567 std::fs::write(filepath, node_json)?;
1568 node_count += 1;
1569 }
1570
1571 let mut edge_count = 0;
1572 for edge in &graph_file.edges {
1573 let edge_key = format!(
1574 "{}___{}___{}",
1575 sanitize_filename(&edge.source_id),
1576 sanitize_filename(&edge.relation),
1577 sanitize_filename(&edge.target_id)
1578 );
1579 let filepath = edges_dir.join(format!("{}.json", edge_key));
1580 let edge_json = serde_json::to_string_pretty(edge)?;
1581 std::fs::write(filepath, edge_json)?;
1582 edge_count += 1;
1583 }
1584
1585 let mut note_count = 0;
1586 for note in &graph_file.notes {
1587 let safe_id = sanitize_filename(¬e.id);
1588 let filepath = notes_dir.join(format!("{}.json", safe_id));
1589 let note_json = serde_json::to_string_pretty(note)?;
1590 std::fs::write(filepath, note_json)?;
1591 note_count += 1;
1592 }
1593
1594 let manifest = format!(
1595 r#"# {} Split Manifest
1596
1597This directory contains a git-friendly split representation of the graph.
1598
1599## Structure
1600
1601- `metadata/metadata.json` - Graph metadata
1602- `nodes/` - One JSON file per node (filename = sanitized node id)
1603- `edges/` - One JSON file per edge (filename = source___relation___target)
1604- `notes/` - One JSON file per note
1605
1606## Stats
1607
1608- Nodes: {}
1609- Edges: {}
1610- Notes: {}
1611
1612## Usage
1613
1614To reassemble into a single JSON file, use `kg {} import-json`.
1615"#,
1616 graph, node_count, edge_count, note_count, graph
1617 );
1618 std::fs::write(Path::new(&output_dir).join("MANIFEST.md"), manifest)?;
1619
1620 Ok(format!(
1621 "+ split {} into {}/ (nodes: {}, edges: {}, notes: {})\n",
1622 graph, output_dir, node_count, edge_count, note_count
1623 ))
1624}
1625
1626fn select_subgraph<'a>(
1627 graph_file: &'a GraphFile,
1628 focus: Option<&'a str>,
1629 depth: usize,
1630 node_types: &'a [String],
1631) -> Result<(Vec<&'a Node>, Vec<&'a Edge>)> {
1632 use std::collections::{HashSet, VecDeque};
1633
1634 let mut selected: HashSet<String> = HashSet::new();
1635 if let Some(focus_id) = focus {
1636 if graph_file.node_by_id(focus_id).is_none() {
1637 bail!("focus node not found: {focus_id}");
1638 }
1639 selected.insert(focus_id.to_owned());
1640 let mut frontier = VecDeque::new();
1641 frontier.push_back((focus_id.to_owned(), 0usize));
1642 while let Some((current, dist)) = frontier.pop_front() {
1643 if dist >= depth {
1644 continue;
1645 }
1646 for edge in &graph_file.edges {
1647 let next = if edge.source_id == current {
1648 Some(edge.target_id.clone())
1649 } else if edge.target_id == current {
1650 Some(edge.source_id.clone())
1651 } else {
1652 None
1653 };
1654 if let Some(next_id) = next {
1655 if selected.insert(next_id.clone()) {
1656 frontier.push_back((next_id, dist + 1));
1657 }
1658 }
1659 }
1660 }
1661 } else {
1662 for node in &graph_file.nodes {
1663 selected.insert(node.id.clone());
1664 }
1665 }
1666
1667 let type_filter: Vec<String> = node_types.iter().map(|t| t.to_lowercase()).collect();
1668 let has_filter = !type_filter.is_empty();
1669 let mut nodes: Vec<&Node> = graph_file
1670 .nodes
1671 .iter()
1672 .filter(|node| selected.contains(&node.id))
1673 .filter(|node| {
1674 if let Some(focus_id) = focus {
1675 if node.id == focus_id {
1676 return true;
1677 }
1678 }
1679 !has_filter || type_filter.contains(&node.r#type.to_lowercase())
1680 })
1681 .collect();
1682 nodes.sort_by(|a, b| a.id.cmp(&b.id));
1683
1684 let node_set: HashSet<String> = nodes.iter().map(|node| node.id.clone()).collect();
1685 let mut edges: Vec<&Edge> = graph_file
1686 .edges
1687 .iter()
1688 .filter(|edge| node_set.contains(&edge.source_id) && node_set.contains(&edge.target_id))
1689 .collect();
1690 edges.sort_by(|a, b| {
1691 a.source_id
1692 .cmp(&b.source_id)
1693 .then_with(|| a.relation.cmp(&b.relation))
1694 .then_with(|| a.target_id.cmp(&b.target_id))
1695 });
1696
1697 Ok((nodes, edges))
1698}
1699
1700fn escape_dot(value: &str) -> String {
1701 value.replace('"', "\\\"").replace('\n', "\\n")
1702}
1703
1704fn escape_mermaid(value: &str) -> String {
1705 value.replace('"', "\\\"").replace('\n', "\\n")
1706}
1707
1708fn sanitize_mermaid_id(value: &str) -> String {
1709 let mut out = String::new();
1710 for ch in value.chars() {
1711 if ch.is_ascii_alphanumeric() || ch == '_' {
1712 out.push(ch);
1713 } else {
1714 out.push('_');
1715 }
1716 }
1717 if out.is_empty() {
1718 "node".to_owned()
1719 } else {
1720 out
1721 }
1722}
1723
1724pub(crate) fn render_graph_history(path: &Path, graph: &str, args: &HistoryArgs) -> Result<String> {
1725 let backups = list_graph_backups(path)?;
1726 let total = backups.len();
1727 let snapshots: Vec<(u64, PathBuf)> = backups.into_iter().rev().take(args.limit).collect();
1728
1729 if args.json {
1730 let payload = GraphHistoryResponse {
1731 graph: graph.to_owned(),
1732 total,
1733 snapshots: snapshots
1734 .iter()
1735 .map(|(ts, backup_path)| GraphHistorySnapshot {
1736 ts: *ts,
1737 path: backup_path.display().to_string(),
1738 })
1739 .collect(),
1740 };
1741 let rendered =
1742 serde_json::to_string_pretty(&payload).context("failed to render history as JSON")?;
1743 return Ok(format!("{rendered}\n"));
1744 }
1745
1746 let mut lines = vec![format!("= history {graph} ({total})")];
1747 for (ts, backup_path) in snapshots {
1748 lines.push(format!("- {ts} | {}", backup_path.display()));
1749 }
1750 Ok(format!("{}\n", lines.join("\n")))
1751}
1752
1753pub(crate) fn render_graph_timeline(
1754 path: &Path,
1755 graph: &str,
1756 args: &TimelineArgs,
1757) -> Result<String> {
1758 let entries = event_log::read_log(path)?;
1759 let total = entries.len();
1760 let filtered: Vec<&event_log::EventLogEntry> = entries
1761 .iter()
1762 .filter(|entry| {
1763 let after_since = args
1764 .since_ts_ms
1765 .map(|since| entry.ts_ms >= since)
1766 .unwrap_or(true);
1767 let before_until = args
1768 .until_ts_ms
1769 .map(|until| entry.ts_ms <= until)
1770 .unwrap_or(true);
1771 after_since && before_until
1772 })
1773 .collect();
1774 let recent: Vec<&event_log::EventLogEntry> =
1775 filtered.into_iter().rev().take(args.limit).collect();
1776
1777 if args.json {
1778 let payload = GraphTimelineResponse {
1779 graph: graph.to_owned(),
1780 total,
1781 filtered: recent.len(),
1782 since_ts_ms: args.since_ts_ms,
1783 until_ts_ms: args.until_ts_ms,
1784 entries: recent
1785 .iter()
1786 .map(|entry| GraphTimelineEntry {
1787 ts_ms: entry.ts_ms,
1788 action: entry.action.clone(),
1789 detail: entry.detail.clone(),
1790 node_count: entry.graph.nodes.len(),
1791 edge_count: entry.graph.edges.len(),
1792 note_count: entry.graph.notes.len(),
1793 })
1794 .collect(),
1795 };
1796 let rendered =
1797 serde_json::to_string_pretty(&payload).context("failed to render timeline as JSON")?;
1798 return Ok(format!("{rendered}\n"));
1799 }
1800
1801 let mut lines = vec![format!("= timeline {graph} ({total})")];
1802 if args.since_ts_ms.is_some() || args.until_ts_ms.is_some() {
1803 lines.push(format!(
1804 "range: {} -> {}",
1805 args.since_ts_ms
1806 .map(|value| value.to_string())
1807 .unwrap_or_else(|| "-inf".to_owned()),
1808 args.until_ts_ms
1809 .map(|value| value.to_string())
1810 .unwrap_or_else(|| "+inf".to_owned())
1811 ));
1812 lines.push(format!("showing: {}", recent.len()));
1813 }
1814 for entry in recent {
1815 let detail = entry
1816 .detail
1817 .as_deref()
1818 .map(|value| format!(" | {value}"))
1819 .unwrap_or_default();
1820 lines.push(format!(
1821 "- {} | {}{} | nodes: {} | edges: {} | notes: {}",
1822 entry.ts_ms,
1823 entry.action,
1824 detail,
1825 entry.graph.nodes.len(),
1826 entry.graph.edges.len(),
1827 entry.graph.notes.len()
1828 ));
1829 }
1830 Ok(format!("{}\n", lines.join("\n")))
1831}
1832
1833#[derive(Debug, Serialize)]
1834struct GraphHistorySnapshot {
1835 ts: u64,
1836 path: String,
1837}
1838
1839#[derive(Debug, Serialize)]
1840struct GraphHistoryResponse {
1841 graph: String,
1842 total: usize,
1843 snapshots: Vec<GraphHistorySnapshot>,
1844}
1845
1846#[derive(Debug, Serialize)]
1847struct GraphTimelineEntry {
1848 ts_ms: u64,
1849 action: String,
1850 detail: Option<String>,
1851 node_count: usize,
1852 edge_count: usize,
1853 note_count: usize,
1854}
1855
1856#[derive(Debug, Serialize)]
1857struct GraphTimelineResponse {
1858 graph: String,
1859 total: usize,
1860 filtered: usize,
1861 since_ts_ms: Option<u64>,
1862 until_ts_ms: Option<u64>,
1863 entries: Vec<GraphTimelineEntry>,
1864}
1865
1866pub(crate) fn render_graph_diff_as_of(
1867 path: &Path,
1868 graph: &str,
1869 args: &DiffAsOfArgs,
1870) -> Result<String> {
1871 match resolve_temporal_source(path, args.source)? {
1872 TemporalSource::EventLog => render_graph_diff_as_of_event_log(path, graph, args),
1873 _ => render_graph_diff_as_of_backups(path, graph, args),
1874 }
1875}
1876
1877pub(crate) fn render_graph_diff_as_of_json(
1878 path: &Path,
1879 graph: &str,
1880 args: &DiffAsOfArgs,
1881) -> Result<String> {
1882 match resolve_temporal_source(path, args.source)? {
1883 TemporalSource::EventLog => render_graph_diff_as_of_event_log_json(path, graph, args),
1884 _ => render_graph_diff_as_of_backups_json(path, graph, args),
1885 }
1886}
1887
1888fn render_graph_diff_as_of_backups(
1889 path: &Path,
1890 graph: &str,
1891 args: &DiffAsOfArgs,
1892) -> Result<String> {
1893 let backups = list_graph_backups(path)?;
1894 if backups.is_empty() {
1895 bail!("no backups found for graph: {graph}");
1896 }
1897 let from_ts = args.from_ts_ms / 1000;
1898 let to_ts = args.to_ts_ms / 1000;
1899 let from_backup = select_backup_at_or_before(&backups, from_ts)
1900 .ok_or_else(|| anyhow!("no backup at or before from_ts_ms={}", args.from_ts_ms))?;
1901 let to_backup = select_backup_at_or_before(&backups, to_ts)
1902 .ok_or_else(|| anyhow!("no backup at or before to_ts_ms={}", args.to_ts_ms))?;
1903
1904 let from_graph = load_graph_from_backup(&from_backup.1)?;
1905 let to_graph = load_graph_from_backup(&to_backup.1)?;
1906 let left_label = format!("{graph}@{}", args.from_ts_ms);
1907 let right_label = format!("{graph}@{}", args.to_ts_ms);
1908 Ok(render_graph_diff_from_files(
1909 &left_label,
1910 &right_label,
1911 &from_graph,
1912 &to_graph,
1913 ))
1914}
1915
1916fn render_graph_diff_as_of_backups_json(
1917 path: &Path,
1918 graph: &str,
1919 args: &DiffAsOfArgs,
1920) -> Result<String> {
1921 let backups = list_graph_backups(path)?;
1922 if backups.is_empty() {
1923 bail!("no backups found for graph: {graph}");
1924 }
1925 let from_ts = args.from_ts_ms / 1000;
1926 let to_ts = args.to_ts_ms / 1000;
1927 let from_backup = select_backup_at_or_before(&backups, from_ts)
1928 .ok_or_else(|| anyhow!("no backup at or before from_ts_ms={}", args.from_ts_ms))?;
1929 let to_backup = select_backup_at_or_before(&backups, to_ts)
1930 .ok_or_else(|| anyhow!("no backup at or before to_ts_ms={}", args.to_ts_ms))?;
1931
1932 let from_graph = load_graph_from_backup(&from_backup.1)?;
1933 let to_graph = load_graph_from_backup(&to_backup.1)?;
1934 let left_label = format!("{graph}@{}", args.from_ts_ms);
1935 let right_label = format!("{graph}@{}", args.to_ts_ms);
1936 Ok(render_graph_diff_json_from_files(
1937 &left_label,
1938 &right_label,
1939 &from_graph,
1940 &to_graph,
1941 ))
1942}
1943
1944fn render_graph_diff_as_of_event_log(
1945 path: &Path,
1946 graph: &str,
1947 args: &DiffAsOfArgs,
1948) -> Result<String> {
1949 let entries = event_log::read_log(path)?;
1950 if entries.is_empty() {
1951 bail!("no event log entries found for graph: {graph}");
1952 }
1953 let from_entry = select_event_at_or_before(&entries, args.from_ts_ms).ok_or_else(|| {
1954 anyhow!(
1955 "no event log entry at or before from_ts_ms={}",
1956 args.from_ts_ms
1957 )
1958 })?;
1959 let to_entry = select_event_at_or_before(&entries, args.to_ts_ms)
1960 .ok_or_else(|| anyhow!("no event log entry at or before to_ts_ms={}", args.to_ts_ms))?;
1961
1962 let left_label = format!("{graph}@{}", args.from_ts_ms);
1963 let right_label = format!("{graph}@{}", args.to_ts_ms);
1964 Ok(render_graph_diff_from_files(
1965 &left_label,
1966 &right_label,
1967 &from_entry.graph,
1968 &to_entry.graph,
1969 ))
1970}
1971
1972fn render_graph_diff_as_of_event_log_json(
1973 path: &Path,
1974 graph: &str,
1975 args: &DiffAsOfArgs,
1976) -> Result<String> {
1977 let entries = event_log::read_log(path)?;
1978 if entries.is_empty() {
1979 bail!("no event log entries found for graph: {graph}");
1980 }
1981 let from_entry = select_event_at_or_before(&entries, args.from_ts_ms).ok_or_else(|| {
1982 anyhow!(
1983 "no event log entry at or before from_ts_ms={}",
1984 args.from_ts_ms
1985 )
1986 })?;
1987 let to_entry = select_event_at_or_before(&entries, args.to_ts_ms)
1988 .ok_or_else(|| anyhow!("no event log entry at or before to_ts_ms={}", args.to_ts_ms))?;
1989
1990 let left_label = format!("{graph}@{}", args.from_ts_ms);
1991 let right_label = format!("{graph}@{}", args.to_ts_ms);
1992 Ok(render_graph_diff_json_from_files(
1993 &left_label,
1994 &right_label,
1995 &from_entry.graph,
1996 &to_entry.graph,
1997 ))
1998}
1999
2000fn resolve_temporal_source(path: &Path, source: TemporalSource) -> Result<TemporalSource> {
2001 if matches!(source, TemporalSource::Auto) {
2002 let has_events = event_log::has_log(path);
2003 return Ok(if has_events {
2004 TemporalSource::EventLog
2005 } else {
2006 TemporalSource::Backups
2007 });
2008 }
2009 Ok(source)
2010}
2011
2012fn select_event_at_or_before(
2013 entries: &[event_log::EventLogEntry],
2014 target_ts_ms: u64,
2015) -> Option<&event_log::EventLogEntry> {
2016 let mut selected = None;
2017 for entry in entries {
2018 if entry.ts_ms <= target_ts_ms {
2019 selected = Some(entry);
2020 }
2021 }
2022 selected
2023}
2024
2025fn select_backup_at_or_before(
2026 backups: &[(u64, PathBuf)],
2027 target_ts: u64,
2028) -> Option<(u64, PathBuf)> {
2029 let mut selected = None;
2030 for (ts, path) in backups {
2031 if *ts <= target_ts {
2032 selected = Some((*ts, path.clone()));
2033 }
2034 }
2035 selected
2036}
2037
2038fn load_graph_from_backup(path: &Path) -> Result<GraphFile> {
2039 let raw = read_gz_to_string(path)?;
2040 let graph: GraphFile = serde_json::from_str(&raw)
2041 .with_context(|| format!("failed to parse backup: {}", path.display()))?;
2042 Ok(graph)
2043}
2044
2045pub(crate) fn render_note_list(graph: &GraphFile, args: &NoteListArgs) -> String {
2046 let mut notes: Vec<&Note> = graph
2047 .notes
2048 .iter()
2049 .filter(|note| args.node.as_ref().is_none_or(|node| note.node_id == *node))
2050 .collect();
2051
2052 notes.sort_by(|a, b| {
2053 a.created_at
2054 .cmp(&b.created_at)
2055 .then_with(|| a.id.cmp(&b.id))
2056 });
2057
2058 let total = notes.len();
2059 let visible: Vec<&Note> = notes.into_iter().take(args.limit).collect();
2060
2061 let mut lines = vec![format!("= notes ({total})")];
2062 for note in &visible {
2063 let mut line = format!(
2064 "- {} | {} | {} | {}",
2065 note.id,
2066 note.node_id,
2067 note.created_at,
2068 truncate_note(&escape_cli_text(¬e.body), 80)
2069 );
2070 if !note.tags.is_empty() {
2071 line.push_str(" | tags: ");
2072 line.push_str(
2073 ¬e
2074 .tags
2075 .iter()
2076 .map(|tag| escape_cli_text(tag))
2077 .collect::<Vec<_>>()
2078 .join(", "),
2079 );
2080 }
2081 if !note.author.is_empty() {
2082 line.push_str(" | by: ");
2083 line.push_str(&escape_cli_text(¬e.author));
2084 }
2085 lines.push(line);
2086 }
2087 let omitted = total.saturating_sub(visible.len());
2088 if omitted > 0 {
2089 lines.push(format!("... {omitted} more notes omitted"));
2090 }
2091
2092 format!("{}\n", lines.join("\n"))
2093}
2094
2095pub(crate) fn build_note(graph: &GraphFile, args: NoteAddArgs) -> Result<Note> {
2096 if graph.node_by_id(&args.node_id).is_none() {
2097 bail!("node not found: {}", args.node_id);
2098 }
2099 let ts = now_ms();
2100 let id = args.id.unwrap_or_else(|| format!("note:{ts}"));
2101 let created_at = args.created_at.unwrap_or_else(|| ts.to_string());
2102 Ok(Note {
2103 id,
2104 node_id: args.node_id,
2105 body: args.text,
2106 tags: args.tag,
2107 author: args.author.unwrap_or_default(),
2108 created_at,
2109 provenance: args.provenance.unwrap_or_default(),
2110 source_files: args.source,
2111 })
2112}
2113
2114fn truncate_note(value: &str, max_len: usize) -> String {
2115 let char_count = value.chars().count();
2116 if char_count <= max_len {
2117 return value.to_owned();
2118 }
2119 let truncated: String = value.chars().take(max_len.saturating_sub(3)).collect();
2120 format!("{truncated}...")
2121}
2122
2123fn escape_cli_text(value: &str) -> String {
2124 let mut out = String::new();
2125 for ch in value.chars() {
2126 match ch {
2127 '\\' => out.push_str("\\\\"),
2128 '\n' => out.push_str("\\n"),
2129 '\r' => out.push_str("\\r"),
2130 '\t' => out.push_str("\\t"),
2131 _ => out.push(ch),
2132 }
2133 }
2134 out
2135}
2136
2137fn now_ms() -> u128 {
2138 use std::time::{SystemTime, UNIX_EPOCH};
2139
2140 SystemTime::now()
2141 .duration_since(UNIX_EPOCH)
2142 .unwrap_or_default()
2143 .as_millis()
2144}
2145
2146pub(crate) fn map_find_mode(mode: CliFindMode) -> output::FindMode {
2147 match mode {
2148 CliFindMode::Fuzzy => output::FindMode::Fuzzy,
2149 CliFindMode::Bm25 => output::FindMode::Bm25,
2150 CliFindMode::Vector => output::FindMode::Fuzzy,
2151 }
2152}
2153
2154pub(crate) fn render_feedback_log(cwd: &Path, args: &FeedbackLogArgs) -> Result<String> {
2155 let path = cwd.join("kg-mcp.feedback.log");
2156 if !path.exists() {
2157 return Ok(String::from("= feedback-log\nempty: no entries yet\n"));
2158 }
2159
2160 let content = std::fs::read_to_string(&path)?;
2161 let mut entries: Vec<FeedbackLogEntry> = Vec::new();
2162 for line in content.lines() {
2163 if let Some(entry) = FeedbackLogEntry::parse(line) {
2164 if let Some(ref uid) = args.uid {
2165 if &entry.uid != uid {
2166 continue;
2167 }
2168 }
2169 if let Some(ref graph) = args.graph {
2170 if &entry.graph != graph {
2171 continue;
2172 }
2173 }
2174 entries.push(entry);
2175 }
2176 }
2177
2178 entries.reverse();
2179 let shown: Vec<&FeedbackLogEntry> = entries.iter().take(args.limit).collect();
2180
2181 let mut output = vec![String::from("= feedback-log")];
2182 output.push(format!("total_entries: {}", entries.len()));
2183 output.push(format!("showing: {}", shown.len()));
2184 output.push(String::from("recent_entries:"));
2185 for e in shown {
2186 let pick = e.pick.as_deref().unwrap_or("-");
2187 let selected = e.selected.as_deref().unwrap_or("-");
2188 let graph = if e.graph.is_empty() { "-" } else { &e.graph };
2189 let queries = if e.queries.is_empty() {
2190 "-"
2191 } else {
2192 &e.queries
2193 };
2194 output.push(format!(
2195 "- {} | {} | {} | pick={} | selected={} | graph={} | {}",
2196 e.ts_ms, e.uid, e.action, pick, selected, graph, queries
2197 ));
2198 }
2199
2200 Ok(format!("{}\n", output.join("\n")))
2201}
2202
2203pub(crate) fn handle_vector_command(
2204 path: &Path,
2205 _graph: &str,
2206 graph_file: &GraphFile,
2207 command: &VectorCommand,
2208 _cwd: &Path,
2209) -> Result<String> {
2210 match command {
2211 VectorCommand::Import(args) => {
2212 let vector_path = path
2213 .parent()
2214 .map(|p| p.join(".kg.vectors.json"))
2215 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
2216 let store =
2217 vectors::VectorStore::import_jsonl(std::path::Path::new(&args.input), graph_file)?;
2218 store.save(&vector_path)?;
2219 Ok(format!(
2220 "+ imported {} vectors (dim={}) to {}\n",
2221 store.vectors.len(),
2222 store.dimension,
2223 vector_path.display()
2224 ))
2225 }
2226 VectorCommand::Stats(_args) => {
2227 let vector_path = path
2228 .parent()
2229 .map(|p| p.join(".kg.vectors.json"))
2230 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
2231 if !vector_path.exists() {
2232 return Ok(String::from("= vectors\nnot initialized\n"));
2233 }
2234 let store = vectors::VectorStore::load(&vector_path)?;
2235 let node_ids: Vec<_> = store.vectors.keys().cloned().collect();
2236 let in_graph = node_ids
2237 .iter()
2238 .filter(|id| graph_file.node_by_id(id).is_some())
2239 .count();
2240 Ok(format!(
2241 "= vectors\ndimension: {}\ntotal: {}\nin_graph: {}\n",
2242 store.dimension,
2243 store.vectors.len(),
2244 in_graph
2245 ))
2246 }
2247 }
2248}
2249
2250fn render_feedback_summary(cwd: &Path, args: &FeedbackSummaryArgs) -> Result<String> {
2251 use std::collections::HashMap;
2252
2253 let path = cwd.join("kg-mcp.feedback.log");
2254 if !path.exists() {
2255 return Ok(String::from("= feedback-summary\nNo feedback yet.\n"));
2256 }
2257
2258 let content = std::fs::read_to_string(&path)?;
2259 let mut entries: Vec<FeedbackLogEntry> = Vec::new();
2260 for line in content.lines() {
2261 if let Some(entry) = FeedbackLogEntry::parse(line) {
2262 if let Some(ref graph) = args.graph {
2263 if &entry.graph != graph {
2264 continue;
2265 }
2266 }
2267 entries.push(entry);
2268 }
2269 }
2270
2271 entries.reverse();
2272 let _shown = entries.iter().take(args.limit).collect::<Vec<_>>();
2273
2274 let mut lines = vec![String::from("= feedback-summary")];
2275 lines.push(format!("Total entries: {}", entries.len()));
2276
2277 let mut by_action: HashMap<&str, usize> = HashMap::new();
2278 let mut nil_queries: Vec<&str> = Vec::new();
2279 let mut yes_count = 0;
2280 let mut no_count = 0;
2281 let mut pick_map: HashMap<&str, usize> = HashMap::new();
2282 let mut query_counts: HashMap<&str, usize> = HashMap::new();
2283
2284 for e in &entries {
2285 *by_action.entry(&e.action).or_insert(0) += 1;
2286
2287 match e.action.as_str() {
2288 "NIL" => {
2289 if !e.queries.is_empty() {
2290 nil_queries.push(&e.queries);
2291 }
2292 }
2293 "YES" => yes_count += 1,
2294 "NO" => no_count += 1,
2295 "PICK" => {
2296 if let Some(ref sel) = e.selected {
2297 *pick_map.entry(sel).or_insert(0) += 1;
2298 }
2299 }
2300 _ => {}
2301 }
2302
2303 if !e.queries.is_empty() {
2304 *query_counts.entry(&e.queries).or_insert(0) += 1;
2305 }
2306 }
2307
2308 lines.push(String::from("\n### By response"));
2309 lines.push(format!(
2310 "YES: {} ({:.0}%)",
2311 yes_count,
2312 if !entries.is_empty() {
2313 (yes_count as f64 / entries.len() as f64) * 100.0
2314 } else {
2315 0.0
2316 }
2317 ));
2318 lines.push(format!("NO: {}", no_count));
2319 lines.push(format!("PICK: {}", by_action.get("PICK").unwrap_or(&0)));
2320 lines.push(format!("NIL: {} (no results)", nil_queries.len()));
2321
2322 if !nil_queries.is_empty() {
2323 lines.push(String::from("\n### Brakujące node'y (NIL queries)"));
2324 for q in nil_queries.iter().take(10) {
2325 lines.push(format!("- \"{}\"", q));
2326 }
2327 if nil_queries.len() > 10 {
2328 lines.push(format!(" ... i {} więcej", nil_queries.len() - 10));
2329 }
2330 }
2331
2332 if !pick_map.is_empty() {
2333 lines.push(String::from("\n### Najczęściej wybierane node'y (PICK)"));
2334 let mut sorted: Vec<_> = pick_map.iter().collect();
2335 sorted.sort_by(|a, b| b.1.cmp(a.1));
2336 for (node, count) in sorted.iter().take(10) {
2337 lines.push(format!("- {} ({}x)", node, count));
2338 }
2339 }
2340
2341 if !query_counts.is_empty() {
2342 lines.push(String::from("\n### Top wyszukiwane terminy"));
2343 let mut sorted: Vec<_> = query_counts.iter().collect();
2344 sorted.sort_by(|a, b| b.1.cmp(a.1));
2345 for (query, count) in sorted.iter().take(10) {
2346 lines.push(format!("- \"{}\" ({})", query, count));
2347 }
2348 }
2349
2350 if yes_count == 0 && no_count == 0 && nil_queries.is_empty() {
2351 lines.push(String::from(
2352 "\n(Wpływy za mało na wnioski - potrzeba więcej feedbacku)",
2353 ));
2354 } else if yes_count > no_count * 3 {
2355 lines.push(String::from(
2356 "\n✓ Feedback pozytywny - wyszukiwania działają dobrze.",
2357 ));
2358 } else if no_count > yes_count {
2359 lines.push(String::from(
2360 "\n⚠ Dużo NO - sprawdź jakość aliasów i dopasowań.",
2361 ));
2362 }
2363
2364 Ok(format!("{}\n", lines.join("\n")))
2365}
2366
2367pub(crate) fn render_feedback_summary_for_graph(
2368 cwd: &Path,
2369 graph: &str,
2370 args: &FeedbackSummaryArgs,
2371) -> Result<String> {
2372 let mut args = args.clone();
2373 args.graph = Some(graph.to_string());
2374 render_feedback_summary(cwd, &args)
2375}
2376
2377#[derive(Debug, Serialize)]
2378struct BaselineFeedbackMetrics {
2379 entries: usize,
2380 yes: usize,
2381 no: usize,
2382 pick: usize,
2383 nil: usize,
2384 yes_rate: f64,
2385 no_rate: f64,
2386 nil_rate: f64,
2387}
2388
2389#[derive(Debug, Serialize)]
2390struct BaselineCostMetrics {
2391 find_operations: usize,
2392 feedback_events: usize,
2393 feedback_events_per_1000_find_ops: f64,
2394 token_cost_estimate: Option<f64>,
2395 token_cost_note: &'static str,
2396}
2397
2398#[derive(Debug, Serialize)]
2399struct GoldenSetMetrics {
2400 cases: usize,
2401 hits_any: usize,
2402 top1_hits: usize,
2403 hit_rate: f64,
2404 top1_rate: f64,
2405 mrr: f64,
2406}
2407
2408#[derive(Debug, Serialize)]
2409struct BaselineQualityScore {
2410 description_coverage: f64,
2411 facts_coverage: f64,
2412 duplicate_penalty: f64,
2413 edge_gap_penalty: f64,
2414 score_0_100: f64,
2415}
2416
2417#[derive(Debug, Serialize)]
2418struct BaselineReport {
2419 graph: String,
2420 quality: crate::analysis::QualitySnapshot,
2421 quality_score: BaselineQualityScore,
2422 feedback: BaselineFeedbackMetrics,
2423 cost: BaselineCostMetrics,
2424 golden: Option<GoldenSetMetrics>,
2425}
2426
2427#[derive(Debug, Deserialize)]
2428struct GoldenSetCase {
2429 query: String,
2430 expected: Vec<String>,
2431}
2432
2433fn parse_feedback_entries(cwd: &Path, graph_name: &str) -> Result<Vec<FeedbackLogEntry>> {
2434 let path = cwd.join("kg-mcp.feedback.log");
2435 if !path.exists() {
2436 return Ok(Vec::new());
2437 }
2438
2439 let content = std::fs::read_to_string(path)?;
2440 let mut entries = Vec::new();
2441 for line in content.lines() {
2442 if let Some(entry) = FeedbackLogEntry::parse(line) {
2443 if entry.graph == graph_name {
2444 entries.push(entry);
2445 }
2446 }
2447 }
2448 Ok(entries)
2449}
2450
2451fn parse_find_operations(graph_path: &Path) -> Result<usize> {
2452 let Some(path) = access_log::first_existing_access_log_path(graph_path) else {
2453 return Ok(0);
2454 };
2455
2456 let content = std::fs::read_to_string(path)?;
2457 let mut find_ops = 0usize;
2458 for line in content.lines() {
2459 let mut parts = line.split('\t');
2460 let _ts = parts.next();
2461 if let Some(op) = parts.next() {
2462 if op == "FIND" {
2463 find_ops += 1;
2464 }
2465 }
2466 }
2467 Ok(find_ops)
2468}
2469
2470fn compute_feedback_metrics(entries: &[FeedbackLogEntry]) -> BaselineFeedbackMetrics {
2471 let mut yes = 0usize;
2472 let mut no = 0usize;
2473 let mut pick = 0usize;
2474 let mut nil = 0usize;
2475 for entry in entries {
2476 match entry.action.as_str() {
2477 "YES" => yes += 1,
2478 "NO" => no += 1,
2479 "PICK" => pick += 1,
2480 "NIL" => nil += 1,
2481 _ => {}
2482 }
2483 }
2484 let total = entries.len() as f64;
2485 BaselineFeedbackMetrics {
2486 entries: entries.len(),
2487 yes,
2488 no,
2489 pick,
2490 nil,
2491 yes_rate: if total > 0.0 { yes as f64 / total } else { 0.0 },
2492 no_rate: if total > 0.0 { no as f64 / total } else { 0.0 },
2493 nil_rate: if total > 0.0 { nil as f64 / total } else { 0.0 },
2494 }
2495}
2496
2497fn compute_quality_score(snapshot: &crate::analysis::QualitySnapshot) -> BaselineQualityScore {
2498 let total_nodes = snapshot.total_nodes as f64;
2499 let description_coverage = if total_nodes > 0.0 {
2500 (snapshot
2501 .total_nodes
2502 .saturating_sub(snapshot.missing_descriptions)) as f64
2503 / total_nodes
2504 } else {
2505 1.0
2506 };
2507 let facts_coverage = if total_nodes > 0.0 {
2508 (snapshot.total_nodes.saturating_sub(snapshot.missing_facts)) as f64 / total_nodes
2509 } else {
2510 1.0
2511 };
2512
2513 let duplicate_penalty = if snapshot.total_nodes > 1 {
2514 let max_pairs = (snapshot.total_nodes * (snapshot.total_nodes - 1) / 2) as f64;
2515 (snapshot.duplicate_pairs as f64 / max_pairs).clamp(0.0, 1.0)
2516 } else {
2517 0.0
2518 };
2519
2520 let edge_candidates = snapshot.edge_gaps.total_candidates();
2521 let edge_gap_penalty = if edge_candidates > 0 {
2522 (snapshot.edge_gaps.total_missing() as f64 / edge_candidates as f64).clamp(0.0, 1.0)
2523 } else {
2524 0.0
2525 };
2526
2527 let score = 100.0
2528 * (0.35 * description_coverage
2529 + 0.35 * facts_coverage
2530 + 0.15 * (1.0 - duplicate_penalty)
2531 + 0.15 * (1.0 - edge_gap_penalty));
2532
2533 BaselineQualityScore {
2534 description_coverage,
2535 facts_coverage,
2536 duplicate_penalty,
2537 edge_gap_penalty,
2538 score_0_100: score,
2539 }
2540}
2541
2542fn eval_golden_set(graph: &GraphFile, args: &BaselineArgs) -> Result<Option<GoldenSetMetrics>> {
2543 let Some(path) = args.golden.as_ref() else {
2544 return Ok(None);
2545 };
2546
2547 let raw = std::fs::read_to_string(path)
2548 .with_context(|| format!("failed to read golden set: {path}"))?;
2549 let cases: Vec<GoldenSetCase> =
2550 serde_json::from_str(&raw).with_context(|| format!("invalid golden set JSON: {path}"))?;
2551
2552 if cases.is_empty() {
2553 return Ok(Some(GoldenSetMetrics {
2554 cases: 0,
2555 hits_any: 0,
2556 top1_hits: 0,
2557 hit_rate: 0.0,
2558 top1_rate: 0.0,
2559 mrr: 0.0,
2560 }));
2561 }
2562
2563 let mode = map_find_mode(args.mode);
2564 let mut hits_any = 0usize;
2565 let mut top1_hits = 0usize;
2566 let mut mrr_sum = 0.0;
2567
2568 for case in &cases {
2569 let results = output::find_nodes(
2570 graph,
2571 &case.query,
2572 args.find_limit,
2573 args.include_features,
2574 mode,
2575 );
2576
2577 let mut first_rank: Option<usize> = None;
2578 for (idx, node) in results.iter().enumerate() {
2579 if case.expected.iter().any(|id| id == &node.id) {
2580 first_rank = Some(idx + 1);
2581 break;
2582 }
2583 }
2584
2585 if let Some(rank) = first_rank {
2586 hits_any += 1;
2587 if rank == 1 {
2588 top1_hits += 1;
2589 }
2590 mrr_sum += 1.0 / rank as f64;
2591 }
2592 }
2593
2594 let total = cases.len() as f64;
2595 Ok(Some(GoldenSetMetrics {
2596 cases: cases.len(),
2597 hits_any,
2598 top1_hits,
2599 hit_rate: hits_any as f64 / total,
2600 top1_rate: top1_hits as f64 / total,
2601 mrr: mrr_sum / total,
2602 }))
2603}
2604
2605pub(crate) fn render_baseline_report(
2606 cwd: &Path,
2607 graph_name: &str,
2608 graph: &GraphFile,
2609 quality: &crate::analysis::QualitySnapshot,
2610 args: &BaselineArgs,
2611) -> Result<String> {
2612 let feedback_entries = parse_feedback_entries(cwd, graph_name)?;
2613 let feedback = compute_feedback_metrics(&feedback_entries);
2614
2615 let graph_root = default_graph_root(cwd);
2616 let graph_path = resolve_graph_path(cwd, &graph_root, graph_name)?;
2617 let find_operations = parse_find_operations(&graph_path)?;
2618
2619 let cost = BaselineCostMetrics {
2620 find_operations,
2621 feedback_events: feedback.entries,
2622 feedback_events_per_1000_find_ops: if find_operations > 0 {
2623 (feedback.entries as f64 / find_operations as f64) * 1000.0
2624 } else {
2625 0.0
2626 },
2627 token_cost_estimate: None,
2628 token_cost_note: "token cost unavailable in current logs (instrumentation pending)",
2629 };
2630
2631 let quality_score = compute_quality_score(quality);
2632 let golden = eval_golden_set(graph, args)?;
2633
2634 let report = BaselineReport {
2635 graph: graph_name.to_owned(),
2636 quality: crate::analysis::QualitySnapshot {
2637 total_nodes: quality.total_nodes,
2638 missing_descriptions: quality.missing_descriptions,
2639 missing_facts: quality.missing_facts,
2640 duplicate_pairs: quality.duplicate_pairs,
2641 edge_gaps: crate::analysis::EdgeGapSnapshot {
2642 datastore_candidates: quality.edge_gaps.datastore_candidates,
2643 datastore_missing_stored_in: quality.edge_gaps.datastore_missing_stored_in,
2644 process_candidates: quality.edge_gaps.process_candidates,
2645 process_missing_incoming: quality.edge_gaps.process_missing_incoming,
2646 },
2647 },
2648 quality_score,
2649 feedback,
2650 cost,
2651 golden,
2652 };
2653
2654 if args.json {
2655 let rendered = serde_json::to_string_pretty(&report).unwrap_or_else(|_| "{}".to_owned());
2656 return Ok(format!("{rendered}\n"));
2657 }
2658
2659 let mut lines = vec![String::from("= baseline")];
2660 lines.push(format!("graph: {}", report.graph));
2661 lines.push(format!(
2662 "quality_score_0_100: {:.1}",
2663 report.quality_score.score_0_100
2664 ));
2665 lines.push(String::from("quality:"));
2666 lines.push(format!("- total_nodes: {}", report.quality.total_nodes));
2667 lines.push(format!(
2668 "- missing_descriptions: {} ({:.1}%)",
2669 report.quality.missing_descriptions,
2670 report
2671 .quality_score
2672 .description_coverage
2673 .mul_add(-100.0, 100.0)
2674 ));
2675 lines.push(format!(
2676 "- missing_facts: {} ({:.1}%)",
2677 report.quality.missing_facts,
2678 report.quality_score.facts_coverage.mul_add(-100.0, 100.0)
2679 ));
2680 lines.push(format!(
2681 "- duplicate_pairs: {}",
2682 report.quality.duplicate_pairs
2683 ));
2684 lines.push(format!(
2685 "- edge_gaps: {} / {}",
2686 report.quality.edge_gaps.total_missing(),
2687 report.quality.edge_gaps.total_candidates()
2688 ));
2689
2690 lines.push(String::from("feedback:"));
2691 lines.push(format!("- entries: {}", report.feedback.entries));
2692 lines.push(format!(
2693 "- YES/NO/NIL/PICK: {}/{}/{}/{}",
2694 report.feedback.yes, report.feedback.no, report.feedback.nil, report.feedback.pick
2695 ));
2696 lines.push(format!(
2697 "- yes_rate: {:.1}%",
2698 report.feedback.yes_rate * 100.0
2699 ));
2700 lines.push(format!(
2701 "- no_rate: {:.1}%",
2702 report.feedback.no_rate * 100.0
2703 ));
2704
2705 lines.push(String::from("cost:"));
2706 lines.push(format!(
2707 "- find_operations: {}",
2708 report.cost.find_operations
2709 ));
2710 lines.push(format!(
2711 "- feedback_events: {}",
2712 report.cost.feedback_events
2713 ));
2714 lines.push(format!(
2715 "- feedback_events_per_1000_find_ops: {:.1}",
2716 report.cost.feedback_events_per_1000_find_ops
2717 ));
2718 lines.push(format!("- token_cost: {}", report.cost.token_cost_note));
2719
2720 if let Some(golden) = report.golden {
2721 lines.push(String::from("golden_set:"));
2722 lines.push(format!("- cases: {}", golden.cases));
2723 lines.push(format!("- hit_rate: {:.1}%", golden.hit_rate * 100.0));
2724 lines.push(format!("- top1_rate: {:.1}%", golden.top1_rate * 100.0));
2725 lines.push(format!("- mrr: {:.3}", golden.mrr));
2726 }
2727
2728 Ok(format!("{}\n", lines.join("\n")))
2729}
2730
2731#[derive(Debug, Clone)]
2732struct FeedbackLogEntry {
2733 ts_ms: String,
2734 uid: String,
2735 action: String,
2736 pick: Option<String>,
2737 selected: Option<String>,
2738 graph: String,
2739 queries: String,
2740}
2741
2742impl FeedbackLogEntry {
2743 fn parse(line: &str) -> Option<Self> {
2744 let mut ts_ms: Option<String> = None;
2747 let mut uid: Option<String> = None;
2748 let mut action: Option<String> = None;
2749 let mut pick: Option<String> = None;
2750 let mut selected: Option<String> = None;
2751 let mut graph: Option<String> = None;
2752 let mut queries: Option<String> = None;
2753
2754 for part in line.split('\t') {
2755 let (k, v) = part.split_once('=')?;
2756 let v = v.trim();
2757 match k {
2758 "ts_ms" => ts_ms = Some(v.to_owned()),
2759 "uid" => uid = Some(v.to_owned()),
2760 "action" => action = Some(v.to_owned()),
2761 "pick" => {
2762 if v != "-" {
2763 pick = Some(v.to_owned());
2764 }
2765 }
2766 "selected" => {
2767 if v != "-" {
2768 selected = Some(v.to_owned());
2769 }
2770 }
2771 "graph" => {
2772 if v != "-" {
2773 graph = Some(v.to_owned());
2774 }
2775 }
2776 "queries" => {
2777 if v != "-" {
2778 queries = Some(v.to_owned());
2779 }
2780 }
2781 _ => {}
2782 }
2783 }
2784
2785 Some(Self {
2786 ts_ms: ts_ms?,
2787 uid: uid?,
2788 action: action?,
2789 pick,
2790 selected,
2791 graph: graph.unwrap_or_default(),
2792 queries: queries.unwrap_or_default(),
2793 })
2794 }
2795}
2796
2797pub fn default_graph_root(cwd: &Path) -> PathBuf {
2806 let home = std::env::var_os("HOME")
2807 .map(PathBuf::from)
2808 .or_else(|| std::env::var_os("USERPROFILE").map(PathBuf::from));
2809 graph_root_from(home.as_deref(), cwd)
2810}
2811
2812fn graph_root_from(home: Option<&Path>, cwd: &Path) -> PathBuf {
2813 match home {
2814 Some(home) => home.join(".kg").join("graphs"),
2815 None => cwd.join(".kg").join("graphs"),
2816 }
2817}
2818
2819pub fn resolve_graph_path(cwd: &Path, graph_root: &Path, graph: &str) -> Result<PathBuf> {
2824 let store = graph_store(cwd, graph_root, false)?;
2825 store.resolve_graph_path(graph)
2826}
2827
2828pub fn feedback_nudge_percent(cwd: &Path) -> Result<u8> {
2830 Ok(config::KgConfig::discover(cwd)?
2831 .map(|(_, config)| config.nudge_percent())
2832 .unwrap_or(config::DEFAULT_NUDGE_PERCENT))
2833}
2834
2835pub fn sidecar_user_short_uid(cwd: &Path) -> String {
2837 config::ensure_user_short_uid(cwd)
2838}
2839
2840pub fn append_kg_feedback(graph_path: &Path, user_short_uid: &str, node_id: &str, feedback: &str) {
2842 let _ = kg_sidecar::append_feedback_with_uid(graph_path, user_short_uid, node_id, feedback);
2843}
2844
2845pub(crate) fn render_check(graph: &GraphFile, cwd: &Path, args: &CheckArgs) -> String {
2850 let report = validate_graph(graph, cwd, args.deep, args.base_dir.as_deref());
2851 format_validation_report(
2852 "check",
2853 &report.errors,
2854 &report.warnings,
2855 args.errors_only,
2856 args.warnings_only,
2857 args.limit,
2858 )
2859}
2860
2861pub(crate) fn render_audit(graph: &GraphFile, cwd: &Path, args: &AuditArgs) -> String {
2862 let report = validate_graph(graph, cwd, args.deep, args.base_dir.as_deref());
2863 format_validation_report(
2864 "audit",
2865 &report.errors,
2866 &report.warnings,
2867 args.errors_only,
2868 args.warnings_only,
2869 args.limit,
2870 )
2871}
2872
2873fn format_validation_report(
2874 header: &str,
2875 errors: &[String],
2876 warnings: &[String],
2877 errors_only: bool,
2878 warnings_only: bool,
2879 limit: usize,
2880) -> String {
2881 let mut lines = vec![format!("= {header}")];
2882 lines.push(format!(
2883 "status: {}",
2884 if errors.is_empty() {
2885 "VALID"
2886 } else {
2887 "INVALID"
2888 }
2889 ));
2890 lines.push(format!("errors: {}", errors.len()));
2891 lines.push(format!("warnings: {}", warnings.len()));
2892 if !warnings_only {
2893 lines.push("error-list:".to_owned());
2894 for error in errors.iter().take(limit) {
2895 lines.push(format!("- {error}"));
2896 }
2897 }
2898 if !errors_only {
2899 lines.push("warning-list:".to_owned());
2900 for warning in warnings.iter().take(limit) {
2901 lines.push(format!("- {warning}"));
2902 }
2903 }
2904 format!("{}\n", lines.join("\n"))
2905}
2906
2907#[cfg(test)]
2912mod tests {
2913 use super::*;
2914 use tempfile::tempdir;
2915
2916 fn fixture_graph() -> GraphFile {
2917 serde_json::from_str(include_str!("../graph-example-fridge.json")).expect("fixture graph")
2918 }
2919
2920 fn exec_safe(args: &[&str], cwd: &Path) -> Result<String> {
2921 run_args_safe(args.iter().map(OsString::from), cwd)
2922 }
2923
2924 #[test]
2925 fn graph_root_prefers_home_directory() {
2926 let cwd = Path::new("/tmp/workspace");
2927 let home = Path::new("/tmp/home");
2928 assert_eq!(
2929 graph_root_from(Some(home), cwd),
2930 PathBuf::from("/tmp/home/.kg/graphs")
2931 );
2932 assert_eq!(
2933 graph_root_from(None, cwd),
2934 PathBuf::from("/tmp/workspace/.kg/graphs")
2935 );
2936 }
2937
2938 #[test]
2939 fn get_renders_compact_symbolic_view() {
2940 let graph = fixture_graph();
2941 let node = graph.node_by_id("concept:refrigerator").expect("node");
2942 let rendered = output::render_node(&graph, node, false);
2943 assert!(rendered.contains("# concept:refrigerator | Lodowka"));
2944 assert!(rendered.contains("aka: Chlodziarka, Fridge"));
2945 assert!(rendered.contains("-> HAS | concept:cooling_chamber | Komora Chlodzenia"));
2946 assert!(rendered.contains("-> HAS | concept:temperature | Temperatura"));
2947 }
2948
2949 #[test]
2950 fn help_lists_mvp_commands() {
2951 let help = Cli::try_parse_from(["kg", "--help"]).expect_err("help exits");
2952 let rendered = help.to_string();
2953 assert!(rendered.contains("▓ ▄▄"));
2954 assert!(rendered.contains("create"));
2955 assert!(rendered.contains("list"));
2956 assert!(rendered.contains("feedback-log"));
2957 assert!(rendered.contains("fridge node"));
2958 assert!(rendered.contains("edge"));
2959 assert!(rendered.contains("quality"));
2960 assert!(rendered.contains("kg graph fridge stats"));
2961 }
2962
2963 #[test]
2964 fn run_args_safe_returns_error_instead_of_exiting() {
2965 let dir = tempdir().expect("tempdir");
2966 let err = exec_safe(&["kg", "create"], dir.path()).expect_err("parse error");
2967 let rendered = err.to_string();
2968 assert!(rendered.contains("required arguments were not provided"));
2969 assert!(rendered.contains("<GRAPH_NAME>"));
2970 }
2971}