1mod access_log;
2mod analysis;
3mod cli;
4mod config;
5mod event_log;
6mod export_html;
7mod graph;
8mod import_csv;
9mod import_markdown;
10mod index;
11mod init;
12mod kql;
13mod ops;
14pub mod output;
15mod schema;
16mod storage;
17mod validate;
18mod vectors;
19
20use graph::Note;
22pub use graph::{Edge, EdgeProperties, GraphFile, Metadata, Node, NodeProperties};
23pub use output::FindMode;
24
25use std::ffi::OsString;
26use std::path::{Path, PathBuf};
27
28use index::Bm25Index;
29
30use anyhow::{Context, Result, anyhow, bail};
31use clap::Parser;
32use cli::{
33 AddEdgeArgs, AddNodeArgs, AsOfArgs, AuditArgs, CheckArgs, Cli, Command, DiffAsOfArgs,
34 EdgeCommand, ExportDotArgs, ExportGraphmlArgs, ExportHtmlArgs, ExportJsonArgs, ExportMdArgs,
35 ExportMermaidArgs, FeedbackLogArgs, FeedbackSummaryArgs, FindMode as CliFindMode, GraphCommand,
36 HistoryArgs, ImportCsvArgs, ImportJsonArgs, ImportMarkdownArgs, KqlArgs, ListNodesArgs,
37 MergeStrategy, ModifyNodeArgs, NodeCommand, NoteAddArgs, NoteCommand, NoteListArgs,
38 QualityCommand, RemoveEdgeArgs, SplitArgs, TemporalSource, TimelineArgs, VectorCommand,
39};
40use serde::Serialize;
41use serde_json::Value;
42use storage::{GraphStore, graph_store, load_graph_index};
44
45use analysis::{
46 render_duplicates, render_duplicates_json, render_edge_gaps, render_edge_gaps_json,
47 render_missing_descriptions, render_missing_descriptions_json, render_missing_facts,
48 render_missing_facts_json, render_stats,
49};
50use ops::{add_edge, add_node, modify_node, remove_edge, remove_node};
51use schema::{GraphSchema, SchemaViolation};
52use validate::validate_graph;
53
54fn format_schema_violations(violations: &[SchemaViolation]) -> String {
59 let mut lines = Vec::new();
60 lines.push("schema violations:".to_owned());
61 for v in violations {
62 lines.push(format!(" - {}", v.message));
63 }
64 lines.join("\n")
65}
66
67fn bail_on_schema_violations(violations: &[SchemaViolation]) -> Result<()> {
68 if !violations.is_empty() {
69 anyhow::bail!("{}", format_schema_violations(violations));
70 }
71 Ok(())
72}
73
74fn validate_graph_with_schema(graph: &GraphFile, schema: &GraphSchema) -> Vec<SchemaViolation> {
75 let mut all_violations = Vec::new();
76 for node in &graph.nodes {
77 all_violations.extend(schema.validate_node_add(node));
78 }
79 let node_type_map: std::collections::HashMap<&str, &str> = graph
80 .nodes
81 .iter()
82 .map(|n| (n.id.as_str(), n.r#type.as_str()))
83 .collect();
84 for edge in &graph.edges {
85 if let (Some(src_type), Some(tgt_type)) = (
86 node_type_map.get(edge.source_id.as_str()),
87 node_type_map.get(edge.target_id.as_str()),
88 ) {
89 all_violations.extend(schema.validate_edge_add(
90 &edge.source_id,
91 src_type,
92 &edge.relation,
93 &edge.target_id,
94 tgt_type,
95 ));
96 }
97 }
98 all_violations.extend(schema.validate_uniqueness(&graph.nodes));
99 all_violations
100}
101
102pub fn run<I>(args: I, cwd: &Path) -> Result<()>
110where
111 I: IntoIterator<Item = OsString>,
112{
113 let rendered = run_args(args, cwd)?;
114 print!("{rendered}");
115 Ok(())
116}
117
118pub fn run_args<I>(args: I, cwd: &Path) -> Result<String>
122where
123 I: IntoIterator<Item = OsString>,
124{
125 let cli = Cli::parse_from(normalize_args(args));
126 let graph_root = default_graph_root(cwd);
127 execute(cli, cwd, &graph_root)
128}
129
130pub fn run_args_safe<I>(args: I, cwd: &Path) -> Result<String>
135where
136 I: IntoIterator<Item = OsString>,
137{
138 let cli = Cli::try_parse_from(normalize_args(args)).map_err(|err| anyhow!(err.to_string()))?;
139 let graph_root = default_graph_root(cwd);
140 execute(cli, cwd, &graph_root)
141}
142
143fn normalize_args<I>(args: I) -> Vec<OsString>
148where
149 I: IntoIterator<Item = OsString>,
150{
151 let collected: Vec<OsString> = args.into_iter().collect();
152 if collected.len() <= 1 {
153 return collected;
154 }
155 let first = collected[1].to_string_lossy();
156 if first.starts_with('-')
157 || first == "init"
158 || first == "create"
159 || first == "diff"
160 || first == "merge"
161 || first == "graph"
162 || first == "list"
163 || first == "feedback-log"
164 || first == "feedback-summary"
165 {
166 return collected;
167 }
168 let mut normalized = Vec::with_capacity(collected.len() + 1);
169 normalized.push(collected[0].clone());
170 normalized.push(OsString::from("graph"));
171 normalized.extend(collected.into_iter().skip(1));
172 normalized
173}
174
175fn execute(cli: Cli, cwd: &Path, graph_root: &Path) -> Result<String> {
180 match cli.command {
181 Command::Init(args) => Ok(init::render_init(&args)),
182 Command::Create { graph_name } => {
183 let store = graph_store(cwd, graph_root)?;
184 let path = store.create_graph(&graph_name)?;
185 let graph_file = store.load_graph(&path)?;
186 append_event_snapshot(&path, "graph.create", Some(graph_name.clone()), &graph_file)?;
187 Ok(format!("+ created {}\n", path.display()))
188 }
189 Command::Diff { left, right, json } => {
190 let store = graph_store(cwd, graph_root)?;
191 if json {
192 render_graph_diff_json(store.as_ref(), &left, &right)
193 } else {
194 render_graph_diff(store.as_ref(), &left, &right)
195 }
196 }
197 Command::Merge {
198 target,
199 source,
200 strategy,
201 } => {
202 let store = graph_store(cwd, graph_root)?;
203 merge_graphs(store.as_ref(), &target, &source, strategy)
204 }
205 Command::List(args) => {
206 let store = graph_store(cwd, graph_root)?;
207 if args.json {
208 render_graph_list_json(store.as_ref())
209 } else {
210 render_graph_list(store.as_ref(), args.full)
211 }
212 }
213 Command::FeedbackLog(args) => render_feedback_log(cwd, &args),
214 Command::Graph { graph, command } => {
215 let store = graph_store(cwd, graph_root)?;
216 let path = store.resolve_graph_path(&graph)?;
217 let mut graph_file = store.load_graph(&path)?;
218 let schema = GraphSchema::discover(cwd).ok().flatten().map(|(_, s)| s);
219
220 match command {
221 GraphCommand::Node { command } => match command {
222 NodeCommand::Find {
223 queries,
224 limit,
225 include_features,
226 mode,
227 full,
228 json,
229 vector_query,
230 } => {
231 if mode == cli::FindMode::Vector {
232 let result = if let Some(query_vec) = vector_query {
233 let vector_path = path
234 .parent()
235 .map(|p| p.join(".kg.vectors.json"))
236 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
237 if !vector_path.exists() {
238 anyhow::bail!(
239 "vector store not found. Run: kg {} vectors import --input <file.jsonl>",
240 graph
241 );
242 }
243 let store = vectors::VectorStore::load(&vector_path)?;
244 let node_ids: Vec<_> =
245 graph_file.nodes.iter().map(|n| n.id.clone()).collect();
246 let results = store.search(&query_vec, &node_ids, limit, 0.0);
247 let mut lines =
248 vec![format!("= vector-search ({} results)", results.len())];
249 for (node_id, score) in &results {
250 if let Some(node) = graph_file.node_by_id(node_id) {
251 lines.push(format!(
252 "# {} | {} [{}] ({:.3})",
253 node.id, node.name, node.r#type, score
254 ));
255 }
256 }
257 format!("{}\n", lines.join("\n"))
258 } else {
259 anyhow::bail!("--vector-query required for --mode vector")
260 };
261 return Ok(result);
262 }
263
264 let bm25_index = if mode == cli::FindMode::Bm25 {
265 load_graph_index(&path).ok().flatten()
266 } else {
267 None
268 };
269
270 let timer = access_log::Timer::new();
271 let results_count = output::count_find_results_with_index(
272 &graph_file,
273 &queries,
274 limit,
275 include_features,
276 map_find_mode(mode),
277 bm25_index.as_ref(),
278 );
279 let result = if json {
280 render_find_json_with_index(
281 &graph_file,
282 &queries,
283 limit,
284 include_features,
285 map_find_mode(mode),
286 bm25_index.as_ref(),
287 )
288 } else {
289 output::render_find_with_index(
290 &graph_file,
291 &queries,
292 limit,
293 include_features,
294 map_find_mode(mode),
295 full,
296 bm25_index.as_ref(),
297 )
298 };
299 let duration_ms = timer.elapsed_ms();
300
301 for query in &queries {
302 let entry = access_log::AccessLogEntry::new(
303 query.clone(),
304 results_count,
305 duration_ms,
306 );
307 if let Err(e) = access_log::append_entry(&path, &entry) {
308 eprintln!("warning: failed to log access: {}", e);
309 }
310 }
311
312 Ok(result)
313 }
314
315 NodeCommand::Get {
316 id,
317 include_features,
318 full,
319 json,
320 } => {
321 let timer = access_log::Timer::new();
322 let node = graph_file
323 .node_by_id(&id)
324 .ok_or_else(|| anyhow!("node not found: {id}"))?;
325 if !include_features && node.r#type == "Feature" {
326 bail!("feature nodes are hidden by default; use --include-features");
327 }
328 let result = Ok(if json {
329 render_node_json(node)
330 } else {
331 output::render_node(&graph_file, node, full)
332 });
333
334 let duration_ms = timer.elapsed_ms();
335 let entry = access_log::AccessLogEntry::node_get(id.clone(), duration_ms);
336 if let Err(e) = access_log::append_entry(&path, &entry) {
337 eprintln!("warning: failed to log access: {}", e);
338 }
339
340 result
341 }
342
343 NodeCommand::Add(AddNodeArgs {
344 id,
345 node_type,
346 name,
347 description,
348 domain_area,
349 provenance,
350 confidence,
351 created_at,
352 fact,
353 alias,
354 source,
355 }) => {
356 let node = Node {
357 id,
358 r#type: node_type,
359 name,
360 properties: NodeProperties {
361 description,
362 domain_area,
363 provenance,
364 confidence,
365 created_at,
366 key_facts: fact,
367 alias,
368 ..NodeProperties::default()
369 },
370 source_files: source,
371 };
372 if let Some(schema) = schema {
373 let violations = schema.validate_node_add(&node);
374 bail_on_schema_violations(&violations)?;
375 }
376 add_node(&mut graph_file, node)?;
377 store.save_graph(&path, &graph_file)?;
378 append_event_snapshot(
379 &path,
380 "node.add",
381 Some(graph_file.nodes.last().expect("node").id.clone()),
382 &graph_file,
383 )?;
384 Ok(format!(
385 "+ node {}\n",
386 graph_file.nodes.last().expect("node").id
387 ))
388 }
389
390 NodeCommand::Modify(ModifyNodeArgs {
391 id,
392 node_type,
393 name,
394 description,
395 domain_area,
396 provenance,
397 confidence,
398 created_at,
399 fact,
400 alias,
401 source,
402 }) => {
403 modify_node(
404 &mut graph_file,
405 &id,
406 node_type.clone(),
407 name.clone(),
408 description.clone(),
409 domain_area.clone(),
410 provenance.clone(),
411 confidence,
412 created_at.clone(),
413 fact.clone(),
414 alias.clone(),
415 source.clone(),
416 )?;
417 if let Some(schema) = schema {
418 if let Some(node) = graph_file.node_by_id(&id) {
419 let violations = schema.validate_node_add(node);
420 bail_on_schema_violations(&violations)?;
421 }
422 }
423 store.save_graph(&path, &graph_file)?;
424 append_event_snapshot(&path, "node.modify", Some(id.clone()), &graph_file)?;
425 Ok(format!("~ node {id}\n"))
426 }
427
428 NodeCommand::Rename { from, to } => {
429 if graph_file.node_by_id(&to).is_some() {
430 bail!("node already exists: {to}");
431 }
432 let Some(node) = graph_file.node_by_id_mut(&from) else {
433 bail!("node not found: {from}");
434 };
435 node.id = to.clone();
436 for edge in &mut graph_file.edges {
437 if edge.source_id == from {
438 edge.source_id = to.clone();
439 }
440 if edge.target_id == from {
441 edge.target_id = to.clone();
442 }
443 }
444 for note in &mut graph_file.notes {
445 if note.node_id == from {
446 note.node_id = to.clone();
447 }
448 }
449 store.save_graph(&path, &graph_file)?;
450 append_event_snapshot(
451 &path,
452 "node.rename",
453 Some(format!("{from} -> {to}")),
454 &graph_file,
455 )?;
456 Ok(format!("~ node {from} -> {to}\n"))
457 }
458
459 NodeCommand::Remove { id } => {
460 let removed_edges = remove_node(&mut graph_file, &id)?;
461 store.save_graph(&path, &graph_file)?;
462 append_event_snapshot(&path, "node.remove", Some(id.clone()), &graph_file)?;
463 Ok(format!("- node {id} ({removed_edges} edges removed)\n"))
464 }
465 NodeCommand::List(args) => Ok(if args.json {
466 render_node_list_json(&graph_file, &args)
467 } else {
468 render_node_list(&graph_file, &args)
469 }),
470 },
471
472 GraphCommand::Edge { command } => match command {
473 EdgeCommand::Add(AddEdgeArgs {
474 source_id,
475 relation,
476 target_id,
477 detail,
478 }) => {
479 if let Some(schema) = schema {
480 let source_node = graph_file.node_by_id(&source_id);
481 let target_node = graph_file.node_by_id(&target_id);
482 if let (Some(src), Some(tgt)) = (source_node, target_node) {
483 let violations = schema.validate_edge_add(
484 &source_id,
485 &src.r#type,
486 &relation,
487 &target_id,
488 &tgt.r#type,
489 );
490 bail_on_schema_violations(&violations)?;
491 }
492 }
493 add_edge(
494 &mut graph_file,
495 Edge {
496 source_id: source_id.clone(),
497 relation: relation.clone(),
498 target_id: target_id.clone(),
499 properties: EdgeProperties {
500 detail,
501 ..EdgeProperties::default()
502 },
503 },
504 )?;
505 store.save_graph(&path, &graph_file)?;
506 append_event_snapshot(
507 &path,
508 "edge.add",
509 Some(format!("{source_id} {relation} {target_id}")),
510 &graph_file,
511 )?;
512 Ok(format!("+ edge {source_id} {relation} {target_id}\n"))
513 }
514 EdgeCommand::Remove(RemoveEdgeArgs {
515 source_id,
516 relation,
517 target_id,
518 }) => {
519 remove_edge(&mut graph_file, &source_id, &relation, &target_id)?;
520 store.save_graph(&path, &graph_file)?;
521 append_event_snapshot(
522 &path,
523 "edge.remove",
524 Some(format!("{source_id} {relation} {target_id}")),
525 &graph_file,
526 )?;
527 Ok(format!("- edge {source_id} {relation} {target_id}\n"))
528 }
529 },
530
531 GraphCommand::Note { command } => match command {
532 NoteCommand::Add(args) => {
533 let note = build_note(&graph_file, args)?;
534 graph_file.notes.push(note.clone());
535 store.save_graph(&path, &graph_file)?;
536 append_event_snapshot(
537 &path,
538 "note.add",
539 Some(note.id.clone()),
540 &graph_file,
541 )?;
542 Ok(format!("+ note {}\n", note.id))
543 }
544 NoteCommand::List(args) => Ok(render_note_list(&graph_file, &args)),
545 NoteCommand::Remove { id } => {
546 let before = graph_file.notes.len();
547 graph_file.notes.retain(|note| note.id != id);
548 let removed = before.saturating_sub(graph_file.notes.len());
549 if removed == 0 {
550 bail!("note not found: {id}");
551 }
552 store.save_graph(&path, &graph_file)?;
553 append_event_snapshot(&path, "note.remove", Some(id.clone()), &graph_file)?;
554 Ok(format!("- note {id}\n"))
555 }
556 },
557
558 GraphCommand::Stats(args) => Ok(render_stats(&graph_file, &args)),
559 GraphCommand::Check(args) => Ok(render_check(&graph_file, cwd, &args)),
560 GraphCommand::Audit(args) => Ok(render_audit(&graph_file, cwd, &args)),
561
562 GraphCommand::Quality { command } => match command {
563 QualityCommand::MissingDescriptions(args) => Ok(if args.json {
564 render_missing_descriptions_json(&graph_file, &args)
565 } else {
566 render_missing_descriptions(&graph_file, &args)
567 }),
568 QualityCommand::MissingFacts(args) => Ok(if args.json {
569 render_missing_facts_json(&graph_file, &args)
570 } else {
571 render_missing_facts(&graph_file, &args)
572 }),
573 QualityCommand::Duplicates(args) => Ok(if args.json {
574 render_duplicates_json(&graph_file, &args)
575 } else {
576 render_duplicates(&graph_file, &args)
577 }),
578 QualityCommand::EdgeGaps(args) => Ok(if args.json {
579 render_edge_gaps_json(&graph_file, &args)
580 } else {
581 render_edge_gaps(&graph_file, &args)
582 }),
583 },
584
585 GraphCommand::MissingDescriptions(args) => Ok(if args.json {
587 render_missing_descriptions_json(&graph_file, &args)
588 } else {
589 render_missing_descriptions(&graph_file, &args)
590 }),
591 GraphCommand::MissingFacts(args) => Ok(if args.json {
592 render_missing_facts_json(&graph_file, &args)
593 } else {
594 render_missing_facts(&graph_file, &args)
595 }),
596 GraphCommand::Duplicates(args) => Ok(if args.json {
597 render_duplicates_json(&graph_file, &args)
598 } else {
599 render_duplicates(&graph_file, &args)
600 }),
601 GraphCommand::EdgeGaps(args) => Ok(if args.json {
602 render_edge_gaps_json(&graph_file, &args)
603 } else {
604 render_edge_gaps(&graph_file, &args)
605 }),
606
607 GraphCommand::ExportHtml(ExportHtmlArgs { output, title }) => {
608 export_html::export_html(
609 &graph_file,
610 &graph,
611 export_html::ExportHtmlOptions {
612 output: output.as_deref(),
613 title: title.as_deref(),
614 },
615 )
616 }
617
618 GraphCommand::AccessLog(args) => {
619 Ok(access_log::read_log(&path, args.limit, args.show_empty)?)
620 }
621
622 GraphCommand::AccessStats(_) => Ok(access_log::log_stats(&path)?),
623 GraphCommand::ImportCsv(args) => import_graph_csv(
624 &path,
625 &graph,
626 &mut graph_file,
627 store.as_ref(),
628 &args,
629 schema.as_ref(),
630 ),
631 GraphCommand::ImportMarkdown(args) => import_graph_markdown(
632 &path,
633 &graph,
634 &mut graph_file,
635 store.as_ref(),
636 &args,
637 schema.as_ref(),
638 ),
639 GraphCommand::Kql(KqlArgs { query, json }) => {
640 if json {
641 Ok(
642 serde_json::to_string_pretty(&kql::query(&graph_file, &query)?)
643 .unwrap_or_else(|_| "{}".to_owned()),
644 )
645 } else {
646 kql::render_query(&graph_file, &query)
647 }
648 }
649 GraphCommand::ExportJson(ExportJsonArgs { output }) => {
650 export_graph_json(&graph, &graph_file, output.as_deref())
651 }
652 GraphCommand::ImportJson(ImportJsonArgs { input }) => {
653 import_graph_json(&path, &graph, &input, store.as_ref())
654 }
655 GraphCommand::ExportDot(args) => export_graph_dot(&graph, &graph_file, &args),
656 GraphCommand::ExportMermaid(args) => {
657 export_graph_mermaid(&graph, &graph_file, &args)
658 }
659 GraphCommand::ExportGraphml(args) => {
660 export_graph_graphml(&graph, &graph_file, &args)
661 }
662 GraphCommand::ExportMd(args) => export_graph_md(&graph, &graph_file, &args, cwd),
663 GraphCommand::Split(args) => split_graph(&graph, &graph_file, &args),
664 GraphCommand::Vector { command } => {
665 handle_vector_command(&path, &graph, &graph_file, &command, cwd)
666 }
667 GraphCommand::AsOf(args) => export_graph_as_of(&path, &graph, &args),
668 GraphCommand::History(args) => Ok(render_graph_history(&path, &graph, &args)?),
669 GraphCommand::Timeline(args) => Ok(render_graph_timeline(&path, &graph, &args)?),
670 GraphCommand::DiffAsOf(args) => Ok(if args.json {
671 render_graph_diff_as_of_json(&path, &graph, &args)?
672 } else {
673 render_graph_diff_as_of(&path, &graph, &args)?
674 }),
675 GraphCommand::FeedbackSummary(args) => {
676 Ok(render_feedback_summary_for_graph(cwd, &graph, &args)?)
677 }
678 GraphCommand::List(args) => Ok(if args.json {
679 render_node_list_json(&graph_file, &args)
680 } else {
681 render_node_list(&graph_file, &args)
682 }),
683 }
684 }
685 }
686}
687
688fn render_graph_list(store: &dyn GraphStore, full: bool) -> Result<String> {
689 let graphs = store.list_graphs()?;
690
691 let mut lines = vec![format!("= graphs ({})", graphs.len())];
692 for (name, path) in graphs {
693 if full {
694 lines.push(format!("- {name} | {}", path.display()));
695 } else {
696 lines.push(format!("- {name}"));
697 }
698 }
699 Ok(format!("{}\n", lines.join("\n")))
700}
701
702#[derive(Debug, Serialize)]
703struct GraphListEntry {
704 name: String,
705 path: String,
706}
707
708#[derive(Debug, Serialize)]
709struct GraphListResponse {
710 graphs: Vec<GraphListEntry>,
711}
712
713fn render_graph_list_json(store: &dyn GraphStore) -> Result<String> {
714 let graphs = store.list_graphs()?;
715 let entries = graphs
716 .into_iter()
717 .map(|(name, path)| GraphListEntry {
718 name,
719 path: path.display().to_string(),
720 })
721 .collect();
722 let payload = GraphListResponse { graphs: entries };
723 Ok(serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned()))
724}
725
726#[derive(Debug, Serialize)]
727struct FindQueryResult {
728 query: String,
729 count: usize,
730 nodes: Vec<Node>,
731}
732
733#[derive(Debug, Serialize)]
734struct FindResponse {
735 total: usize,
736 queries: Vec<FindQueryResult>,
737}
738
739fn render_find_json_with_index(
740 graph: &GraphFile,
741 queries: &[String],
742 limit: usize,
743 include_features: bool,
744 mode: output::FindMode,
745 index: Option<&Bm25Index>,
746) -> String {
747 let mut total = 0usize;
748 let mut results = Vec::new();
749 for query in queries {
750 let nodes =
751 output::find_nodes_with_index(graph, query, limit, include_features, mode, index);
752 let count = nodes.len();
753 total += count;
754 results.push(FindQueryResult {
755 query: query.clone(),
756 count,
757 nodes,
758 });
759 }
760 let payload = FindResponse {
761 total,
762 queries: results,
763 };
764 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
765}
766
767#[derive(Debug, Serialize)]
768struct NodeGetResponse {
769 node: Node,
770}
771
772fn render_node_json(node: &Node) -> String {
773 let payload = NodeGetResponse { node: node.clone() };
774 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
775}
776
777#[derive(Debug, Serialize)]
778struct NodeListResponse {
779 total: usize,
780 nodes: Vec<Node>,
781}
782
783fn render_node_list_json(graph: &GraphFile, args: &ListNodesArgs) -> String {
784 let (total, visible) = collect_node_list(graph, args);
785 let nodes = visible.into_iter().cloned().collect();
786 let payload = NodeListResponse { total, nodes };
787 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
788}
789
790fn render_graph_diff(store: &dyn GraphStore, left: &str, right: &str) -> Result<String> {
791 let left_path = store.resolve_graph_path(left)?;
792 let right_path = store.resolve_graph_path(right)?;
793 let left_graph = store.load_graph(&left_path)?;
794 let right_graph = store.load_graph(&right_path)?;
795 Ok(render_graph_diff_from_files(
796 left,
797 right,
798 &left_graph,
799 &right_graph,
800 ))
801}
802
803fn render_graph_diff_json(store: &dyn GraphStore, left: &str, right: &str) -> Result<String> {
804 let left_path = store.resolve_graph_path(left)?;
805 let right_path = store.resolve_graph_path(right)?;
806 let left_graph = store.load_graph(&left_path)?;
807 let right_graph = store.load_graph(&right_path)?;
808 Ok(render_graph_diff_json_from_files(
809 left,
810 right,
811 &left_graph,
812 &right_graph,
813 ))
814}
815
816#[derive(Debug, Serialize)]
817struct DiffEntry {
818 path: String,
819 left: Value,
820 right: Value,
821}
822
823#[derive(Debug, Serialize)]
824struct EntityDiff {
825 id: String,
826 diffs: Vec<DiffEntry>,
827}
828
829#[derive(Debug, Serialize)]
830struct GraphDiffResponse {
831 left: String,
832 right: String,
833 added_nodes: Vec<String>,
834 removed_nodes: Vec<String>,
835 changed_nodes: Vec<EntityDiff>,
836 added_edges: Vec<String>,
837 removed_edges: Vec<String>,
838 changed_edges: Vec<EntityDiff>,
839 added_notes: Vec<String>,
840 removed_notes: Vec<String>,
841 changed_notes: Vec<EntityDiff>,
842}
843
844fn render_graph_diff_json_from_files(
845 left: &str,
846 right: &str,
847 left_graph: &GraphFile,
848 right_graph: &GraphFile,
849) -> String {
850 use std::collections::{HashMap, HashSet};
851
852 let left_nodes: HashSet<String> = left_graph.nodes.iter().map(|n| n.id.clone()).collect();
853 let right_nodes: HashSet<String> = right_graph.nodes.iter().map(|n| n.id.clone()).collect();
854
855 let left_node_map: HashMap<String, &Node> =
856 left_graph.nodes.iter().map(|n| (n.id.clone(), n)).collect();
857 let right_node_map: HashMap<String, &Node> = right_graph
858 .nodes
859 .iter()
860 .map(|n| (n.id.clone(), n))
861 .collect();
862
863 let left_edges: HashSet<String> = left_graph
864 .edges
865 .iter()
866 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
867 .collect();
868 let right_edges: HashSet<String> = right_graph
869 .edges
870 .iter()
871 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
872 .collect();
873
874 let left_edge_map: HashMap<String, &Edge> = left_graph
875 .edges
876 .iter()
877 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
878 .collect();
879 let right_edge_map: HashMap<String, &Edge> = right_graph
880 .edges
881 .iter()
882 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
883 .collect();
884
885 let left_notes: HashSet<String> = left_graph.notes.iter().map(|n| n.id.clone()).collect();
886 let right_notes: HashSet<String> = right_graph.notes.iter().map(|n| n.id.clone()).collect();
887
888 let left_note_map: HashMap<String, &Note> =
889 left_graph.notes.iter().map(|n| (n.id.clone(), n)).collect();
890 let right_note_map: HashMap<String, &Note> = right_graph
891 .notes
892 .iter()
893 .map(|n| (n.id.clone(), n))
894 .collect();
895
896 let mut added_nodes: Vec<String> = right_nodes.difference(&left_nodes).cloned().collect();
897 let mut removed_nodes: Vec<String> = left_nodes.difference(&right_nodes).cloned().collect();
898 let mut added_edges: Vec<String> = right_edges.difference(&left_edges).cloned().collect();
899 let mut removed_edges: Vec<String> = left_edges.difference(&right_edges).cloned().collect();
900 let mut added_notes: Vec<String> = right_notes.difference(&left_notes).cloned().collect();
901 let mut removed_notes: Vec<String> = left_notes.difference(&right_notes).cloned().collect();
902
903 let mut changed_nodes: Vec<String> = left_nodes
904 .intersection(&right_nodes)
905 .filter_map(|id| {
906 let left_node = left_node_map.get(id.as_str())?;
907 let right_node = right_node_map.get(id.as_str())?;
908 if eq_serialized(*left_node, *right_node) {
909 None
910 } else {
911 Some(id.clone())
912 }
913 })
914 .collect();
915 let mut changed_edges: Vec<String> = left_edges
916 .intersection(&right_edges)
917 .filter_map(|key| {
918 let left_edge = left_edge_map.get(key.as_str())?;
919 let right_edge = right_edge_map.get(key.as_str())?;
920 if eq_serialized(*left_edge, *right_edge) {
921 None
922 } else {
923 Some(key.clone())
924 }
925 })
926 .collect();
927 let mut changed_notes: Vec<String> = left_notes
928 .intersection(&right_notes)
929 .filter_map(|id| {
930 let left_note = left_note_map.get(id.as_str())?;
931 let right_note = right_note_map.get(id.as_str())?;
932 if eq_serialized(*left_note, *right_note) {
933 None
934 } else {
935 Some(id.clone())
936 }
937 })
938 .collect();
939
940 added_nodes.sort();
941 removed_nodes.sort();
942 added_edges.sort();
943 removed_edges.sort();
944 added_notes.sort();
945 removed_notes.sort();
946 changed_nodes.sort();
947 changed_edges.sort();
948 changed_notes.sort();
949
950 let changed_nodes = changed_nodes
951 .into_iter()
952 .map(|id| EntityDiff {
953 diffs: left_node_map
954 .get(id.as_str())
955 .zip(right_node_map.get(id.as_str()))
956 .map(|(left_node, right_node)| diff_serialized_values_json(*left_node, *right_node))
957 .unwrap_or_default(),
958 id,
959 })
960 .collect();
961 let changed_edges = changed_edges
962 .into_iter()
963 .map(|id| EntityDiff {
964 diffs: left_edge_map
965 .get(id.as_str())
966 .zip(right_edge_map.get(id.as_str()))
967 .map(|(left_edge, right_edge)| diff_serialized_values_json(*left_edge, *right_edge))
968 .unwrap_or_default(),
969 id,
970 })
971 .collect();
972 let changed_notes = changed_notes
973 .into_iter()
974 .map(|id| EntityDiff {
975 diffs: left_note_map
976 .get(id.as_str())
977 .zip(right_note_map.get(id.as_str()))
978 .map(|(left_note, right_note)| diff_serialized_values_json(*left_note, *right_note))
979 .unwrap_or_default(),
980 id,
981 })
982 .collect();
983
984 let payload = GraphDiffResponse {
985 left: left.to_owned(),
986 right: right.to_owned(),
987 added_nodes,
988 removed_nodes,
989 changed_nodes,
990 added_edges,
991 removed_edges,
992 changed_edges,
993 added_notes,
994 removed_notes,
995 changed_notes,
996 };
997 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
998}
999
1000fn render_graph_diff_from_files(
1001 left: &str,
1002 right: &str,
1003 left_graph: &GraphFile,
1004 right_graph: &GraphFile,
1005) -> String {
1006 use std::collections::{HashMap, HashSet};
1007
1008 let left_nodes: HashSet<String> = left_graph.nodes.iter().map(|n| n.id.clone()).collect();
1009 let right_nodes: HashSet<String> = right_graph.nodes.iter().map(|n| n.id.clone()).collect();
1010
1011 let left_node_map: HashMap<String, &Node> =
1012 left_graph.nodes.iter().map(|n| (n.id.clone(), n)).collect();
1013 let right_node_map: HashMap<String, &Node> = right_graph
1014 .nodes
1015 .iter()
1016 .map(|n| (n.id.clone(), n))
1017 .collect();
1018
1019 let left_edges: HashSet<String> = left_graph
1020 .edges
1021 .iter()
1022 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
1023 .collect();
1024 let right_edges: HashSet<String> = right_graph
1025 .edges
1026 .iter()
1027 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
1028 .collect();
1029
1030 let left_edge_map: HashMap<String, &Edge> = left_graph
1031 .edges
1032 .iter()
1033 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
1034 .collect();
1035 let right_edge_map: HashMap<String, &Edge> = right_graph
1036 .edges
1037 .iter()
1038 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
1039 .collect();
1040
1041 let left_notes: HashSet<String> = left_graph.notes.iter().map(|n| n.id.clone()).collect();
1042 let right_notes: HashSet<String> = right_graph.notes.iter().map(|n| n.id.clone()).collect();
1043
1044 let left_note_map: HashMap<String, &Note> =
1045 left_graph.notes.iter().map(|n| (n.id.clone(), n)).collect();
1046 let right_note_map: HashMap<String, &Note> = right_graph
1047 .notes
1048 .iter()
1049 .map(|n| (n.id.clone(), n))
1050 .collect();
1051
1052 let mut added_nodes: Vec<String> = right_nodes.difference(&left_nodes).cloned().collect();
1053 let mut removed_nodes: Vec<String> = left_nodes.difference(&right_nodes).cloned().collect();
1054 let mut added_edges: Vec<String> = right_edges.difference(&left_edges).cloned().collect();
1055 let mut removed_edges: Vec<String> = left_edges.difference(&right_edges).cloned().collect();
1056 let mut added_notes: Vec<String> = right_notes.difference(&left_notes).cloned().collect();
1057 let mut removed_notes: Vec<String> = left_notes.difference(&right_notes).cloned().collect();
1058
1059 let mut changed_nodes: Vec<String> = left_nodes
1060 .intersection(&right_nodes)
1061 .filter_map(|id| {
1062 let left_node = left_node_map.get(id.as_str())?;
1063 let right_node = right_node_map.get(id.as_str())?;
1064 if eq_serialized(*left_node, *right_node) {
1065 None
1066 } else {
1067 Some(id.clone())
1068 }
1069 })
1070 .collect();
1071
1072 let mut changed_edges: Vec<String> = left_edges
1073 .intersection(&right_edges)
1074 .filter_map(|key| {
1075 let left_edge = left_edge_map.get(key.as_str())?;
1076 let right_edge = right_edge_map.get(key.as_str())?;
1077 if eq_serialized(*left_edge, *right_edge) {
1078 None
1079 } else {
1080 Some(key.clone())
1081 }
1082 })
1083 .collect();
1084
1085 let mut changed_notes: Vec<String> = left_notes
1086 .intersection(&right_notes)
1087 .filter_map(|id| {
1088 let left_note = left_note_map.get(id.as_str())?;
1089 let right_note = right_note_map.get(id.as_str())?;
1090 if eq_serialized(*left_note, *right_note) {
1091 None
1092 } else {
1093 Some(id.clone())
1094 }
1095 })
1096 .collect();
1097
1098 added_nodes.sort();
1099 removed_nodes.sort();
1100 added_edges.sort();
1101 removed_edges.sort();
1102 added_notes.sort();
1103 removed_notes.sort();
1104 changed_nodes.sort();
1105 changed_edges.sort();
1106 changed_notes.sort();
1107
1108 let mut lines = vec![format!("= diff {left} -> {right}")];
1109 lines.push(format!("+ nodes ({})", added_nodes.len()));
1110 for id in added_nodes {
1111 lines.push(format!("+ node {id}"));
1112 }
1113 lines.push(format!("- nodes ({})", removed_nodes.len()));
1114 for id in removed_nodes {
1115 lines.push(format!("- node {id}"));
1116 }
1117 lines.push(format!("~ nodes ({})", changed_nodes.len()));
1118 for id in changed_nodes {
1119 if let (Some(left_node), Some(right_node)) = (
1120 left_node_map.get(id.as_str()),
1121 right_node_map.get(id.as_str()),
1122 ) {
1123 lines.extend(render_entity_diff_lines("node", &id, left_node, right_node));
1124 } else {
1125 lines.push(format!("~ node {id}"));
1126 }
1127 }
1128 lines.push(format!("+ edges ({})", added_edges.len()));
1129 for edge in added_edges {
1130 lines.push(format!("+ edge {edge}"));
1131 }
1132 lines.push(format!("- edges ({})", removed_edges.len()));
1133 for edge in removed_edges {
1134 lines.push(format!("- edge {edge}"));
1135 }
1136 lines.push(format!("~ edges ({})", changed_edges.len()));
1137 for edge in changed_edges {
1138 if let (Some(left_edge), Some(right_edge)) = (
1139 left_edge_map.get(edge.as_str()),
1140 right_edge_map.get(edge.as_str()),
1141 ) {
1142 lines.extend(render_entity_diff_lines(
1143 "edge", &edge, left_edge, right_edge,
1144 ));
1145 } else {
1146 lines.push(format!("~ edge {edge}"));
1147 }
1148 }
1149 lines.push(format!("+ notes ({})", added_notes.len()));
1150 for note_id in added_notes {
1151 lines.push(format!("+ note {note_id}"));
1152 }
1153 lines.push(format!("- notes ({})", removed_notes.len()));
1154 for note_id in removed_notes {
1155 lines.push(format!("- note {note_id}"));
1156 }
1157 lines.push(format!("~ notes ({})", changed_notes.len()));
1158 for note_id in changed_notes {
1159 if let (Some(left_note), Some(right_note)) = (
1160 left_note_map.get(note_id.as_str()),
1161 right_note_map.get(note_id.as_str()),
1162 ) {
1163 lines.extend(render_entity_diff_lines(
1164 "note", ¬e_id, left_note, right_note,
1165 ));
1166 } else {
1167 lines.push(format!("~ note {note_id}"));
1168 }
1169 }
1170
1171 format!("{}\n", lines.join("\n"))
1172}
1173
1174fn eq_serialized<T: Serialize>(left: &T, right: &T) -> bool {
1175 match (serde_json::to_value(left), serde_json::to_value(right)) {
1176 (Ok(left_value), Ok(right_value)) => left_value == right_value,
1177 _ => false,
1178 }
1179}
1180
1181fn render_entity_diff_lines<T: Serialize>(
1182 kind: &str,
1183 id: &str,
1184 left: &T,
1185 right: &T,
1186) -> Vec<String> {
1187 let mut lines = Vec::new();
1188 lines.push(format!("~ {kind} {id}"));
1189 for diff in diff_serialized_values(left, right) {
1190 lines.push(format!(" ~ {diff}"));
1191 }
1192 lines
1193}
1194
1195fn diff_serialized_values<T: Serialize>(left: &T, right: &T) -> Vec<String> {
1196 match (serde_json::to_value(left), serde_json::to_value(right)) {
1197 (Ok(left_value), Ok(right_value)) => {
1198 let mut diffs = Vec::new();
1199 collect_value_diffs("", &left_value, &right_value, &mut diffs);
1200 diffs
1201 }
1202 _ => vec!["<serialization failed>".to_owned()],
1203 }
1204}
1205
1206fn diff_serialized_values_json<T: Serialize>(left: &T, right: &T) -> Vec<DiffEntry> {
1207 match (serde_json::to_value(left), serde_json::to_value(right)) {
1208 (Ok(left_value), Ok(right_value)) => {
1209 let mut diffs = Vec::new();
1210 collect_value_diffs_json("", &left_value, &right_value, &mut diffs);
1211 diffs
1212 }
1213 _ => Vec::new(),
1214 }
1215}
1216
1217fn collect_value_diffs_json(path: &str, left: &Value, right: &Value, out: &mut Vec<DiffEntry>) {
1218 if left == right {
1219 return;
1220 }
1221 match (left, right) {
1222 (Value::Object(left_obj), Value::Object(right_obj)) => {
1223 use std::collections::BTreeSet;
1224
1225 let mut keys: BTreeSet<&str> = BTreeSet::new();
1226 for key in left_obj.keys() {
1227 keys.insert(key.as_str());
1228 }
1229 for key in right_obj.keys() {
1230 keys.insert(key.as_str());
1231 }
1232 for key in keys {
1233 let left_value = left_obj.get(key).unwrap_or(&Value::Null);
1234 let right_value = right_obj.get(key).unwrap_or(&Value::Null);
1235 let next_path = if path.is_empty() {
1236 key.to_owned()
1237 } else {
1238 format!("{path}.{key}")
1239 };
1240 collect_value_diffs_json(&next_path, left_value, right_value, out);
1241 }
1242 }
1243 (Value::Array(_), Value::Array(_)) => {
1244 let label = if path.is_empty() {
1245 "<root>[]".to_owned()
1246 } else {
1247 format!("{path}[]")
1248 };
1249 out.push(DiffEntry {
1250 path: label,
1251 left: left.clone(),
1252 right: right.clone(),
1253 });
1254 }
1255 _ => {
1256 let label = if path.is_empty() { "<root>" } else { path };
1257 out.push(DiffEntry {
1258 path: label.to_owned(),
1259 left: left.clone(),
1260 right: right.clone(),
1261 });
1262 }
1263 }
1264}
1265
1266fn collect_value_diffs(path: &str, left: &Value, right: &Value, out: &mut Vec<String>) {
1267 if left == right {
1268 return;
1269 }
1270 match (left, right) {
1271 (Value::Object(left_obj), Value::Object(right_obj)) => {
1272 use std::collections::BTreeSet;
1273
1274 let mut keys: BTreeSet<&str> = BTreeSet::new();
1275 for key in left_obj.keys() {
1276 keys.insert(key.as_str());
1277 }
1278 for key in right_obj.keys() {
1279 keys.insert(key.as_str());
1280 }
1281 for key in keys {
1282 let left_value = left_obj.get(key).unwrap_or(&Value::Null);
1283 let right_value = right_obj.get(key).unwrap_or(&Value::Null);
1284 let next_path = if path.is_empty() {
1285 key.to_owned()
1286 } else {
1287 format!("{path}.{key}")
1288 };
1289 collect_value_diffs(&next_path, left_value, right_value, out);
1290 }
1291 }
1292 (Value::Array(_), Value::Array(_)) => {
1293 let label = if path.is_empty() {
1294 "<root>[]".to_owned()
1295 } else {
1296 format!("{path}[]")
1297 };
1298 out.push(format!(
1299 "{label}: {} -> {}",
1300 format_value(left),
1301 format_value(right)
1302 ));
1303 }
1304 _ => {
1305 let label = if path.is_empty() { "<root>" } else { path };
1306 out.push(format!(
1307 "{label}: {} -> {}",
1308 format_value(left),
1309 format_value(right)
1310 ));
1311 }
1312 }
1313}
1314
1315fn format_value(value: &Value) -> String {
1316 let mut rendered =
1317 serde_json::to_string(value).unwrap_or_else(|_| "<unserializable>".to_owned());
1318 rendered = rendered.replace('\n', "\\n");
1319 truncate_value(rendered, 160)
1320}
1321
1322fn truncate_value(mut value: String, limit: usize) -> String {
1323 if value.len() <= limit {
1324 return value;
1325 }
1326 value.truncate(limit.saturating_sub(3));
1327 value.push_str("...");
1328 value
1329}
1330
1331fn merge_graphs(
1332 store: &dyn GraphStore,
1333 target: &str,
1334 source: &str,
1335 strategy: MergeStrategy,
1336) -> Result<String> {
1337 use std::collections::HashMap;
1338
1339 let target_path = store.resolve_graph_path(target)?;
1340 let source_path = store.resolve_graph_path(source)?;
1341 let mut target_graph = store.load_graph(&target_path)?;
1342 let source_graph = store.load_graph(&source_path)?;
1343
1344 let mut node_index: HashMap<String, usize> = HashMap::new();
1345 for (idx, node) in target_graph.nodes.iter().enumerate() {
1346 node_index.insert(node.id.clone(), idx);
1347 }
1348
1349 let mut node_added = 0usize;
1350 let mut node_updated = 0usize;
1351 for node in &source_graph.nodes {
1352 if let Some(&idx) = node_index.get(&node.id) {
1353 if matches!(strategy, MergeStrategy::PreferNew) {
1354 target_graph.nodes[idx] = node.clone();
1355 node_updated += 1;
1356 }
1357 } else {
1358 target_graph.nodes.push(node.clone());
1359 node_index.insert(node.id.clone(), target_graph.nodes.len() - 1);
1360 node_added += 1;
1361 }
1362 }
1363
1364 let mut edge_index: HashMap<String, usize> = HashMap::new();
1365 for (idx, edge) in target_graph.edges.iter().enumerate() {
1366 let key = format!("{} {} {}", edge.source_id, edge.relation, edge.target_id);
1367 edge_index.insert(key, idx);
1368 }
1369
1370 let mut edge_added = 0usize;
1371 let mut edge_updated = 0usize;
1372 for edge in &source_graph.edges {
1373 let key = format!("{} {} {}", edge.source_id, edge.relation, edge.target_id);
1374 if let Some(&idx) = edge_index.get(&key) {
1375 if matches!(strategy, MergeStrategy::PreferNew) {
1376 target_graph.edges[idx] = edge.clone();
1377 edge_updated += 1;
1378 }
1379 } else {
1380 target_graph.edges.push(edge.clone());
1381 edge_index.insert(key, target_graph.edges.len() - 1);
1382 edge_added += 1;
1383 }
1384 }
1385
1386 let mut note_index: HashMap<String, usize> = HashMap::new();
1387 for (idx, note) in target_graph.notes.iter().enumerate() {
1388 note_index.insert(note.id.clone(), idx);
1389 }
1390
1391 let mut note_added = 0usize;
1392 let mut note_updated = 0usize;
1393 for note in &source_graph.notes {
1394 if let Some(&idx) = note_index.get(¬e.id) {
1395 if matches!(strategy, MergeStrategy::PreferNew) {
1396 target_graph.notes[idx] = note.clone();
1397 note_updated += 1;
1398 }
1399 } else {
1400 target_graph.notes.push(note.clone());
1401 note_index.insert(note.id.clone(), target_graph.notes.len() - 1);
1402 note_added += 1;
1403 }
1404 }
1405
1406 store.save_graph(&target_path, &target_graph)?;
1407 append_event_snapshot(
1408 &target_path,
1409 "graph.merge",
1410 Some(format!("{source} -> {target} ({strategy:?})")),
1411 &target_graph,
1412 )?;
1413
1414 let mut lines = vec![format!("+ merged {source} -> {target}")];
1415 lines.push(format!("nodes: +{node_added} ~{node_updated}"));
1416 lines.push(format!("edges: +{edge_added} ~{edge_updated}"));
1417 lines.push(format!("notes: +{note_added} ~{note_updated}"));
1418
1419 Ok(format!("{}\n", lines.join("\n")))
1420}
1421
1422fn export_graph_as_of(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1423 match resolve_temporal_source(path, args.source)? {
1424 TemporalSource::EventLog => export_graph_as_of_event_log(path, graph, args),
1425 _ => export_graph_as_of_backups(path, graph, args),
1426 }
1427}
1428
1429fn export_graph_as_of_backups(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1430 let backups = list_graph_backups(path)?;
1431 if backups.is_empty() {
1432 bail!("no backups found for graph: {graph}");
1433 }
1434 let target_ts = args.ts_ms / 1000;
1435 let mut selected = None;
1436 for (ts, backup_path) in backups {
1437 if ts <= target_ts {
1438 selected = Some((ts, backup_path));
1439 }
1440 }
1441 let Some((ts, backup_path)) = selected else {
1442 bail!("no backup at or before ts_ms={}", args.ts_ms);
1443 };
1444
1445 let output_path = args
1446 .output
1447 .clone()
1448 .unwrap_or_else(|| format!("{graph}.asof.{}.json", args.ts_ms));
1449 let raw = read_gz_to_string(&backup_path)?;
1450 std::fs::write(&output_path, raw)?;
1451 Ok(format!("+ exported {output_path} (as-of {ts})\n"))
1452}
1453
1454fn export_graph_as_of_event_log(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1455 let entries = event_log::read_log(path)?;
1456 if entries.is_empty() {
1457 bail!("no event log entries found for graph: {graph}");
1458 }
1459 let selected = select_event_at_or_before(&entries, args.ts_ms)
1460 .ok_or_else(|| anyhow!("no event log entry at or before ts_ms={}", args.ts_ms))?;
1461 let output_path = args
1462 .output
1463 .clone()
1464 .unwrap_or_else(|| format!("{graph}.asof.{}.json", args.ts_ms));
1465 let mut snapshot = selected.graph.clone();
1466 snapshot.refresh_counts();
1467 let raw = serde_json::to_string_pretty(&snapshot).context("failed to serialize graph")?;
1468 std::fs::write(&output_path, raw)?;
1469 Ok(format!(
1470 "+ exported {output_path} (as-of {})\n",
1471 selected.ts_ms
1472 ))
1473}
1474
1475fn list_graph_backups(path: &Path) -> Result<Vec<(u64, PathBuf)>> {
1476 let parent = path
1477 .parent()
1478 .ok_or_else(|| anyhow!("missing parent directory"))?;
1479 let stem = path
1480 .file_stem()
1481 .and_then(|s| s.to_str())
1482 .ok_or_else(|| anyhow!("invalid graph filename"))?;
1483 let prefix = format!("{stem}.bck.");
1484 let suffix = ".gz";
1485
1486 let mut backups = Vec::new();
1487 for entry in std::fs::read_dir(parent)? {
1488 let entry = entry?;
1489 let name = entry.file_name();
1490 let name = name.to_string_lossy();
1491 if !name.starts_with(&prefix) || !name.ends_with(suffix) {
1492 continue;
1493 }
1494 let ts_part = &name[prefix.len()..name.len() - suffix.len()];
1495 if let Ok(ts) = ts_part.parse::<u64>() {
1496 backups.push((ts, entry.path()));
1497 }
1498 }
1499 backups.sort_by_key(|(ts, _)| *ts);
1500 Ok(backups)
1501}
1502
1503fn read_gz_to_string(path: &Path) -> Result<String> {
1504 use flate2::read::GzDecoder;
1505 use std::io::Read;
1506
1507 let data = std::fs::read(path)?;
1508 let mut decoder = GzDecoder::new(&data[..]);
1509 let mut out = String::new();
1510 decoder.read_to_string(&mut out)?;
1511 Ok(out)
1512}
1513
1514fn append_event_snapshot(
1515 path: &Path,
1516 action: &str,
1517 detail: Option<String>,
1518 graph: &GraphFile,
1519) -> Result<()> {
1520 event_log::append_snapshot(path, action, detail, graph)
1521}
1522
1523fn export_graph_json(graph: &str, graph_file: &GraphFile, output: Option<&str>) -> Result<String> {
1524 let output_path = output
1525 .map(|value| value.to_owned())
1526 .unwrap_or_else(|| format!("{graph}.export.json"));
1527 let raw = serde_json::to_string_pretty(graph_file).context("failed to serialize graph")?;
1528 std::fs::write(&output_path, raw)?;
1529 Ok(format!("+ exported {output_path}\n"))
1530}
1531
1532fn import_graph_json(
1533 path: &Path,
1534 graph: &str,
1535 input: &str,
1536 store: &dyn GraphStore,
1537) -> Result<String> {
1538 let raw = std::fs::read_to_string(input)
1539 .with_context(|| format!("failed to read import file: {input}"))?;
1540 let mut imported: GraphFile =
1541 serde_json::from_str(&raw).with_context(|| format!("invalid JSON: {input}"))?;
1542 imported.metadata.name = graph.to_owned();
1543 imported.refresh_counts();
1544 store.save_graph(path, &imported)?;
1545 append_event_snapshot(path, "graph.import", Some(input.to_owned()), &imported)?;
1546 Ok(format!("+ imported {input} -> {graph}\n"))
1547}
1548
1549fn import_graph_csv(
1550 path: &Path,
1551 graph: &str,
1552 graph_file: &mut GraphFile,
1553 store: &dyn GraphStore,
1554 args: &ImportCsvArgs,
1555 schema: Option<&GraphSchema>,
1556) -> Result<String> {
1557 if args.nodes.is_none() && args.edges.is_none() && args.notes.is_none() {
1558 bail!("expected at least one of --nodes/--edges/--notes");
1559 }
1560 let strategy = match args.strategy {
1561 MergeStrategy::PreferNew => import_csv::CsvStrategy::PreferNew,
1562 MergeStrategy::PreferOld => import_csv::CsvStrategy::PreferOld,
1563 };
1564 let summary = import_csv::import_csv_into_graph(
1565 graph_file,
1566 import_csv::CsvImportArgs {
1567 nodes_path: args.nodes.as_deref(),
1568 edges_path: args.edges.as_deref(),
1569 notes_path: args.notes.as_deref(),
1570 strategy,
1571 },
1572 )?;
1573 if let Some(schema) = schema {
1574 let all_violations = validate_graph_with_schema(graph_file, schema);
1575 bail_on_schema_violations(&all_violations)?;
1576 }
1577 store.save_graph(path, graph_file)?;
1578 append_event_snapshot(path, "graph.import-csv", None, graph_file)?;
1579 let mut lines = vec![format!("+ imported csv into {graph}")];
1580 lines.extend(import_csv::merge_summary_lines(&summary));
1581 Ok(format!("{}\n", lines.join("\n")))
1582}
1583
1584fn import_graph_markdown(
1585 path: &Path,
1586 graph: &str,
1587 graph_file: &mut GraphFile,
1588 store: &dyn GraphStore,
1589 args: &ImportMarkdownArgs,
1590 schema: Option<&GraphSchema>,
1591) -> Result<String> {
1592 let strategy = match args.strategy {
1593 MergeStrategy::PreferNew => import_markdown::MarkdownStrategy::PreferNew,
1594 MergeStrategy::PreferOld => import_markdown::MarkdownStrategy::PreferOld,
1595 };
1596 let summary = import_markdown::import_markdown_into_graph(
1597 graph_file,
1598 import_markdown::MarkdownImportArgs {
1599 path: &args.path,
1600 notes_as_nodes: args.notes_as_nodes,
1601 strategy,
1602 },
1603 )?;
1604 if let Some(schema) = schema {
1605 let all_violations = validate_graph_with_schema(graph_file, schema);
1606 bail_on_schema_violations(&all_violations)?;
1607 }
1608 store.save_graph(path, graph_file)?;
1609 append_event_snapshot(path, "graph.import-md", Some(args.path.clone()), graph_file)?;
1610 let mut lines = vec![format!("+ imported markdown into {graph}")];
1611 lines.extend(import_csv::merge_summary_lines(&summary));
1612 Ok(format!("{}\n", lines.join("\n")))
1613}
1614
1615fn export_graph_dot(graph: &str, graph_file: &GraphFile, args: &ExportDotArgs) -> Result<String> {
1616 let output_path = args
1617 .output
1618 .clone()
1619 .unwrap_or_else(|| format!("{graph}.dot"));
1620 let (nodes, edges) = select_subgraph(
1621 graph_file,
1622 args.focus.as_deref(),
1623 args.depth,
1624 &args.node_types,
1625 )?;
1626 let mut lines = Vec::new();
1627 lines.push("digraph kg {".to_owned());
1628 for node in &nodes {
1629 let label = format!("{}\\n{}", node.id, node.name);
1630 lines.push(format!(
1631 " \"{}\" [label=\"{}\"];",
1632 escape_dot(&node.id),
1633 escape_dot(&label)
1634 ));
1635 }
1636 for edge in &edges {
1637 lines.push(format!(
1638 " \"{}\" -> \"{}\" [label=\"{}\"];",
1639 escape_dot(&edge.source_id),
1640 escape_dot(&edge.target_id),
1641 escape_dot(&edge.relation)
1642 ));
1643 }
1644 lines.push("}".to_owned());
1645 std::fs::write(&output_path, format!("{}\n", lines.join("\n")))?;
1646 Ok(format!("+ exported {output_path}\n"))
1647}
1648
1649fn export_graph_mermaid(
1650 graph: &str,
1651 graph_file: &GraphFile,
1652 args: &ExportMermaidArgs,
1653) -> Result<String> {
1654 let output_path = args
1655 .output
1656 .clone()
1657 .unwrap_or_else(|| format!("{graph}.mmd"));
1658 let (nodes, edges) = select_subgraph(
1659 graph_file,
1660 args.focus.as_deref(),
1661 args.depth,
1662 &args.node_types,
1663 )?;
1664 let mut lines = Vec::new();
1665 lines.push("graph TD".to_owned());
1666 for node in &nodes {
1667 let label = format!("{}\\n{}", node.id, node.name);
1668 lines.push(format!(
1669 " {}[\"{}\"]",
1670 sanitize_mermaid_id(&node.id),
1671 escape_mermaid(&label)
1672 ));
1673 }
1674 for edge in &edges {
1675 lines.push(format!(
1676 " {} -- \"{}\" --> {}",
1677 sanitize_mermaid_id(&edge.source_id),
1678 escape_mermaid(&edge.relation),
1679 sanitize_mermaid_id(&edge.target_id)
1680 ));
1681 }
1682 std::fs::write(&output_path, format!("{}\n", lines.join("\n")))?;
1683 Ok(format!("+ exported {output_path}\n"))
1684}
1685
1686fn export_graph_graphml(
1687 graph: &str,
1688 graph_file: &GraphFile,
1689 args: &ExportGraphmlArgs,
1690) -> Result<String> {
1691 let output_path = args
1692 .output
1693 .clone()
1694 .unwrap_or_else(|| format!("{graph}.graphml"));
1695 let (nodes, edges) = select_subgraph(
1696 graph_file,
1697 args.focus.as_deref(),
1698 args.depth,
1699 &args.node_types,
1700 )?;
1701
1702 let mut lines = Vec::new();
1703 lines.push(r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string());
1704 lines.push(r#"<graphml xmlns="http://graphml.graphdrawing.org/xmlns" "#.to_string());
1705 lines.push(r#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance""#.to_string());
1706 lines.push(r#" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns"#.to_string());
1707 lines.push(r#" http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">"#.to_string());
1708 lines.push(r#" <key id="d0" for="node" attr.name="name" attr.type="string"/>"#.to_string());
1709 lines.push(r#" <key id="d1" for="node" attr.name="type" attr.type="string"/>"#.to_string());
1710 lines.push(
1711 r#" <key id="d2" for="node" attr.name="description" attr.type="string"/>"#.to_string(),
1712 );
1713 lines
1714 .push(r#" <key id="d3" for="edge" attr.name="relation" attr.type="string"/>"#.to_string());
1715 lines.push(r#" <key id="d4" for="edge" attr.name="detail" attr.type="string"/>"#.to_string());
1716 lines.push(format!(
1717 r#" <graph id="{}" edgedefault="directed">"#,
1718 escape_xml(graph)
1719 ));
1720
1721 for node in &nodes {
1722 lines.push(format!(r#" <node id="{}">"#, escape_xml(&node.id)));
1723 lines.push(format!(
1724 r#" <data key="d0">{}</data>"#,
1725 escape_xml(&node.name)
1726 ));
1727 lines.push(format!(
1728 r#" <data key="d1">{}</data>"#,
1729 escape_xml(&node.r#type)
1730 ));
1731 lines.push(format!(
1732 r#" <data key="d2">{}</data>"#,
1733 escape_xml(&node.properties.description)
1734 ));
1735 lines.push(" </node>".to_string());
1736 }
1737
1738 for edge in &edges {
1739 lines.push(format!(
1740 r#" <edge source="{}" target="{}">"#,
1741 escape_xml(&edge.source_id),
1742 escape_xml(&edge.target_id)
1743 ));
1744 lines.push(format!(
1745 r#" <data key="d3">{}</data>"#,
1746 escape_xml(&edge.relation)
1747 ));
1748 lines.push(format!(
1749 r#" <data key="d4">{}</data>"#,
1750 escape_xml(&edge.properties.detail)
1751 ));
1752 lines.push(" </edge>".to_string());
1753 }
1754
1755 lines.push(" </graph>".to_string());
1756 lines.push("</graphml>".to_string());
1757
1758 std::fs::write(&output_path, lines.join("\n"))?;
1759 Ok(format!("+ exported {output_path}\n"))
1760}
1761
1762fn escape_xml(s: &str) -> String {
1763 s.replace('&', "&")
1764 .replace('<', "<")
1765 .replace('>', ">")
1766 .replace('"', """)
1767 .replace('\'', "'")
1768}
1769
1770fn export_graph_md(
1771 graph: &str,
1772 graph_file: &GraphFile,
1773 args: &ExportMdArgs,
1774 _cwd: &Path,
1775) -> Result<String> {
1776 let output_dir = args
1777 .output
1778 .clone()
1779 .unwrap_or_else(|| format!("{}-md", graph));
1780
1781 let (nodes, edges) = select_subgraph(
1782 graph_file,
1783 args.focus.as_deref(),
1784 args.depth,
1785 &args.node_types,
1786 )?;
1787
1788 std::fs::create_dir_all(&output_dir)?;
1789
1790 let mut index_lines = format!("# {}\n\nNodes: {}\n\n## Index\n", graph, nodes.len());
1791
1792 for node in &nodes {
1793 let safe_name = sanitize_filename(&node.id);
1794 let filename = format!("{}.md", safe_name);
1795 let filepath = Path::new(&output_dir).join(&filename);
1796
1797 let mut content = String::new();
1798 content.push_str(&format!("# {}\n\n", node.name));
1799 content.push_str(&format!("**ID:** `{}`\n\n", node.id));
1800 content.push_str(&format!("**Type:** {}\n\n", node.r#type));
1801
1802 if !node.properties.description.is_empty() {
1803 content.push_str(&format!(
1804 "## Description\n\n{}\n\n",
1805 node.properties.description
1806 ));
1807 }
1808
1809 if !node.properties.key_facts.is_empty() {
1810 content.push_str("## Facts\n\n");
1811 for fact in &node.properties.key_facts {
1812 content.push_str(&format!("- {}\n", fact));
1813 }
1814 content.push('\n');
1815 }
1816
1817 if !node.properties.alias.is_empty() {
1818 content.push_str(&format!(
1819 "**Aliases:** {}\n\n",
1820 node.properties.alias.join(", ")
1821 ));
1822 }
1823
1824 content.push_str("## Relations\n\n");
1825 for edge in &edges {
1826 if edge.source_id == node.id {
1827 content.push_str(&format!(
1828 "- [[{}]] --({})--> [[{}]]\n",
1829 node.id, edge.relation, edge.target_id
1830 ));
1831 } else if edge.target_id == node.id {
1832 content.push_str(&format!(
1833 "- [[{}]] <--({})-- [[{}]]\n",
1834 edge.source_id, edge.relation, node.id
1835 ));
1836 }
1837 }
1838 content.push('\n');
1839
1840 content.push_str("## Backlinks\n\n");
1841 let backlinks: Vec<_> = edges.iter().filter(|e| e.target_id == node.id).collect();
1842 if backlinks.is_empty() {
1843 content.push_str("_No backlinks_\n");
1844 } else {
1845 for edge in backlinks {
1846 content.push_str(&format!("- [[{}]] ({})\n", edge.source_id, edge.relation));
1847 }
1848 }
1849
1850 std::fs::write(&filepath, content)?;
1851
1852 index_lines.push_str(&format!(
1853 "- [[{}]] - {} [{}]\n",
1854 node.id, node.name, node.r#type
1855 ));
1856 }
1857
1858 std::fs::write(Path::new(&output_dir).join("index.md"), index_lines)?;
1859
1860 Ok(format!(
1861 "+ exported {}/ ({} nodes)\n",
1862 output_dir,
1863 nodes.len()
1864 ))
1865}
1866
1867fn sanitize_filename(name: &str) -> String {
1868 name.replace([':', '/', '\\', ' '], "_").replace('&', "and")
1869}
1870
1871fn split_graph(graph: &str, graph_file: &GraphFile, args: &SplitArgs) -> Result<String> {
1872 let output_dir = args
1873 .output
1874 .clone()
1875 .unwrap_or_else(|| format!("{}-split", graph));
1876
1877 let nodes_dir = Path::new(&output_dir).join("nodes");
1878 let edges_dir = Path::new(&output_dir).join("edges");
1879 let notes_dir = Path::new(&output_dir).join("notes");
1880 let meta_dir = Path::new(&output_dir).join("metadata");
1881
1882 std::fs::create_dir_all(&nodes_dir)?;
1883 std::fs::create_dir_all(&edges_dir)?;
1884 std::fs::create_dir_all(¬es_dir)?;
1885 std::fs::create_dir_all(&meta_dir)?;
1886
1887 let meta_json = serde_json::to_string_pretty(&graph_file.metadata)?;
1888 std::fs::write(meta_dir.join("metadata.json"), meta_json)?;
1889
1890 let mut node_count = 0;
1891 for node in &graph_file.nodes {
1892 let safe_id = sanitize_filename(&node.id);
1893 let filepath = nodes_dir.join(format!("{}.json", safe_id));
1894 let node_json = serde_json::to_string_pretty(node)?;
1895 std::fs::write(filepath, node_json)?;
1896 node_count += 1;
1897 }
1898
1899 let mut edge_count = 0;
1900 for edge in &graph_file.edges {
1901 let edge_key = format!(
1902 "{}___{}___{}",
1903 sanitize_filename(&edge.source_id),
1904 sanitize_filename(&edge.relation),
1905 sanitize_filename(&edge.target_id)
1906 );
1907 let filepath = edges_dir.join(format!("{}.json", edge_key));
1908 let edge_json = serde_json::to_string_pretty(edge)?;
1909 std::fs::write(filepath, edge_json)?;
1910 edge_count += 1;
1911 }
1912
1913 let mut note_count = 0;
1914 for note in &graph_file.notes {
1915 let safe_id = sanitize_filename(¬e.id);
1916 let filepath = notes_dir.join(format!("{}.json", safe_id));
1917 let note_json = serde_json::to_string_pretty(note)?;
1918 std::fs::write(filepath, note_json)?;
1919 note_count += 1;
1920 }
1921
1922 let manifest = format!(
1923 r#"# {} Split Manifest
1924
1925This directory contains a git-friendly split representation of the graph.
1926
1927## Structure
1928
1929- `metadata/metadata.json` - Graph metadata
1930- `nodes/` - One JSON file per node (filename = sanitized node id)
1931- `edges/` - One JSON file per edge (filename = source___relation___target)
1932- `notes/` - One JSON file per note
1933
1934## Stats
1935
1936- Nodes: {}
1937- Edges: {}
1938- Notes: {}
1939
1940## Usage
1941
1942To reassemble into a single JSON file, use `kg {} import-json`.
1943"#,
1944 graph, node_count, edge_count, note_count, graph
1945 );
1946 std::fs::write(Path::new(&output_dir).join("MANIFEST.md"), manifest)?;
1947
1948 Ok(format!(
1949 "+ split {} into {}/ (nodes: {}, edges: {}, notes: {})\n",
1950 graph, output_dir, node_count, edge_count, note_count
1951 ))
1952}
1953
1954fn select_subgraph<'a>(
1955 graph_file: &'a GraphFile,
1956 focus: Option<&'a str>,
1957 depth: usize,
1958 node_types: &'a [String],
1959) -> Result<(Vec<&'a Node>, Vec<&'a Edge>)> {
1960 use std::collections::{HashSet, VecDeque};
1961
1962 let mut selected: HashSet<String> = HashSet::new();
1963 if let Some(focus_id) = focus {
1964 if graph_file.node_by_id(focus_id).is_none() {
1965 bail!("focus node not found: {focus_id}");
1966 }
1967 selected.insert(focus_id.to_owned());
1968 let mut frontier = VecDeque::new();
1969 frontier.push_back((focus_id.to_owned(), 0usize));
1970 while let Some((current, dist)) = frontier.pop_front() {
1971 if dist >= depth {
1972 continue;
1973 }
1974 for edge in &graph_file.edges {
1975 let next = if edge.source_id == current {
1976 Some(edge.target_id.clone())
1977 } else if edge.target_id == current {
1978 Some(edge.source_id.clone())
1979 } else {
1980 None
1981 };
1982 if let Some(next_id) = next {
1983 if selected.insert(next_id.clone()) {
1984 frontier.push_back((next_id, dist + 1));
1985 }
1986 }
1987 }
1988 }
1989 } else {
1990 for node in &graph_file.nodes {
1991 selected.insert(node.id.clone());
1992 }
1993 }
1994
1995 let type_filter: Vec<String> = node_types.iter().map(|t| t.to_lowercase()).collect();
1996 let has_filter = !type_filter.is_empty();
1997 let mut nodes: Vec<&Node> = graph_file
1998 .nodes
1999 .iter()
2000 .filter(|node| selected.contains(&node.id))
2001 .filter(|node| {
2002 if let Some(focus_id) = focus {
2003 if node.id == focus_id {
2004 return true;
2005 }
2006 }
2007 !has_filter || type_filter.contains(&node.r#type.to_lowercase())
2008 })
2009 .collect();
2010 nodes.sort_by(|a, b| a.id.cmp(&b.id));
2011
2012 let node_set: HashSet<String> = nodes.iter().map(|node| node.id.clone()).collect();
2013 let mut edges: Vec<&Edge> = graph_file
2014 .edges
2015 .iter()
2016 .filter(|edge| node_set.contains(&edge.source_id) && node_set.contains(&edge.target_id))
2017 .collect();
2018 edges.sort_by(|a, b| {
2019 a.source_id
2020 .cmp(&b.source_id)
2021 .then_with(|| a.relation.cmp(&b.relation))
2022 .then_with(|| a.target_id.cmp(&b.target_id))
2023 });
2024
2025 Ok((nodes, edges))
2026}
2027
2028fn escape_dot(value: &str) -> String {
2029 value.replace('"', "\\\"").replace('\n', "\\n")
2030}
2031
2032fn escape_mermaid(value: &str) -> String {
2033 value.replace('"', "\\\"").replace('\n', "\\n")
2034}
2035
2036fn sanitize_mermaid_id(value: &str) -> String {
2037 let mut out = String::new();
2038 for ch in value.chars() {
2039 if ch.is_ascii_alphanumeric() || ch == '_' {
2040 out.push(ch);
2041 } else {
2042 out.push('_');
2043 }
2044 }
2045 if out.is_empty() {
2046 "node".to_owned()
2047 } else {
2048 out
2049 }
2050}
2051
2052fn render_graph_history(path: &Path, graph: &str, args: &HistoryArgs) -> Result<String> {
2053 let backups = list_graph_backups(path)?;
2054 let total = backups.len();
2055 let snapshots: Vec<(u64, PathBuf)> = backups.into_iter().rev().take(args.limit).collect();
2056
2057 if args.json {
2058 let payload = GraphHistoryResponse {
2059 graph: graph.to_owned(),
2060 total,
2061 snapshots: snapshots
2062 .iter()
2063 .map(|(ts, backup_path)| GraphHistorySnapshot {
2064 ts: *ts,
2065 path: backup_path.display().to_string(),
2066 })
2067 .collect(),
2068 };
2069 let rendered =
2070 serde_json::to_string_pretty(&payload).context("failed to render history as JSON")?;
2071 return Ok(format!("{rendered}\n"));
2072 }
2073
2074 let mut lines = vec![format!("= history {graph} ({total})")];
2075 for (ts, backup_path) in snapshots {
2076 lines.push(format!("- {ts} | {}", backup_path.display()));
2077 }
2078 Ok(format!("{}\n", lines.join("\n")))
2079}
2080
2081fn render_graph_timeline(path: &Path, graph: &str, args: &TimelineArgs) -> Result<String> {
2082 let entries = event_log::read_log(path)?;
2083 let total = entries.len();
2084 let filtered: Vec<&event_log::EventLogEntry> = entries
2085 .iter()
2086 .filter(|entry| {
2087 let after_since = args
2088 .since_ts_ms
2089 .map(|since| entry.ts_ms >= since)
2090 .unwrap_or(true);
2091 let before_until = args
2092 .until_ts_ms
2093 .map(|until| entry.ts_ms <= until)
2094 .unwrap_or(true);
2095 after_since && before_until
2096 })
2097 .collect();
2098 let recent: Vec<&event_log::EventLogEntry> =
2099 filtered.into_iter().rev().take(args.limit).collect();
2100
2101 if args.json {
2102 let payload = GraphTimelineResponse {
2103 graph: graph.to_owned(),
2104 total,
2105 filtered: recent.len(),
2106 since_ts_ms: args.since_ts_ms,
2107 until_ts_ms: args.until_ts_ms,
2108 entries: recent
2109 .iter()
2110 .map(|entry| GraphTimelineEntry {
2111 ts_ms: entry.ts_ms,
2112 action: entry.action.clone(),
2113 detail: entry.detail.clone(),
2114 node_count: entry.graph.nodes.len(),
2115 edge_count: entry.graph.edges.len(),
2116 note_count: entry.graph.notes.len(),
2117 })
2118 .collect(),
2119 };
2120 let rendered =
2121 serde_json::to_string_pretty(&payload).context("failed to render timeline as JSON")?;
2122 return Ok(format!("{rendered}\n"));
2123 }
2124
2125 let mut lines = vec![format!("= timeline {graph} ({total})")];
2126 if args.since_ts_ms.is_some() || args.until_ts_ms.is_some() {
2127 lines.push(format!(
2128 "range: {} -> {}",
2129 args.since_ts_ms
2130 .map(|value| value.to_string())
2131 .unwrap_or_else(|| "-inf".to_owned()),
2132 args.until_ts_ms
2133 .map(|value| value.to_string())
2134 .unwrap_or_else(|| "+inf".to_owned())
2135 ));
2136 lines.push(format!("showing: {}", recent.len()));
2137 }
2138 for entry in recent {
2139 let detail = entry
2140 .detail
2141 .as_deref()
2142 .map(|value| format!(" | {value}"))
2143 .unwrap_or_default();
2144 lines.push(format!(
2145 "- {} | {}{} | nodes: {} | edges: {} | notes: {}",
2146 entry.ts_ms,
2147 entry.action,
2148 detail,
2149 entry.graph.nodes.len(),
2150 entry.graph.edges.len(),
2151 entry.graph.notes.len()
2152 ));
2153 }
2154 Ok(format!("{}\n", lines.join("\n")))
2155}
2156
2157#[derive(Debug, Serialize)]
2158struct GraphHistorySnapshot {
2159 ts: u64,
2160 path: String,
2161}
2162
2163#[derive(Debug, Serialize)]
2164struct GraphHistoryResponse {
2165 graph: String,
2166 total: usize,
2167 snapshots: Vec<GraphHistorySnapshot>,
2168}
2169
2170#[derive(Debug, Serialize)]
2171struct GraphTimelineEntry {
2172 ts_ms: u64,
2173 action: String,
2174 detail: Option<String>,
2175 node_count: usize,
2176 edge_count: usize,
2177 note_count: usize,
2178}
2179
2180#[derive(Debug, Serialize)]
2181struct GraphTimelineResponse {
2182 graph: String,
2183 total: usize,
2184 filtered: usize,
2185 since_ts_ms: Option<u64>,
2186 until_ts_ms: Option<u64>,
2187 entries: Vec<GraphTimelineEntry>,
2188}
2189
2190fn render_graph_diff_as_of(path: &Path, graph: &str, args: &DiffAsOfArgs) -> Result<String> {
2191 match resolve_temporal_source(path, args.source)? {
2192 TemporalSource::EventLog => render_graph_diff_as_of_event_log(path, graph, args),
2193 _ => render_graph_diff_as_of_backups(path, graph, args),
2194 }
2195}
2196
2197fn render_graph_diff_as_of_json(path: &Path, graph: &str, args: &DiffAsOfArgs) -> Result<String> {
2198 match resolve_temporal_source(path, args.source)? {
2199 TemporalSource::EventLog => render_graph_diff_as_of_event_log_json(path, graph, args),
2200 _ => render_graph_diff_as_of_backups_json(path, graph, args),
2201 }
2202}
2203
2204fn render_graph_diff_as_of_backups(
2205 path: &Path,
2206 graph: &str,
2207 args: &DiffAsOfArgs,
2208) -> Result<String> {
2209 let backups = list_graph_backups(path)?;
2210 if backups.is_empty() {
2211 bail!("no backups found for graph: {graph}");
2212 }
2213 let from_ts = args.from_ts_ms / 1000;
2214 let to_ts = args.to_ts_ms / 1000;
2215 let from_backup = select_backup_at_or_before(&backups, from_ts)
2216 .ok_or_else(|| anyhow!("no backup at or before from_ts_ms={}", args.from_ts_ms))?;
2217 let to_backup = select_backup_at_or_before(&backups, to_ts)
2218 .ok_or_else(|| anyhow!("no backup at or before to_ts_ms={}", args.to_ts_ms))?;
2219
2220 let from_graph = load_graph_from_backup(&from_backup.1)?;
2221 let to_graph = load_graph_from_backup(&to_backup.1)?;
2222 let left_label = format!("{graph}@{}", args.from_ts_ms);
2223 let right_label = format!("{graph}@{}", args.to_ts_ms);
2224 Ok(render_graph_diff_from_files(
2225 &left_label,
2226 &right_label,
2227 &from_graph,
2228 &to_graph,
2229 ))
2230}
2231
2232fn render_graph_diff_as_of_backups_json(
2233 path: &Path,
2234 graph: &str,
2235 args: &DiffAsOfArgs,
2236) -> Result<String> {
2237 let backups = list_graph_backups(path)?;
2238 if backups.is_empty() {
2239 bail!("no backups found for graph: {graph}");
2240 }
2241 let from_ts = args.from_ts_ms / 1000;
2242 let to_ts = args.to_ts_ms / 1000;
2243 let from_backup = select_backup_at_or_before(&backups, from_ts)
2244 .ok_or_else(|| anyhow!("no backup at or before from_ts_ms={}", args.from_ts_ms))?;
2245 let to_backup = select_backup_at_or_before(&backups, to_ts)
2246 .ok_or_else(|| anyhow!("no backup at or before to_ts_ms={}", args.to_ts_ms))?;
2247
2248 let from_graph = load_graph_from_backup(&from_backup.1)?;
2249 let to_graph = load_graph_from_backup(&to_backup.1)?;
2250 let left_label = format!("{graph}@{}", args.from_ts_ms);
2251 let right_label = format!("{graph}@{}", args.to_ts_ms);
2252 Ok(render_graph_diff_json_from_files(
2253 &left_label,
2254 &right_label,
2255 &from_graph,
2256 &to_graph,
2257 ))
2258}
2259
2260fn render_graph_diff_as_of_event_log(
2261 path: &Path,
2262 graph: &str,
2263 args: &DiffAsOfArgs,
2264) -> Result<String> {
2265 let entries = event_log::read_log(path)?;
2266 if entries.is_empty() {
2267 bail!("no event log entries found for graph: {graph}");
2268 }
2269 let from_entry = select_event_at_or_before(&entries, args.from_ts_ms).ok_or_else(|| {
2270 anyhow!(
2271 "no event log entry at or before from_ts_ms={}",
2272 args.from_ts_ms
2273 )
2274 })?;
2275 let to_entry = select_event_at_or_before(&entries, args.to_ts_ms)
2276 .ok_or_else(|| anyhow!("no event log entry at or before to_ts_ms={}", args.to_ts_ms))?;
2277
2278 let left_label = format!("{graph}@{}", args.from_ts_ms);
2279 let right_label = format!("{graph}@{}", args.to_ts_ms);
2280 Ok(render_graph_diff_from_files(
2281 &left_label,
2282 &right_label,
2283 &from_entry.graph,
2284 &to_entry.graph,
2285 ))
2286}
2287
2288fn render_graph_diff_as_of_event_log_json(
2289 path: &Path,
2290 graph: &str,
2291 args: &DiffAsOfArgs,
2292) -> Result<String> {
2293 let entries = event_log::read_log(path)?;
2294 if entries.is_empty() {
2295 bail!("no event log entries found for graph: {graph}");
2296 }
2297 let from_entry = select_event_at_or_before(&entries, args.from_ts_ms).ok_or_else(|| {
2298 anyhow!(
2299 "no event log entry at or before from_ts_ms={}",
2300 args.from_ts_ms
2301 )
2302 })?;
2303 let to_entry = select_event_at_or_before(&entries, args.to_ts_ms)
2304 .ok_or_else(|| anyhow!("no event log entry at or before to_ts_ms={}", args.to_ts_ms))?;
2305
2306 let left_label = format!("{graph}@{}", args.from_ts_ms);
2307 let right_label = format!("{graph}@{}", args.to_ts_ms);
2308 Ok(render_graph_diff_json_from_files(
2309 &left_label,
2310 &right_label,
2311 &from_entry.graph,
2312 &to_entry.graph,
2313 ))
2314}
2315
2316fn resolve_temporal_source(path: &Path, source: TemporalSource) -> Result<TemporalSource> {
2317 if matches!(source, TemporalSource::Auto) {
2318 let has_events = event_log::has_log(path);
2319 return Ok(if has_events {
2320 TemporalSource::EventLog
2321 } else {
2322 TemporalSource::Backups
2323 });
2324 }
2325 Ok(source)
2326}
2327
2328fn select_event_at_or_before(
2329 entries: &[event_log::EventLogEntry],
2330 target_ts_ms: u64,
2331) -> Option<&event_log::EventLogEntry> {
2332 let mut selected = None;
2333 for entry in entries {
2334 if entry.ts_ms <= target_ts_ms {
2335 selected = Some(entry);
2336 }
2337 }
2338 selected
2339}
2340
2341fn select_backup_at_or_before(
2342 backups: &[(u64, PathBuf)],
2343 target_ts: u64,
2344) -> Option<(u64, PathBuf)> {
2345 let mut selected = None;
2346 for (ts, path) in backups {
2347 if *ts <= target_ts {
2348 selected = Some((*ts, path.clone()));
2349 }
2350 }
2351 selected
2352}
2353
2354fn load_graph_from_backup(path: &Path) -> Result<GraphFile> {
2355 let raw = read_gz_to_string(path)?;
2356 let graph: GraphFile = serde_json::from_str(&raw)
2357 .with_context(|| format!("failed to parse backup: {}", path.display()))?;
2358 Ok(graph)
2359}
2360
2361fn collect_node_list<'a>(graph: &'a GraphFile, args: &ListNodesArgs) -> (usize, Vec<&'a Node>) {
2362 let type_filter: Vec<String> = args.node_types.iter().map(|t| t.to_lowercase()).collect();
2363 let include_all_types = type_filter.is_empty();
2364
2365 let mut nodes: Vec<&Node> = graph
2366 .nodes
2367 .iter()
2368 .filter(|node| args.include_features || node.r#type != "Feature")
2369 .filter(|node| include_all_types || type_filter.contains(&node.r#type.to_lowercase()))
2370 .collect();
2371
2372 nodes.sort_by(|a, b| a.id.cmp(&b.id));
2373
2374 let total = nodes.len();
2375 let visible: Vec<&Node> = nodes.into_iter().take(args.limit).collect();
2376 (total, visible)
2377}
2378
2379fn render_node_list(graph: &GraphFile, args: &ListNodesArgs) -> String {
2380 let (total, visible) = collect_node_list(graph, args);
2381
2382 let mut lines = vec![format!("= nodes ({total})")];
2383 for node in visible {
2384 if args.full {
2385 lines.push(output::render_node(graph, node, true).trim_end().to_owned());
2386 } else {
2387 lines.push(format!("# {} | {} [{}]", node.id, node.name, node.r#type));
2388 }
2389 }
2390
2391 format!("{}\n", lines.join("\n"))
2392}
2393
2394fn render_note_list(graph: &GraphFile, args: &NoteListArgs) -> String {
2395 let mut notes: Vec<&Note> = graph
2396 .notes
2397 .iter()
2398 .filter(|note| args.node.as_ref().is_none_or(|node| note.node_id == *node))
2399 .collect();
2400
2401 notes.sort_by(|a, b| {
2402 a.created_at
2403 .cmp(&b.created_at)
2404 .then_with(|| a.id.cmp(&b.id))
2405 });
2406
2407 let total = notes.len();
2408 let visible = notes.into_iter().take(args.limit);
2409
2410 let mut lines = vec![format!("= notes ({total})")];
2411 for note in visible {
2412 let mut line = format!(
2413 "- {} | {} | {} | {}",
2414 note.id,
2415 note.node_id,
2416 note.created_at,
2417 truncate_note(¬e.body, 80)
2418 );
2419 if !note.tags.is_empty() {
2420 line.push_str(" | tags: ");
2421 line.push_str(¬e.tags.join(", "));
2422 }
2423 if !note.author.is_empty() {
2424 line.push_str(" | by: ");
2425 line.push_str(¬e.author);
2426 }
2427 lines.push(line);
2428 }
2429
2430 format!("{}\n", lines.join("\n"))
2431}
2432
2433fn build_note(graph: &GraphFile, args: NoteAddArgs) -> Result<Note> {
2434 if graph.node_by_id(&args.node_id).is_none() {
2435 bail!("node not found: {}", args.node_id);
2436 }
2437 let ts = now_ms();
2438 let id = args.id.unwrap_or_else(|| format!("note:{ts}"));
2439 let created_at = args.created_at.unwrap_or_else(|| ts.to_string());
2440 Ok(Note {
2441 id,
2442 node_id: args.node_id,
2443 body: args.text,
2444 tags: args.tag,
2445 author: args.author.unwrap_or_default(),
2446 created_at,
2447 provenance: args.provenance.unwrap_or_default(),
2448 source_files: args.source,
2449 })
2450}
2451
2452fn truncate_note(value: &str, max_len: usize) -> String {
2453 let char_count = value.chars().count();
2454 if char_count <= max_len {
2455 return value.to_owned();
2456 }
2457 let truncated: String = value.chars().take(max_len.saturating_sub(3)).collect();
2458 format!("{truncated}...")
2459}
2460
2461fn now_ms() -> u128 {
2462 use std::time::{SystemTime, UNIX_EPOCH};
2463
2464 SystemTime::now()
2465 .duration_since(UNIX_EPOCH)
2466 .unwrap_or_default()
2467 .as_millis()
2468}
2469
2470fn map_find_mode(mode: CliFindMode) -> output::FindMode {
2471 match mode {
2472 CliFindMode::Fuzzy => output::FindMode::Fuzzy,
2473 CliFindMode::Bm25 => output::FindMode::Bm25,
2474 CliFindMode::Vector => output::FindMode::Fuzzy,
2475 }
2476}
2477
2478fn render_feedback_log(cwd: &Path, args: &FeedbackLogArgs) -> Result<String> {
2479 let path = cwd.join("kg-mcp.feedback.log");
2480 if !path.exists() {
2481 return Ok(String::from("= feedback-log\nempty: no entries yet\n"));
2482 }
2483
2484 let content = std::fs::read_to_string(&path)?;
2485 let mut entries: Vec<FeedbackLogEntry> = Vec::new();
2486 for line in content.lines() {
2487 if let Some(entry) = FeedbackLogEntry::parse(line) {
2488 if let Some(ref uid) = args.uid {
2489 if &entry.uid != uid {
2490 continue;
2491 }
2492 }
2493 if let Some(ref graph) = args.graph {
2494 if &entry.graph != graph {
2495 continue;
2496 }
2497 }
2498 entries.push(entry);
2499 }
2500 }
2501
2502 entries.reverse();
2503 let shown: Vec<&FeedbackLogEntry> = entries.iter().take(args.limit).collect();
2504
2505 let mut output = vec![String::from("= feedback-log")];
2506 output.push(format!("total_entries: {}", entries.len()));
2507 output.push(format!("showing: {}", shown.len()));
2508 output.push(String::from("recent_entries:"));
2509 for e in shown {
2510 let pick = e.pick.as_deref().unwrap_or("-");
2511 let selected = e.selected.as_deref().unwrap_or("-");
2512 let graph = if e.graph.is_empty() { "-" } else { &e.graph };
2513 let queries = if e.queries.is_empty() {
2514 "-"
2515 } else {
2516 &e.queries
2517 };
2518 output.push(format!(
2519 "- {} | {} | {} | pick={} | selected={} | graph={} | {}",
2520 e.ts_ms, e.uid, e.action, pick, selected, graph, queries
2521 ));
2522 }
2523
2524 Ok(format!("{}\n", output.join("\n")))
2525}
2526
2527fn handle_vector_command(
2528 path: &Path,
2529 _graph: &str,
2530 graph_file: &GraphFile,
2531 command: &VectorCommand,
2532 _cwd: &Path,
2533) -> Result<String> {
2534 match command {
2535 VectorCommand::Import(args) => {
2536 let vector_path = path
2537 .parent()
2538 .map(|p| p.join(".kg.vectors.json"))
2539 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
2540 let store =
2541 vectors::VectorStore::import_jsonl(std::path::Path::new(&args.input), graph_file)?;
2542 store.save(&vector_path)?;
2543 Ok(format!(
2544 "+ imported {} vectors (dim={}) to {}\n",
2545 store.vectors.len(),
2546 store.dimension,
2547 vector_path.display()
2548 ))
2549 }
2550 VectorCommand::Stats(_args) => {
2551 let vector_path = path
2552 .parent()
2553 .map(|p| p.join(".kg.vectors.json"))
2554 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
2555 if !vector_path.exists() {
2556 return Ok(String::from("= vectors\nnot initialized\n"));
2557 }
2558 let store = vectors::VectorStore::load(&vector_path)?;
2559 let node_ids: Vec<_> = store.vectors.keys().cloned().collect();
2560 let in_graph = node_ids
2561 .iter()
2562 .filter(|id| graph_file.node_by_id(id).is_some())
2563 .count();
2564 Ok(format!(
2565 "= vectors\ndimension: {}\ntotal: {}\nin_graph: {}\n",
2566 store.dimension,
2567 store.vectors.len(),
2568 in_graph
2569 ))
2570 }
2571 }
2572}
2573
2574fn render_feedback_summary(cwd: &Path, args: &FeedbackSummaryArgs) -> Result<String> {
2575 use std::collections::HashMap;
2576
2577 let path = cwd.join("kg-mcp.feedback.log");
2578 if !path.exists() {
2579 return Ok(String::from("= feedback-summary\nNo feedback yet.\n"));
2580 }
2581
2582 let content = std::fs::read_to_string(&path)?;
2583 let mut entries: Vec<FeedbackLogEntry> = Vec::new();
2584 for line in content.lines() {
2585 if let Some(entry) = FeedbackLogEntry::parse(line) {
2586 if let Some(ref graph) = args.graph {
2587 if &entry.graph != graph {
2588 continue;
2589 }
2590 }
2591 entries.push(entry);
2592 }
2593 }
2594
2595 entries.reverse();
2596 let _shown = entries.iter().take(args.limit).collect::<Vec<_>>();
2597
2598 let mut lines = vec![String::from("= feedback-summary")];
2599 lines.push(format!("Total entries: {}", entries.len()));
2600
2601 let mut by_action: HashMap<&str, usize> = HashMap::new();
2602 let mut nil_queries: Vec<&str> = Vec::new();
2603 let mut yes_count = 0;
2604 let mut no_count = 0;
2605 let mut pick_map: HashMap<&str, usize> = HashMap::new();
2606 let mut query_counts: HashMap<&str, usize> = HashMap::new();
2607
2608 for e in &entries {
2609 *by_action.entry(&e.action).or_insert(0) += 1;
2610
2611 match e.action.as_str() {
2612 "NIL" => {
2613 if !e.queries.is_empty() {
2614 nil_queries.push(&e.queries);
2615 }
2616 }
2617 "YES" => yes_count += 1,
2618 "NO" => no_count += 1,
2619 "PICK" => {
2620 if let Some(ref sel) = e.selected {
2621 *pick_map.entry(sel).or_insert(0) += 1;
2622 }
2623 }
2624 _ => {}
2625 }
2626
2627 if !e.queries.is_empty() {
2628 *query_counts.entry(&e.queries).or_insert(0) += 1;
2629 }
2630 }
2631
2632 lines.push(String::from("\n### By response"));
2633 lines.push(format!(
2634 "YES: {} ({:.0}%)",
2635 yes_count,
2636 if !entries.is_empty() {
2637 (yes_count as f64 / entries.len() as f64) * 100.0
2638 } else {
2639 0.0
2640 }
2641 ));
2642 lines.push(format!("NO: {}", no_count));
2643 lines.push(format!("PICK: {}", by_action.get("PICK").unwrap_or(&0)));
2644 lines.push(format!("NIL: {} (no results)", nil_queries.len()));
2645
2646 if !nil_queries.is_empty() {
2647 lines.push(String::from("\n### BrakujÄ…ce node'y (NIL queries)"));
2648 for q in nil_queries.iter().take(10) {
2649 lines.push(format!("- \"{}\"", q));
2650 }
2651 if nil_queries.len() > 10 {
2652 lines.push(format!(" ... i {} więcej", nil_queries.len() - 10));
2653 }
2654 }
2655
2656 if !pick_map.is_empty() {
2657 lines.push(String::from("\n### Najczęściej wybierane node'y (PICK)"));
2658 let mut sorted: Vec<_> = pick_map.iter().collect();
2659 sorted.sort_by(|a, b| b.1.cmp(a.1));
2660 for (node, count) in sorted.iter().take(10) {
2661 lines.push(format!("- {} ({}x)", node, count));
2662 }
2663 }
2664
2665 if !query_counts.is_empty() {
2666 lines.push(String::from("\n### Top wyszukiwane terminy"));
2667 let mut sorted: Vec<_> = query_counts.iter().collect();
2668 sorted.sort_by(|a, b| b.1.cmp(a.1));
2669 for (query, count) in sorted.iter().take(10) {
2670 lines.push(format!("- \"{}\" ({})", query, count));
2671 }
2672 }
2673
2674 if yes_count == 0 && no_count == 0 && nil_queries.is_empty() {
2675 lines.push(String::from(
2676 "\n(Wpływy za mało na wnioski - potrzeba więcej feedbacku)",
2677 ));
2678 } else if yes_count > no_count * 3 {
2679 lines.push(String::from(
2680 "\n✓ Feedback pozytywny - wyszukiwania działają dobrze.",
2681 ));
2682 } else if no_count > yes_count {
2683 lines.push(String::from(
2684 "\n⚠Dużo NO - sprawdź jakość aliasów i dopasowań.",
2685 ));
2686 }
2687
2688 Ok(format!("{}\n", lines.join("\n")))
2689}
2690
2691fn render_feedback_summary_for_graph(
2692 cwd: &Path,
2693 graph: &str,
2694 args: &FeedbackSummaryArgs,
2695) -> Result<String> {
2696 let mut args = args.clone();
2697 args.graph = Some(graph.to_string());
2698 render_feedback_summary(cwd, &args)
2699}
2700
2701#[derive(Debug, Clone)]
2702struct FeedbackLogEntry {
2703 ts_ms: String,
2704 uid: String,
2705 action: String,
2706 pick: Option<String>,
2707 selected: Option<String>,
2708 graph: String,
2709 queries: String,
2710}
2711
2712impl FeedbackLogEntry {
2713 fn parse(line: &str) -> Option<Self> {
2714 let mut ts_ms: Option<String> = None;
2717 let mut uid: Option<String> = None;
2718 let mut action: Option<String> = None;
2719 let mut pick: Option<String> = None;
2720 let mut selected: Option<String> = None;
2721 let mut graph: Option<String> = None;
2722 let mut queries: Option<String> = None;
2723
2724 for part in line.split('\t') {
2725 let (k, v) = part.split_once('=')?;
2726 let v = v.trim();
2727 match k {
2728 "ts_ms" => ts_ms = Some(v.to_owned()),
2729 "uid" => uid = Some(v.to_owned()),
2730 "action" => action = Some(v.to_owned()),
2731 "pick" => {
2732 if v != "-" {
2733 pick = Some(v.to_owned());
2734 }
2735 }
2736 "selected" => {
2737 if v != "-" {
2738 selected = Some(v.to_owned());
2739 }
2740 }
2741 "graph" => {
2742 if v != "-" {
2743 graph = Some(v.to_owned());
2744 }
2745 }
2746 "queries" => {
2747 if v != "-" {
2748 queries = Some(v.to_owned());
2749 }
2750 }
2751 _ => {}
2752 }
2753 }
2754
2755 Some(Self {
2756 ts_ms: ts_ms?,
2757 uid: uid?,
2758 action: action?,
2759 pick,
2760 selected,
2761 graph: graph.unwrap_or_default(),
2762 queries: queries.unwrap_or_default(),
2763 })
2764 }
2765}
2766
2767pub fn default_graph_root(cwd: &Path) -> PathBuf {
2776 let home = std::env::var_os("HOME")
2777 .map(PathBuf::from)
2778 .or_else(|| std::env::var_os("USERPROFILE").map(PathBuf::from));
2779 graph_root_from(home.as_deref(), cwd)
2780}
2781
2782fn graph_root_from(home: Option<&Path>, cwd: &Path) -> PathBuf {
2783 match home {
2784 Some(home) => home.join(".kg").join("graphs"),
2785 None => cwd.join(".kg").join("graphs"),
2786 }
2787}
2788
2789pub fn resolve_graph_path(cwd: &Path, graph_root: &Path, graph: &str) -> Result<PathBuf> {
2794 let store = graph_store(cwd, graph_root)?;
2795 store.resolve_graph_path(graph)
2796}
2797
2798fn render_check(graph: &GraphFile, cwd: &Path, args: &CheckArgs) -> String {
2803 let report = validate_graph(graph, cwd, args.deep, args.base_dir.as_deref());
2804 format_validation_report(
2805 "check",
2806 &report.errors,
2807 &report.warnings,
2808 args.errors_only,
2809 args.warnings_only,
2810 args.limit,
2811 )
2812}
2813
2814fn render_audit(graph: &GraphFile, cwd: &Path, args: &AuditArgs) -> String {
2815 let report = validate_graph(graph, cwd, args.deep, args.base_dir.as_deref());
2816 format_validation_report(
2817 "audit",
2818 &report.errors,
2819 &report.warnings,
2820 args.errors_only,
2821 args.warnings_only,
2822 args.limit,
2823 )
2824}
2825
2826fn format_validation_report(
2827 header: &str,
2828 errors: &[String],
2829 warnings: &[String],
2830 errors_only: bool,
2831 warnings_only: bool,
2832 limit: usize,
2833) -> String {
2834 let mut lines = vec![format!("= {header}")];
2835 lines.push(format!(
2836 "status: {}",
2837 if errors.is_empty() {
2838 "VALID"
2839 } else {
2840 "INVALID"
2841 }
2842 ));
2843 lines.push(format!("errors: {}", errors.len()));
2844 lines.push(format!("warnings: {}", warnings.len()));
2845 if !warnings_only {
2846 lines.push("error-list:".to_owned());
2847 for error in errors.iter().take(limit) {
2848 lines.push(format!("- {error}"));
2849 }
2850 }
2851 if !errors_only {
2852 lines.push("warning-list:".to_owned());
2853 for warning in warnings.iter().take(limit) {
2854 lines.push(format!("- {warning}"));
2855 }
2856 }
2857 format!("{}\n", lines.join("\n"))
2858}
2859
2860#[cfg(test)]
2865mod tests {
2866 use super::*;
2867 use tempfile::tempdir;
2868
2869 fn fixture_graph() -> GraphFile {
2870 serde_json::from_str(include_str!("../graph-example-fridge.json")).expect("fixture graph")
2871 }
2872
2873 fn test_graph_root(cwd: &Path) -> PathBuf {
2874 cwd.join(".kg").join("graphs")
2875 }
2876
2877 fn write_fixture(dir: &Path) -> PathBuf {
2878 std::fs::create_dir_all(dir).expect("create graph root");
2879 let path = dir.join("fridge.json");
2880 std::fs::write(&path, include_str!("../graph-example-fridge.json")).expect("write fixture");
2881 path
2882 }
2883
2884 fn write_config(cwd: &Path, body: &str) {
2885 std::fs::write(cwd.join(".kg.toml"), body).expect("write config");
2886 }
2887
2888 fn write_graph(path: &Path, graph: &GraphFile) {
2889 graph.save(path).expect("save graph");
2890 }
2891
2892 fn exec_ok(args: &[&str], cwd: &Path) -> String {
2893 let cli = Cli::try_parse_from(normalize_args(args.iter().map(OsString::from)))
2894 .expect("parse args");
2895 execute(cli, cwd, &test_graph_root(cwd)).expect("execute")
2896 }
2897
2898 fn exec_safe(args: &[&str], cwd: &Path) -> Result<String> {
2899 run_args_safe(args.iter().map(OsString::from), cwd)
2900 }
2901
2902 #[test]
2903 fn create_graph_writes_empty_graph_file() {
2904 let dir = tempdir().expect("tempdir");
2905 let output = exec_ok(&["kg", "create", "fridge"], dir.path());
2906 assert!(output.contains("+ created"));
2907 assert!(output.contains(".kg/graphs/fridge.json"));
2908 let graph = GraphFile::load(&test_graph_root(dir.path()).join("fridge.json"))
2909 .expect("load created graph");
2910 assert_eq!(graph.metadata.name, "fridge");
2911 assert_eq!(graph.metadata.node_count, 0);
2912 assert!(graph.nodes.is_empty());
2913 }
2914
2915 #[test]
2916 fn create_graph_writes_redb_file() {
2917 let dir = tempdir().expect("tempdir");
2918 write_config(dir.path(), "backend = \"redb\"\n");
2919 let output = exec_ok(&["kg", "create", "fridge"], dir.path());
2920 assert!(output.contains("+ created"));
2921 assert!(output.contains(".kg/graphs/fridge.db"));
2922
2923 let store = graph_store(dir.path(), &test_graph_root(dir.path())).expect("graph store");
2924 let path = store.resolve_graph_path("fridge").expect("resolve graph");
2925 let graph = store.load_graph(&path).expect("load graph");
2926 assert_eq!(graph.metadata.name, "fridge");
2927 assert_eq!(graph.metadata.node_count, 0);
2928 assert!(graph.nodes.is_empty());
2929 }
2930
2931 #[test]
2932 fn graph_root_prefers_home_directory() {
2933 let cwd = Path::new("/tmp/workspace");
2934 let home = Path::new("/tmp/home");
2935 assert_eq!(
2936 graph_root_from(Some(home), cwd),
2937 PathBuf::from("/tmp/home/.kg/graphs")
2938 );
2939 assert_eq!(
2940 graph_root_from(None, cwd),
2941 PathBuf::from("/tmp/workspace/.kg/graphs")
2942 );
2943 }
2944
2945 #[test]
2946 fn find_supports_multiple_queries() {
2947 let dir = tempdir().expect("tempdir");
2948 write_fixture(&test_graph_root(dir.path()));
2949 let output = exec_ok(
2950 &["kg", "fridge", "node", "find", "lodowka", "smart"],
2951 dir.path(),
2952 );
2953 assert!(output.contains("? lodowka ("));
2954 assert!(output.contains("# concept:refrigerator | Lodowka"));
2955 assert!(output.contains("? smart ("));
2956 assert!(output.contains("# interface:smart_api | Smart Home API (REST)"));
2957 }
2958
2959 #[test]
2960 fn kql_filters_nodes_by_type() {
2961 let dir = tempdir().expect("tempdir");
2962 write_fixture(&test_graph_root(dir.path()));
2963 let output = exec_ok(&["kg", "fridge", "kql", "node type=Concept"], dir.path());
2964 assert!(output.contains("nodes:"));
2965 assert!(output.contains("concept:refrigerator"));
2966 }
2967
2968 #[test]
2969 fn find_uses_fuzzy_matching_for_imperfect_queries() {
2970 let dir = tempdir().expect("tempdir");
2971 write_fixture(&test_graph_root(dir.path()));
2972 let output = exec_ok(&["kg", "fridge", "node", "find", "smrt api"], dir.path());
2973 assert!(output.contains("? smrt api ("));
2974 assert!(output.contains("# interface:smart_api | Smart Home API (REST)"));
2975 assert!(!output.contains("# process:diagnostics | Autodiagnostyka"));
2976 }
2977
2978 #[test]
2979 fn list_graphs_shows_available_graph_names() {
2980 let dir = tempdir().expect("tempdir");
2981 write_fixture(&test_graph_root(dir.path()));
2982 write_fixture(&dir.path().join(".kg").join("graphs"));
2983 let output = exec_ok(&["kg", "list"], dir.path());
2984 assert!(output.contains("= graphs (1)"));
2985 assert!(output.contains("- fridge"));
2986 }
2987
2988 #[test]
2989 fn list_nodes_supports_type_filter_and_limit() {
2990 let dir = tempdir().expect("tempdir");
2991 write_fixture(&test_graph_root(dir.path()));
2992 let output = exec_ok(
2993 &["kg", "fridge", "list", "--type", "Process", "--limit", "1"],
2994 dir.path(),
2995 );
2996 assert!(output.contains("= nodes (3)"));
2997 assert!(output.contains("[Process]"));
2998 assert!(!output.contains("[Concept]"));
2999 }
3000
3001 #[test]
3002 fn node_list_subcommand_matches_graph_list() {
3003 let dir = tempdir().expect("tempdir");
3004 write_fixture(&test_graph_root(dir.path()));
3005 let graph_list = exec_ok(&["kg", "fridge", "list", "--limit", "5"], dir.path());
3006 let node_list = exec_ok(
3007 &["kg", "fridge", "node", "list", "--limit", "5"],
3008 dir.path(),
3009 );
3010 assert_eq!(graph_list, node_list);
3011 }
3012
3013 #[test]
3014 fn feedback_log_lists_recent_entries_and_supports_filters() {
3015 let dir = tempdir().expect("tempdir");
3016 let log_path = dir.path().join("kg-mcp.feedback.log");
3017 std::fs::write(
3018 &log_path,
3019 "ts_ms=1\tuid=aaaaaa\taction=YES\tpick=-\tselected=concept:refrigerator\tgraph=fridge\tqueries=lodowka\n\
3020ts_ms=2\tuid=bbbbbb\taction=NO\tpick=-\tselected=-\tgraph=fridge\tqueries=smart\n\
3021ts_ms=3\tuid=cccccc\taction=PICK\tpick=2\tselected=process:diagnostics\tgraph=fridge\tqueries=diag\n",
3022 )
3023 .expect("write feedback log");
3024
3025 let output = exec_ok(&["kg", "feedback-log", "--limit", "2"], dir.path());
3026 assert!(output.contains("= feedback-log"));
3027 assert!(output.contains("total_entries: 3"));
3028 assert!(output.contains("showing: 2"));
3029 assert!(output.contains("- 3 | cccccc | PICK"));
3031
3032 let filtered = exec_ok(
3033 &["kg", "feedback-log", "--uid", "aaaaaa", "--limit", "5"],
3034 dir.path(),
3035 );
3036 assert!(filtered.contains("total_entries: 1"));
3037 assert!(filtered.contains("uid=aaaaaa") == false);
3038 assert!(filtered.contains("aaaaaa"));
3039 assert!(!filtered.contains("bbbbbb"));
3040 }
3041
3042 #[test]
3043 fn feedback_summary_parses_and_aggregates_correctly() {
3044 let dir = tempdir().expect("tempdir");
3045 write_fixture(&test_graph_root(dir.path()));
3046 let log_path = dir.path().join("kg-mcp.feedback.log");
3047 std::fs::write(
3048 &log_path,
3049 "ts_ms=1\tuid=aaaaaa\taction=YES\tpick=-\tselected=concept:foo\tgraph=fridge\tqueries=foo\n\
3050ts_ms=2\tuid=bbbbbb\taction=YES\tpick=-\tselected=concept:bar\tgraph=fridge\tqueries=bar\n\
3051ts_ms=3\tuid=cccccc\taction=NO\tpick=-\tselected=-\tgraph=fridge\tqueries=baz\n\
3052ts_ms=4\tuid=dddddd\taction=NIL\tpick=-\tselected=-\tgraph=fridge\tqueries=missing\n\
3053ts_ms=5\tuid=eeeeee\taction=PICK\tpick=1\tselected=concept:foo\tgraph=fridge\tqueries=xyz\n",
3054 )
3055 .expect("write feedback log");
3056
3057 let output = exec_ok(&["kg", "fridge", "feedback-summary"], dir.path());
3058 assert!(output.contains("= feedback-summary"));
3059 assert!(output.contains("YES: 2"));
3060 assert!(output.contains("NO: 1"));
3061 assert!(output.contains("NIL: 1"));
3062 assert!(output.contains("BrakujÄ…ce node'y"));
3063 assert!(output.contains("missing"));
3064 assert!(output.contains("concept:foo"));
3065 assert!(output.contains("Top wyszukiwane"));
3066 }
3067
3068 #[test]
3069 fn graph_stats_reports_counts() {
3070 let dir = tempdir().expect("tempdir");
3071 write_fixture(&test_graph_root(dir.path()));
3072 let output = exec_ok(
3073 &[
3074 "kg",
3075 "graph",
3076 "fridge",
3077 "stats",
3078 "--by-type",
3079 "--by-relation",
3080 "--show-sources",
3081 ],
3082 dir.path(),
3083 );
3084 assert!(output.contains("= stats"));
3085 assert!(output.contains("nodes:"));
3086 assert!(output.contains("types:"));
3087 assert!(output.contains("relations:"));
3088 assert!(output.contains("sources:"));
3089 }
3090
3091 #[test]
3092 fn graph_missing_descriptions_alias_works() {
3093 let dir = tempdir().expect("tempdir");
3094 let path = write_fixture(&test_graph_root(dir.path()));
3095 let mut graph = GraphFile::load(&path).expect("load graph");
3096 graph
3097 .node_by_id_mut("concept:temperature")
3098 .expect("node")
3099 .properties
3100 .description
3101 .clear();
3102 write_graph(&path, &graph);
3103 let output = exec_ok(
3104 &[
3105 "kg",
3106 "graph",
3107 "fridge",
3108 "missing-descriptions",
3109 "--limit",
3110 "10",
3111 ],
3112 dir.path(),
3113 );
3114 assert!(output.contains("= missing-descriptions ("));
3115 assert!(output.contains("concept:temperature"));
3116 }
3117
3118 #[test]
3119 fn graph_missing_facts_quality_command_works() {
3120 let dir = tempdir().expect("tempdir");
3121 let path = write_fixture(&test_graph_root(dir.path()));
3122 let mut graph = GraphFile::load(&path).expect("load graph");
3123 graph
3124 .node_by_id_mut("process:defrost")
3125 .expect("node")
3126 .properties
3127 .key_facts
3128 .clear();
3129 write_graph(&path, &graph);
3130 let output = exec_ok(
3131 &[
3132 "kg",
3133 "graph",
3134 "fridge",
3135 "quality",
3136 "missing-facts",
3137 "--limit",
3138 "10",
3139 ],
3140 dir.path(),
3141 );
3142 assert!(output.contains("= missing-facts ("));
3143 assert!(output.contains("process:defrost"));
3144 }
3145
3146 #[test]
3147 fn graph_duplicates_detects_similar_names() {
3148 let dir = tempdir().expect("tempdir");
3149 let path = write_fixture(&test_graph_root(dir.path()));
3150 let mut graph = GraphFile::load(&path).expect("load graph");
3151 graph.nodes.push(Node {
3152 id: "concept:smart_home_api".to_owned(),
3153 r#type: "Interface".to_owned(),
3154 name: "Smart Home API".to_owned(),
3155 properties: NodeProperties::default(),
3156 source_files: vec!["smart_home_integration.md".to_owned()],
3157 });
3158 graph.refresh_counts();
3159 write_graph(&path, &graph);
3160 let output = exec_ok(
3161 &[
3162 "kg",
3163 "graph",
3164 "fridge",
3165 "quality",
3166 "duplicates",
3167 "--threshold",
3168 "0.7",
3169 ],
3170 dir.path(),
3171 );
3172 assert!(output.contains("= duplicates ("));
3173 assert!(output.contains("interface:smart_api"));
3174 assert!(output.contains("concept:smart_home_api"));
3175 }
3176
3177 #[test]
3178 fn graph_edge_gaps_reports_structural_gaps() {
3179 let dir = tempdir().expect("tempdir");
3180 let path = write_fixture(&test_graph_root(dir.path()));
3181 let mut graph = GraphFile::load(&path).expect("load graph");
3182 graph.nodes.push(Node {
3183 id: "datastore:manual_cache".to_owned(),
3184 r#type: "DataStore".to_owned(),
3185 name: "Manual Cache".to_owned(),
3186 properties: NodeProperties::default(),
3187 source_files: vec!["manual.md".to_owned()],
3188 });
3189 graph.nodes.push(Node {
3190 id: "process:manual_sync".to_owned(),
3191 r#type: "Process".to_owned(),
3192 name: "Manual Sync".to_owned(),
3193 properties: NodeProperties::default(),
3194 source_files: vec!["manual.md".to_owned()],
3195 });
3196 graph.refresh_counts();
3197 write_graph(&path, &graph);
3198 let output = exec_ok(&["kg", "graph", "fridge", "edge-gaps"], dir.path());
3199 assert!(output.contains("datastore-missing-stored-in:"));
3200 assert!(output.contains("datastore:manual_cache"));
3201 assert!(output.contains("process:manual_sync"));
3202 }
3203
3204 #[test]
3205 fn graph_audit_reports_invalid_conditions() {
3206 let dir = tempdir().expect("tempdir");
3207 let path = write_fixture(&test_graph_root(dir.path()));
3208 let mut graph = GraphFile::load(&path).expect("load graph");
3209 graph.nodes.push(Node {
3210 id: "concept:refrigerator".to_owned(),
3211 r#type: "Concept".to_owned(),
3212 name: "Duplicate Refrigerator".to_owned(),
3213 properties: NodeProperties::default(),
3214 source_files: vec!["missing.md".to_owned()],
3215 });
3216 graph.refresh_counts();
3217 write_graph(&path, &graph);
3218 let output = exec_ok(
3219 &["kg", "graph", "fridge", "audit", "--deep", "--limit", "20"],
3220 dir.path(),
3221 );
3222 assert!(output.contains("= audit"));
3223 assert!(output.contains("status: INVALID"));
3224 assert!(output.contains("duplicate node id: concept:refrigerator"));
3225 assert!(output.contains("missing source file:"));
3226 }
3227
3228 #[test]
3229 fn graph_check_reports_validation_errors() {
3230 let dir = tempdir().expect("tempdir");
3231 let path = write_fixture(&test_graph_root(dir.path()));
3232 let mut graph = GraphFile::load(&path).expect("load graph");
3233 graph.nodes.push(Node {
3234 id: "bad-id".to_owned(),
3235 r#type: "WeirdType".to_owned(),
3236 name: String::new(),
3237 properties: NodeProperties {
3238 confidence: Some(1.5),
3239 ..NodeProperties::default()
3240 },
3241 source_files: Vec::new(),
3242 });
3243 graph.refresh_counts();
3244 write_graph(&path, &graph);
3245 let output = exec_ok(
3246 &["kg", "graph", "fridge", "check", "--limit", "20"],
3247 dir.path(),
3248 );
3249 assert!(output.contains("= check"));
3250 assert!(output.contains("status: INVALID"));
3251 assert!(output.contains("node bad-id has invalid type WeirdType"));
3252 assert!(output.contains("node id bad-id does not match prefix:snake_case"));
3253 assert!(output.contains("node bad-id missing name"));
3254 assert!(output.contains("node bad-id missing source_files"));
3255 assert!(output.contains("confidence out of range"));
3256 }
3257
3258 #[test]
3259 fn resolve_graph_path_uses_config_mapping() {
3260 let dir = tempdir().expect("tempdir");
3261 let mapped_dir = dir.path().join("mapped");
3262 write_fixture(&mapped_dir);
3263 write_config(dir.path(), "[graphs]\nfridge = \"mapped/fridge.json\"\n");
3264 let output = exec_ok(
3265 &["kg", "fridge", "node", "get", "concept:refrigerator"],
3266 dir.path(),
3267 );
3268 assert!(output.contains("# concept:refrigerator | Lodowka"));
3269 }
3270
3271 #[test]
3272 fn get_renders_compact_symbolic_view() {
3273 let graph = fixture_graph();
3274 let node = graph.node_by_id("concept:refrigerator").expect("node");
3275 let rendered = output::render_node(&graph, node, false);
3276 assert!(rendered.contains("# concept:refrigerator | Lodowka"));
3277 assert!(rendered.contains("aka: Chlodziarka, Fridge"));
3278 assert!(rendered.contains("-> HAS | concept:cooling_chamber | Komora Chlodzenia"));
3279 assert!(rendered.contains("-> HAS | concept:temperature | Temperatura"));
3280 }
3281
3282 #[test]
3283 fn add_persists_node_in_existing_graph() {
3284 let dir = tempdir().expect("tempdir");
3285 write_fixture(&test_graph_root(dir.path()));
3286 let output = exec_ok(
3287 &[
3288 "kg",
3289 "fridge",
3290 "node",
3291 "add",
3292 "concept:ice_maker",
3293 "--type",
3294 "Concept",
3295 "--name",
3296 "Kostkarka",
3297 "--description",
3298 "Automatyczna kostkarka do lodu",
3299 "--domain-area",
3300 "hardware",
3301 "--provenance",
3302 "manual",
3303 "--confidence",
3304 "0.9",
3305 "--created-at",
3306 "2026-03-20T01:00:00Z",
3307 "--fact",
3308 "Wytwarza kostki lodu co 2 godziny",
3309 "--alias",
3310 "Ice Maker",
3311 "--source",
3312 "instrukcja_obslugi.md",
3313 ],
3314 dir.path(),
3315 );
3316 assert_eq!(output, "+ node concept:ice_maker\n");
3317 let graph =
3318 GraphFile::load(&test_graph_root(dir.path()).join("fridge.json")).expect("load graph");
3319 let node = graph.node_by_id("concept:ice_maker").expect("new node");
3320 assert_eq!(node.properties.alias, vec!["Ice Maker"]);
3321 assert_eq!(node.properties.domain_area, "hardware");
3322 assert_eq!(node.properties.provenance, "manual");
3323 assert_eq!(node.properties.confidence, Some(0.9));
3324 assert_eq!(node.properties.created_at, "2026-03-20T01:00:00Z");
3325 assert_eq!(graph.metadata.node_count, graph.nodes.len());
3326 }
3327
3328 #[test]
3329 fn modify_updates_existing_node_without_duplicate_values() {
3330 let dir = tempdir().expect("tempdir");
3331 write_fixture(&test_graph_root(dir.path()));
3332 let output = exec_ok(
3333 &[
3334 "kg",
3335 "fridge",
3336 "node",
3337 "modify",
3338 "concept:temperature",
3339 "--name",
3340 "Temperatura Komory",
3341 "--domain-area",
3342 "sensing",
3343 "--provenance",
3344 "service_manual",
3345 "--confidence",
3346 "0.75",
3347 "--created-at",
3348 "2026-03-20T01:05:00Z",
3349 "--fact",
3350 "Alarm po 15 minutach odchylenia",
3351 "--fact",
3352 "Alarm po 15 minutach odchylenia",
3353 "--alias",
3354 "Temp",
3355 "--alias",
3356 "Temp",
3357 "--source",
3358 "panel_api.md",
3359 ],
3360 dir.path(),
3361 );
3362 assert_eq!(output, "~ node concept:temperature\n");
3363 let graph =
3364 GraphFile::load(&test_graph_root(dir.path()).join("fridge.json")).expect("load graph");
3365 let node = graph.node_by_id("concept:temperature").expect("node");
3366 assert_eq!(node.name, "Temperatura Komory");
3367 assert_eq!(node.properties.alias, vec!["Temp"]);
3368 assert_eq!(node.properties.domain_area, "sensing");
3369 assert_eq!(node.properties.provenance, "service_manual");
3370 assert_eq!(node.properties.confidence, Some(0.75));
3371 assert_eq!(node.properties.created_at, "2026-03-20T01:05:00Z");
3372 assert_eq!(
3373 node.properties
3374 .key_facts
3375 .iter()
3376 .filter(|f| f.as_str() == "Alarm po 15 minutach odchylenia")
3377 .count(),
3378 1
3379 );
3380 assert!(node.source_files.iter().any(|s| s == "panel_api.md"));
3381 }
3382
3383 #[test]
3384 fn remove_deletes_node_and_incident_edges() {
3385 let dir = tempdir().expect("tempdir");
3386 write_fixture(&test_graph_root(dir.path()));
3387 let output = exec_ok(
3388 &["kg", "fridge", "node", "remove", "process:defrost"],
3389 dir.path(),
3390 );
3391 assert_eq!(output, "- node process:defrost (3 edges removed)\n");
3392 let graph =
3393 GraphFile::load(&test_graph_root(dir.path()).join("fridge.json")).expect("load graph");
3394 assert!(graph.node_by_id("process:defrost").is_none());
3395 assert!(
3396 graph
3397 .edges
3398 .iter()
3399 .all(|e| e.source_id != "process:defrost" && e.target_id != "process:defrost")
3400 );
3401 }
3402
3403 #[test]
3404 fn edge_add_persists_new_edge() {
3405 let dir = tempdir().expect("tempdir");
3406 write_fixture(&test_graph_root(dir.path()));
3407 let output = exec_ok(
3408 &[
3409 "kg",
3410 "fridge",
3411 "edge",
3412 "add",
3413 "concept:refrigerator",
3414 "READS_FROM",
3415 "datastore:settings_storage",
3416 "--detail",
3417 "Lodowka odczytuje ustawienia z pamieci ustawien",
3418 ],
3419 dir.path(),
3420 );
3421 assert_eq!(
3422 output,
3423 "+ edge concept:refrigerator READS_FROM datastore:settings_storage\n"
3424 );
3425 let graph =
3426 GraphFile::load(&test_graph_root(dir.path()).join("fridge.json")).expect("load graph");
3427 assert!(graph.has_edge(
3428 "concept:refrigerator",
3429 "READS_FROM",
3430 "datastore:settings_storage"
3431 ));
3432 }
3433
3434 #[test]
3435 fn edge_remove_deletes_existing_edge() {
3436 let dir = tempdir().expect("tempdir");
3437 write_fixture(&test_graph_root(dir.path()));
3438 let output = exec_ok(
3439 &[
3440 "kg",
3441 "fridge",
3442 "edge",
3443 "remove",
3444 "concept:refrigerator",
3445 "HAS",
3446 "concept:temperature",
3447 ],
3448 dir.path(),
3449 );
3450 assert_eq!(
3451 output,
3452 "- edge concept:refrigerator HAS concept:temperature\n"
3453 );
3454 let graph =
3455 GraphFile::load(&test_graph_root(dir.path()).join("fridge.json")).expect("load graph");
3456 assert!(!graph.has_edge("concept:refrigerator", "HAS", "concept:temperature"));
3457 }
3458
3459 #[test]
3460 fn help_lists_mvp_commands() {
3461 let help = Cli::try_parse_from(["kg", "--help"]).expect_err("help exits");
3462 let rendered = help.to_string();
3463 assert!(rendered.contains("create"));
3464 assert!(rendered.contains("list"));
3465 assert!(rendered.contains("feedback-log"));
3466 assert!(rendered.contains("fridge node"));
3467 assert!(rendered.contains("edge"));
3468 assert!(rendered.contains("quality"));
3469 assert!(rendered.contains("kg graph fridge stats"));
3470 }
3471
3472 #[test]
3473 fn get_full_renders_new_properties() {
3474 let dir = tempdir().expect("tempdir");
3475 write_fixture(&test_graph_root(dir.path()));
3476 exec_ok(
3477 &[
3478 "kg",
3479 "fridge",
3480 "node",
3481 "modify",
3482 "concept:refrigerator",
3483 "--domain-area",
3484 "appliance",
3485 "--provenance",
3486 "user_import",
3487 "--confidence",
3488 "0.88",
3489 "--created-at",
3490 "2026-03-20T01:10:00Z",
3491 ],
3492 dir.path(),
3493 );
3494 let output = exec_ok(
3495 &[
3496 "kg",
3497 "fridge",
3498 "node",
3499 "get",
3500 "concept:refrigerator",
3501 "--full",
3502 ],
3503 dir.path(),
3504 );
3505 assert!(output.contains("domain_area: appliance"));
3506 assert!(output.contains("provenance: user_import"));
3507 assert!(output.contains("confidence: 0.88"));
3508 assert!(output.contains("created_at: 2026-03-20T01:10:00Z"));
3509 }
3510
3511 #[test]
3512 fn run_args_safe_returns_error_instead_of_exiting() {
3513 let dir = tempdir().expect("tempdir");
3514 let err = exec_safe(&["kg", "create"], dir.path()).expect_err("parse error");
3515 let rendered = err.to_string();
3516 assert!(rendered.contains("required arguments were not provided"));
3517 assert!(rendered.contains("<GRAPH_NAME>"));
3518 }
3519}