1mod access_log;
2mod analysis;
3mod app;
4mod cli;
5mod config;
6mod event_log;
7mod export_html;
8pub mod graph;
9mod import_csv;
10mod import_markdown;
11mod index;
12mod init;
13mod kg_sidecar;
14mod kql;
15mod ops;
16pub mod output;
17mod schema;
18mod storage;
19mod validate;
20mod vectors;
21
22pub use graph::{Edge, EdgeProperties, GraphFile, Metadata, Node, NodeProperties, Note};
24pub use output::FindMode;
25
26pub use validate::{
28 EDGE_TYPE_RULES, TYPE_TO_PREFIX, VALID_RELATIONS, VALID_TYPES, edge_type_rule,
29 format_edge_source_type_error, format_edge_target_type_error,
30};
31
32pub use index::Bm25Index;
34
35use std::ffi::OsString;
36use std::fmt::Write as _;
37use std::path::{Path, PathBuf};
38
39use anyhow::{Context, Result, anyhow, bail};
40use clap::Parser;
41use cli::{
42 AsOfArgs, AuditArgs, BaselineArgs, CheckArgs, Cli, Command, DiffAsOfArgs, ExportDotArgs,
43 ExportGraphmlArgs, ExportMdArgs, ExportMermaidArgs, FeedbackLogArgs, FeedbackSummaryArgs,
44 FindMode as CliFindMode, GraphCommand, HistoryArgs, ImportCsvArgs, ImportMarkdownArgs,
45 MergeStrategy, NoteAddArgs, NoteListArgs, SplitArgs, TemporalSource, TimelineArgs,
46 VectorCommand,
47};
48use serde::{Deserialize, Serialize};
49use serde_json::Value;
50use storage::{GraphStore, graph_store};
52
53use app::graph_node_edge::{GraphCommandContext, execute_edge, execute_node};
54use app::graph_note::{GraphNoteContext, execute_note};
55use app::graph_query_quality::{
56 execute_audit, execute_baseline, execute_check, execute_duplicates, execute_edge_gaps,
57 execute_feedback_log, execute_feedback_summary, execute_kql, execute_missing_descriptions,
58 execute_missing_facts, execute_quality, execute_stats,
59};
60use app::graph_transfer_temporal::{
61 GraphTransferContext, execute_access_log, execute_access_stats, execute_as_of,
62 execute_diff_as_of, execute_export_dot, execute_export_graphml, execute_export_html,
63 execute_export_json, execute_export_md, execute_export_mermaid, execute_history,
64 execute_import_csv, execute_import_json, execute_import_markdown, execute_split,
65 execute_timeline, execute_vector,
66};
67
68use schema::{GraphSchema, SchemaViolation};
69use validate::validate_graph;
70
71fn format_schema_violations(violations: &[SchemaViolation]) -> String {
76 let mut lines = Vec::new();
77 lines.push("schema violations:".to_owned());
78 for v in violations {
79 lines.push(format!(" - {}", v.message));
80 }
81 lines.join("\n")
82}
83
84pub(crate) fn bail_on_schema_violations(violations: &[SchemaViolation]) -> Result<()> {
85 if !violations.is_empty() {
86 anyhow::bail!("{}", format_schema_violations(violations));
87 }
88 Ok(())
89}
90
91fn validate_graph_with_schema(graph: &GraphFile, schema: &GraphSchema) -> Vec<SchemaViolation> {
92 let mut all_violations = Vec::new();
93 for node in &graph.nodes {
94 all_violations.extend(schema.validate_node_add(node));
95 }
96 let node_type_map: std::collections::HashMap<&str, &str> = graph
97 .nodes
98 .iter()
99 .map(|n| (n.id.as_str(), n.r#type.as_str()))
100 .collect();
101 for edge in &graph.edges {
102 if let (Some(src_type), Some(tgt_type)) = (
103 node_type_map.get(edge.source_id.as_str()),
104 node_type_map.get(edge.target_id.as_str()),
105 ) {
106 all_violations.extend(schema.validate_edge_add(
107 &edge.source_id,
108 src_type,
109 &edge.relation,
110 &edge.target_id,
111 tgt_type,
112 ));
113 }
114 }
115 all_violations.extend(schema.validate_uniqueness(&graph.nodes));
116 all_violations
117}
118
119pub fn run<I>(args: I, cwd: &Path) -> Result<()>
127where
128 I: IntoIterator<Item = OsString>,
129{
130 let rendered = run_args(args, cwd)?;
131 print!("{rendered}");
132 Ok(())
133}
134
135pub fn format_error_chain(err: &anyhow::Error) -> String {
136 let mut rendered = err.to_string();
137 let mut causes = err.chain().skip(1).peekable();
138 if causes.peek().is_some() {
139 rendered.push_str("\ncaused by:");
140 for cause in causes {
141 let _ = write!(rendered, "\n - {cause}");
142 }
143 }
144 rendered
145}
146
147pub fn run_args<I>(args: I, cwd: &Path) -> Result<String>
151where
152 I: IntoIterator<Item = OsString>,
153{
154 let cli = Cli::parse_from(normalize_args(args));
155 let graph_root = default_graph_root(cwd);
156 execute(cli, cwd, &graph_root)
157}
158
159pub fn run_args_safe<I>(args: I, cwd: &Path) -> Result<String>
164where
165 I: IntoIterator<Item = OsString>,
166{
167 let cli = Cli::try_parse_from(normalize_args(args)).map_err(|err| anyhow!(err.to_string()))?;
168 let graph_root = default_graph_root(cwd);
169 execute(cli, cwd, &graph_root)
170}
171
172fn normalize_args<I>(args: I) -> Vec<OsString>
177where
178 I: IntoIterator<Item = OsString>,
179{
180 let collected: Vec<OsString> = args.into_iter().collect();
181 if collected.len() <= 1 {
182 return collected;
183 }
184 let first = collected[1].to_string_lossy();
185 if first.starts_with('-')
186 || first == "init"
187 || first == "create"
188 || first == "diff"
189 || first == "merge"
190 || first == "graph"
191 || first == "list"
192 || first == "feedback-log"
193 || first == "feedback-summary"
194 {
195 return collected;
196 }
197 let mut normalized = Vec::with_capacity(collected.len() + 1);
198 normalized.push(collected[0].clone());
199 normalized.push(OsString::from("graph"));
200 normalized.extend(collected.into_iter().skip(1));
201 normalized
202}
203
204fn execute(cli: Cli, cwd: &Path, graph_root: &Path) -> Result<String> {
209 match cli.command {
210 Command::Init(args) => Ok(init::render_init(&args)),
211 Command::Create { graph_name } => {
212 let store = graph_store(cwd, graph_root, false)?;
213 let path = store.create_graph(&graph_name)?;
214 let graph_file = store.load_graph(&path)?;
215 append_event_snapshot(&path, "graph.create", Some(graph_name.clone()), &graph_file)?;
216 Ok(format!("+ created {}\n", path.display()))
217 }
218 Command::Diff { left, right, json } => {
219 let store = graph_store(cwd, graph_root, false)?;
220 if json {
221 render_graph_diff_json(store.as_ref(), &left, &right)
222 } else {
223 render_graph_diff(store.as_ref(), &left, &right)
224 }
225 }
226 Command::Merge {
227 target,
228 source,
229 strategy,
230 } => {
231 let store = graph_store(cwd, graph_root, false)?;
232 merge_graphs(store.as_ref(), &target, &source, strategy)
233 }
234 Command::List(args) => {
235 let store = graph_store(cwd, graph_root, false)?;
236 if args.json {
237 render_graph_list_json(store.as_ref())
238 } else {
239 render_graph_list(store.as_ref(), args.full)
240 }
241 }
242 Command::FeedbackLog(args) => execute_feedback_log(cwd, &args),
243 Command::Graph {
244 graph,
245 legacy,
246 command,
247 } => {
248 let store = graph_store(cwd, graph_root, legacy)?;
249 let path = store.resolve_graph_path(&graph)?;
250 let mut graph_file = store.load_graph(&path)?;
251 let schema = GraphSchema::discover(cwd).ok().flatten().map(|(_, s)| s);
252 let user_short_uid = config::ensure_user_short_uid(cwd);
253
254 match command {
255 GraphCommand::Node { command } => execute_node(
256 command,
257 GraphCommandContext {
258 graph_name: &graph,
259 path: &path,
260 user_short_uid: &user_short_uid,
261 graph_file: &mut graph_file,
262 schema: schema.as_ref(),
263 store: store.as_ref(),
264 },
265 ),
266
267 GraphCommand::Edge { command } => execute_edge(
268 command,
269 GraphCommandContext {
270 graph_name: &graph,
271 path: &path,
272 user_short_uid: &user_short_uid,
273 graph_file: &mut graph_file,
274 schema: schema.as_ref(),
275 store: store.as_ref(),
276 },
277 ),
278
279 GraphCommand::Note { command } => execute_note(
280 command,
281 GraphNoteContext {
282 path: &path,
283 graph_file: &mut graph_file,
284 store: store.as_ref(),
285 _schema: schema.as_ref(),
286 },
287 ),
288
289 GraphCommand::Stats(args) => Ok(execute_stats(&graph_file, &args)),
290 GraphCommand::Check(args) => Ok(execute_check(&graph_file, cwd, &args)),
291 GraphCommand::Audit(args) => Ok(execute_audit(&graph_file, cwd, &args)),
292
293 GraphCommand::Quality { command } => Ok(execute_quality(command, &graph_file)),
294
295 GraphCommand::MissingDescriptions(args) => {
297 Ok(execute_missing_descriptions(&graph_file, &args))
298 }
299 GraphCommand::MissingFacts(args) => Ok(execute_missing_facts(&graph_file, &args)),
300 GraphCommand::Duplicates(args) => Ok(execute_duplicates(&graph_file, &args)),
301 GraphCommand::EdgeGaps(args) => Ok(execute_edge_gaps(&graph_file, &args)),
302
303 GraphCommand::ExportHtml(args) => execute_export_html(&graph, &graph_file, args),
304
305 GraphCommand::AccessLog(args) => execute_access_log(&path, args),
306
307 GraphCommand::AccessStats(_) => execute_access_stats(&path),
308 GraphCommand::ImportCsv(args) => execute_import_csv(
309 GraphTransferContext {
310 cwd,
311 graph_name: &graph,
312 path: &path,
313 graph_file: &mut graph_file,
314 schema: schema.as_ref(),
315 store: store.as_ref(),
316 },
317 args,
318 ),
319 GraphCommand::ImportMarkdown(args) => execute_import_markdown(
320 GraphTransferContext {
321 cwd,
322 graph_name: &graph,
323 path: &path,
324 graph_file: &mut graph_file,
325 schema: schema.as_ref(),
326 store: store.as_ref(),
327 },
328 args,
329 ),
330 GraphCommand::Kql(args) => execute_kql(&graph_file, args),
331 GraphCommand::ExportJson(args) => execute_export_json(&graph, &graph_file, args),
332 GraphCommand::ImportJson(args) => {
333 execute_import_json(&path, &graph, store.as_ref(), args)
334 }
335 GraphCommand::ExportDot(args) => execute_export_dot(&graph, &graph_file, args),
336 GraphCommand::ExportMermaid(args) => {
337 execute_export_mermaid(&graph, &graph_file, args)
338 }
339 GraphCommand::ExportGraphml(args) => {
340 execute_export_graphml(&graph, &graph_file, args)
341 }
342 GraphCommand::ExportMd(args) => execute_export_md(
343 GraphTransferContext {
344 cwd,
345 graph_name: &graph,
346 path: &path,
347 graph_file: &mut graph_file,
348 schema: schema.as_ref(),
349 store: store.as_ref(),
350 },
351 args,
352 ),
353 GraphCommand::Split(args) => execute_split(&graph, &graph_file, args),
354 GraphCommand::Vector { command } => execute_vector(
355 GraphTransferContext {
356 cwd,
357 graph_name: &graph,
358 path: &path,
359 graph_file: &mut graph_file,
360 schema: schema.as_ref(),
361 store: store.as_ref(),
362 },
363 command,
364 ),
365 GraphCommand::AsOf(args) => execute_as_of(&path, &graph, args),
366 GraphCommand::History(args) => execute_history(&path, &graph, args),
367 GraphCommand::Timeline(args) => execute_timeline(&path, &graph, args),
368 GraphCommand::DiffAsOf(args) => execute_diff_as_of(&path, &graph, args),
369 GraphCommand::FeedbackSummary(args) => {
370 Ok(execute_feedback_summary(cwd, &graph, &args)?)
371 }
372 GraphCommand::Baseline(args) => {
373 Ok(execute_baseline(cwd, &graph, &graph_file, &args)?)
374 }
375 }
376 }
377 }
378}
379
380fn render_graph_list(store: &dyn GraphStore, full: bool) -> Result<String> {
381 let graphs = store.list_graphs()?;
382
383 let mut lines = vec![format!("= graphs ({})", graphs.len())];
384 for (name, path) in graphs {
385 if full {
386 lines.push(format!("- {name} | {}", path.display()));
387 } else {
388 lines.push(format!("- {name}"));
389 }
390 }
391 Ok(format!("{}\n", lines.join("\n")))
392}
393
394#[derive(Debug, Serialize)]
395struct GraphListEntry {
396 name: String,
397 path: String,
398}
399
400#[derive(Debug, Serialize)]
401struct GraphListResponse {
402 graphs: Vec<GraphListEntry>,
403}
404
405fn render_graph_list_json(store: &dyn GraphStore) -> Result<String> {
406 let graphs = store.list_graphs()?;
407 let entries = graphs
408 .into_iter()
409 .map(|(name, path)| GraphListEntry {
410 name,
411 path: path.display().to_string(),
412 })
413 .collect();
414 let payload = GraphListResponse { graphs: entries };
415 Ok(serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned()))
416}
417
418#[derive(Debug, Serialize)]
419struct FindQueryResult {
420 query: String,
421 count: usize,
422 nodes: Vec<Node>,
423}
424
425#[derive(Debug, Serialize)]
426struct FindResponse {
427 total: usize,
428 queries: Vec<FindQueryResult>,
429}
430
431pub(crate) fn render_find_json_with_index(
432 graph: &GraphFile,
433 queries: &[String],
434 limit: usize,
435 mode: output::FindMode,
436 index: Option<&Bm25Index>,
437) -> String {
438 let mut total = 0usize;
439 let mut results = Vec::new();
440 for query in queries {
441 let (count, nodes) =
442 output::find_nodes_and_total_with_index(graph, query, limit, true, mode, index);
443 total += count;
444 results.push(FindQueryResult {
445 query: query.clone(),
446 count,
447 nodes,
448 });
449 }
450 let payload = FindResponse {
451 total,
452 queries: results,
453 };
454 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
455}
456
457#[derive(Debug, Serialize)]
458struct NodeGetResponse {
459 node: Node,
460}
461
462pub(crate) fn render_node_json(node: &Node) -> String {
463 let payload = NodeGetResponse { node: node.clone() };
464 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
465}
466
467fn render_graph_diff(store: &dyn GraphStore, left: &str, right: &str) -> Result<String> {
468 let left_path = store.resolve_graph_path(left)?;
469 let right_path = store.resolve_graph_path(right)?;
470 let left_graph = store.load_graph(&left_path)?;
471 let right_graph = store.load_graph(&right_path)?;
472 Ok(render_graph_diff_from_files(
473 left,
474 right,
475 &left_graph,
476 &right_graph,
477 ))
478}
479
480fn render_graph_diff_json(store: &dyn GraphStore, left: &str, right: &str) -> Result<String> {
481 let left_path = store.resolve_graph_path(left)?;
482 let right_path = store.resolve_graph_path(right)?;
483 let left_graph = store.load_graph(&left_path)?;
484 let right_graph = store.load_graph(&right_path)?;
485 Ok(render_graph_diff_json_from_files(
486 left,
487 right,
488 &left_graph,
489 &right_graph,
490 ))
491}
492
493#[derive(Debug, Serialize)]
494struct DiffEntry {
495 path: String,
496 left: Value,
497 right: Value,
498}
499
500#[derive(Debug, Serialize)]
501struct EntityDiff {
502 id: String,
503 diffs: Vec<DiffEntry>,
504}
505
506#[derive(Debug, Serialize)]
507struct GraphDiffResponse {
508 left: String,
509 right: String,
510 added_nodes: Vec<String>,
511 removed_nodes: Vec<String>,
512 changed_nodes: Vec<EntityDiff>,
513 added_edges: Vec<String>,
514 removed_edges: Vec<String>,
515 changed_edges: Vec<EntityDiff>,
516 added_notes: Vec<String>,
517 removed_notes: Vec<String>,
518 changed_notes: Vec<EntityDiff>,
519}
520
521fn render_graph_diff_json_from_files(
522 left: &str,
523 right: &str,
524 left_graph: &GraphFile,
525 right_graph: &GraphFile,
526) -> String {
527 use std::collections::{HashMap, HashSet};
528
529 let left_nodes: HashSet<String> = left_graph.nodes.iter().map(|n| n.id.clone()).collect();
530 let right_nodes: HashSet<String> = right_graph.nodes.iter().map(|n| n.id.clone()).collect();
531
532 let left_node_map: HashMap<String, &Node> =
533 left_graph.nodes.iter().map(|n| (n.id.clone(), n)).collect();
534 let right_node_map: HashMap<String, &Node> = right_graph
535 .nodes
536 .iter()
537 .map(|n| (n.id.clone(), n))
538 .collect();
539
540 let left_edges: HashSet<String> = left_graph
541 .edges
542 .iter()
543 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
544 .collect();
545 let right_edges: HashSet<String> = right_graph
546 .edges
547 .iter()
548 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
549 .collect();
550
551 let left_edge_map: HashMap<String, &Edge> = left_graph
552 .edges
553 .iter()
554 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
555 .collect();
556 let right_edge_map: HashMap<String, &Edge> = right_graph
557 .edges
558 .iter()
559 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
560 .collect();
561
562 let left_notes: HashSet<String> = left_graph.notes.iter().map(|n| n.id.clone()).collect();
563 let right_notes: HashSet<String> = right_graph.notes.iter().map(|n| n.id.clone()).collect();
564
565 let left_note_map: HashMap<String, &Note> =
566 left_graph.notes.iter().map(|n| (n.id.clone(), n)).collect();
567 let right_note_map: HashMap<String, &Note> = right_graph
568 .notes
569 .iter()
570 .map(|n| (n.id.clone(), n))
571 .collect();
572
573 let mut added_nodes: Vec<String> = right_nodes.difference(&left_nodes).cloned().collect();
574 let mut removed_nodes: Vec<String> = left_nodes.difference(&right_nodes).cloned().collect();
575 let mut added_edges: Vec<String> = right_edges.difference(&left_edges).cloned().collect();
576 let mut removed_edges: Vec<String> = left_edges.difference(&right_edges).cloned().collect();
577 let mut added_notes: Vec<String> = right_notes.difference(&left_notes).cloned().collect();
578 let mut removed_notes: Vec<String> = left_notes.difference(&right_notes).cloned().collect();
579
580 let mut changed_nodes: Vec<String> = left_nodes
581 .intersection(&right_nodes)
582 .filter_map(|id| {
583 let left_node = left_node_map.get(id.as_str())?;
584 let right_node = right_node_map.get(id.as_str())?;
585 if eq_serialized(*left_node, *right_node) {
586 None
587 } else {
588 Some(id.clone())
589 }
590 })
591 .collect();
592 let mut changed_edges: Vec<String> = left_edges
593 .intersection(&right_edges)
594 .filter_map(|key| {
595 let left_edge = left_edge_map.get(key.as_str())?;
596 let right_edge = right_edge_map.get(key.as_str())?;
597 if eq_serialized(*left_edge, *right_edge) {
598 None
599 } else {
600 Some(key.clone())
601 }
602 })
603 .collect();
604 let mut changed_notes: Vec<String> = left_notes
605 .intersection(&right_notes)
606 .filter_map(|id| {
607 let left_note = left_note_map.get(id.as_str())?;
608 let right_note = right_note_map.get(id.as_str())?;
609 if eq_serialized(*left_note, *right_note) {
610 None
611 } else {
612 Some(id.clone())
613 }
614 })
615 .collect();
616
617 added_nodes.sort();
618 removed_nodes.sort();
619 added_edges.sort();
620 removed_edges.sort();
621 added_notes.sort();
622 removed_notes.sort();
623 changed_nodes.sort();
624 changed_edges.sort();
625 changed_notes.sort();
626
627 let changed_nodes = changed_nodes
628 .into_iter()
629 .map(|id| EntityDiff {
630 diffs: left_node_map
631 .get(id.as_str())
632 .zip(right_node_map.get(id.as_str()))
633 .map(|(left_node, right_node)| diff_serialized_values_json(*left_node, *right_node))
634 .unwrap_or_default(),
635 id,
636 })
637 .collect();
638 let changed_edges = changed_edges
639 .into_iter()
640 .map(|id| EntityDiff {
641 diffs: left_edge_map
642 .get(id.as_str())
643 .zip(right_edge_map.get(id.as_str()))
644 .map(|(left_edge, right_edge)| diff_serialized_values_json(*left_edge, *right_edge))
645 .unwrap_or_default(),
646 id,
647 })
648 .collect();
649 let changed_notes = changed_notes
650 .into_iter()
651 .map(|id| EntityDiff {
652 diffs: left_note_map
653 .get(id.as_str())
654 .zip(right_note_map.get(id.as_str()))
655 .map(|(left_note, right_note)| diff_serialized_values_json(*left_note, *right_note))
656 .unwrap_or_default(),
657 id,
658 })
659 .collect();
660
661 let payload = GraphDiffResponse {
662 left: left.to_owned(),
663 right: right.to_owned(),
664 added_nodes,
665 removed_nodes,
666 changed_nodes,
667 added_edges,
668 removed_edges,
669 changed_edges,
670 added_notes,
671 removed_notes,
672 changed_notes,
673 };
674 serde_json::to_string_pretty(&payload).unwrap_or_else(|_| "{}".to_owned())
675}
676
677fn render_graph_diff_from_files(
678 left: &str,
679 right: &str,
680 left_graph: &GraphFile,
681 right_graph: &GraphFile,
682) -> String {
683 use std::collections::{HashMap, HashSet};
684
685 let left_nodes: HashSet<String> = left_graph.nodes.iter().map(|n| n.id.clone()).collect();
686 let right_nodes: HashSet<String> = right_graph.nodes.iter().map(|n| n.id.clone()).collect();
687
688 let left_node_map: HashMap<String, &Node> =
689 left_graph.nodes.iter().map(|n| (n.id.clone(), n)).collect();
690 let right_node_map: HashMap<String, &Node> = right_graph
691 .nodes
692 .iter()
693 .map(|n| (n.id.clone(), n))
694 .collect();
695
696 let left_edges: HashSet<String> = left_graph
697 .edges
698 .iter()
699 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
700 .collect();
701 let right_edges: HashSet<String> = right_graph
702 .edges
703 .iter()
704 .map(|e| format!("{} {} {}", e.source_id, e.relation, e.target_id))
705 .collect();
706
707 let left_edge_map: HashMap<String, &Edge> = left_graph
708 .edges
709 .iter()
710 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
711 .collect();
712 let right_edge_map: HashMap<String, &Edge> = right_graph
713 .edges
714 .iter()
715 .map(|e| (format!("{} {} {}", e.source_id, e.relation, e.target_id), e))
716 .collect();
717
718 let left_notes: HashSet<String> = left_graph.notes.iter().map(|n| n.id.clone()).collect();
719 let right_notes: HashSet<String> = right_graph.notes.iter().map(|n| n.id.clone()).collect();
720
721 let left_note_map: HashMap<String, &Note> =
722 left_graph.notes.iter().map(|n| (n.id.clone(), n)).collect();
723 let right_note_map: HashMap<String, &Note> = right_graph
724 .notes
725 .iter()
726 .map(|n| (n.id.clone(), n))
727 .collect();
728
729 let mut added_nodes: Vec<String> = right_nodes.difference(&left_nodes).cloned().collect();
730 let mut removed_nodes: Vec<String> = left_nodes.difference(&right_nodes).cloned().collect();
731 let mut added_edges: Vec<String> = right_edges.difference(&left_edges).cloned().collect();
732 let mut removed_edges: Vec<String> = left_edges.difference(&right_edges).cloned().collect();
733 let mut added_notes: Vec<String> = right_notes.difference(&left_notes).cloned().collect();
734 let mut removed_notes: Vec<String> = left_notes.difference(&right_notes).cloned().collect();
735
736 let mut changed_nodes: Vec<String> = left_nodes
737 .intersection(&right_nodes)
738 .filter_map(|id| {
739 let left_node = left_node_map.get(id.as_str())?;
740 let right_node = right_node_map.get(id.as_str())?;
741 if eq_serialized(*left_node, *right_node) {
742 None
743 } else {
744 Some(id.clone())
745 }
746 })
747 .collect();
748
749 let mut changed_edges: Vec<String> = left_edges
750 .intersection(&right_edges)
751 .filter_map(|key| {
752 let left_edge = left_edge_map.get(key.as_str())?;
753 let right_edge = right_edge_map.get(key.as_str())?;
754 if eq_serialized(*left_edge, *right_edge) {
755 None
756 } else {
757 Some(key.clone())
758 }
759 })
760 .collect();
761
762 let mut changed_notes: Vec<String> = left_notes
763 .intersection(&right_notes)
764 .filter_map(|id| {
765 let left_note = left_note_map.get(id.as_str())?;
766 let right_note = right_note_map.get(id.as_str())?;
767 if eq_serialized(*left_note, *right_note) {
768 None
769 } else {
770 Some(id.clone())
771 }
772 })
773 .collect();
774
775 added_nodes.sort();
776 removed_nodes.sort();
777 added_edges.sort();
778 removed_edges.sort();
779 added_notes.sort();
780 removed_notes.sort();
781 changed_nodes.sort();
782 changed_edges.sort();
783 changed_notes.sort();
784
785 let mut lines = vec![format!("= diff {left} -> {right}")];
786 lines.push(format!("+ nodes ({})", added_nodes.len()));
787 for id in added_nodes {
788 lines.push(format!("+ node {id}"));
789 }
790 lines.push(format!("- nodes ({})", removed_nodes.len()));
791 for id in removed_nodes {
792 lines.push(format!("- node {id}"));
793 }
794 lines.push(format!("~ nodes ({})", changed_nodes.len()));
795 for id in changed_nodes {
796 if let (Some(left_node), Some(right_node)) = (
797 left_node_map.get(id.as_str()),
798 right_node_map.get(id.as_str()),
799 ) {
800 lines.extend(render_entity_diff_lines("node", &id, left_node, right_node));
801 } else {
802 lines.push(format!("~ node {id}"));
803 }
804 }
805 lines.push(format!("+ edges ({})", added_edges.len()));
806 for edge in added_edges {
807 lines.push(format!("+ edge {edge}"));
808 }
809 lines.push(format!("- edges ({})", removed_edges.len()));
810 for edge in removed_edges {
811 lines.push(format!("- edge {edge}"));
812 }
813 lines.push(format!("~ edges ({})", changed_edges.len()));
814 for edge in changed_edges {
815 if let (Some(left_edge), Some(right_edge)) = (
816 left_edge_map.get(edge.as_str()),
817 right_edge_map.get(edge.as_str()),
818 ) {
819 lines.extend(render_entity_diff_lines(
820 "edge", &edge, left_edge, right_edge,
821 ));
822 } else {
823 lines.push(format!("~ edge {edge}"));
824 }
825 }
826 lines.push(format!("+ notes ({})", added_notes.len()));
827 for note_id in added_notes {
828 lines.push(format!("+ note {note_id}"));
829 }
830 lines.push(format!("- notes ({})", removed_notes.len()));
831 for note_id in removed_notes {
832 lines.push(format!("- note {note_id}"));
833 }
834 lines.push(format!("~ notes ({})", changed_notes.len()));
835 for note_id in changed_notes {
836 if let (Some(left_note), Some(right_note)) = (
837 left_note_map.get(note_id.as_str()),
838 right_note_map.get(note_id.as_str()),
839 ) {
840 lines.extend(render_entity_diff_lines(
841 "note", ¬e_id, left_note, right_note,
842 ));
843 } else {
844 lines.push(format!("~ note {note_id}"));
845 }
846 }
847
848 format!("{}\n", lines.join("\n"))
849}
850
851fn eq_serialized<T: Serialize>(left: &T, right: &T) -> bool {
852 match (serde_json::to_value(left), serde_json::to_value(right)) {
853 (Ok(left_value), Ok(right_value)) => left_value == right_value,
854 _ => false,
855 }
856}
857
858fn render_entity_diff_lines<T: Serialize>(
859 kind: &str,
860 id: &str,
861 left: &T,
862 right: &T,
863) -> Vec<String> {
864 let mut lines = Vec::new();
865 lines.push(format!("~ {kind} {id}"));
866 for diff in diff_serialized_values(left, right) {
867 lines.push(format!(" ~ {diff}"));
868 }
869 lines
870}
871
872fn diff_serialized_values<T: Serialize>(left: &T, right: &T) -> Vec<String> {
873 match (serde_json::to_value(left), serde_json::to_value(right)) {
874 (Ok(left_value), Ok(right_value)) => {
875 let mut diffs = Vec::new();
876 collect_value_diffs("", &left_value, &right_value, &mut diffs);
877 diffs
878 }
879 _ => vec!["<serialization failed>".to_owned()],
880 }
881}
882
883fn diff_serialized_values_json<T: Serialize>(left: &T, right: &T) -> Vec<DiffEntry> {
884 match (serde_json::to_value(left), serde_json::to_value(right)) {
885 (Ok(left_value), Ok(right_value)) => {
886 let mut diffs = Vec::new();
887 collect_value_diffs_json("", &left_value, &right_value, &mut diffs);
888 diffs
889 }
890 _ => Vec::new(),
891 }
892}
893
894fn collect_value_diffs_json(path: &str, left: &Value, right: &Value, out: &mut Vec<DiffEntry>) {
895 if left == right {
896 return;
897 }
898 match (left, right) {
899 (Value::Object(left_obj), Value::Object(right_obj)) => {
900 use std::collections::BTreeSet;
901
902 let mut keys: BTreeSet<&str> = BTreeSet::new();
903 for key in left_obj.keys() {
904 keys.insert(key.as_str());
905 }
906 for key in right_obj.keys() {
907 keys.insert(key.as_str());
908 }
909 for key in keys {
910 let left_value = left_obj.get(key).unwrap_or(&Value::Null);
911 let right_value = right_obj.get(key).unwrap_or(&Value::Null);
912 let next_path = if path.is_empty() {
913 key.to_owned()
914 } else {
915 format!("{path}.{key}")
916 };
917 collect_value_diffs_json(&next_path, left_value, right_value, out);
918 }
919 }
920 (Value::Array(_), Value::Array(_)) => {
921 let label = if path.is_empty() {
922 "<root>[]".to_owned()
923 } else {
924 format!("{path}[]")
925 };
926 out.push(DiffEntry {
927 path: label,
928 left: left.clone(),
929 right: right.clone(),
930 });
931 }
932 _ => {
933 let label = if path.is_empty() { "<root>" } else { path };
934 out.push(DiffEntry {
935 path: label.to_owned(),
936 left: left.clone(),
937 right: right.clone(),
938 });
939 }
940 }
941}
942
943fn collect_value_diffs(path: &str, left: &Value, right: &Value, out: &mut Vec<String>) {
944 if left == right {
945 return;
946 }
947 match (left, right) {
948 (Value::Object(left_obj), Value::Object(right_obj)) => {
949 use std::collections::BTreeSet;
950
951 let mut keys: BTreeSet<&str> = BTreeSet::new();
952 for key in left_obj.keys() {
953 keys.insert(key.as_str());
954 }
955 for key in right_obj.keys() {
956 keys.insert(key.as_str());
957 }
958 for key in keys {
959 let left_value = left_obj.get(key).unwrap_or(&Value::Null);
960 let right_value = right_obj.get(key).unwrap_or(&Value::Null);
961 let next_path = if path.is_empty() {
962 key.to_owned()
963 } else {
964 format!("{path}.{key}")
965 };
966 collect_value_diffs(&next_path, left_value, right_value, out);
967 }
968 }
969 (Value::Array(_), Value::Array(_)) => {
970 let label = if path.is_empty() {
971 "<root>[]".to_owned()
972 } else {
973 format!("{path}[]")
974 };
975 out.push(format!(
976 "{label}: {} -> {}",
977 format_value(left),
978 format_value(right)
979 ));
980 }
981 _ => {
982 let label = if path.is_empty() { "<root>" } else { path };
983 out.push(format!(
984 "{label}: {} -> {}",
985 format_value(left),
986 format_value(right)
987 ));
988 }
989 }
990}
991
992fn format_value(value: &Value) -> String {
993 let mut rendered =
994 serde_json::to_string(value).unwrap_or_else(|_| "<unserializable>".to_owned());
995 rendered = rendered.replace('\n', "\\n");
996 truncate_value(rendered, 160)
997}
998
999fn truncate_value(mut value: String, limit: usize) -> String {
1000 if value.len() <= limit {
1001 return value;
1002 }
1003 value.truncate(limit.saturating_sub(3));
1004 value.push_str("...");
1005 value
1006}
1007
1008fn merge_graphs(
1009 store: &dyn GraphStore,
1010 target: &str,
1011 source: &str,
1012 strategy: MergeStrategy,
1013) -> Result<String> {
1014 use std::collections::HashMap;
1015
1016 let target_path = store.resolve_graph_path(target)?;
1017 let source_path = store.resolve_graph_path(source)?;
1018 let mut target_graph = store.load_graph(&target_path)?;
1019 let source_graph = store.load_graph(&source_path)?;
1020
1021 let mut node_index: HashMap<String, usize> = HashMap::new();
1022 for (idx, node) in target_graph.nodes.iter().enumerate() {
1023 node_index.insert(node.id.clone(), idx);
1024 }
1025
1026 let mut node_added = 0usize;
1027 let mut node_updated = 0usize;
1028 for node in &source_graph.nodes {
1029 if let Some(&idx) = node_index.get(&node.id) {
1030 if matches!(strategy, MergeStrategy::PreferNew) {
1031 target_graph.nodes[idx] = node.clone();
1032 node_updated += 1;
1033 }
1034 } else {
1035 target_graph.nodes.push(node.clone());
1036 node_index.insert(node.id.clone(), target_graph.nodes.len() - 1);
1037 node_added += 1;
1038 }
1039 }
1040
1041 let mut edge_index: HashMap<String, usize> = HashMap::new();
1042 for (idx, edge) in target_graph.edges.iter().enumerate() {
1043 let key = format!("{} {} {}", edge.source_id, edge.relation, edge.target_id);
1044 edge_index.insert(key, idx);
1045 }
1046
1047 let mut edge_added = 0usize;
1048 let mut edge_updated = 0usize;
1049 for edge in &source_graph.edges {
1050 let key = format!("{} {} {}", edge.source_id, edge.relation, edge.target_id);
1051 if let Some(&idx) = edge_index.get(&key) {
1052 if matches!(strategy, MergeStrategy::PreferNew) {
1053 target_graph.edges[idx] = edge.clone();
1054 edge_updated += 1;
1055 }
1056 } else {
1057 target_graph.edges.push(edge.clone());
1058 edge_index.insert(key, target_graph.edges.len() - 1);
1059 edge_added += 1;
1060 }
1061 }
1062
1063 let mut note_index: HashMap<String, usize> = HashMap::new();
1064 for (idx, note) in target_graph.notes.iter().enumerate() {
1065 note_index.insert(note.id.clone(), idx);
1066 }
1067
1068 let mut note_added = 0usize;
1069 let mut note_updated = 0usize;
1070 for note in &source_graph.notes {
1071 if let Some(&idx) = note_index.get(¬e.id) {
1072 if matches!(strategy, MergeStrategy::PreferNew) {
1073 target_graph.notes[idx] = note.clone();
1074 note_updated += 1;
1075 }
1076 } else {
1077 target_graph.notes.push(note.clone());
1078 note_index.insert(note.id.clone(), target_graph.notes.len() - 1);
1079 note_added += 1;
1080 }
1081 }
1082
1083 store.save_graph(&target_path, &target_graph)?;
1084 append_event_snapshot(
1085 &target_path,
1086 "graph.merge",
1087 Some(format!("{source} -> {target} ({strategy:?})")),
1088 &target_graph,
1089 )?;
1090
1091 let mut lines = vec![format!("+ merged {source} -> {target}")];
1092 lines.push(format!("nodes: +{node_added} ~{node_updated}"));
1093 lines.push(format!("edges: +{edge_added} ~{edge_updated}"));
1094 lines.push(format!("notes: +{note_added} ~{note_updated}"));
1095
1096 Ok(format!("{}\n", lines.join("\n")))
1097}
1098
1099pub(crate) fn export_graph_as_of(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1100 match resolve_temporal_source(path, args.source)? {
1101 TemporalSource::EventLog => export_graph_as_of_event_log(path, graph, args),
1102 _ => export_graph_as_of_backups(path, graph, args),
1103 }
1104}
1105
1106fn export_graph_as_of_backups(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1107 let backups = list_graph_backups(path)?;
1108 if backups.is_empty() {
1109 bail!("no backups found for graph: {graph}");
1110 }
1111 let target_ts = args.ts_ms / 1000;
1112 let mut selected = None;
1113 for (ts, backup_path) in backups {
1114 if ts <= target_ts {
1115 selected = Some((ts, backup_path));
1116 }
1117 }
1118 let Some((ts, backup_path)) = selected else {
1119 bail!("no backup at or before ts_ms={}", args.ts_ms);
1120 };
1121
1122 let output_path = args
1123 .output
1124 .clone()
1125 .unwrap_or_else(|| format!("{graph}.asof.{}.json", args.ts_ms));
1126 let raw = read_gz_to_string(&backup_path)?;
1127 std::fs::write(&output_path, raw)?;
1128 Ok(format!("+ exported {output_path} (as-of {ts})\n"))
1129}
1130
1131fn export_graph_as_of_event_log(path: &Path, graph: &str, args: &AsOfArgs) -> Result<String> {
1132 let entries = event_log::read_log(path)?;
1133 if entries.is_empty() {
1134 bail!("no event log entries found for graph: {graph}");
1135 }
1136 let selected = select_event_at_or_before(&entries, args.ts_ms)
1137 .ok_or_else(|| anyhow!("no event log entry at or before ts_ms={}", args.ts_ms))?;
1138 let output_path = args
1139 .output
1140 .clone()
1141 .unwrap_or_else(|| format!("{graph}.asof.{}.json", args.ts_ms));
1142 let mut snapshot = selected.graph.clone();
1143 snapshot.refresh_counts();
1144 let raw = serde_json::to_string_pretty(&snapshot).context("failed to serialize graph")?;
1145 std::fs::write(&output_path, raw)?;
1146 Ok(format!(
1147 "+ exported {output_path} (as-of {})\n",
1148 selected.ts_ms
1149 ))
1150}
1151
1152fn list_graph_backups(path: &Path) -> Result<Vec<(u64, PathBuf)>> {
1153 let parent = path
1154 .parent()
1155 .ok_or_else(|| anyhow!("missing parent directory"))?;
1156 let stem = path
1157 .file_stem()
1158 .and_then(|s| s.to_str())
1159 .ok_or_else(|| anyhow!("invalid graph filename"))?;
1160 let prefix = format!("{stem}.bck.");
1161 let suffix = ".gz";
1162
1163 let mut backups = Vec::new();
1164 for entry in std::fs::read_dir(parent)? {
1165 let entry = entry?;
1166 let name = entry.file_name();
1167 let name = name.to_string_lossy();
1168 if !name.starts_with(&prefix) || !name.ends_with(suffix) {
1169 continue;
1170 }
1171 let ts_part = &name[prefix.len()..name.len() - suffix.len()];
1172 if let Ok(ts) = ts_part.parse::<u64>() {
1173 backups.push((ts, entry.path()));
1174 }
1175 }
1176 backups.sort_by_key(|(ts, _)| *ts);
1177 Ok(backups)
1178}
1179
1180fn read_gz_to_string(path: &Path) -> Result<String> {
1181 use flate2::read::GzDecoder;
1182 use std::io::Read;
1183
1184 let data = std::fs::read(path)?;
1185 let mut decoder = GzDecoder::new(&data[..]);
1186 let mut out = String::new();
1187 decoder.read_to_string(&mut out)?;
1188 Ok(out)
1189}
1190
1191pub(crate) fn append_event_snapshot(
1192 path: &Path,
1193 action: &str,
1194 detail: Option<String>,
1195 graph: &GraphFile,
1196) -> Result<()> {
1197 event_log::append_snapshot(path, action, detail, graph)
1198}
1199
1200pub(crate) fn export_graph_json(
1201 graph: &str,
1202 graph_file: &GraphFile,
1203 output: Option<&str>,
1204) -> Result<String> {
1205 let output_path = output
1206 .map(|value| value.to_owned())
1207 .unwrap_or_else(|| format!("{graph}.export.json"));
1208 let raw = serde_json::to_string_pretty(graph_file).context("failed to serialize graph")?;
1209 std::fs::write(&output_path, raw)?;
1210 Ok(format!("+ exported {output_path}\n"))
1211}
1212
1213pub(crate) fn import_graph_json(
1214 path: &Path,
1215 graph: &str,
1216 input: &str,
1217 store: &dyn GraphStore,
1218) -> Result<String> {
1219 let raw = std::fs::read_to_string(input)
1220 .with_context(|| format!("failed to read import file: {input}"))?;
1221 let mut imported: GraphFile =
1222 serde_json::from_str(&raw).with_context(|| format!("invalid JSON: {input}"))?;
1223 imported.metadata.name = graph.to_owned();
1224 imported.refresh_counts();
1225 store.save_graph(path, &imported)?;
1226 append_event_snapshot(path, "graph.import", Some(input.to_owned()), &imported)?;
1227 Ok(format!("+ imported {input} -> {graph}\n"))
1228}
1229
1230pub(crate) fn import_graph_csv(
1231 path: &Path,
1232 graph: &str,
1233 graph_file: &mut GraphFile,
1234 store: &dyn GraphStore,
1235 args: &ImportCsvArgs,
1236 schema: Option<&GraphSchema>,
1237) -> Result<String> {
1238 if args.nodes.is_none() && args.edges.is_none() && args.notes.is_none() {
1239 bail!("expected at least one of --nodes/--edges/--notes");
1240 }
1241 let strategy = match args.strategy {
1242 MergeStrategy::PreferNew => import_csv::CsvStrategy::PreferNew,
1243 MergeStrategy::PreferOld => import_csv::CsvStrategy::PreferOld,
1244 };
1245 let summary = import_csv::import_csv_into_graph(
1246 graph_file,
1247 import_csv::CsvImportArgs {
1248 nodes_path: args.nodes.as_deref(),
1249 edges_path: args.edges.as_deref(),
1250 notes_path: args.notes.as_deref(),
1251 strategy,
1252 },
1253 )?;
1254 if let Some(schema) = schema {
1255 let all_violations = validate_graph_with_schema(graph_file, schema);
1256 bail_on_schema_violations(&all_violations)?;
1257 }
1258 store.save_graph(path, graph_file)?;
1259 append_event_snapshot(path, "graph.import-csv", None, graph_file)?;
1260 let mut lines = vec![format!("+ imported csv into {graph}")];
1261 lines.extend(import_csv::merge_summary_lines(&summary));
1262 Ok(format!("{}\n", lines.join("\n")))
1263}
1264
1265pub(crate) fn import_graph_markdown(
1266 path: &Path,
1267 graph: &str,
1268 graph_file: &mut GraphFile,
1269 store: &dyn GraphStore,
1270 args: &ImportMarkdownArgs,
1271 schema: Option<&GraphSchema>,
1272) -> Result<String> {
1273 let strategy = match args.strategy {
1274 MergeStrategy::PreferNew => import_markdown::MarkdownStrategy::PreferNew,
1275 MergeStrategy::PreferOld => import_markdown::MarkdownStrategy::PreferOld,
1276 };
1277 let summary = import_markdown::import_markdown_into_graph(
1278 graph_file,
1279 import_markdown::MarkdownImportArgs {
1280 path: &args.path,
1281 notes_as_nodes: args.notes_as_nodes,
1282 strategy,
1283 },
1284 )?;
1285 if let Some(schema) = schema {
1286 let all_violations = validate_graph_with_schema(graph_file, schema);
1287 bail_on_schema_violations(&all_violations)?;
1288 }
1289 store.save_graph(path, graph_file)?;
1290 append_event_snapshot(path, "graph.import-md", Some(args.path.clone()), graph_file)?;
1291 let mut lines = vec![format!("+ imported markdown into {graph}")];
1292 lines.extend(import_csv::merge_summary_lines(&summary));
1293 Ok(format!("{}\n", lines.join("\n")))
1294}
1295
1296pub(crate) fn export_graph_dot(
1297 graph: &str,
1298 graph_file: &GraphFile,
1299 args: &ExportDotArgs,
1300) -> Result<String> {
1301 let output_path = args
1302 .output
1303 .clone()
1304 .unwrap_or_else(|| format!("{graph}.dot"));
1305 let (nodes, edges) = select_subgraph(
1306 graph_file,
1307 args.focus.as_deref(),
1308 args.depth,
1309 &args.node_types,
1310 )?;
1311 let mut lines = Vec::new();
1312 lines.push("digraph kg {".to_owned());
1313 for node in &nodes {
1314 let label = format!("{}\\n{}", node.id, node.name);
1315 lines.push(format!(
1316 " \"{}\" [label=\"{}\"];",
1317 escape_dot(&node.id),
1318 escape_dot(&label)
1319 ));
1320 }
1321 for edge in &edges {
1322 lines.push(format!(
1323 " \"{}\" -> \"{}\" [label=\"{}\"];",
1324 escape_dot(&edge.source_id),
1325 escape_dot(&edge.target_id),
1326 escape_dot(&edge.relation)
1327 ));
1328 }
1329 lines.push("}".to_owned());
1330 std::fs::write(&output_path, format!("{}\n", lines.join("\n")))?;
1331 Ok(format!("+ exported {output_path}\n"))
1332}
1333
1334pub(crate) fn export_graph_mermaid(
1335 graph: &str,
1336 graph_file: &GraphFile,
1337 args: &ExportMermaidArgs,
1338) -> Result<String> {
1339 let output_path = args
1340 .output
1341 .clone()
1342 .unwrap_or_else(|| format!("{graph}.mmd"));
1343 let (nodes, edges) = select_subgraph(
1344 graph_file,
1345 args.focus.as_deref(),
1346 args.depth,
1347 &args.node_types,
1348 )?;
1349 let mut lines = Vec::new();
1350 lines.push("graph TD".to_owned());
1351 for node in &nodes {
1352 let label = format!("{}\\n{}", node.id, node.name);
1353 lines.push(format!(
1354 " {}[\"{}\"]",
1355 sanitize_mermaid_id(&node.id),
1356 escape_mermaid(&label)
1357 ));
1358 }
1359 for edge in &edges {
1360 lines.push(format!(
1361 " {} -- \"{}\" --> {}",
1362 sanitize_mermaid_id(&edge.source_id),
1363 escape_mermaid(&edge.relation),
1364 sanitize_mermaid_id(&edge.target_id)
1365 ));
1366 }
1367 std::fs::write(&output_path, format!("{}\n", lines.join("\n")))?;
1368 Ok(format!("+ exported {output_path}\n"))
1369}
1370
1371pub(crate) fn export_graph_graphml(
1372 graph: &str,
1373 graph_file: &GraphFile,
1374 args: &ExportGraphmlArgs,
1375) -> Result<String> {
1376 let output_path = args
1377 .output
1378 .clone()
1379 .unwrap_or_else(|| format!("{graph}.graphml"));
1380 let (nodes, edges) = select_subgraph(
1381 graph_file,
1382 args.focus.as_deref(),
1383 args.depth,
1384 &args.node_types,
1385 )?;
1386
1387 let mut lines = Vec::new();
1388 lines.push(r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string());
1389 lines.push(r#"<graphml xmlns="http://graphml.graphdrawing.org/xmlns" "#.to_string());
1390 lines.push(r#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance""#.to_string());
1391 lines.push(r#" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns"#.to_string());
1392 lines.push(r#" http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">"#.to_string());
1393 lines.push(r#" <key id="d0" for="node" attr.name="name" attr.type="string"/>"#.to_string());
1394 lines.push(r#" <key id="d1" for="node" attr.name="type" attr.type="string"/>"#.to_string());
1395 lines.push(
1396 r#" <key id="d2" for="node" attr.name="description" attr.type="string"/>"#.to_string(),
1397 );
1398 lines
1399 .push(r#" <key id="d3" for="edge" attr.name="relation" attr.type="string"/>"#.to_string());
1400 lines.push(r#" <key id="d4" for="edge" attr.name="detail" attr.type="string"/>"#.to_string());
1401 lines.push(format!(
1402 r#" <graph id="{}" edgedefault="directed">"#,
1403 escape_xml(graph)
1404 ));
1405
1406 for node in &nodes {
1407 lines.push(format!(r#" <node id="{}">"#, escape_xml(&node.id)));
1408 lines.push(format!(
1409 r#" <data key="d0">{}</data>"#,
1410 escape_xml(&node.name)
1411 ));
1412 lines.push(format!(
1413 r#" <data key="d1">{}</data>"#,
1414 escape_xml(&node.r#type)
1415 ));
1416 lines.push(format!(
1417 r#" <data key="d2">{}</data>"#,
1418 escape_xml(&node.properties.description)
1419 ));
1420 lines.push(" </node>".to_string());
1421 }
1422
1423 for edge in &edges {
1424 lines.push(format!(
1425 r#" <edge source="{}" target="{}">"#,
1426 escape_xml(&edge.source_id),
1427 escape_xml(&edge.target_id)
1428 ));
1429 lines.push(format!(
1430 r#" <data key="d3">{}</data>"#,
1431 escape_xml(&edge.relation)
1432 ));
1433 lines.push(format!(
1434 r#" <data key="d4">{}</data>"#,
1435 escape_xml(&edge.properties.detail)
1436 ));
1437 lines.push(" </edge>".to_string());
1438 }
1439
1440 lines.push(" </graph>".to_string());
1441 lines.push("</graphml>".to_string());
1442
1443 std::fs::write(&output_path, lines.join("\n"))?;
1444 Ok(format!("+ exported {output_path}\n"))
1445}
1446
1447fn escape_xml(s: &str) -> String {
1448 s.replace('&', "&")
1449 .replace('<', "<")
1450 .replace('>', ">")
1451 .replace('"', """)
1452 .replace('\'', "'")
1453}
1454
1455pub(crate) fn export_graph_md(
1456 graph: &str,
1457 graph_file: &GraphFile,
1458 args: &ExportMdArgs,
1459 _cwd: &Path,
1460) -> Result<String> {
1461 let output_dir = args
1462 .output
1463 .clone()
1464 .unwrap_or_else(|| format!("{}-md", graph));
1465
1466 let (nodes, edges) = select_subgraph(
1467 graph_file,
1468 args.focus.as_deref(),
1469 args.depth,
1470 &args.node_types,
1471 )?;
1472
1473 std::fs::create_dir_all(&output_dir)?;
1474
1475 let mut index_lines = format!("# {}\n\nNodes: {}\n\n## Index\n", graph, nodes.len());
1476
1477 for node in &nodes {
1478 let safe_name = sanitize_filename(&node.id);
1479 let filename = format!("{}.md", safe_name);
1480 let filepath = Path::new(&output_dir).join(&filename);
1481
1482 let mut content = String::new();
1483 content.push_str(&format!("# {}\n\n", node.name));
1484 content.push_str(&format!("**ID:** `{}`\n\n", node.id));
1485 content.push_str(&format!("**Type:** {}\n\n", node.r#type));
1486
1487 if !node.properties.description.is_empty() {
1488 content.push_str(&format!(
1489 "## Description\n\n{}\n\n",
1490 node.properties.description
1491 ));
1492 }
1493
1494 if !node.properties.key_facts.is_empty() {
1495 content.push_str("## Facts\n\n");
1496 for fact in &node.properties.key_facts {
1497 content.push_str(&format!("- {}\n", fact));
1498 }
1499 content.push('\n');
1500 }
1501
1502 if !node.properties.alias.is_empty() {
1503 content.push_str(&format!(
1504 "**Aliases:** {}\n\n",
1505 node.properties.alias.join(", ")
1506 ));
1507 }
1508
1509 content.push_str("## Relations\n\n");
1510 for edge in &edges {
1511 if edge.source_id == node.id {
1512 content.push_str(&format!(
1513 "- [[{}]] --({})--> [[{}]]\n",
1514 node.id, edge.relation, edge.target_id
1515 ));
1516 } else if edge.target_id == node.id {
1517 content.push_str(&format!(
1518 "- [[{}]] <--({})-- [[{}]]\n",
1519 edge.source_id, edge.relation, node.id
1520 ));
1521 }
1522 }
1523 content.push('\n');
1524
1525 content.push_str("## Backlinks\n\n");
1526 let backlinks: Vec<_> = edges.iter().filter(|e| e.target_id == node.id).collect();
1527 if backlinks.is_empty() {
1528 content.push_str("_No backlinks_\n");
1529 } else {
1530 for edge in backlinks {
1531 content.push_str(&format!("- [[{}]] ({})\n", edge.source_id, edge.relation));
1532 }
1533 }
1534
1535 std::fs::write(&filepath, content)?;
1536
1537 index_lines.push_str(&format!(
1538 "- [[{}]] - {} [{}]\n",
1539 node.id, node.name, node.r#type
1540 ));
1541 }
1542
1543 std::fs::write(Path::new(&output_dir).join("index.md"), index_lines)?;
1544
1545 Ok(format!(
1546 "+ exported {}/ ({} nodes)\n",
1547 output_dir,
1548 nodes.len()
1549 ))
1550}
1551
1552fn sanitize_filename(name: &str) -> String {
1553 name.replace([':', '/', '\\', ' '], "_").replace('&', "and")
1554}
1555
1556pub(crate) fn split_graph(graph: &str, graph_file: &GraphFile, args: &SplitArgs) -> Result<String> {
1557 let output_dir = args
1558 .output
1559 .clone()
1560 .unwrap_or_else(|| format!("{}-split", graph));
1561
1562 let nodes_dir = Path::new(&output_dir).join("nodes");
1563 let edges_dir = Path::new(&output_dir).join("edges");
1564 let notes_dir = Path::new(&output_dir).join("notes");
1565 let meta_dir = Path::new(&output_dir).join("metadata");
1566
1567 std::fs::create_dir_all(&nodes_dir)?;
1568 std::fs::create_dir_all(&edges_dir)?;
1569 std::fs::create_dir_all(¬es_dir)?;
1570 std::fs::create_dir_all(&meta_dir)?;
1571
1572 let meta_json = serde_json::to_string_pretty(&graph_file.metadata)?;
1573 std::fs::write(meta_dir.join("metadata.json"), meta_json)?;
1574
1575 let mut node_count = 0;
1576 for node in &graph_file.nodes {
1577 let safe_id = sanitize_filename(&node.id);
1578 let filepath = nodes_dir.join(format!("{}.json", safe_id));
1579 let node_json = serde_json::to_string_pretty(node)?;
1580 std::fs::write(filepath, node_json)?;
1581 node_count += 1;
1582 }
1583
1584 let mut edge_count = 0;
1585 for edge in &graph_file.edges {
1586 let edge_key = format!(
1587 "{}___{}___{}",
1588 sanitize_filename(&edge.source_id),
1589 sanitize_filename(&edge.relation),
1590 sanitize_filename(&edge.target_id)
1591 );
1592 let filepath = edges_dir.join(format!("{}.json", edge_key));
1593 let edge_json = serde_json::to_string_pretty(edge)?;
1594 std::fs::write(filepath, edge_json)?;
1595 edge_count += 1;
1596 }
1597
1598 let mut note_count = 0;
1599 for note in &graph_file.notes {
1600 let safe_id = sanitize_filename(¬e.id);
1601 let filepath = notes_dir.join(format!("{}.json", safe_id));
1602 let note_json = serde_json::to_string_pretty(note)?;
1603 std::fs::write(filepath, note_json)?;
1604 note_count += 1;
1605 }
1606
1607 let manifest = format!(
1608 r#"# {} Split Manifest
1609
1610This directory contains a git-friendly split representation of the graph.
1611
1612## Structure
1613
1614- `metadata/metadata.json` - Graph metadata
1615- `nodes/` - One JSON file per node (filename = sanitized node id)
1616- `edges/` - One JSON file per edge (filename = source___relation___target)
1617- `notes/` - One JSON file per note
1618
1619## Stats
1620
1621- Nodes: {}
1622- Edges: {}
1623- Notes: {}
1624
1625## Usage
1626
1627To reassemble into a single JSON file, use `kg {} import-json`.
1628"#,
1629 graph, node_count, edge_count, note_count, graph
1630 );
1631 std::fs::write(Path::new(&output_dir).join("MANIFEST.md"), manifest)?;
1632
1633 Ok(format!(
1634 "+ split {} into {}/ (nodes: {}, edges: {}, notes: {})\n",
1635 graph, output_dir, node_count, edge_count, note_count
1636 ))
1637}
1638
1639fn select_subgraph<'a>(
1640 graph_file: &'a GraphFile,
1641 focus: Option<&'a str>,
1642 depth: usize,
1643 node_types: &'a [String],
1644) -> Result<(Vec<&'a Node>, Vec<&'a Edge>)> {
1645 use std::collections::{HashSet, VecDeque};
1646
1647 let mut selected: HashSet<String> = HashSet::new();
1648 if let Some(focus_id) = focus {
1649 if graph_file.node_by_id(focus_id).is_none() {
1650 bail!("focus node not found: {focus_id}");
1651 }
1652 selected.insert(focus_id.to_owned());
1653 let mut frontier = VecDeque::new();
1654 frontier.push_back((focus_id.to_owned(), 0usize));
1655 while let Some((current, dist)) = frontier.pop_front() {
1656 if dist >= depth {
1657 continue;
1658 }
1659 for edge in &graph_file.edges {
1660 let next = if edge.source_id == current {
1661 Some(edge.target_id.clone())
1662 } else if edge.target_id == current {
1663 Some(edge.source_id.clone())
1664 } else {
1665 None
1666 };
1667 if let Some(next_id) = next {
1668 if selected.insert(next_id.clone()) {
1669 frontier.push_back((next_id, dist + 1));
1670 }
1671 }
1672 }
1673 }
1674 } else {
1675 for node in &graph_file.nodes {
1676 selected.insert(node.id.clone());
1677 }
1678 }
1679
1680 let type_filter: Vec<String> = node_types.iter().map(|t| t.to_lowercase()).collect();
1681 let has_filter = !type_filter.is_empty();
1682 let mut nodes: Vec<&Node> = graph_file
1683 .nodes
1684 .iter()
1685 .filter(|node| selected.contains(&node.id))
1686 .filter(|node| {
1687 if let Some(focus_id) = focus {
1688 if node.id == focus_id {
1689 return true;
1690 }
1691 }
1692 !has_filter || type_filter.contains(&node.r#type.to_lowercase())
1693 })
1694 .collect();
1695 nodes.sort_by(|a, b| a.id.cmp(&b.id));
1696
1697 let node_set: HashSet<String> = nodes.iter().map(|node| node.id.clone()).collect();
1698 let mut edges: Vec<&Edge> = graph_file
1699 .edges
1700 .iter()
1701 .filter(|edge| node_set.contains(&edge.source_id) && node_set.contains(&edge.target_id))
1702 .collect();
1703 edges.sort_by(|a, b| {
1704 a.source_id
1705 .cmp(&b.source_id)
1706 .then_with(|| a.relation.cmp(&b.relation))
1707 .then_with(|| a.target_id.cmp(&b.target_id))
1708 });
1709
1710 Ok((nodes, edges))
1711}
1712
1713fn escape_dot(value: &str) -> String {
1714 value.replace('"', "\\\"").replace('\n', "\\n")
1715}
1716
1717fn escape_mermaid(value: &str) -> String {
1718 value.replace('"', "\\\"").replace('\n', "\\n")
1719}
1720
1721fn sanitize_mermaid_id(value: &str) -> String {
1722 let mut out = String::new();
1723 for ch in value.chars() {
1724 if ch.is_ascii_alphanumeric() || ch == '_' {
1725 out.push(ch);
1726 } else {
1727 out.push('_');
1728 }
1729 }
1730 if out.is_empty() {
1731 "node".to_owned()
1732 } else {
1733 out
1734 }
1735}
1736
1737pub(crate) fn render_graph_history(path: &Path, graph: &str, args: &HistoryArgs) -> Result<String> {
1738 let backups = list_graph_backups(path)?;
1739 let total = backups.len();
1740 let snapshots: Vec<(u64, PathBuf)> = backups.into_iter().rev().take(args.limit).collect();
1741
1742 if args.json {
1743 let payload = GraphHistoryResponse {
1744 graph: graph.to_owned(),
1745 total,
1746 snapshots: snapshots
1747 .iter()
1748 .map(|(ts, backup_path)| GraphHistorySnapshot {
1749 ts: *ts,
1750 path: backup_path.display().to_string(),
1751 })
1752 .collect(),
1753 };
1754 let rendered =
1755 serde_json::to_string_pretty(&payload).context("failed to render history as JSON")?;
1756 return Ok(format!("{rendered}\n"));
1757 }
1758
1759 let mut lines = vec![format!("= history {graph} ({total})")];
1760 for (ts, backup_path) in snapshots {
1761 lines.push(format!("- {ts} | {}", backup_path.display()));
1762 }
1763 Ok(format!("{}\n", lines.join("\n")))
1764}
1765
1766pub(crate) fn render_graph_timeline(
1767 path: &Path,
1768 graph: &str,
1769 args: &TimelineArgs,
1770) -> Result<String> {
1771 let entries = event_log::read_log(path)?;
1772 let total = entries.len();
1773 let filtered: Vec<&event_log::EventLogEntry> = entries
1774 .iter()
1775 .filter(|entry| {
1776 let after_since = args
1777 .since_ts_ms
1778 .map(|since| entry.ts_ms >= since)
1779 .unwrap_or(true);
1780 let before_until = args
1781 .until_ts_ms
1782 .map(|until| entry.ts_ms <= until)
1783 .unwrap_or(true);
1784 after_since && before_until
1785 })
1786 .collect();
1787 let recent: Vec<&event_log::EventLogEntry> =
1788 filtered.into_iter().rev().take(args.limit).collect();
1789
1790 if args.json {
1791 let payload = GraphTimelineResponse {
1792 graph: graph.to_owned(),
1793 total,
1794 filtered: recent.len(),
1795 since_ts_ms: args.since_ts_ms,
1796 until_ts_ms: args.until_ts_ms,
1797 entries: recent
1798 .iter()
1799 .map(|entry| GraphTimelineEntry {
1800 ts_ms: entry.ts_ms,
1801 action: entry.action.clone(),
1802 detail: entry.detail.clone(),
1803 node_count: entry.graph.nodes.len(),
1804 edge_count: entry.graph.edges.len(),
1805 note_count: entry.graph.notes.len(),
1806 })
1807 .collect(),
1808 };
1809 let rendered =
1810 serde_json::to_string_pretty(&payload).context("failed to render timeline as JSON")?;
1811 return Ok(format!("{rendered}\n"));
1812 }
1813
1814 let mut lines = vec![format!("= timeline {graph} ({total})")];
1815 if args.since_ts_ms.is_some() || args.until_ts_ms.is_some() {
1816 lines.push(format!(
1817 "range: {} -> {}",
1818 args.since_ts_ms
1819 .map(|value| value.to_string())
1820 .unwrap_or_else(|| "-inf".to_owned()),
1821 args.until_ts_ms
1822 .map(|value| value.to_string())
1823 .unwrap_or_else(|| "+inf".to_owned())
1824 ));
1825 lines.push(format!("showing: {}", recent.len()));
1826 }
1827 for entry in recent {
1828 let detail = entry
1829 .detail
1830 .as_deref()
1831 .map(|value| format!(" | {value}"))
1832 .unwrap_or_default();
1833 lines.push(format!(
1834 "- {} | {}{} | nodes: {} | edges: {} | notes: {}",
1835 entry.ts_ms,
1836 entry.action,
1837 detail,
1838 entry.graph.nodes.len(),
1839 entry.graph.edges.len(),
1840 entry.graph.notes.len()
1841 ));
1842 }
1843 Ok(format!("{}\n", lines.join("\n")))
1844}
1845
1846#[derive(Debug, Serialize)]
1847struct GraphHistorySnapshot {
1848 ts: u64,
1849 path: String,
1850}
1851
1852#[derive(Debug, Serialize)]
1853struct GraphHistoryResponse {
1854 graph: String,
1855 total: usize,
1856 snapshots: Vec<GraphHistorySnapshot>,
1857}
1858
1859#[derive(Debug, Serialize)]
1860struct GraphTimelineEntry {
1861 ts_ms: u64,
1862 action: String,
1863 detail: Option<String>,
1864 node_count: usize,
1865 edge_count: usize,
1866 note_count: usize,
1867}
1868
1869#[derive(Debug, Serialize)]
1870struct GraphTimelineResponse {
1871 graph: String,
1872 total: usize,
1873 filtered: usize,
1874 since_ts_ms: Option<u64>,
1875 until_ts_ms: Option<u64>,
1876 entries: Vec<GraphTimelineEntry>,
1877}
1878
1879pub(crate) fn render_graph_diff_as_of(
1880 path: &Path,
1881 graph: &str,
1882 args: &DiffAsOfArgs,
1883) -> Result<String> {
1884 match resolve_temporal_source(path, args.source)? {
1885 TemporalSource::EventLog => render_graph_diff_as_of_event_log(path, graph, args),
1886 _ => render_graph_diff_as_of_backups(path, graph, args),
1887 }
1888}
1889
1890pub(crate) fn render_graph_diff_as_of_json(
1891 path: &Path,
1892 graph: &str,
1893 args: &DiffAsOfArgs,
1894) -> Result<String> {
1895 match resolve_temporal_source(path, args.source)? {
1896 TemporalSource::EventLog => render_graph_diff_as_of_event_log_json(path, graph, args),
1897 _ => render_graph_diff_as_of_backups_json(path, graph, args),
1898 }
1899}
1900
1901fn render_graph_diff_as_of_backups(
1902 path: &Path,
1903 graph: &str,
1904 args: &DiffAsOfArgs,
1905) -> Result<String> {
1906 let backups = list_graph_backups(path)?;
1907 if backups.is_empty() {
1908 bail!("no backups found for graph: {graph}");
1909 }
1910 let from_ts = args.from_ts_ms / 1000;
1911 let to_ts = args.to_ts_ms / 1000;
1912 let from_backup = select_backup_at_or_before(&backups, from_ts)
1913 .ok_or_else(|| anyhow!("no backup at or before from_ts_ms={}", args.from_ts_ms))?;
1914 let to_backup = select_backup_at_or_before(&backups, to_ts)
1915 .ok_or_else(|| anyhow!("no backup at or before to_ts_ms={}", args.to_ts_ms))?;
1916
1917 let from_graph = load_graph_from_backup(&from_backup.1)?;
1918 let to_graph = load_graph_from_backup(&to_backup.1)?;
1919 let left_label = format!("{graph}@{}", args.from_ts_ms);
1920 let right_label = format!("{graph}@{}", args.to_ts_ms);
1921 Ok(render_graph_diff_from_files(
1922 &left_label,
1923 &right_label,
1924 &from_graph,
1925 &to_graph,
1926 ))
1927}
1928
1929fn render_graph_diff_as_of_backups_json(
1930 path: &Path,
1931 graph: &str,
1932 args: &DiffAsOfArgs,
1933) -> Result<String> {
1934 let backups = list_graph_backups(path)?;
1935 if backups.is_empty() {
1936 bail!("no backups found for graph: {graph}");
1937 }
1938 let from_ts = args.from_ts_ms / 1000;
1939 let to_ts = args.to_ts_ms / 1000;
1940 let from_backup = select_backup_at_or_before(&backups, from_ts)
1941 .ok_or_else(|| anyhow!("no backup at or before from_ts_ms={}", args.from_ts_ms))?;
1942 let to_backup = select_backup_at_or_before(&backups, to_ts)
1943 .ok_or_else(|| anyhow!("no backup at or before to_ts_ms={}", args.to_ts_ms))?;
1944
1945 let from_graph = load_graph_from_backup(&from_backup.1)?;
1946 let to_graph = load_graph_from_backup(&to_backup.1)?;
1947 let left_label = format!("{graph}@{}", args.from_ts_ms);
1948 let right_label = format!("{graph}@{}", args.to_ts_ms);
1949 Ok(render_graph_diff_json_from_files(
1950 &left_label,
1951 &right_label,
1952 &from_graph,
1953 &to_graph,
1954 ))
1955}
1956
1957fn render_graph_diff_as_of_event_log(
1958 path: &Path,
1959 graph: &str,
1960 args: &DiffAsOfArgs,
1961) -> Result<String> {
1962 let entries = event_log::read_log(path)?;
1963 if entries.is_empty() {
1964 bail!("no event log entries found for graph: {graph}");
1965 }
1966 let from_entry = select_event_at_or_before(&entries, args.from_ts_ms).ok_or_else(|| {
1967 anyhow!(
1968 "no event log entry at or before from_ts_ms={}",
1969 args.from_ts_ms
1970 )
1971 })?;
1972 let to_entry = select_event_at_or_before(&entries, args.to_ts_ms)
1973 .ok_or_else(|| anyhow!("no event log entry at or before to_ts_ms={}", args.to_ts_ms))?;
1974
1975 let left_label = format!("{graph}@{}", args.from_ts_ms);
1976 let right_label = format!("{graph}@{}", args.to_ts_ms);
1977 Ok(render_graph_diff_from_files(
1978 &left_label,
1979 &right_label,
1980 &from_entry.graph,
1981 &to_entry.graph,
1982 ))
1983}
1984
1985fn render_graph_diff_as_of_event_log_json(
1986 path: &Path,
1987 graph: &str,
1988 args: &DiffAsOfArgs,
1989) -> Result<String> {
1990 let entries = event_log::read_log(path)?;
1991 if entries.is_empty() {
1992 bail!("no event log entries found for graph: {graph}");
1993 }
1994 let from_entry = select_event_at_or_before(&entries, args.from_ts_ms).ok_or_else(|| {
1995 anyhow!(
1996 "no event log entry at or before from_ts_ms={}",
1997 args.from_ts_ms
1998 )
1999 })?;
2000 let to_entry = select_event_at_or_before(&entries, args.to_ts_ms)
2001 .ok_or_else(|| anyhow!("no event log entry at or before to_ts_ms={}", args.to_ts_ms))?;
2002
2003 let left_label = format!("{graph}@{}", args.from_ts_ms);
2004 let right_label = format!("{graph}@{}", args.to_ts_ms);
2005 Ok(render_graph_diff_json_from_files(
2006 &left_label,
2007 &right_label,
2008 &from_entry.graph,
2009 &to_entry.graph,
2010 ))
2011}
2012
2013fn resolve_temporal_source(path: &Path, source: TemporalSource) -> Result<TemporalSource> {
2014 if matches!(source, TemporalSource::Auto) {
2015 let has_events = event_log::has_log(path);
2016 return Ok(if has_events {
2017 TemporalSource::EventLog
2018 } else {
2019 TemporalSource::Backups
2020 });
2021 }
2022 Ok(source)
2023}
2024
2025fn select_event_at_or_before(
2026 entries: &[event_log::EventLogEntry],
2027 target_ts_ms: u64,
2028) -> Option<&event_log::EventLogEntry> {
2029 let mut selected = None;
2030 for entry in entries {
2031 if entry.ts_ms <= target_ts_ms {
2032 selected = Some(entry);
2033 }
2034 }
2035 selected
2036}
2037
2038fn select_backup_at_or_before(
2039 backups: &[(u64, PathBuf)],
2040 target_ts: u64,
2041) -> Option<(u64, PathBuf)> {
2042 let mut selected = None;
2043 for (ts, path) in backups {
2044 if *ts <= target_ts {
2045 selected = Some((*ts, path.clone()));
2046 }
2047 }
2048 selected
2049}
2050
2051fn load_graph_from_backup(path: &Path) -> Result<GraphFile> {
2052 let raw = read_gz_to_string(path)?;
2053 let graph: GraphFile = serde_json::from_str(&raw)
2054 .with_context(|| format!("failed to parse backup: {}", path.display()))?;
2055 Ok(graph)
2056}
2057
2058pub(crate) fn render_note_list(graph: &GraphFile, args: &NoteListArgs) -> String {
2059 let mut notes: Vec<&Note> = graph
2060 .notes
2061 .iter()
2062 .filter(|note| args.node.as_ref().is_none_or(|node| note.node_id == *node))
2063 .collect();
2064
2065 notes.sort_by(|a, b| {
2066 a.created_at
2067 .cmp(&b.created_at)
2068 .then_with(|| a.id.cmp(&b.id))
2069 });
2070
2071 let total = notes.len();
2072 let visible: Vec<&Note> = notes.into_iter().take(args.limit).collect();
2073
2074 let mut lines = vec![format!("= notes ({total})")];
2075 for note in &visible {
2076 let mut line = format!(
2077 "- {} | {} | {} | {}",
2078 note.id,
2079 note.node_id,
2080 note.created_at,
2081 truncate_note(&escape_cli_text(¬e.body), 80)
2082 );
2083 if !note.tags.is_empty() {
2084 line.push_str(" | tags: ");
2085 line.push_str(
2086 ¬e
2087 .tags
2088 .iter()
2089 .map(|tag| escape_cli_text(tag))
2090 .collect::<Vec<_>>()
2091 .join(", "),
2092 );
2093 }
2094 if !note.author.is_empty() {
2095 line.push_str(" | by: ");
2096 line.push_str(&escape_cli_text(¬e.author));
2097 }
2098 lines.push(line);
2099 }
2100 let omitted = total.saturating_sub(visible.len());
2101 if omitted > 0 {
2102 lines.push(format!("... {omitted} more notes omitted"));
2103 }
2104
2105 format!("{}\n", lines.join("\n"))
2106}
2107
2108pub(crate) fn build_note(graph: &GraphFile, args: NoteAddArgs) -> Result<Note> {
2109 if graph.node_by_id(&args.node_id).is_none() {
2110 bail!("node not found: {}", args.node_id);
2111 }
2112 let ts = now_ms();
2113 let id = args.id.unwrap_or_else(|| format!("note:{ts}"));
2114 let created_at = args.created_at.unwrap_or_else(|| ts.to_string());
2115 Ok(Note {
2116 id,
2117 node_id: args.node_id,
2118 body: args.text,
2119 tags: args.tag,
2120 author: args.author.unwrap_or_default(),
2121 created_at,
2122 provenance: args.provenance.unwrap_or_default(),
2123 source_files: args.source,
2124 })
2125}
2126
2127fn truncate_note(value: &str, max_len: usize) -> String {
2128 let char_count = value.chars().count();
2129 if char_count <= max_len {
2130 return value.to_owned();
2131 }
2132 let truncated: String = value.chars().take(max_len.saturating_sub(3)).collect();
2133 format!("{truncated}...")
2134}
2135
2136fn escape_cli_text(value: &str) -> String {
2137 let mut out = String::new();
2138 for ch in value.chars() {
2139 match ch {
2140 '\\' => out.push_str("\\\\"),
2141 '\n' => out.push_str("\\n"),
2142 '\r' => out.push_str("\\r"),
2143 '\t' => out.push_str("\\t"),
2144 _ => out.push(ch),
2145 }
2146 }
2147 out
2148}
2149
2150fn now_ms() -> u128 {
2151 use std::time::{SystemTime, UNIX_EPOCH};
2152
2153 SystemTime::now()
2154 .duration_since(UNIX_EPOCH)
2155 .unwrap_or_default()
2156 .as_millis()
2157}
2158
2159pub(crate) fn map_find_mode(mode: CliFindMode) -> output::FindMode {
2160 match mode {
2161 CliFindMode::Fuzzy => output::FindMode::Fuzzy,
2162 CliFindMode::Bm25 => output::FindMode::Bm25,
2163 CliFindMode::Vector => output::FindMode::Fuzzy,
2164 }
2165}
2166
2167pub(crate) fn render_feedback_log(cwd: &Path, args: &FeedbackLogArgs) -> Result<String> {
2168 let path = cwd.join("kg-mcp.feedback.log");
2169 if !path.exists() {
2170 return Ok(String::from("= feedback-log\nempty: no entries yet\n"));
2171 }
2172
2173 let content = std::fs::read_to_string(&path)?;
2174 let mut entries: Vec<FeedbackLogEntry> = Vec::new();
2175 for line in content.lines() {
2176 if let Some(entry) = FeedbackLogEntry::parse(line) {
2177 if let Some(ref uid) = args.uid {
2178 if &entry.uid != uid {
2179 continue;
2180 }
2181 }
2182 if let Some(ref graph) = args.graph {
2183 if &entry.graph != graph {
2184 continue;
2185 }
2186 }
2187 entries.push(entry);
2188 }
2189 }
2190
2191 entries.reverse();
2192 let shown: Vec<&FeedbackLogEntry> = entries.iter().take(args.limit).collect();
2193
2194 let mut output = vec![String::from("= feedback-log")];
2195 output.push(format!("total_entries: {}", entries.len()));
2196 output.push(format!("showing: {}", shown.len()));
2197 output.push(String::from("recent_entries:"));
2198 for e in shown {
2199 let pick = e.pick.as_deref().unwrap_or("-");
2200 let selected = e.selected.as_deref().unwrap_or("-");
2201 let graph = if e.graph.is_empty() { "-" } else { &e.graph };
2202 let queries = if e.queries.is_empty() {
2203 "-"
2204 } else {
2205 &e.queries
2206 };
2207 output.push(format!(
2208 "- {} | {} | {} | pick={} | selected={} | graph={} | {}",
2209 e.ts_ms, e.uid, e.action, pick, selected, graph, queries
2210 ));
2211 }
2212
2213 Ok(format!("{}\n", output.join("\n")))
2214}
2215
2216pub(crate) fn handle_vector_command(
2217 path: &Path,
2218 _graph: &str,
2219 graph_file: &GraphFile,
2220 command: &VectorCommand,
2221 _cwd: &Path,
2222) -> Result<String> {
2223 match command {
2224 VectorCommand::Import(args) => {
2225 let vector_path = path
2226 .parent()
2227 .map(|p| p.join(".kg.vectors.json"))
2228 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
2229 let store =
2230 vectors::VectorStore::import_jsonl(std::path::Path::new(&args.input), graph_file)?;
2231 store.save(&vector_path)?;
2232 Ok(format!(
2233 "+ imported {} vectors (dim={}) to {}\n",
2234 store.vectors.len(),
2235 store.dimension,
2236 vector_path.display()
2237 ))
2238 }
2239 VectorCommand::Stats(_args) => {
2240 let vector_path = path
2241 .parent()
2242 .map(|p| p.join(".kg.vectors.json"))
2243 .unwrap_or_else(|| PathBuf::from(".kg.vectors.json"));
2244 if !vector_path.exists() {
2245 return Ok(String::from("= vectors\nnot initialized\n"));
2246 }
2247 let store = vectors::VectorStore::load(&vector_path)?;
2248 let node_ids: Vec<_> = store.vectors.keys().cloned().collect();
2249 let in_graph = node_ids
2250 .iter()
2251 .filter(|id| graph_file.node_by_id(id).is_some())
2252 .count();
2253 Ok(format!(
2254 "= vectors\ndimension: {}\ntotal: {}\nin_graph: {}\n",
2255 store.dimension,
2256 store.vectors.len(),
2257 in_graph
2258 ))
2259 }
2260 }
2261}
2262
2263fn render_feedback_summary(cwd: &Path, args: &FeedbackSummaryArgs) -> Result<String> {
2264 use std::collections::HashMap;
2265
2266 let path = cwd.join("kg-mcp.feedback.log");
2267 if !path.exists() {
2268 return Ok(String::from("= feedback-summary\nNo feedback yet.\n"));
2269 }
2270
2271 let content = std::fs::read_to_string(&path)?;
2272 let mut entries: Vec<FeedbackLogEntry> = Vec::new();
2273 for line in content.lines() {
2274 if let Some(entry) = FeedbackLogEntry::parse(line) {
2275 if let Some(ref graph) = args.graph {
2276 if &entry.graph != graph {
2277 continue;
2278 }
2279 }
2280 entries.push(entry);
2281 }
2282 }
2283
2284 entries.reverse();
2285 let _shown = entries.iter().take(args.limit).collect::<Vec<_>>();
2286
2287 let mut lines = vec![String::from("= feedback-summary")];
2288 lines.push(format!("Total entries: {}", entries.len()));
2289
2290 let mut by_action: HashMap<&str, usize> = HashMap::new();
2291 let mut nil_queries: Vec<&str> = Vec::new();
2292 let mut yes_count = 0;
2293 let mut no_count = 0;
2294 let mut pick_map: HashMap<&str, usize> = HashMap::new();
2295 let mut query_counts: HashMap<&str, usize> = HashMap::new();
2296
2297 for e in &entries {
2298 *by_action.entry(&e.action).or_insert(0) += 1;
2299
2300 match e.action.as_str() {
2301 "NIL" => {
2302 if !e.queries.is_empty() {
2303 nil_queries.push(&e.queries);
2304 }
2305 }
2306 "YES" => yes_count += 1,
2307 "NO" => no_count += 1,
2308 "PICK" => {
2309 if let Some(ref sel) = e.selected {
2310 *pick_map.entry(sel).or_insert(0) += 1;
2311 }
2312 }
2313 _ => {}
2314 }
2315
2316 if !e.queries.is_empty() {
2317 *query_counts.entry(&e.queries).or_insert(0) += 1;
2318 }
2319 }
2320
2321 lines.push(String::from("\n### By response"));
2322 lines.push(format!(
2323 "YES: {} ({:.0}%)",
2324 yes_count,
2325 if !entries.is_empty() {
2326 (yes_count as f64 / entries.len() as f64) * 100.0
2327 } else {
2328 0.0
2329 }
2330 ));
2331 lines.push(format!("NO: {}", no_count));
2332 lines.push(format!("PICK: {}", by_action.get("PICK").unwrap_or(&0)));
2333 lines.push(format!("NIL: {} (no results)", nil_queries.len()));
2334
2335 if !nil_queries.is_empty() {
2336 lines.push(String::from("\n### Brakujące node'y (NIL queries)"));
2337 for q in nil_queries.iter().take(10) {
2338 lines.push(format!("- \"{}\"", q));
2339 }
2340 if nil_queries.len() > 10 {
2341 lines.push(format!(" ... i {} więcej", nil_queries.len() - 10));
2342 }
2343 }
2344
2345 if !pick_map.is_empty() {
2346 lines.push(String::from("\n### Najczęściej wybierane node'y (PICK)"));
2347 let mut sorted: Vec<_> = pick_map.iter().collect();
2348 sorted.sort_by(|a, b| b.1.cmp(a.1));
2349 for (node, count) in sorted.iter().take(10) {
2350 lines.push(format!("- {} ({}x)", node, count));
2351 }
2352 }
2353
2354 if !query_counts.is_empty() {
2355 lines.push(String::from("\n### Top wyszukiwane terminy"));
2356 let mut sorted: Vec<_> = query_counts.iter().collect();
2357 sorted.sort_by(|a, b| b.1.cmp(a.1));
2358 for (query, count) in sorted.iter().take(10) {
2359 lines.push(format!("- \"{}\" ({})", query, count));
2360 }
2361 }
2362
2363 if yes_count == 0 && no_count == 0 && nil_queries.is_empty() {
2364 lines.push(String::from(
2365 "\n(Wpływy za mało na wnioski - potrzeba więcej feedbacku)",
2366 ));
2367 } else if yes_count > no_count * 3 {
2368 lines.push(String::from(
2369 "\n✓ Feedback pozytywny - wyszukiwania działają dobrze.",
2370 ));
2371 } else if no_count > yes_count {
2372 lines.push(String::from(
2373 "\n⚠ Dużo NO - sprawdź jakość aliasów i dopasowań.",
2374 ));
2375 }
2376
2377 Ok(format!("{}\n", lines.join("\n")))
2378}
2379
2380pub(crate) fn render_feedback_summary_for_graph(
2381 cwd: &Path,
2382 graph: &str,
2383 args: &FeedbackSummaryArgs,
2384) -> Result<String> {
2385 let mut args = args.clone();
2386 args.graph = Some(graph.to_string());
2387 render_feedback_summary(cwd, &args)
2388}
2389
2390#[derive(Debug, Serialize)]
2391struct BaselineFeedbackMetrics {
2392 entries: usize,
2393 yes: usize,
2394 no: usize,
2395 pick: usize,
2396 nil: usize,
2397 yes_rate: f64,
2398 no_rate: f64,
2399 nil_rate: f64,
2400}
2401
2402#[derive(Debug, Serialize)]
2403struct BaselineCostMetrics {
2404 find_operations: usize,
2405 feedback_events: usize,
2406 feedback_events_per_1000_find_ops: f64,
2407 token_cost_estimate: Option<f64>,
2408 token_cost_note: &'static str,
2409}
2410
2411#[derive(Debug, Serialize)]
2412struct GoldenSetMetrics {
2413 cases: usize,
2414 hits_any: usize,
2415 top1_hits: usize,
2416 hit_rate: f64,
2417 top1_rate: f64,
2418 mrr: f64,
2419}
2420
2421#[derive(Debug, Serialize)]
2422struct BaselineQualityScore {
2423 description_coverage: f64,
2424 facts_coverage: f64,
2425 duplicate_penalty: f64,
2426 edge_gap_penalty: f64,
2427 score_0_100: f64,
2428}
2429
2430#[derive(Debug, Serialize)]
2431struct BaselineReport {
2432 graph: String,
2433 quality: crate::analysis::QualitySnapshot,
2434 quality_score: BaselineQualityScore,
2435 feedback: BaselineFeedbackMetrics,
2436 cost: BaselineCostMetrics,
2437 golden: Option<GoldenSetMetrics>,
2438}
2439
2440#[derive(Debug, Deserialize)]
2441struct GoldenSetCase {
2442 query: String,
2443 expected: Vec<String>,
2444}
2445
2446fn parse_feedback_entries(cwd: &Path, graph_name: &str) -> Result<Vec<FeedbackLogEntry>> {
2447 let path = cwd.join("kg-mcp.feedback.log");
2448 if !path.exists() {
2449 return Ok(Vec::new());
2450 }
2451
2452 let content = std::fs::read_to_string(path)?;
2453 let mut entries = Vec::new();
2454 for line in content.lines() {
2455 if let Some(entry) = FeedbackLogEntry::parse(line) {
2456 if entry.graph == graph_name {
2457 entries.push(entry);
2458 }
2459 }
2460 }
2461 Ok(entries)
2462}
2463
2464fn parse_find_operations(graph_path: &Path) -> Result<usize> {
2465 let Some(path) = access_log::first_existing_access_log_path(graph_path) else {
2466 return Ok(0);
2467 };
2468
2469 let content = std::fs::read_to_string(path)?;
2470 let mut find_ops = 0usize;
2471 for line in content.lines() {
2472 let mut parts = line.split('\t');
2473 let _ts = parts.next();
2474 if let Some(op) = parts.next() {
2475 if op == "FIND" {
2476 find_ops += 1;
2477 }
2478 }
2479 }
2480 Ok(find_ops)
2481}
2482
2483fn compute_feedback_metrics(entries: &[FeedbackLogEntry]) -> BaselineFeedbackMetrics {
2484 let mut yes = 0usize;
2485 let mut no = 0usize;
2486 let mut pick = 0usize;
2487 let mut nil = 0usize;
2488 for entry in entries {
2489 match entry.action.as_str() {
2490 "YES" => yes += 1,
2491 "NO" => no += 1,
2492 "PICK" => pick += 1,
2493 "NIL" => nil += 1,
2494 _ => {}
2495 }
2496 }
2497 let total = entries.len() as f64;
2498 BaselineFeedbackMetrics {
2499 entries: entries.len(),
2500 yes,
2501 no,
2502 pick,
2503 nil,
2504 yes_rate: if total > 0.0 { yes as f64 / total } else { 0.0 },
2505 no_rate: if total > 0.0 { no as f64 / total } else { 0.0 },
2506 nil_rate: if total > 0.0 { nil as f64 / total } else { 0.0 },
2507 }
2508}
2509
2510fn compute_quality_score(snapshot: &crate::analysis::QualitySnapshot) -> BaselineQualityScore {
2511 let total_nodes = snapshot.total_nodes as f64;
2512 let description_coverage = if total_nodes > 0.0 {
2513 (snapshot
2514 .total_nodes
2515 .saturating_sub(snapshot.missing_descriptions)) as f64
2516 / total_nodes
2517 } else {
2518 1.0
2519 };
2520 let facts_coverage = if total_nodes > 0.0 {
2521 (snapshot.total_nodes.saturating_sub(snapshot.missing_facts)) as f64 / total_nodes
2522 } else {
2523 1.0
2524 };
2525
2526 let duplicate_penalty = if snapshot.total_nodes > 1 {
2527 let max_pairs = (snapshot.total_nodes * (snapshot.total_nodes - 1) / 2) as f64;
2528 (snapshot.duplicate_pairs as f64 / max_pairs).clamp(0.0, 1.0)
2529 } else {
2530 0.0
2531 };
2532
2533 let edge_candidates = snapshot.edge_gaps.total_candidates();
2534 let edge_gap_penalty = if edge_candidates > 0 {
2535 (snapshot.edge_gaps.total_missing() as f64 / edge_candidates as f64).clamp(0.0, 1.0)
2536 } else {
2537 0.0
2538 };
2539
2540 let score = 100.0
2541 * (0.35 * description_coverage
2542 + 0.35 * facts_coverage
2543 + 0.15 * (1.0 - duplicate_penalty)
2544 + 0.15 * (1.0 - edge_gap_penalty));
2545
2546 BaselineQualityScore {
2547 description_coverage,
2548 facts_coverage,
2549 duplicate_penalty,
2550 edge_gap_penalty,
2551 score_0_100: score,
2552 }
2553}
2554
2555fn eval_golden_set(graph: &GraphFile, args: &BaselineArgs) -> Result<Option<GoldenSetMetrics>> {
2556 let Some(path) = args.golden.as_ref() else {
2557 return Ok(None);
2558 };
2559
2560 let raw = std::fs::read_to_string(path)
2561 .with_context(|| format!("failed to read golden set: {path}"))?;
2562 let cases: Vec<GoldenSetCase> =
2563 serde_json::from_str(&raw).with_context(|| format!("invalid golden set JSON: {path}"))?;
2564
2565 if cases.is_empty() {
2566 return Ok(Some(GoldenSetMetrics {
2567 cases: 0,
2568 hits_any: 0,
2569 top1_hits: 0,
2570 hit_rate: 0.0,
2571 top1_rate: 0.0,
2572 mrr: 0.0,
2573 }));
2574 }
2575
2576 let mode = map_find_mode(args.mode);
2577 let mut hits_any = 0usize;
2578 let mut top1_hits = 0usize;
2579 let mut mrr_sum = 0.0;
2580
2581 for case in &cases {
2582 let results = output::find_nodes(
2583 graph,
2584 &case.query,
2585 args.find_limit,
2586 args.include_features,
2587 mode,
2588 );
2589
2590 let mut first_rank: Option<usize> = None;
2591 for (idx, node) in results.iter().enumerate() {
2592 if case.expected.iter().any(|id| id == &node.id) {
2593 first_rank = Some(idx + 1);
2594 break;
2595 }
2596 }
2597
2598 if let Some(rank) = first_rank {
2599 hits_any += 1;
2600 if rank == 1 {
2601 top1_hits += 1;
2602 }
2603 mrr_sum += 1.0 / rank as f64;
2604 }
2605 }
2606
2607 let total = cases.len() as f64;
2608 Ok(Some(GoldenSetMetrics {
2609 cases: cases.len(),
2610 hits_any,
2611 top1_hits,
2612 hit_rate: hits_any as f64 / total,
2613 top1_rate: top1_hits as f64 / total,
2614 mrr: mrr_sum / total,
2615 }))
2616}
2617
2618pub(crate) fn render_baseline_report(
2619 cwd: &Path,
2620 graph_name: &str,
2621 graph: &GraphFile,
2622 quality: &crate::analysis::QualitySnapshot,
2623 args: &BaselineArgs,
2624) -> Result<String> {
2625 let feedback_entries = parse_feedback_entries(cwd, graph_name)?;
2626 let feedback = compute_feedback_metrics(&feedback_entries);
2627
2628 let graph_root = default_graph_root(cwd);
2629 let graph_path = resolve_graph_path(cwd, &graph_root, graph_name)?;
2630 let find_operations = parse_find_operations(&graph_path)?;
2631
2632 let cost = BaselineCostMetrics {
2633 find_operations,
2634 feedback_events: feedback.entries,
2635 feedback_events_per_1000_find_ops: if find_operations > 0 {
2636 (feedback.entries as f64 / find_operations as f64) * 1000.0
2637 } else {
2638 0.0
2639 },
2640 token_cost_estimate: None,
2641 token_cost_note: "token cost unavailable in current logs (instrumentation pending)",
2642 };
2643
2644 let quality_score = compute_quality_score(quality);
2645 let golden = eval_golden_set(graph, args)?;
2646
2647 let report = BaselineReport {
2648 graph: graph_name.to_owned(),
2649 quality: crate::analysis::QualitySnapshot {
2650 total_nodes: quality.total_nodes,
2651 missing_descriptions: quality.missing_descriptions,
2652 missing_facts: quality.missing_facts,
2653 duplicate_pairs: quality.duplicate_pairs,
2654 edge_gaps: crate::analysis::EdgeGapSnapshot {
2655 datastore_candidates: quality.edge_gaps.datastore_candidates,
2656 datastore_missing_stored_in: quality.edge_gaps.datastore_missing_stored_in,
2657 process_candidates: quality.edge_gaps.process_candidates,
2658 process_missing_incoming: quality.edge_gaps.process_missing_incoming,
2659 },
2660 },
2661 quality_score,
2662 feedback,
2663 cost,
2664 golden,
2665 };
2666
2667 if args.json {
2668 let rendered = serde_json::to_string_pretty(&report).unwrap_or_else(|_| "{}".to_owned());
2669 return Ok(format!("{rendered}\n"));
2670 }
2671
2672 let mut lines = vec![String::from("= baseline")];
2673 lines.push(format!("graph: {}", report.graph));
2674 lines.push(format!(
2675 "quality_score_0_100: {:.1}",
2676 report.quality_score.score_0_100
2677 ));
2678 lines.push(String::from("quality:"));
2679 lines.push(format!("- total_nodes: {}", report.quality.total_nodes));
2680 lines.push(format!(
2681 "- missing_descriptions: {} ({:.1}%)",
2682 report.quality.missing_descriptions,
2683 report
2684 .quality_score
2685 .description_coverage
2686 .mul_add(-100.0, 100.0)
2687 ));
2688 lines.push(format!(
2689 "- missing_facts: {} ({:.1}%)",
2690 report.quality.missing_facts,
2691 report.quality_score.facts_coverage.mul_add(-100.0, 100.0)
2692 ));
2693 lines.push(format!(
2694 "- duplicate_pairs: {}",
2695 report.quality.duplicate_pairs
2696 ));
2697 lines.push(format!(
2698 "- edge_gaps: {} / {}",
2699 report.quality.edge_gaps.total_missing(),
2700 report.quality.edge_gaps.total_candidates()
2701 ));
2702
2703 lines.push(String::from("feedback:"));
2704 lines.push(format!("- entries: {}", report.feedback.entries));
2705 lines.push(format!(
2706 "- YES/NO/NIL/PICK: {}/{}/{}/{}",
2707 report.feedback.yes, report.feedback.no, report.feedback.nil, report.feedback.pick
2708 ));
2709 lines.push(format!(
2710 "- yes_rate: {:.1}%",
2711 report.feedback.yes_rate * 100.0
2712 ));
2713 lines.push(format!(
2714 "- no_rate: {:.1}%",
2715 report.feedback.no_rate * 100.0
2716 ));
2717
2718 lines.push(String::from("cost:"));
2719 lines.push(format!(
2720 "- find_operations: {}",
2721 report.cost.find_operations
2722 ));
2723 lines.push(format!(
2724 "- feedback_events: {}",
2725 report.cost.feedback_events
2726 ));
2727 lines.push(format!(
2728 "- feedback_events_per_1000_find_ops: {:.1}",
2729 report.cost.feedback_events_per_1000_find_ops
2730 ));
2731 lines.push(format!("- token_cost: {}", report.cost.token_cost_note));
2732
2733 if let Some(golden) = report.golden {
2734 lines.push(String::from("golden_set:"));
2735 lines.push(format!("- cases: {}", golden.cases));
2736 lines.push(format!("- hit_rate: {:.1}%", golden.hit_rate * 100.0));
2737 lines.push(format!("- top1_rate: {:.1}%", golden.top1_rate * 100.0));
2738 lines.push(format!("- mrr: {:.3}", golden.mrr));
2739 }
2740
2741 Ok(format!("{}\n", lines.join("\n")))
2742}
2743
2744#[derive(Debug, Clone)]
2745struct FeedbackLogEntry {
2746 ts_ms: String,
2747 uid: String,
2748 action: String,
2749 pick: Option<String>,
2750 selected: Option<String>,
2751 graph: String,
2752 queries: String,
2753}
2754
2755impl FeedbackLogEntry {
2756 fn parse(line: &str) -> Option<Self> {
2757 let mut ts_ms: Option<String> = None;
2760 let mut uid: Option<String> = None;
2761 let mut action: Option<String> = None;
2762 let mut pick: Option<String> = None;
2763 let mut selected: Option<String> = None;
2764 let mut graph: Option<String> = None;
2765 let mut queries: Option<String> = None;
2766
2767 for part in line.split('\t') {
2768 let (k, v) = part.split_once('=')?;
2769 let v = v.trim();
2770 match k {
2771 "ts_ms" => ts_ms = Some(v.to_owned()),
2772 "uid" => uid = Some(v.to_owned()),
2773 "action" => action = Some(v.to_owned()),
2774 "pick" => {
2775 if v != "-" {
2776 pick = Some(v.to_owned());
2777 }
2778 }
2779 "selected" => {
2780 if v != "-" {
2781 selected = Some(v.to_owned());
2782 }
2783 }
2784 "graph" => {
2785 if v != "-" {
2786 graph = Some(v.to_owned());
2787 }
2788 }
2789 "queries" => {
2790 if v != "-" {
2791 queries = Some(v.to_owned());
2792 }
2793 }
2794 _ => {}
2795 }
2796 }
2797
2798 Some(Self {
2799 ts_ms: ts_ms?,
2800 uid: uid?,
2801 action: action?,
2802 pick,
2803 selected,
2804 graph: graph.unwrap_or_default(),
2805 queries: queries.unwrap_or_default(),
2806 })
2807 }
2808}
2809
2810pub fn default_graph_root(cwd: &Path) -> PathBuf {
2819 let home = std::env::var_os("HOME")
2820 .map(PathBuf::from)
2821 .or_else(|| std::env::var_os("USERPROFILE").map(PathBuf::from));
2822 graph_root_from(home.as_deref(), cwd)
2823}
2824
2825fn graph_root_from(home: Option<&Path>, cwd: &Path) -> PathBuf {
2826 match home {
2827 Some(home) => home.join(".kg").join("graphs"),
2828 None => cwd.join(".kg").join("graphs"),
2829 }
2830}
2831
2832pub fn resolve_graph_path(cwd: &Path, graph_root: &Path, graph: &str) -> Result<PathBuf> {
2837 let store = graph_store(cwd, graph_root, false)?;
2838 store.resolve_graph_path(graph)
2839}
2840
2841pub fn feedback_nudge_percent(cwd: &Path) -> Result<u8> {
2843 Ok(config::KgConfig::discover(cwd)?
2844 .map(|(_, config)| config.nudge_percent())
2845 .unwrap_or(config::DEFAULT_NUDGE_PERCENT))
2846}
2847
2848pub fn sidecar_user_short_uid(cwd: &Path) -> String {
2850 config::ensure_user_short_uid(cwd)
2851}
2852
2853pub fn append_kg_feedback(graph_path: &Path, user_short_uid: &str, node_id: &str, feedback: &str) {
2855 let _ = kg_sidecar::append_feedback_with_uid(graph_path, user_short_uid, node_id, feedback);
2856}
2857
2858pub(crate) fn render_check(graph: &GraphFile, cwd: &Path, args: &CheckArgs) -> String {
2863 let report = validate_graph(graph, cwd, args.deep, args.base_dir.as_deref());
2864 format_validation_report(
2865 "check",
2866 &report.errors,
2867 &report.warnings,
2868 args.errors_only,
2869 args.warnings_only,
2870 args.limit,
2871 )
2872}
2873
2874pub(crate) fn render_audit(graph: &GraphFile, cwd: &Path, args: &AuditArgs) -> String {
2875 let report = validate_graph(graph, cwd, args.deep, args.base_dir.as_deref());
2876 format_validation_report(
2877 "audit",
2878 &report.errors,
2879 &report.warnings,
2880 args.errors_only,
2881 args.warnings_only,
2882 args.limit,
2883 )
2884}
2885
2886fn format_validation_report(
2887 header: &str,
2888 errors: &[String],
2889 warnings: &[String],
2890 errors_only: bool,
2891 warnings_only: bool,
2892 limit: usize,
2893) -> String {
2894 let mut lines = vec![format!("= {header}")];
2895 lines.push(format!(
2896 "status: {}",
2897 if errors.is_empty() {
2898 "VALID"
2899 } else {
2900 "INVALID"
2901 }
2902 ));
2903 lines.push(format!("errors: {}", errors.len()));
2904 lines.push(format!("warnings: {}", warnings.len()));
2905 if !warnings_only {
2906 lines.push("error-list:".to_owned());
2907 for error in errors.iter().take(limit) {
2908 lines.push(format!("- {error}"));
2909 }
2910 }
2911 if !errors_only {
2912 lines.push("warning-list:".to_owned());
2913 for warning in warnings.iter().take(limit) {
2914 lines.push(format!("- {warning}"));
2915 }
2916 }
2917 format!("{}\n", lines.join("\n"))
2918}
2919
2920#[cfg(test)]
2925mod tests {
2926 use super::*;
2927 use tempfile::tempdir;
2928
2929 fn fixture_graph() -> GraphFile {
2930 serde_json::from_str(include_str!("../graph-example-fridge.json")).expect("fixture graph")
2931 }
2932
2933 fn exec_safe(args: &[&str], cwd: &Path) -> Result<String> {
2934 run_args_safe(args.iter().map(OsString::from), cwd)
2935 }
2936
2937 #[test]
2938 fn graph_root_prefers_home_directory() {
2939 let cwd = Path::new("/tmp/workspace");
2940 let home = Path::new("/tmp/home");
2941 assert_eq!(
2942 graph_root_from(Some(home), cwd),
2943 PathBuf::from("/tmp/home/.kg/graphs")
2944 );
2945 assert_eq!(
2946 graph_root_from(None, cwd),
2947 PathBuf::from("/tmp/workspace/.kg/graphs")
2948 );
2949 }
2950
2951 #[test]
2952 fn get_renders_compact_symbolic_view() {
2953 let graph = fixture_graph();
2954 let node = graph.node_by_id("concept:refrigerator").expect("node");
2955 let rendered = output::render_node(&graph, node, false);
2956 assert!(rendered.contains("# concept:refrigerator | Lodowka"));
2957 assert!(rendered.contains("aka: Chlodziarka, Fridge"));
2958 assert!(rendered.contains("-> HAS | concept:cooling_chamber | Komora Chlodzenia"));
2959 assert!(rendered.contains("-> HAS | concept:temperature | Temperatura"));
2960 }
2961
2962 #[test]
2963 fn help_lists_mvp_commands() {
2964 let help = Cli::try_parse_from(["kg", "--help"]).expect_err("help exits");
2965 let rendered = help.to_string();
2966 assert!(!rendered.contains("▓ ▄▄"));
2967 assert!(rendered.contains("create"));
2968 assert!(rendered.contains("list"));
2969 assert!(rendered.contains("feedback-log"));
2970 assert!(rendered.contains("fridge node"));
2971 assert!(rendered.contains("edge"));
2972 assert!(rendered.contains("quality"));
2973 assert!(rendered.contains("kg graph fridge stats"));
2974 }
2975
2976 #[test]
2977 fn run_args_safe_returns_error_instead_of_exiting() {
2978 let dir = tempdir().expect("tempdir");
2979 let err = exec_safe(&["kg", "create"], dir.path()).expect_err("parse error");
2980 let rendered = err.to_string();
2981 assert!(rendered.contains("required arguments were not provided"));
2982 assert!(rendered.contains("<GRAPH_NAME>"));
2983 }
2984}