Skip to main content

graphrag_cli/
lib.rs

1//! GraphRAG CLI library entry point.
2//!
3//! Exposes [`run`] so the `graphrag` meta-crate (and tests) can invoke the
4//! full CLI without going through a subprocess.
5
6pub mod action;
7pub mod app;
8pub mod commands;
9pub mod config;
10pub mod handlers;
11pub mod mode;
12pub mod query_history;
13pub mod theme;
14pub mod tui;
15pub mod ui;
16pub mod workspace;
17
18use app::App;
19use clap::{Parser, Subcommand};
20use color_eyre::eyre::Result;
21use std::path::PathBuf;
22
23// ──────────────────────────────────────────────────────────────────────────────
24// CLI types
25// ──────────────────────────────────────────────────────────────────────────────
26
27#[derive(Parser)]
28#[command(name = "graphrag")]
29#[command(version, about = "Modern Terminal UI for GraphRAG operations", long_about = None)]
30#[command(author = "GraphRAG Contributors")]
31pub struct Cli {
32    /// Configuration file path
33    #[arg(short, long, value_name = "FILE")]
34    pub config: Option<PathBuf>,
35
36    /// Workspace name
37    #[arg(short, long)]
38    pub workspace: Option<String>,
39
40    /// Enable debug logging
41    #[arg(short, long)]
42    pub debug: bool,
43
44    /// Output format: text (default) or json (for scripting/CI)
45    #[arg(long, default_value = "text", value_parser = ["text", "json"])]
46    pub format: String,
47
48    #[command(subcommand)]
49    pub command: Option<Commands>,
50}
51
52#[derive(Subcommand)]
53pub enum Commands {
54    /// Start interactive TUI (default)
55    Tui,
56
57    /// Interactive setup wizard - creates graphrag.toml with guided configuration
58    Setup {
59        /// Template to use: general, legal, medical, financial, technical
60        #[arg(short, long)]
61        template: Option<String>,
62
63        /// Output path for configuration file
64        #[arg(short, long, default_value = "./graphrag.toml")]
65        output: PathBuf,
66    },
67
68    /// Validate a configuration file (TOML or JSON5)
69    Validate {
70        /// Path to the configuration file to validate
71        config_file: PathBuf,
72    },
73
74    /// Initialize GraphRAG with configuration (deprecated: prefer TUI with /config)
75    Init {
76        /// Configuration file path
77        config: PathBuf,
78    },
79
80    /// Load a document into the knowledge graph (deprecated: prefer TUI with /load)
81    Load {
82        /// Document file path
83        document: PathBuf,
84
85        /// Configuration file (required if not already initialized)
86        #[arg(short, long)]
87        config: Option<PathBuf>,
88    },
89
90    /// Execute a query (deprecated: prefer TUI)
91    Query {
92        /// Query text
93        query: String,
94
95        /// Configuration file (required if not already initialized)
96        #[arg(short, long)]
97        config: Option<PathBuf>,
98    },
99
100    /// List entities in the knowledge graph (deprecated: prefer TUI with /entities)
101    Entities {
102        /// Filter by name or type
103        filter: Option<String>,
104
105        /// Configuration file
106        #[arg(short, long)]
107        config: Option<PathBuf>,
108    },
109
110    /// Configuration file
111    Stats {
112        /// Configuration file
113        #[arg(short, long)]
114        config: Option<PathBuf>,
115    },
116
117    /// Run full E2E benchmark (Init -> Load -> Query) in memory
118    Bench {
119        /// Configuration file
120        #[arg(short, long)]
121        config: PathBuf,
122
123        /// Book text file
124        #[arg(short, long)]
125        book: PathBuf,
126
127        /// Pipe-separated list of questions e.g. "Q1?|Q2?"
128        #[arg(short, long)]
129        questions: String,
130    },
131
132    /// Workspace management commands
133    Workspace {
134        #[command(subcommand)]
135        action: WorkspaceCommands,
136    },
137}
138
139#[derive(Subcommand)]
140pub enum WorkspaceCommands {
141    /// List all workspaces
142    List,
143
144    /// Create a new workspace
145    Create { name: String },
146
147    /// Show workspace information
148    Info { id: String },
149
150    /// Delete a workspace
151    Delete { id: String },
152}
153
154// ──────────────────────────────────────────────────────────────────────────────
155// Public entry point
156// ──────────────────────────────────────────────────────────────────────────────
157
158/// Run the full GraphRAG CLI. Called by both the `graphrag-cli` binary and
159/// the `graphrag` meta-crate binary.
160pub async fn run() -> Result<()> {
161    install_panic_hook();
162
163    let cli = Cli::parse();
164
165    color_eyre::install()?;
166
167    match cli.command {
168        Some(Commands::Tui) | None => {
169            run_tui(cli.config, cli.workspace).await?;
170        },
171        Some(Commands::Setup { template, output }) => {
172            run_setup_wizard(template, output).await?;
173        },
174        Some(Commands::Validate { config_file }) => {
175            setup_logging(cli.debug)?;
176            run_validate(&config_file, &cli.format)?;
177        },
178        Some(Commands::Init { config }) => {
179            setup_logging(cli.debug)?;
180            eprintln!(
181                "⚠️  `init` is deprecated. Prefer: graphrag tui --config {}",
182                config.display()
183            );
184
185            let handler = handlers::graphrag::GraphRAGHandler::new();
186            let cfg = load_config_from_file(&config).await?;
187            handler.initialize(cfg).await?;
188
189            if cli.format == "json" {
190                println!(
191                    "{}",
192                    serde_json::json!({"status": "initialized", "config": config.display().to_string()})
193                );
194            } else {
195                println!("✅ GraphRAG initialized with config: {}", config.display());
196            }
197        },
198        Some(Commands::Load { document, config }) => {
199            setup_logging(cli.debug)?;
200            eprintln!(
201                "⚠️  `load` is deprecated. Prefer: graphrag tui, then /load {}",
202                document.display()
203            );
204
205            let handler = handlers::graphrag::GraphRAGHandler::new();
206            let config_path = config.unwrap_or_else(|| PathBuf::from("./graphrag.toml"));
207            let cfg = load_config_from_file(&config_path).await?;
208            handler.initialize(cfg).await?;
209            let result = handler.load_document_with_options(&document, false).await?;
210
211            if cli.format == "json" {
212                println!(
213                    "{}",
214                    serde_json::json!({"status": "loaded", "document": document.display().to_string(), "details": result})
215                );
216            } else {
217                println!("✅ {}", result);
218            }
219        },
220        Some(Commands::Query { query, config }) => {
221            setup_logging(cli.debug)?;
222            eprintln!(
223                "⚠️  `query` is deprecated. Prefer: graphrag tui, then /query {}",
224                query
225            );
226
227            let handler = handlers::graphrag::GraphRAGHandler::new();
228            let config_path = config.unwrap_or_else(|| PathBuf::from("./graphrag.toml"));
229            let cfg = load_config_from_file(&config_path).await?;
230            handler.initialize(cfg).await?;
231
232            let (answer, raw_results) = handler.query_with_raw(&query).await?;
233
234            if cli.format == "json" {
235                println!(
236                    "{}",
237                    serde_json::json!({"query": query, "answer": answer, "sources": raw_results})
238                );
239            } else {
240                println!("📝 Query: {}\n", query);
241                println!("💡 Answer:\n{}\n", answer);
242                if !raw_results.is_empty() {
243                    println!("📚 Sources:");
244                    for (i, src) in raw_results.iter().enumerate() {
245                        println!("   {}. {}", i + 1, src);
246                    }
247                }
248            }
249        },
250        Some(Commands::Entities { filter, config }) => {
251            setup_logging(cli.debug)?;
252            eprintln!("⚠️  `entities` is deprecated. Prefer: graphrag tui, then /entities");
253
254            let handler = handlers::graphrag::GraphRAGHandler::new();
255            let config_path = config.unwrap_or_else(|| PathBuf::from("./graphrag.toml"));
256            let cfg = load_config_from_file(&config_path).await?;
257            handler.initialize(cfg).await?;
258            let entities = handler.get_entities(filter.as_deref()).await?;
259
260            if cli.format == "json" {
261                let json_entities: Vec<serde_json::Value> = entities
262                    .iter()
263                    .map(|e| serde_json::json!({"name": e.name, "type": e.entity_type}))
264                    .collect();
265                println!(
266                    "{}",
267                    serde_json::json!({"entities": json_entities, "count": entities.len()})
268                );
269            } else {
270                println!("📊 Entities ({} found):\n", entities.len());
271                for entity in &entities {
272                    println!("   • {} [{}]", entity.name, entity.entity_type);
273                }
274            }
275        },
276        Some(Commands::Stats { config }) => {
277            setup_logging(cli.debug)?;
278            eprintln!("⚠️  `stats` is deprecated. Prefer: graphrag tui, then /stats");
279
280            let handler = handlers::graphrag::GraphRAGHandler::new();
281            let config_path = config.unwrap_or_else(|| PathBuf::from("./graphrag.toml"));
282            let cfg = load_config_from_file(&config_path).await?;
283            handler.initialize(cfg).await?;
284
285            if let Some(stats) = handler.get_stats().await {
286                if cli.format == "json" {
287                    println!(
288                        "{}",
289                        serde_json::json!({
290                            "entities": stats.entities,
291                            "relationships": stats.relationships,
292                            "documents": stats.documents,
293                            "chunks": stats.chunks,
294                        })
295                    );
296                } else {
297                    println!("📊 Knowledge Graph Statistics:");
298                    println!("   Entities:      {}", stats.entities);
299                    println!("   Relationships: {}", stats.relationships);
300                    println!("   Documents:     {}", stats.documents);
301                    println!("   Chunks:        {}", stats.chunks);
302                }
303            } else if cli.format == "json" {
304                println!(
305                    "{}",
306                    serde_json::json!({"error": "No knowledge graph built yet"})
307                );
308            } else {
309                println!("⚠️  No knowledge graph built yet. Load documents first.");
310            }
311        },
312        Some(Commands::Bench {
313            config,
314            book,
315            questions,
316        }) => {
317            if !cli.debug {
318                std::env::set_var("RUST_LOG", "error");
319            }
320            setup_logging(cli.debug)?;
321
322            let q_vec: Vec<String> = questions.split('|').map(|s| s.to_string()).collect();
323            handlers::bench::run_benchmark(&config, &book, q_vec).await?;
324        },
325        Some(Commands::Workspace { action }) => {
326            setup_logging(cli.debug)?;
327            handle_workspace_commands(action).await?;
328        },
329    }
330
331    Ok(())
332}
333
334// ──────────────────────────────────────────────────────────────────────────────
335// Internal helpers
336// ──────────────────────────────────────────────────────────────────────────────
337
338async fn load_config_from_file(path: &std::path::Path) -> Result<graphrag_core::Config> {
339    config::load_config(path).await
340}
341
342fn run_validate(config_file: &std::path::Path, format: &str) -> Result<()> {
343    use graphrag_core::config::json5_loader::{detect_config_format, ConfigFormat};
344    use graphrag_core::config::setconfig::SetConfig;
345
346    if !config_file.exists() {
347        if format == "json" {
348            println!(
349                "{}",
350                serde_json::json!({"valid": false, "error": format!("File not found: {}", config_file.display())})
351            );
352        } else {
353            println!("❌ File not found: {}", config_file.display());
354        }
355        return Ok(());
356    }
357
358    let fmt = match detect_config_format(config_file) {
359        Some(f) => f,
360        None => {
361            if format == "json" {
362                println!(
363                    "{}",
364                    serde_json::json!({"valid": false, "error": "Unsupported file format. Use .toml, .json, or .json5"})
365                );
366            } else {
367                println!("❌ Unsupported file format. Use .toml, .json, or .json5");
368            }
369            return Ok(());
370        },
371    };
372
373    let content = std::fs::read_to_string(config_file)
374        .map_err(|e| color_eyre::eyre::eyre!("Cannot read file: {}", e))?;
375
376    let result: std::result::Result<SetConfig, String> = match fmt {
377        ConfigFormat::Toml => toml::from_str(&content).map_err(|e| format!("{}", e)),
378        ConfigFormat::Json => serde_json::from_str(&content).map_err(|e| format!("{}", e)),
379        ConfigFormat::Json5 => {
380            #[cfg(feature = "json5-support")]
381            {
382                json5::from_str(&content).map_err(|e| format!("{}", e))
383            }
384            #[cfg(not(feature = "json5-support"))]
385            {
386                Err("JSON5 support not enabled".to_string())
387            }
388        },
389        ConfigFormat::Yaml => Err("YAML support not enabled".to_string()),
390    };
391
392    match result {
393        Ok(set_config) => {
394            let config = set_config.to_graphrag_config();
395            if format == "json" {
396                println!(
397                    "{}",
398                    serde_json::json!({
399                        "valid": true,
400                        "format": format!("{:?}", fmt),
401                        "approach": set_config.mode.approach,
402                        "ollama_enabled": config.ollama.enabled,
403                        "chunk_size": config.chunk_size,
404                    })
405                );
406            } else {
407                println!("✅ Configuration is valid!");
408                println!("   Format:    {:?}", fmt);
409                println!("   Approach:  {}", set_config.mode.approach);
410                println!(
411                    "   Ollama:    {}",
412                    if config.ollama.enabled { "enabled" } else { "disabled" }
413                );
414                println!("   Chunk size: {}", config.chunk_size);
415            }
416        },
417        Err(err) => {
418            if format == "json" {
419                println!("{}", serde_json::json!({"valid": false, "error": err}));
420            } else {
421                println!("❌ Invalid configuration:\n   {}", err);
422            }
423        },
424    }
425
426    Ok(())
427}
428
429async fn run_tui(config_path: Option<PathBuf>, workspace: Option<String>) -> Result<()> {
430    setup_tui_logging()?;
431    let mut app = App::new(config_path, workspace)?;
432    app.run().await?;
433    Ok(())
434}
435
436async fn handle_workspace_commands(action: WorkspaceCommands) -> Result<()> {
437    let workspace_manager = workspace::WorkspaceManager::new()?;
438
439    match action {
440        WorkspaceCommands::List => {
441            let workspaces = workspace_manager.list_workspaces().await?;
442
443            if workspaces.is_empty() {
444                println!("No workspaces found.");
445                println!("\nCreate a workspace with: graphrag workspace create <name>");
446            } else {
447                println!("Available workspaces:\n");
448                for ws in workspaces {
449                    println!("  📁 {} ({})", ws.name, ws.id);
450                    println!("     Created: {}", ws.created_at.format("%Y-%m-%d %H:%M:%S"));
451                    println!(
452                        "     Last accessed: {}",
453                        ws.last_accessed.format("%Y-%m-%d %H:%M:%S")
454                    );
455                    if let Some(ref cfg) = ws.config_path {
456                        println!("     Config: {}", cfg.display());
457                    }
458                    println!();
459                }
460            }
461        },
462        WorkspaceCommands::Create { name } => {
463            let workspace = workspace_manager.create_workspace(name.clone()).await?;
464            println!("✅ Workspace created successfully!");
465            println!("   Name: {}", workspace.name);
466            println!("   ID:   {}", workspace.id);
467            println!("\nUse it with: graphrag tui --workspace {}", workspace.id);
468        },
469        WorkspaceCommands::Info { id } => {
470            match workspace_manager.load_metadata(&id).await {
471                Ok(workspace) => {
472                    println!("Workspace Information:\n");
473                    println!("  Name: {}", workspace.name);
474                    println!("  ID:   {}", workspace.id);
475                    println!(
476                        "  Created: {}",
477                        workspace.created_at.format("%Y-%m-%d %H:%M:%S")
478                    );
479                    println!(
480                        "  Last accessed: {}",
481                        workspace.last_accessed.format("%Y-%m-%d %H:%M:%S")
482                    );
483                    if let Some(ref cfg) = workspace.config_path {
484                        println!("  Config: {}", cfg.display());
485                    }
486
487                    let history_path = workspace_manager.query_history_path(&id);
488                    if history_path.exists() {
489                        if let Ok(history) =
490                            query_history::QueryHistory::load(&history_path).await
491                        {
492                            println!("\n  Total queries: {}", history.total_queries());
493                        }
494                    }
495                },
496                Err(e) => {
497                    eprintln!("❌ Error loading workspace: {}", e);
498                    eprintln!("\nList available workspaces with: graphrag workspace list");
499                },
500            }
501        },
502        WorkspaceCommands::Delete { id } => {
503            workspace_manager.delete_workspace(&id).await?;
504            println!("✅ Workspace deleted: {}", id);
505        },
506    }
507
508    Ok(())
509}
510
511async fn run_setup_wizard(template: Option<String>, output: PathBuf) -> Result<()> {
512    use dialoguer::{theme::ColorfulTheme, Confirm, Input, Select};
513    use std::fs;
514
515    let theme = ColorfulTheme::default();
516
517    println!(
518        "\n{}",
519        "╔════════════════════════════════════════════════════════════╗\n\
520         ║           GraphRAG Configuration Setup Wizard              ║\n\
521         ╚════════════════════════════════════════════════════════════╝"
522    );
523    println!();
524
525    let use_case = if let Some(ref t) = template {
526        t.clone()
527    } else {
528        let options = vec![
529            "General purpose - Mixed documents, articles (Recommended)",
530            "Legal documents - Contracts, agreements, regulations",
531            "Medical documents - Clinical notes, patient records",
532            "Financial documents - Reports, SEC filings, analysis",
533            "Technical documentation - API docs, code documentation",
534        ];
535
536        let selection = Select::with_theme(&theme)
537            .with_prompt("Select your use case")
538            .items(&options)
539            .default(0)
540            .interact()?;
541
542        match selection {
543            0 => "general",
544            1 => "legal",
545            2 => "medical",
546            3 => "financial",
547            4 => "technical",
548            _ => "general",
549        }
550        .to_string()
551    };
552
553    println!("\n   Selected template: {}\n", use_case);
554
555    let llm_options = vec![
556        "Local Ollama (Recommended - free, private, runs locally)",
557        "No LLM (Pattern-based extraction only, faster but less accurate)",
558    ];
559
560    let llm_selection = Select::with_theme(&theme)
561        .with_prompt("Select LLM provider")
562        .items(&llm_options)
563        .default(0)
564        .interact()?;
565
566    let ollama_enabled = llm_selection == 0;
567
568    let mut ollama_host = "localhost".to_string();
569    let mut ollama_port: u16 = 11434;
570    let mut chat_model = "llama3.2:3b".to_string();
571
572    if ollama_enabled {
573        println!("\n   Ollama Configuration:");
574
575        ollama_host = Input::with_theme(&theme)
576            .with_prompt("   Ollama host")
577            .default("localhost".to_string())
578            .interact_text()?;
579
580        let port_str: String = Input::with_theme(&theme)
581            .with_prompt("   Ollama port")
582            .default("11434".to_string())
583            .interact_text()?;
584
585        ollama_port = port_str.parse().unwrap_or(11434);
586
587        chat_model = Input::with_theme(&theme)
588            .with_prompt("   Chat model")
589            .default("llama3.2:3b".to_string())
590            .interact_text()?;
591    }
592
593    let output_dir: String = Input::with_theme(&theme)
594        .with_prompt("Output directory for graph data")
595        .default("./graphrag-output".to_string())
596        .interact_text()?;
597
598    println!("\n   Generating configuration...\n");
599
600    let config_content = generate_config(
601        &use_case,
602        ollama_enabled,
603        &ollama_host,
604        ollama_port,
605        &chat_model,
606        &output_dir,
607    );
608
609    if output.exists() {
610        let overwrite = Confirm::with_theme(&theme)
611            .with_prompt(format!(
612                "File {} already exists. Overwrite?",
613                output.display()
614            ))
615            .default(false)
616            .interact()?;
617
618        if !overwrite {
619            println!("\n   Setup cancelled.");
620            return Ok(());
621        }
622    }
623
624    fs::write(&output, config_content)?;
625
626    println!("   ✅ Configuration saved to: {}\n", output.display());
627    println!("╔════════════════════════════════════════════════════════════╗");
628    println!("║                     Next Steps                             ║");
629    println!("╠════════════════════════════════════════════════════════════╣");
630    println!("║  1. Start the TUI:                                         ║");
631    println!("║     graphrag tui --config {}                         ║", output.display());
632    println!("║                                                            ║");
633    println!("║  2. Load a document in the TUI:                            ║");
634    println!("║     /load path/to/your/document.txt                        ║");
635    println!("║                                                            ║");
636    println!("║  3. Query your knowledge graph:                            ║");
637    println!("║     Type your question and press Enter                     ║");
638    println!("╚════════════════════════════════════════════════════════════╝");
639
640    if ollama_enabled {
641        println!(
642            "\n   💡 Tip: Make sure Ollama is running at {}:{}",
643            ollama_host, ollama_port
644        );
645        println!("      Start it with: ollama serve");
646        println!("      Pull model with: ollama pull {}", chat_model);
647    }
648
649    Ok(())
650}
651
652fn generate_config(
653    use_case: &str,
654    ollama_enabled: bool,
655    ollama_host: &str,
656    ollama_port: u16,
657    chat_model: &str,
658    output_dir: &str,
659) -> String {
660    let entity_types = match use_case {
661        "legal" => {
662            r#"["PARTY", "PERSON", "ORGANIZATION", "DATE", "MONETARY_VALUE", "JURISDICTION", "CLAUSE_TYPE", "OBLIGATION"]"#
663        },
664        "medical" => {
665            r#"["PATIENT", "DIAGNOSIS", "MEDICATION", "PROCEDURE", "SYMPTOM", "LAB_VALUE", "PROVIDER", "DATE"]"#
666        },
667        "financial" => {
668            r#"["COMPANY", "TICKER", "PERSON", "MONETARY_VALUE", "PERCENTAGE", "DATE", "METRIC", "INDUSTRY"]"#
669        },
670        "technical" => {
671            r#"["FUNCTION", "CLASS", "MODULE", "API_ENDPOINT", "PARAMETER", "VERSION", "DEPENDENCY"]"#
672        },
673        _ => r#"["PERSON", "ORGANIZATION", "LOCATION", "DATE", "EVENT"]"#,
674    };
675
676    let approach = match use_case {
677        "legal" | "medical" => "semantic",
678        "technical" => "algorithmic",
679        _ => "hybrid",
680    };
681
682    let chunk_size = match use_case {
683        "legal" => 500,
684        "medical" => 750,
685        "technical" => 600,
686        "financial" => 1200,
687        _ => 1000,
688    };
689
690    let use_gleaning = ollama_enabled && matches!(use_case, "legal" | "medical" | "financial");
691
692    format!(
693        r#"# GraphRAG Configuration
694# Generated by: graphrag setup
695# Template: {use_case}
696# ===================================================
697
698output_dir = "{output_dir}"
699approach = "{approach}"
700
701# Text chunking settings
702chunk_size = {chunk_size}
703chunk_overlap = {overlap}
704
705# Retrieval settings
706top_k_results = 10
707similarity_threshold = 0.7
708
709[embeddings]
710backend = "{embedding_backend}"
711dimension = 384
712fallback_to_hash = true
713batch_size = 32
714
715[entities]
716min_confidence = 0.7
717entity_types = {entity_types}
718use_gleaning = {use_gleaning}
719max_gleaning_rounds = 3
720
721[graph]
722max_connections = 10
723similarity_threshold = 0.8
724extract_relationships = true
725relationship_confidence_threshold = 0.5
726
727[graph.traversal]
728max_depth = 3
729max_paths = 10
730use_edge_weights = true
731min_relationship_strength = 0.3
732
733[retrieval]
734top_k = 10
735search_algorithm = "cosine"
736
737[parallel]
738enabled = true
739num_threads = 0
740min_batch_size = 10
741
742[ollama]
743enabled = {ollama_enabled}
744host = "{ollama_host}"
745port = {ollama_port}
746chat_model = "{chat_model}"
747embedding_model = "nomic-embed-text"
748timeout_seconds = 30
749enable_caching = true
750
751[auto_save]
752enabled = false
753interval_seconds = 300
754max_versions = 5
755"#,
756        use_case = use_case,
757        output_dir = output_dir,
758        approach = approach,
759        chunk_size = chunk_size,
760        overlap = chunk_size / 5,
761        embedding_backend = if ollama_enabled { "ollama" } else { "hash" },
762        entity_types = entity_types,
763        use_gleaning = use_gleaning,
764        ollama_enabled = ollama_enabled,
765        ollama_host = ollama_host,
766        ollama_port = ollama_port,
767        chat_model = chat_model,
768    )
769}
770
771/// Restore the terminal on panic (called at the top of [`run`]).
772pub fn install_panic_hook() {
773    let original_hook = std::panic::take_hook();
774    std::panic::set_hook(Box::new(move |panic_info| {
775        let _ = crossterm::execute!(std::io::stderr(), crossterm::terminal::LeaveAlternateScreen);
776        let _ = crossterm::terminal::disable_raw_mode();
777        original_hook(panic_info);
778    }));
779}
780
781fn setup_logging(debug: bool) -> Result<()> {
782    use tracing_subscriber::EnvFilter;
783
784    let filter = if debug {
785        EnvFilter::new("graphrag_cli=debug,graphrag_core=debug")
786    } else {
787        EnvFilter::new("graphrag_cli=info,graphrag_core=info")
788    };
789
790    tracing_subscriber::fmt()
791        .with_env_filter(filter)
792        .with_writer(std::io::stderr)
793        .with_target(false)
794        .with_file(true)
795        .with_line_number(true)
796        .init();
797
798    Ok(())
799}
800
801fn setup_tui_logging() -> Result<()> {
802    use std::fs::OpenOptions;
803    use std::sync::Arc;
804    use tracing_subscriber::EnvFilter;
805
806    let log_dir = dirs::data_local_dir()
807        .unwrap_or_else(|| PathBuf::from("."))
808        .join("graphrag-cli")
809        .join("logs");
810
811    std::fs::create_dir_all(&log_dir)?;
812
813    let log_file = log_dir.join("graphrag-cli.log");
814    let file = OpenOptions::new().create(true).append(true).open(log_file)?;
815
816    let filter = EnvFilter::new("graphrag_cli=warn,graphrag_core=warn");
817
818    tracing_subscriber::fmt()
819        .with_env_filter(filter)
820        .with_writer(Arc::new(file))
821        .with_target(false)
822        .with_file(false)
823        .with_line_number(false)
824        .with_ansi(false)
825        .init();
826
827    Ok(())
828}