Skip to main content

tldr_cli/commands/
context.rs

1//! Context command - Build LLM context
2//!
3//! Generates token-efficient LLM context from an entry point.
4//! Auto-routes through daemon when available for ~35x speedup.
5
6use std::path::PathBuf;
7
8use anyhow::Result;
9use clap::Args;
10
11use tldr_core::types::RelevantContext;
12use tldr_core::{get_relevant_context, Language};
13
14use crate::commands::daemon_router::{params_with_entry_depth, try_daemon_route};
15use crate::output::{OutputFormat, OutputWriter};
16
17/// Build LLM-ready context from entry point
18#[derive(Debug, Args)]
19pub struct ContextArgs {
20    /// Entry point function name
21    pub entry: String,
22
23    /// Project root directory (default: current directory)
24    #[arg(long, short = 'p', default_value = ".")]
25    pub project: PathBuf,
26
27    /// Programming language
28    #[arg(long, short = 'l')]
29    pub lang: Option<Language>,
30
31    /// Maximum traversal depth
32    #[arg(long, short = 'd', default_value = "3")]
33    pub depth: usize,
34
35    /// Include function docstrings
36    #[arg(long)]
37    pub include_docstrings: bool,
38
39    /// Filter to functions in this file (for disambiguating common names like "render")
40    #[arg(long)]
41    pub file: Option<PathBuf>,
42}
43
44impl ContextArgs {
45    /// Run the context command
46    pub fn run(&self, format: OutputFormat, quiet: bool) -> Result<()> {
47        let writer = OutputWriter::new(format, quiet);
48
49        // Determine language (auto-detect from directory, default to Python)
50        let language = self
51            .lang
52            .unwrap_or_else(|| Language::from_directory(&self.project).unwrap_or(Language::Python));
53
54        // Try daemon first for cached result
55        if let Some(context) = try_daemon_route::<RelevantContext>(
56            &self.project,
57            "context",
58            params_with_entry_depth(&self.entry, Some(self.depth)),
59        ) {
60            // Output based on format
61            if writer.is_text() {
62                // Use the built-in LLM string format
63                let text = context.to_llm_string();
64                writer.write_text(&text)?;
65                return Ok(());
66            } else {
67                writer.write(&context)?;
68                return Ok(());
69            }
70        }
71
72        // Fallback to direct compute
73        writer.progress(&format!(
74            "Building context for {} (depth={})...",
75            self.entry, self.depth
76        ));
77
78        // Get relevant context
79        let context = get_relevant_context(
80            &self.project,
81            &self.entry,
82            self.depth,
83            language,
84            self.include_docstrings,
85            self.file.as_deref(),
86        )?;
87
88        // Output based on format
89        if writer.is_text() {
90            // Use the built-in LLM string format
91            let text = context.to_llm_string();
92            writer.write_text(&text)?;
93        } else {
94            writer.write(&context)?;
95        }
96
97        Ok(())
98    }
99}