1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
//! Context command - Build LLM context
//!
//! Generates token-efficient LLM context from an entry point.
//! Auto-routes through daemon when available for ~35x speedup.
use std::path::PathBuf;
use anyhow::Result;
use clap::Args;
use tldr_core::types::RelevantContext;
use tldr_core::{get_relevant_context, Language};
use crate::commands::daemon_router::{params_with_entry_depth, try_daemon_route};
use crate::output::{OutputFormat, OutputWriter};
/// Build LLM-ready context from entry point
#[derive(Debug, Args)]
pub struct ContextArgs {
/// Entry point function name
pub entry: String,
/// Project root directory (default: current directory)
#[arg(long, short = 'p', default_value = ".")]
pub project: PathBuf,
/// Programming language
#[arg(long, short = 'l')]
pub lang: Option<Language>,
/// Maximum traversal depth
#[arg(long, short = 'd', default_value = "3")]
pub depth: usize,
/// Include function docstrings
#[arg(long)]
pub include_docstrings: bool,
/// Filter to functions in this file (for disambiguating common names like "render")
#[arg(long)]
pub file: Option<PathBuf>,
}
impl ContextArgs {
/// Run the context command
pub fn run(&self, format: OutputFormat, quiet: bool) -> Result<()> {
let writer = OutputWriter::new(format, quiet);
// Determine language (auto-detect from directory, default to Python)
let language = self
.lang
.unwrap_or_else(|| Language::from_directory(&self.project).unwrap_or(Language::Python));
// Try daemon first for cached result
if let Some(context) = try_daemon_route::<RelevantContext>(
&self.project,
"context",
params_with_entry_depth(&self.entry, Some(self.depth)),
) {
// Output based on format
if writer.is_text() {
// Use the built-in LLM string format
let text = context.to_llm_string();
writer.write_text(&text)?;
return Ok(());
} else {
writer.write(&context)?;
return Ok(());
}
}
// Fallback to direct compute
writer.progress(&format!(
"Building context for {} (depth={})...",
self.entry, self.depth
));
// Get relevant context
let context = get_relevant_context(
&self.project,
&self.entry,
self.depth,
language,
self.include_docstrings,
self.file.as_deref(),
)?;
// Output based on format
if writer.is_text() {
// Use the built-in LLM string format
let text = context.to_llm_string();
writer.write_text(&text)?;
} else {
writer.write(&context)?;
}
Ok(())
}
}