Skip to main content

st/
ai_output.rs

1// AI Output Discipline Module - Omni's Efficiency Manifesto Implementation
2// When AI_TOOLS=1, all non-JSON output goes to stderr to keep stdout clean
3// This enables perfect JSON parsing for AI consumers
4
5/// Check if we're in AI mode
6pub fn is_ai_mode() -> bool {
7    std::env::var("AI_TOOLS").is_ok() || std::env::var("MCP_MODE").is_ok() || is_strict_mode()
8}
9
10/// Check if we're in strict AI mode (JSON-only output)
11pub fn is_strict_mode() -> bool {
12    std::env::var("ST_AI_STRICT").is_ok()
13}
14
15/// Print to stdout or stderr based on AI mode
16#[macro_export]
17macro_rules! ai_print {
18    ($($arg:tt)*) => {
19        if $crate::ai_output::is_ai_mode() {
20            eprint!($($arg)*);
21        } else {
22            print!($($arg)*);
23        }
24    };
25}
26
27/// Println to stdout or stderr based on AI mode
28#[macro_export]
29macro_rules! ai_println {
30    () => {
31        if $crate::ai_output::is_ai_mode() {
32            eprintln!();
33        } else {
34            println!();
35        }
36    };
37    ($($arg:tt)*) => {
38        if $crate::ai_output::is_ai_mode() {
39            eprintln!($($arg)*);
40        } else {
41            println!($($arg)*);
42        }
43    };
44}
45
46/// Configuration for AI-optimized output
47#[derive(Debug, Clone)]
48pub struct AiOutputConfig {
49    pub mode: String,
50    pub compress: bool,
51    pub no_emoji: bool,
52    pub path_mode: String,
53    pub deterministic_sort: bool,
54    pub include_digest: bool,
55    pub max_depth: Option<usize>,
56}
57
58impl Default for AiOutputConfig {
59    fn default() -> Self {
60        if is_ai_mode() {
61            // Omni's recommended defaults for AI consumption
62            Self {
63                mode: "summary-ai".to_string(),
64                compress: false, // No compression by default - only after probe success
65                no_emoji: is_strict_mode(), // Emoji only in strict mode
66                path_mode: "relative".to_string(),
67                deterministic_sort: true,
68                include_digest: true,
69                max_depth: Some(5), // Reasonable default for overview
70            }
71        } else {
72            // Human-friendly defaults
73            Self {
74                mode: "classic".to_string(),
75                compress: false,
76                no_emoji: false,
77                path_mode: "off".to_string(),
78                deterministic_sort: false,
79                include_digest: false,
80                max_depth: None,
81            }
82        }
83    }
84}
85
86/// Generate a cache key for a given path and configuration
87/// This enables AI clients to short-circuit repeated calls
88pub fn generate_cache_key(path: &str, config: &AiOutputConfig) -> String {
89    use std::collections::hash_map::DefaultHasher;
90    use std::hash::{Hash, Hasher};
91
92    let mut hasher = DefaultHasher::new();
93    path.hash(&mut hasher);
94    format!("{:?}", config).hash(&mut hasher);
95
96    let hash = hasher.finish();
97    format!("st_cache_{:016x}", hash)
98}
99
100/// Standardized error response for AI self-correction
101#[derive(serde::Serialize)]
102pub struct AiError {
103    pub code: String,
104    pub message: String,
105    pub classification: ErrorClass,
106    pub hint: String,
107    pub example: Option<String>,
108    pub expected: Option<String>,
109}
110
111#[derive(serde::Serialize, Debug)]
112pub enum ErrorClass {
113    InvalidArg,
114    Security,
115    Resource,
116    Timeout,
117    TooLarge,
118    Paginate,
119}
120
121impl AiError {
122    pub fn invalid_arg(message: &str, hint: &str, example: Option<&str>) -> Self {
123        Self {
124            code: "INVALID_ARG".to_string(),
125            message: message.to_string(),
126            classification: ErrorClass::InvalidArg,
127            hint: hint.to_string(),
128            example: example.map(String::from),
129            expected: None,
130        }
131    }
132
133    pub fn security(message: &str, hint: &str) -> Self {
134        Self {
135            code: "SECURITY".to_string(),
136            message: message.to_string(),
137            classification: ErrorClass::Security,
138            hint: hint.to_string(),
139            example: None,
140            expected: None,
141        }
142    }
143
144    pub fn too_large(message: &str, hint: &str) -> Self {
145        Self {
146            code: "TOO_LARGE".to_string(),
147            message: message.to_string(),
148            classification: ErrorClass::TooLarge,
149            hint: format!("Use pagination: {}", hint),
150            example: Some("add 'limit: 100, cursor: \"next_page\"' to your request".to_string()),
151            expected: None,
152        }
153    }
154}
155
156/// Response wrapper with usage stats and next best calls
157#[derive(serde::Serialize)]
158pub struct AiResponse<T> {
159    pub data: T,
160    pub cache_key: String,
161    pub digest: Option<String>,
162    pub usage: Usage,
163    pub next_best_calls: Vec<NextCall>,
164    pub decorations: Option<Decorations>,
165    pub watermark: Option<Watermark>,
166}
167
168/// Visual hints separated from primary data
169#[derive(serde::Serialize, Debug, Clone)]
170pub struct Decorations {
171    pub emoji_hint: Option<String>,
172    pub color: Option<String>,
173    pub intensity: Option<u8>,
174}
175
176/// Context watermark for AI clients
177#[derive(serde::Serialize, Debug, Clone)]
178pub struct Watermark {
179    pub mode: ModeInfo,
180    pub compression: CompressionInfo,
181    pub lane: Option<String>,
182    pub next_lanes: Vec<String>,
183    pub dir_digest: Option<String>,
184    pub args_fingerprint: Option<String>,
185}
186
187#[derive(serde::Serialize, Debug, Clone)]
188pub struct ModeInfo {
189    pub strict: bool,
190    pub ai_tools: bool,
191    pub emoji: bool,
192}
193
194#[derive(serde::Serialize, Debug, Clone)]
195pub struct CompressionInfo {
196    pub default: bool,
197    pub supported: bool,
198    pub active: bool,
199}
200
201/// Compression capability probe
202#[derive(serde::Serialize, Debug)]
203pub struct CompressionProbe {
204    pub plain: String,
205    pub base64: String,
206    pub zlib_base64: String,
207    pub instructions: String,
208}
209
210impl Default for CompressionProbe {
211    fn default() -> Self {
212        Self::new()
213    }
214}
215
216impl CompressionProbe {
217    pub fn new() -> Self {
218        Self {
219            plain: "PING".to_string(),
220            base64: "UElORw==".to_string(), // base64("PING")
221            zlib_base64: "eJwLy0xPVgIACR0DEQ==".to_string(), // base64(zlib("PING"))
222            instructions: "Call server_info again with echo: 'BASE64_OK' and/or 'ZLIB_OK' if you could decode.".to_string(),
223        }
224    }
225}
226
227/// Session compression capabilities
228#[derive(Debug, Clone, Default)]
229pub struct CompressionCapabilities {
230    pub base64: bool,
231    pub zlib: bool,
232    pub probed: bool,
233}
234
235impl CompressionCapabilities {
236    pub fn from_echo(echo: &str) -> Self {
237        Self {
238            base64: echo.contains("BASE64_OK"),
239            zlib: echo.contains("ZLIB_OK"),
240            probed: true,
241        }
242    }
243
244    pub fn should_compress(&self) -> bool {
245        // Only compress if explicitly supported and not disabled
246        if std::env::var("MCP_NO_COMPRESS").is_ok() {
247            return false;
248        }
249        self.probed && (self.base64 || self.zlib)
250    }
251}
252
253#[derive(serde::Serialize)]
254pub struct Usage {
255    pub file_count: usize,
256    pub bytes_scanned: usize,
257    pub elapsed_ms: u64,
258}
259
260#[derive(serde::Serialize)]
261pub struct NextCall {
262    pub tool: String,
263    pub args: serde_json::Value,
264    pub tip: String,
265}
266
267impl<T> AiResponse<T> {
268    pub fn new(data: T, path: &str, config: &AiOutputConfig) -> Self {
269        Self {
270            data,
271            cache_key: generate_cache_key(path, config),
272            digest: None, // Set by caller if available
273            usage: Usage {
274                file_count: 0,
275                bytes_scanned: 0,
276                elapsed_ms: 0,
277            },
278            next_best_calls: vec![],
279            decorations: None,
280            watermark: None,
281        }
282    }
283
284    pub fn with_digest(mut self, digest: String) -> Self {
285        self.digest = Some(digest);
286        self
287    }
288
289    pub fn with_usage(mut self, file_count: usize, bytes_scanned: usize, elapsed_ms: u64) -> Self {
290        self.usage = Usage {
291            file_count,
292            bytes_scanned,
293            elapsed_ms,
294        };
295        self
296    }
297
298    pub fn suggest_next(mut self, tool: &str, args: serde_json::Value, tip: &str) -> Self {
299        self.next_best_calls.push(NextCall {
300            tool: tool.to_string(),
301            args,
302            tip: tip.to_string(),
303        });
304        self
305    }
306}
307
308/// Pagination support for list-style operations
309#[derive(Debug, Clone, serde::Deserialize)]
310pub struct PaginationParams {
311    pub limit: Option<usize>,
312    pub cursor: Option<String>,
313    pub fields: Option<Vec<String>>, // Field selector for token reduction
314}
315
316impl Default for PaginationParams {
317    fn default() -> Self {
318        Self {
319            limit: if is_ai_mode() { Some(100) } else { None },
320            cursor: None,
321            fields: None,
322        }
323    }
324}
325
326/// Ensure all output follows Omni's discipline
327pub fn setup_ai_output() {
328    if is_ai_mode() {
329        // Ensure panic messages go to stderr
330        std::panic::set_hook(Box::new(|info| {
331            eprintln!("Smart Tree panic: {}", info);
332        }));
333
334        // Log that we're in AI mode (to stderr!)
335        eprintln!("# Smart Tree running in AI mode - JSON on stdout, logs on stderr");
336    }
337}