1pub fn is_ai_mode() -> bool {
7 std::env::var("AI_TOOLS").is_ok() || std::env::var("MCP_MODE").is_ok() || is_strict_mode()
8}
9
10pub fn is_strict_mode() -> bool {
12 std::env::var("ST_AI_STRICT").is_ok()
13}
14
15#[macro_export]
17macro_rules! ai_print {
18 ($($arg:tt)*) => {
19 if $crate::ai_output::is_ai_mode() {
20 eprint!($($arg)*);
21 } else {
22 print!($($arg)*);
23 }
24 };
25}
26
27#[macro_export]
29macro_rules! ai_println {
30 () => {
31 if $crate::ai_output::is_ai_mode() {
32 eprintln!();
33 } else {
34 println!();
35 }
36 };
37 ($($arg:tt)*) => {
38 if $crate::ai_output::is_ai_mode() {
39 eprintln!($($arg)*);
40 } else {
41 println!($($arg)*);
42 }
43 };
44}
45
46#[derive(Debug, Clone)]
48pub struct AiOutputConfig {
49 pub mode: String,
50 pub compress: bool,
51 pub no_emoji: bool,
52 pub path_mode: String,
53 pub deterministic_sort: bool,
54 pub include_digest: bool,
55 pub max_depth: Option<usize>,
56}
57
58impl Default for AiOutputConfig {
59 fn default() -> Self {
60 if is_ai_mode() {
61 Self {
63 mode: "summary-ai".to_string(),
64 compress: false, no_emoji: is_strict_mode(), path_mode: "relative".to_string(),
67 deterministic_sort: true,
68 include_digest: true,
69 max_depth: Some(5), }
71 } else {
72 Self {
74 mode: "classic".to_string(),
75 compress: false,
76 no_emoji: false,
77 path_mode: "off".to_string(),
78 deterministic_sort: false,
79 include_digest: false,
80 max_depth: None,
81 }
82 }
83 }
84}
85
86pub fn generate_cache_key(path: &str, config: &AiOutputConfig) -> String {
89 use std::collections::hash_map::DefaultHasher;
90 use std::hash::{Hash, Hasher};
91
92 let mut hasher = DefaultHasher::new();
93 path.hash(&mut hasher);
94 format!("{:?}", config).hash(&mut hasher);
95
96 let hash = hasher.finish();
97 format!("st_cache_{:016x}", hash)
98}
99
100#[derive(serde::Serialize)]
102pub struct AiError {
103 pub code: String,
104 pub message: String,
105 pub classification: ErrorClass,
106 pub hint: String,
107 pub example: Option<String>,
108 pub expected: Option<String>,
109}
110
111#[derive(serde::Serialize, Debug)]
112pub enum ErrorClass {
113 InvalidArg,
114 Security,
115 Resource,
116 Timeout,
117 TooLarge,
118 Paginate,
119}
120
121impl AiError {
122 pub fn invalid_arg(message: &str, hint: &str, example: Option<&str>) -> Self {
123 Self {
124 code: "INVALID_ARG".to_string(),
125 message: message.to_string(),
126 classification: ErrorClass::InvalidArg,
127 hint: hint.to_string(),
128 example: example.map(String::from),
129 expected: None,
130 }
131 }
132
133 pub fn security(message: &str, hint: &str) -> Self {
134 Self {
135 code: "SECURITY".to_string(),
136 message: message.to_string(),
137 classification: ErrorClass::Security,
138 hint: hint.to_string(),
139 example: None,
140 expected: None,
141 }
142 }
143
144 pub fn too_large(message: &str, hint: &str) -> Self {
145 Self {
146 code: "TOO_LARGE".to_string(),
147 message: message.to_string(),
148 classification: ErrorClass::TooLarge,
149 hint: format!("Use pagination: {}", hint),
150 example: Some("add 'limit: 100, cursor: \"next_page\"' to your request".to_string()),
151 expected: None,
152 }
153 }
154}
155
156#[derive(serde::Serialize)]
158pub struct AiResponse<T> {
159 pub data: T,
160 pub cache_key: String,
161 pub digest: Option<String>,
162 pub usage: Usage,
163 pub next_best_calls: Vec<NextCall>,
164 pub decorations: Option<Decorations>,
165 pub watermark: Option<Watermark>,
166}
167
168#[derive(serde::Serialize, Debug, Clone)]
170pub struct Decorations {
171 pub emoji_hint: Option<String>,
172 pub color: Option<String>,
173 pub intensity: Option<u8>,
174}
175
176#[derive(serde::Serialize, Debug, Clone)]
178pub struct Watermark {
179 pub mode: ModeInfo,
180 pub compression: CompressionInfo,
181 pub lane: Option<String>,
182 pub next_lanes: Vec<String>,
183 pub dir_digest: Option<String>,
184 pub args_fingerprint: Option<String>,
185}
186
187#[derive(serde::Serialize, Debug, Clone)]
188pub struct ModeInfo {
189 pub strict: bool,
190 pub ai_tools: bool,
191 pub emoji: bool,
192}
193
194#[derive(serde::Serialize, Debug, Clone)]
195pub struct CompressionInfo {
196 pub default: bool,
197 pub supported: bool,
198 pub active: bool,
199}
200
201#[derive(serde::Serialize, Debug)]
203pub struct CompressionProbe {
204 pub plain: String,
205 pub base64: String,
206 pub zlib_base64: String,
207 pub instructions: String,
208}
209
210impl Default for CompressionProbe {
211 fn default() -> Self {
212 Self::new()
213 }
214}
215
216impl CompressionProbe {
217 pub fn new() -> Self {
218 Self {
219 plain: "PING".to_string(),
220 base64: "UElORw==".to_string(), zlib_base64: "eJwLy0xPVgIACR0DEQ==".to_string(), instructions: "Call server_info again with echo: 'BASE64_OK' and/or 'ZLIB_OK' if you could decode.".to_string(),
223 }
224 }
225}
226
227#[derive(Debug, Clone, Default)]
229pub struct CompressionCapabilities {
230 pub base64: bool,
231 pub zlib: bool,
232 pub probed: bool,
233}
234
235impl CompressionCapabilities {
236 pub fn from_echo(echo: &str) -> Self {
237 Self {
238 base64: echo.contains("BASE64_OK"),
239 zlib: echo.contains("ZLIB_OK"),
240 probed: true,
241 }
242 }
243
244 pub fn should_compress(&self) -> bool {
245 if std::env::var("MCP_NO_COMPRESS").is_ok() {
247 return false;
248 }
249 self.probed && (self.base64 || self.zlib)
250 }
251}
252
253#[derive(serde::Serialize)]
254pub struct Usage {
255 pub file_count: usize,
256 pub bytes_scanned: usize,
257 pub elapsed_ms: u64,
258}
259
260#[derive(serde::Serialize)]
261pub struct NextCall {
262 pub tool: String,
263 pub args: serde_json::Value,
264 pub tip: String,
265}
266
267impl<T> AiResponse<T> {
268 pub fn new(data: T, path: &str, config: &AiOutputConfig) -> Self {
269 Self {
270 data,
271 cache_key: generate_cache_key(path, config),
272 digest: None, usage: Usage {
274 file_count: 0,
275 bytes_scanned: 0,
276 elapsed_ms: 0,
277 },
278 next_best_calls: vec![],
279 decorations: None,
280 watermark: None,
281 }
282 }
283
284 pub fn with_digest(mut self, digest: String) -> Self {
285 self.digest = Some(digest);
286 self
287 }
288
289 pub fn with_usage(mut self, file_count: usize, bytes_scanned: usize, elapsed_ms: u64) -> Self {
290 self.usage = Usage {
291 file_count,
292 bytes_scanned,
293 elapsed_ms,
294 };
295 self
296 }
297
298 pub fn suggest_next(mut self, tool: &str, args: serde_json::Value, tip: &str) -> Self {
299 self.next_best_calls.push(NextCall {
300 tool: tool.to_string(),
301 args,
302 tip: tip.to_string(),
303 });
304 self
305 }
306}
307
308#[derive(Debug, Clone, serde::Deserialize)]
310pub struct PaginationParams {
311 pub limit: Option<usize>,
312 pub cursor: Option<String>,
313 pub fields: Option<Vec<String>>, }
315
316impl Default for PaginationParams {
317 fn default() -> Self {
318 Self {
319 limit: if is_ai_mode() { Some(100) } else { None },
320 cursor: None,
321 fields: None,
322 }
323 }
324}
325
326pub fn setup_ai_output() {
328 if is_ai_mode() {
329 std::panic::set_hook(Box::new(|info| {
331 eprintln!("Smart Tree panic: {}", info);
332 }));
333
334 eprintln!("# Smart Tree running in AI mode - JSON on stdout, logs on stderr");
336 }
337}