1use async_trait::async_trait;
2use bamboo_agent_core::{Tool, ToolError, ToolExecutionContext, ToolResult};
3use serde::Deserialize;
4use serde_json::json;
5use std::path::Path;
6
7use super::read_tracker;
8
9const BLOCKED_DEVICE_PATHS: &[&str] = &[
10 "/dev/zero",
11 "/dev/random",
12 "/dev/urandom",
13 "/dev/full",
14 "/dev/stdin",
15 "/dev/tty",
16 "/dev/console",
17 "/dev/stdout",
18 "/dev/stderr",
19 "/dev/fd/0",
20 "/dev/fd/1",
21 "/dev/fd/2",
22];
23
24#[derive(Debug, Deserialize)]
25struct ReadArgs {
26 file_path: String,
27 #[serde(default)]
28 offset: Option<usize>,
29 #[serde(default)]
30 limit: Option<usize>,
31}
32
33pub struct ReadTool;
34
35impl ReadTool {
36 pub fn new() -> Self {
37 Self
38 }
39
40 fn is_blocked_device_path(path: &Path) -> bool {
41 let display = path.to_string_lossy();
42 if BLOCKED_DEVICE_PATHS
43 .iter()
44 .any(|blocked| display == *blocked)
45 {
46 return true;
47 }
48
49 display.starts_with("/proc/")
50 && (display.ends_with("/fd/0")
51 || display.ends_with("/fd/1")
52 || display.ends_with("/fd/2"))
53 }
54}
55
56impl Default for ReadTool {
57 fn default() -> Self {
58 Self::new()
59 }
60}
61
62fn slice_bounds(total: usize, offset: usize, limit: Option<usize>) -> (usize, usize) {
63 let start = offset.min(total);
64 let end = limit
65 .map(|value| start.saturating_add(value).min(total))
66 .unwrap_or(total);
67 (start, end)
68}
69
70fn continuation_hint(
71 noun: &str,
72 start: usize,
73 end: usize,
74 total: usize,
75 limit: Option<usize>,
76) -> Option<String> {
77 if end >= total {
78 return None;
79 }
80
81 let shown = end.saturating_sub(start);
82 let limit_fragment = match limit {
83 Some(value) => format!(", limit={value}"),
84 None => String::new(),
85 };
86
87 if shown == 0 {
88 return Some(format!(
89 "[TRUNCATED] No {noun} returned. Continue with offset={end}{limit_fragment}"
90 ));
91 }
92
93 Some(format!(
94 "[TRUNCATED] Showing {noun} {first}-{end} of {total}. Continue with offset={end}{limit_fragment}",
95 first = start + 1
96 ))
97}
98
99fn render_file_with_line_numbers(content: &str, offset: usize, limit: Option<usize>) -> String {
100 let lines: Vec<&str> = content.lines().collect();
101 let (start, end) = slice_bounds(lines.len(), offset, limit);
102
103 let mut rendered = lines[start..end]
104 .iter()
105 .enumerate()
106 .map(|(idx, line)| format!("{:>6}\t{}", start + idx + 1, line))
107 .collect::<Vec<_>>()
108 .join("\n");
109
110 if let Some(hint) = continuation_hint("lines", start, end, lines.len(), limit) {
111 if !rendered.is_empty() {
112 rendered.push('\n');
113 }
114 rendered.push_str(&hint);
115 }
116
117 rendered
118}
119
120fn render_directory_entries(entries: &[String], offset: usize, limit: Option<usize>) -> String {
121 let (start, end) = slice_bounds(entries.len(), offset, limit);
122 let mut rendered = entries[start..end]
123 .iter()
124 .enumerate()
125 .map(|(idx, entry)| format!("{:>6}\t{}", start + idx + 1, entry))
126 .collect::<Vec<_>>()
127 .join("\n");
128
129 if let Some(hint) = continuation_hint("entries", start, end, entries.len(), limit) {
130 if !rendered.is_empty() {
131 rendered.push('\n');
132 }
133 rendered.push_str(&hint);
134 }
135
136 rendered
137}
138
139#[async_trait]
140impl Tool for ReadTool {
141 fn name(&self) -> &str {
142 "Read"
143 }
144
145 fn description(&self) -> &str {
146 "Read a local file or directory with line-numbered output (supports offset/limit). Use this before Edit/Write on existing files. Safe for text files and directories; binary files are omitted and blocking device paths are rejected."
147 }
148
149 fn mutability(&self) -> crate::ToolMutability {
150 crate::ToolMutability::ReadOnly
151 }
152
153 fn concurrency_safe(&self) -> bool {
154 true
155 }
156
157 fn parameters_schema(&self) -> serde_json::Value {
158 json!({
159 "type": "object",
160 "properties": {
161 "file_path": {
162 "type": "string",
163 "description": "The absolute path to the file or directory to read"
164 },
165 "offset": {
166 "type": "number",
167 "description": "The line offset to start reading from. Omit when you want the full file or directory listing."
168 },
169 "limit": {
170 "type": "number",
171 "description": "The maximum number of lines or directory entries to read. Omit for the full result when safe."
172 }
173 },
174 "required": ["file_path"],
175 "additionalProperties": false
176 })
177 }
178
179 async fn execute(&self, args: serde_json::Value) -> Result<ToolResult, ToolError> {
180 self.execute_with_context(args, ToolExecutionContext::none("Read"))
181 .await
182 }
183
184 async fn execute_with_context(
185 &self,
186 args: serde_json::Value,
187 ctx: ToolExecutionContext<'_>,
188 ) -> Result<ToolResult, ToolError> {
189 let parsed: ReadArgs = serde_json::from_value(args)
190 .map_err(|e| ToolError::InvalidArguments(format!("Invalid Read args: {}", e)))?;
191
192 let path = Path::new(parsed.file_path.trim());
193 if !path.is_absolute() {
194 return Err(ToolError::InvalidArguments(
195 "file_path must be an absolute path".to_string(),
196 ));
197 }
198 if Self::is_blocked_device_path(path) {
199 return Err(ToolError::InvalidArguments(format!(
200 "Refusing to read blocking or unbounded device path: {}",
201 path.display()
202 )));
203 }
204
205 let metadata = tokio::fs::metadata(path)
206 .await
207 .map_err(|e| ToolError::Execution(format!("Failed to read path: {}", e)))?;
208
209 if metadata.is_dir() {
210 let mut dir = tokio::fs::read_dir(path)
211 .await
212 .map_err(|e| ToolError::Execution(format!("Failed to read directory: {}", e)))?;
213 let mut entries = Vec::new();
214 while let Some(entry) = dir
215 .next_entry()
216 .await
217 .map_err(|e| ToolError::Execution(format!("Failed to iterate directory: {}", e)))?
218 {
219 let mut name = entry.file_name().to_string_lossy().to_string();
220 if entry
221 .file_type()
222 .await
223 .map_err(|e| ToolError::Execution(format!("Failed to inspect entry: {}", e)))?
224 .is_dir()
225 {
226 name.push('/');
227 }
228 entries.push(name);
229 }
230 entries.sort();
231
232 let rendered =
233 render_directory_entries(&entries, parsed.offset.unwrap_or(0), parsed.limit);
234 return Ok(ToolResult {
235 success: true,
236 result: rendered,
237 display_preference: Some("Collapsible".to_string()),
238 });
239 }
240
241 let bytes = tokio::fs::read(path)
242 .await
243 .map_err(|e| ToolError::Execution(format!("Failed to read file: {}", e)))?;
244
245 if let Some(session_id) = ctx.session_id {
246 read_tracker::mark_read(session_id, parsed.file_path.trim()).await;
247 }
248
249 if bytes.contains(&0) {
250 return Ok(ToolResult {
251 success: true,
252 result: "[Binary file omitted]".to_string(),
253 display_preference: Some("Collapsible".to_string()),
254 });
255 }
256
257 let content = String::from_utf8_lossy(&bytes).to_string();
258 let rendered =
259 render_file_with_line_numbers(&content, parsed.offset.unwrap_or(0), parsed.limit);
260
261 Ok(ToolResult {
262 success: true,
263 result: rendered,
264 display_preference: Some("Collapsible".to_string()),
265 })
266 }
267}
268
269#[cfg(test)]
270mod tests {
271 use super::*;
272 use crate::tools::WriteTool;
273 use serde_json::json;
274
275 #[tokio::test]
276 async fn binary_read_still_marks_file_as_read_for_session_write_gate() {
277 let file = tempfile::NamedTempFile::new().unwrap();
278 tokio::fs::write(file.path(), vec![0_u8, 1, 2, 3])
279 .await
280 .unwrap();
281 let file_path = file.path().to_string_lossy().to_string();
282 let ctx = ToolExecutionContext {
283 session_id: Some("session_binary_read"),
284 tool_call_id: "call_1",
285 event_tx: None,
286 available_tool_schemas: None,
287 };
288
289 let read_tool = ReadTool::new();
290 let read_result = read_tool
291 .execute_with_context(json!({ "file_path": file_path }), ctx)
292 .await
293 .unwrap();
294 assert!(read_result.success);
295 assert!(read_result.result.contains("Binary file omitted"));
296
297 let write_tool = WriteTool::new();
298 let write_result = write_tool
299 .execute_with_context(
300 json!({
301 "file_path": file.path(),
302 "content": "now text"
303 }),
304 ctx,
305 )
306 .await
307 .unwrap();
308 assert!(write_result.success);
309 }
310
311 #[tokio::test]
312 async fn read_directory_supports_offset_limit_and_marks_subdirs() {
313 let dir = tempfile::tempdir().unwrap();
314 tokio::fs::create_dir_all(dir.path().join("b-dir"))
315 .await
316 .unwrap();
317 tokio::fs::write(dir.path().join("a.txt"), "a")
318 .await
319 .unwrap();
320 tokio::fs::write(dir.path().join("c.txt"), "c")
321 .await
322 .unwrap();
323
324 let tool = ReadTool::new();
325 let result = tool
326 .execute(json!({
327 "file_path": dir.path(),
328 "offset": 1,
329 "limit": 1
330 }))
331 .await
332 .unwrap();
333
334 assert!(result.success);
335 assert!(result.result.contains("b-dir/"));
336 assert!(result.result.contains("TRUNCATED"));
337 }
338
339 #[tokio::test]
340 async fn read_file_adds_continuation_hint_when_truncated() {
341 let file = tempfile::NamedTempFile::new().unwrap();
342 tokio::fs::write(file.path(), "l1\nl2\nl3\n").await.unwrap();
343
344 let tool = ReadTool::new();
345 let result = tool
346 .execute(json!({
347 "file_path": file.path(),
348 "offset": 0,
349 "limit": 1
350 }))
351 .await
352 .unwrap();
353
354 assert!(result.success);
355 assert!(result.result.contains("l1"));
356 assert!(result.result.contains("Continue with offset=1"));
357 }
358
359 #[tokio::test]
360 async fn read_rejects_blocking_device_paths() {
361 let tool = ReadTool::new();
362 let result = tool
363 .execute(json!({
364 "file_path": "/dev/stdin"
365 }))
366 .await;
367
368 let error = result.expect_err("device path should be rejected");
369 assert!(matches!(error, ToolError::InvalidArguments(_)));
370 assert!(error
371 .to_string()
372 .contains("Refusing to read blocking or unbounded device path"));
373 }
374}