agentzero_tools/
glob_search.rs1use agentzero_core::{Tool, ToolContext, ToolResult};
2use anyhow::{anyhow, Context};
3use async_trait::async_trait;
4use serde::Deserialize;
5use std::path::{Component, Path, PathBuf};
6
7const DEFAULT_LIMIT: usize = 100;
8
9#[derive(Debug, Deserialize)]
10struct GlobSearchInput {
11 pattern: String,
12 #[serde(default)]
13 path: Option<String>,
14 #[serde(default = "default_limit")]
15 limit: usize,
16}
17
18fn default_limit() -> usize {
19 DEFAULT_LIMIT
20}
21
22#[derive(Debug, Default, Clone, Copy)]
23pub struct GlobSearchTool;
24
25impl GlobSearchTool {
26 fn resolve_base(
27 input_path: Option<&str>,
28 workspace_root: &str,
29 allowed_root: &Path,
30 ) -> anyhow::Result<PathBuf> {
31 let base = match input_path {
32 Some(p) if !p.trim().is_empty() => {
33 let rel = Path::new(p);
34 if rel.is_absolute() {
35 return Err(anyhow!("absolute paths are not allowed"));
36 }
37 if rel.components().any(|c| matches!(c, Component::ParentDir)) {
38 return Err(anyhow!("path traversal is not allowed"));
39 }
40 Path::new(workspace_root).join(rel)
41 }
42 _ => PathBuf::from(workspace_root),
43 };
44
45 let canonical = base
46 .canonicalize()
47 .with_context(|| format!("unable to resolve search base: {}", base.display()))?;
48 let canonical_root = allowed_root
49 .canonicalize()
50 .context("unable to resolve allowed root")?;
51 if !canonical.starts_with(&canonical_root) {
52 return Err(anyhow!("search path is outside allowed root"));
53 }
54 Ok(canonical)
55 }
56}
57
58#[async_trait]
59impl Tool for GlobSearchTool {
60 fn name(&self) -> &'static str {
61 "glob_search"
62 }
63
64 fn description(&self) -> &'static str {
65 "Search for files matching a glob pattern within the workspace. Returns a list of matching file paths."
66 }
67
68 fn input_schema(&self) -> Option<serde_json::Value> {
69 Some(serde_json::json!({
70 "type": "object",
71 "properties": {
72 "pattern": {
73 "type": "string",
74 "description": "Glob pattern to match (e.g. \"**/*.rs\", \"src/*.ts\")"
75 },
76 "path": {
77 "type": "string",
78 "description": "Subdirectory to search within (optional, defaults to workspace root)"
79 },
80 "limit": {
81 "type": "integer",
82 "description": "Maximum number of results to return (default: 100)"
83 }
84 },
85 "required": ["pattern"]
86 }))
87 }
88
89 async fn execute(&self, input: &str, ctx: &ToolContext) -> anyhow::Result<ToolResult> {
90 let request: GlobSearchInput = serde_json::from_str(input)
91 .context("glob_search expects JSON: {\"pattern\", \"path\"?, \"limit\"?}")?;
92
93 if request.pattern.trim().is_empty() {
94 return Err(anyhow!("pattern must not be empty"));
95 }
96
97 let workspace_root = PathBuf::from(&ctx.workspace_root);
98 let base = Self::resolve_base(
99 request.path.as_deref(),
100 &ctx.workspace_root,
101 &workspace_root,
102 )?;
103
104 let full_pattern = base.join(&request.pattern);
105 let pattern_str = full_pattern.to_string_lossy().to_string();
106
107 let entries = glob::glob(&pattern_str)
108 .with_context(|| format!("invalid glob pattern: {}", request.pattern))?;
109
110 let canonical_root = workspace_root
111 .canonicalize()
112 .context("unable to resolve workspace root")?;
113
114 let limit = if request.limit == 0 {
115 DEFAULT_LIMIT
116 } else {
117 request.limit
118 };
119
120 let mut results = Vec::new();
121 for entry in entries {
122 if results.len() >= limit {
123 break;
124 }
125 match entry {
126 Ok(path) => {
127 if let Ok(canonical) = path.canonicalize() {
129 if canonical.starts_with(&canonical_root) {
130 let relative = canonical
131 .strip_prefix(&canonical_root)
132 .unwrap_or(&canonical);
133 results.push(relative.to_string_lossy().to_string());
134 }
135 }
136 }
137 Err(_) => continue,
138 }
139 }
140
141 results.sort();
142
143 if results.is_empty() {
144 return Ok(ToolResult {
145 output: "no matches found".to_string(),
146 });
147 }
148
149 let truncated = results.len() >= limit;
150 let mut output = results.join("\n");
151 if truncated {
152 output.push_str(&format!("\n<truncated at {} results>", limit));
153 }
154
155 Ok(ToolResult { output })
156 }
157}
158
159#[cfg(test)]
160mod tests {
161 use super::GlobSearchTool;
162 use agentzero_core::{Tool, ToolContext};
163 use std::fs;
164 use std::path::PathBuf;
165 use std::sync::atomic::{AtomicU64, Ordering};
166 use std::time::{SystemTime, UNIX_EPOCH};
167
168 static TEMP_COUNTER: AtomicU64 = AtomicU64::new(0);
169
170 fn temp_dir() -> PathBuf {
171 let nanos = SystemTime::now()
172 .duration_since(UNIX_EPOCH)
173 .expect("clock")
174 .as_nanos();
175 let seq = TEMP_COUNTER.fetch_add(1, Ordering::Relaxed);
176 let dir = std::env::temp_dir().join(format!(
177 "agentzero-glob-search-{}-{nanos}-{seq}",
178 std::process::id()
179 ));
180 fs::create_dir_all(&dir).expect("temp dir should be created");
181 dir
182 }
183
184 #[tokio::test]
185 async fn glob_search_finds_matching_files() {
186 let dir = temp_dir();
187 fs::write(dir.join("foo.rs"), "").unwrap();
188 fs::write(dir.join("bar.rs"), "").unwrap();
189 fs::write(dir.join("baz.txt"), "").unwrap();
190
191 let tool = GlobSearchTool;
192 let result = tool
193 .execute(
194 r#"{"pattern": "*.rs"}"#,
195 &ToolContext::new(dir.to_string_lossy().to_string()),
196 )
197 .await
198 .expect("glob should succeed");
199 assert!(result.output.contains("foo.rs"));
200 assert!(result.output.contains("bar.rs"));
201 assert!(!result.output.contains("baz.txt"));
202 fs::remove_dir_all(dir).ok();
203 }
204
205 #[tokio::test]
206 async fn glob_search_respects_limit() {
207 let dir = temp_dir();
208 for i in 0..10 {
209 fs::write(dir.join(format!("file{i}.txt")), "").unwrap();
210 }
211
212 let tool = GlobSearchTool;
213 let result = tool
214 .execute(
215 r#"{"pattern": "*.txt", "limit": 3}"#,
216 &ToolContext::new(dir.to_string_lossy().to_string()),
217 )
218 .await
219 .expect("glob should succeed");
220 assert!(result.output.contains("truncated at 3"));
221 fs::remove_dir_all(dir).ok();
222 }
223
224 #[tokio::test]
225 async fn glob_search_no_matches() {
226 let dir = temp_dir();
227
228 let tool = GlobSearchTool;
229 let result = tool
230 .execute(
231 r#"{"pattern": "*.nonexistent"}"#,
232 &ToolContext::new(dir.to_string_lossy().to_string()),
233 )
234 .await
235 .expect("no matches should succeed");
236 assert!(result.output.contains("no matches"));
237 fs::remove_dir_all(dir).ok();
238 }
239
240 #[tokio::test]
241 async fn glob_search_rejects_empty_pattern_negative_path() {
242 let dir = temp_dir();
243
244 let tool = GlobSearchTool;
245 let err = tool
246 .execute(
247 r#"{"pattern": ""}"#,
248 &ToolContext::new(dir.to_string_lossy().to_string()),
249 )
250 .await
251 .expect_err("empty pattern should fail");
252 assert!(err.to_string().contains("pattern must not be empty"));
253 fs::remove_dir_all(dir).ok();
254 }
255
256 #[tokio::test]
257 async fn glob_search_rejects_path_traversal_negative_path() {
258 let dir = temp_dir();
259
260 let tool = GlobSearchTool;
261 let err = tool
262 .execute(
263 r#"{"pattern": "*.txt", "path": "../"}"#,
264 &ToolContext::new(dir.to_string_lossy().to_string()),
265 )
266 .await
267 .expect_err("path traversal should be denied");
268 assert!(err.to_string().contains("path traversal"));
269 fs::remove_dir_all(dir).ok();
270 }
271
272 #[tokio::test]
273 async fn glob_search_recursive_pattern() {
274 let dir = temp_dir();
275 let sub = dir.join("sub");
276 fs::create_dir_all(&sub).unwrap();
277 fs::write(dir.join("top.rs"), "").unwrap();
278 fs::write(sub.join("nested.rs"), "").unwrap();
279
280 let tool = GlobSearchTool;
281 let result = tool
282 .execute(
283 r#"{"pattern": "**/*.rs"}"#,
284 &ToolContext::new(dir.to_string_lossy().to_string()),
285 )
286 .await
287 .expect("recursive glob should succeed");
288 assert!(result.output.contains("top.rs"));
289 assert!(result.output.contains("nested.rs"));
290 fs::remove_dir_all(dir).ok();
291 }
292}