1use std::cmp::Reverse;
2use std::fs;
3use std::io;
4use std::path::{Path, PathBuf};
5use std::time::Instant;
6
7use glob::Pattern;
8use regex::RegexBuilder;
9use serde::{Deserialize, Serialize};
10use walkdir::WalkDir;
11
12#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
13pub struct TextFilePayload {
14 #[serde(rename = "filePath")]
15 pub file_path: String,
16 pub content: String,
17 #[serde(rename = "numLines")]
18 pub num_lines: usize,
19 #[serde(rename = "startLine")]
20 pub start_line: usize,
21 #[serde(rename = "totalLines")]
22 pub total_lines: usize,
23}
24
25#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
26pub struct ReadFileOutput {
27 #[serde(rename = "type")]
28 pub kind: String,
29 pub file: TextFilePayload,
30}
31
32#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
33pub struct StructuredPatchHunk {
34 #[serde(rename = "oldStart")]
35 pub old_start: usize,
36 #[serde(rename = "oldLines")]
37 pub old_lines: usize,
38 #[serde(rename = "newStart")]
39 pub new_start: usize,
40 #[serde(rename = "newLines")]
41 pub new_lines: usize,
42 pub lines: Vec<String>,
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
46pub struct WriteFileOutput {
47 #[serde(rename = "type")]
48 pub kind: String,
49 #[serde(rename = "filePath")]
50 pub file_path: String,
51 pub content: String,
52 #[serde(rename = "structuredPatch")]
53 pub structured_patch: Vec<StructuredPatchHunk>,
54 #[serde(rename = "originalFile")]
55 pub original_file: Option<String>,
56 #[serde(rename = "gitDiff")]
57 pub git_diff: Option<serde_json::Value>,
58}
59
60#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
61pub struct EditFileOutput {
62 #[serde(rename = "filePath")]
63 pub file_path: String,
64 #[serde(rename = "oldString")]
65 pub old_string: String,
66 #[serde(rename = "newString")]
67 pub new_string: String,
68 #[serde(rename = "originalFile")]
69 pub original_file: String,
70 #[serde(rename = "structuredPatch")]
71 pub structured_patch: Vec<StructuredPatchHunk>,
72 #[serde(rename = "userModified")]
73 pub user_modified: bool,
74 #[serde(rename = "replaceAll")]
75 pub replace_all: bool,
76 #[serde(rename = "gitDiff")]
77 pub git_diff: Option<serde_json::Value>,
78}
79
80#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
81pub struct GlobSearchOutput {
82 #[serde(rename = "durationMs")]
83 pub duration_ms: u128,
84 #[serde(rename = "numFiles")]
85 pub num_files: usize,
86 pub filenames: Vec<String>,
87 pub truncated: bool,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
91pub struct GrepSearchInput {
92 pub pattern: String,
93 pub path: Option<String>,
94 pub glob: Option<String>,
95 #[serde(rename = "output_mode")]
96 pub output_mode: Option<String>,
97 #[serde(rename = "-B")]
98 pub before: Option<usize>,
99 #[serde(rename = "-A")]
100 pub after: Option<usize>,
101 #[serde(rename = "-C")]
102 pub context_short: Option<usize>,
103 pub context: Option<usize>,
104 #[serde(rename = "-n")]
105 pub line_numbers: Option<bool>,
106 #[serde(rename = "-i")]
107 pub case_insensitive: Option<bool>,
108 #[serde(rename = "type")]
109 pub file_type: Option<String>,
110 pub head_limit: Option<usize>,
111 pub offset: Option<usize>,
112 pub multiline: Option<bool>,
113}
114
115#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
116pub struct GrepSearchOutput {
117 pub mode: Option<String>,
118 #[serde(rename = "numFiles")]
119 pub num_files: usize,
120 pub filenames: Vec<String>,
121 pub content: Option<String>,
122 #[serde(rename = "numLines")]
123 pub num_lines: Option<usize>,
124 #[serde(rename = "numMatches")]
125 pub num_matches: Option<usize>,
126 #[serde(rename = "appliedLimit")]
127 pub applied_limit: Option<usize>,
128 #[serde(rename = "appliedOffset")]
129 pub applied_offset: Option<usize>,
130}
131
132pub fn read_file(
133 path: &str,
134 offset: Option<usize>,
135 limit: Option<usize>,
136) -> io::Result<ReadFileOutput> {
137 let absolute_path = normalize_path(path)?;
138 let content = fs::read_to_string(&absolute_path)?;
139 let lines: Vec<&str> = content.lines().collect();
140 let start_index = offset.unwrap_or(0).min(lines.len());
141 let end_index = limit.map_or(lines.len(), |limit| {
142 start_index.saturating_add(limit).min(lines.len())
143 });
144 let selected = lines[start_index..end_index].join("\n");
145
146 Ok(ReadFileOutput {
147 kind: String::from("text"),
148 file: TextFilePayload {
149 file_path: absolute_path.to_string_lossy().into_owned(),
150 content: selected,
151 num_lines: end_index.saturating_sub(start_index),
152 start_line: start_index.saturating_add(1),
153 total_lines: lines.len(),
154 },
155 })
156}
157
158pub fn write_file(path: &str, content: &str) -> io::Result<WriteFileOutput> {
159 let absolute_path = normalize_path_allow_missing(path)?;
160 let original_file = fs::read_to_string(&absolute_path).ok();
161 if let Some(parent) = absolute_path.parent() {
162 fs::create_dir_all(parent)?;
163 }
164 fs::write(&absolute_path, content)?;
165
166 Ok(WriteFileOutput {
167 kind: if original_file.is_some() {
168 String::from("update")
169 } else {
170 String::from("create")
171 },
172 file_path: absolute_path.to_string_lossy().into_owned(),
173 content: content.to_owned(),
174 structured_patch: make_patch(original_file.as_deref().unwrap_or(""), content),
175 original_file,
176 git_diff: None,
177 })
178}
179
180pub fn edit_file(
181 path: &str,
182 old_string: &str,
183 new_string: &str,
184 replace_all: bool,
185) -> io::Result<EditFileOutput> {
186 let absolute_path = normalize_path(path)?;
187 let original_file = fs::read_to_string(&absolute_path)?;
188 if old_string == new_string {
189 return Err(io::Error::new(
190 io::ErrorKind::InvalidInput,
191 "old_string and new_string must differ",
192 ));
193 }
194 if !original_file.contains(old_string) {
195 return Err(io::Error::new(
196 io::ErrorKind::NotFound,
197 "old_string not found in file",
198 ));
199 }
200
201 let updated = if replace_all {
202 original_file.replace(old_string, new_string)
203 } else {
204 original_file.replacen(old_string, new_string, 1)
205 };
206 fs::write(&absolute_path, &updated)?;
207
208 Ok(EditFileOutput {
209 file_path: absolute_path.to_string_lossy().into_owned(),
210 old_string: old_string.to_owned(),
211 new_string: new_string.to_owned(),
212 original_file: original_file.clone(),
213 structured_patch: make_patch(&original_file, &updated),
214 user_modified: false,
215 replace_all,
216 git_diff: None,
217 })
218}
219
220pub fn glob_search(pattern: &str, path: Option<&str>) -> io::Result<GlobSearchOutput> {
221 let started = Instant::now();
222 let base_dir = path
223 .map(normalize_path)
224 .transpose()?
225 .unwrap_or(std::env::current_dir()?);
226 let search_pattern = if Path::new(pattern).is_absolute() {
227 pattern.to_owned()
228 } else {
229 base_dir.join(pattern).to_string_lossy().into_owned()
230 };
231
232 let mut matches = Vec::new();
233 let entries = glob::glob(&search_pattern)
234 .map_err(|error| io::Error::new(io::ErrorKind::InvalidInput, error.to_string()))?;
235 for entry in entries.flatten() {
236 if entry.is_file() {
237 matches.push(entry);
238 }
239 }
240
241 matches.sort_by_key(|path| {
242 fs::metadata(path)
243 .and_then(|metadata| metadata.modified())
244 .ok()
245 .map(Reverse)
246 });
247
248 let truncated = matches.len() > 100;
249 let filenames = matches
250 .into_iter()
251 .take(100)
252 .map(|path| path.to_string_lossy().into_owned())
253 .collect::<Vec<_>>();
254
255 Ok(GlobSearchOutput {
256 duration_ms: started.elapsed().as_millis(),
257 num_files: filenames.len(),
258 filenames,
259 truncated,
260 })
261}
262
263pub fn grep_search(input: &GrepSearchInput) -> io::Result<GrepSearchOutput> {
264 let base_path = input
265 .path
266 .as_deref()
267 .map(normalize_path)
268 .transpose()?
269 .unwrap_or(std::env::current_dir()?);
270
271 let regex = RegexBuilder::new(&input.pattern)
272 .case_insensitive(input.case_insensitive.unwrap_or(false))
273 .dot_matches_new_line(input.multiline.unwrap_or(false))
274 .build()
275 .map_err(|error| io::Error::new(io::ErrorKind::InvalidInput, error.to_string()))?;
276
277 let glob_filter = input
278 .glob
279 .as_deref()
280 .map(Pattern::new)
281 .transpose()
282 .map_err(|error| io::Error::new(io::ErrorKind::InvalidInput, error.to_string()))?;
283 let file_type = input.file_type.as_deref();
284 let output_mode = input
285 .output_mode
286 .clone()
287 .unwrap_or_else(|| String::from("files_with_matches"));
288 let context = input.context.or(input.context_short).unwrap_or(0);
289
290 let mut filenames = Vec::new();
291 let mut content_lines = Vec::new();
292 let mut total_matches = 0usize;
293
294 for file_path in collect_search_files(&base_path)? {
295 if !matches_optional_filters(&file_path, glob_filter.as_ref(), file_type) {
296 continue;
297 }
298
299 let Ok(file_contents) = fs::read_to_string(&file_path) else {
300 continue;
301 };
302
303 if output_mode == "count" {
304 let count = regex.find_iter(&file_contents).count();
305 if count > 0 {
306 filenames.push(file_path.to_string_lossy().into_owned());
307 total_matches += count;
308 }
309 continue;
310 }
311
312 let lines: Vec<&str> = file_contents.lines().collect();
313 let mut matched_lines = Vec::new();
314 for (index, line) in lines.iter().enumerate() {
315 if regex.is_match(line) {
316 total_matches += 1;
317 matched_lines.push(index);
318 }
319 }
320
321 if matched_lines.is_empty() {
322 continue;
323 }
324
325 filenames.push(file_path.to_string_lossy().into_owned());
326 if output_mode == "content" {
327 for index in matched_lines {
328 let start = index.saturating_sub(input.before.unwrap_or(context));
329 let end = (index + input.after.unwrap_or(context) + 1).min(lines.len());
330 for (current, line) in lines.iter().enumerate().take(end).skip(start) {
331 let prefix = if input.line_numbers.unwrap_or(true) {
332 format!("{}:{}:", file_path.to_string_lossy(), current + 1)
333 } else {
334 format!("{}:", file_path.to_string_lossy())
335 };
336 content_lines.push(format!("{prefix}{line}"));
337 }
338 }
339 }
340 }
341
342 let (filenames, applied_limit, applied_offset) =
343 apply_limit(filenames, input.head_limit, input.offset);
344 let content_output = if output_mode == "content" {
345 let (lines, limit, offset) = apply_limit(content_lines, input.head_limit, input.offset);
346 return Ok(GrepSearchOutput {
347 mode: Some(output_mode),
348 num_files: filenames.len(),
349 filenames,
350 num_lines: Some(lines.len()),
351 content: Some(lines.join("\n")),
352 num_matches: None,
353 applied_limit: limit,
354 applied_offset: offset,
355 });
356 } else {
357 None
358 };
359
360 Ok(GrepSearchOutput {
361 mode: Some(output_mode.clone()),
362 num_files: filenames.len(),
363 filenames,
364 content: content_output,
365 num_lines: None,
366 num_matches: (output_mode == "count").then_some(total_matches),
367 applied_limit,
368 applied_offset,
369 })
370}
371
372fn collect_search_files(base_path: &Path) -> io::Result<Vec<PathBuf>> {
373 if base_path.is_file() {
374 return Ok(vec![base_path.to_path_buf()]);
375 }
376
377 let mut files = Vec::new();
378 for entry in WalkDir::new(base_path) {
379 let entry = entry.map_err(|error| io::Error::other(error.to_string()))?;
380 if entry.file_type().is_file() {
381 files.push(entry.path().to_path_buf());
382 }
383 }
384 Ok(files)
385}
386
387fn matches_optional_filters(
388 path: &Path,
389 glob_filter: Option<&Pattern>,
390 file_type: Option<&str>,
391) -> bool {
392 if let Some(glob_filter) = glob_filter {
393 let path_string = path.to_string_lossy();
394 if !glob_filter.matches(&path_string) && !glob_filter.matches_path(path) {
395 return false;
396 }
397 }
398
399 if let Some(file_type) = file_type {
400 let extension = path.extension().and_then(|extension| extension.to_str());
401 if extension != Some(file_type) {
402 return false;
403 }
404 }
405
406 true
407}
408
409fn apply_limit<T>(
410 items: Vec<T>,
411 limit: Option<usize>,
412 offset: Option<usize>,
413) -> (Vec<T>, Option<usize>, Option<usize>) {
414 let offset_value = offset.unwrap_or(0);
415 let mut items = items.into_iter().skip(offset_value).collect::<Vec<_>>();
416 let explicit_limit = limit.unwrap_or(250);
417 if explicit_limit == 0 {
418 return (items, None, (offset_value > 0).then_some(offset_value));
419 }
420
421 let truncated = items.len() > explicit_limit;
422 items.truncate(explicit_limit);
423 (
424 items,
425 truncated.then_some(explicit_limit),
426 (offset_value > 0).then_some(offset_value),
427 )
428}
429
430fn make_patch(original: &str, updated: &str) -> Vec<StructuredPatchHunk> {
431 let mut lines = Vec::new();
432 for line in original.lines() {
433 lines.push(format!("-{line}"));
434 }
435 for line in updated.lines() {
436 lines.push(format!("+{line}"));
437 }
438
439 vec![StructuredPatchHunk {
440 old_start: 1,
441 old_lines: original.lines().count(),
442 new_start: 1,
443 new_lines: updated.lines().count(),
444 lines,
445 }]
446}
447
448fn normalize_path(path: &str) -> io::Result<PathBuf> {
449 let candidate = if Path::new(path).is_absolute() {
450 PathBuf::from(path)
451 } else {
452 std::env::current_dir()?.join(path)
453 };
454 candidate.canonicalize()
455}
456
457fn normalize_path_allow_missing(path: &str) -> io::Result<PathBuf> {
458 let candidate = if Path::new(path).is_absolute() {
459 PathBuf::from(path)
460 } else {
461 std::env::current_dir()?.join(path)
462 };
463
464 if let Ok(canonical) = candidate.canonicalize() {
465 return Ok(canonical);
466 }
467
468 if let Some(parent) = candidate.parent() {
469 let canonical_parent = parent
470 .canonicalize()
471 .unwrap_or_else(|_| parent.to_path_buf());
472 if let Some(name) = candidate.file_name() {
473 return Ok(canonical_parent.join(name));
474 }
475 }
476
477 Ok(candidate)
478}
479
480#[cfg(test)]
481mod tests {
482 use std::time::{SystemTime, UNIX_EPOCH};
483
484 use super::{edit_file, glob_search, grep_search, read_file, write_file, GrepSearchInput};
485
486 fn temp_path(name: &str) -> std::path::PathBuf {
487 let unique = SystemTime::now()
488 .duration_since(UNIX_EPOCH)
489 .expect("time should move forward")
490 .as_nanos();
491 std::env::temp_dir().join(format!("wraith-native-{name}-{unique}"))
492 }
493
494 #[test]
495 fn reads_and_writes_files() {
496 let path = temp_path("read-write.txt");
497 let write_output = write_file(path.to_string_lossy().as_ref(), "one\ntwo\nthree")
498 .expect("write should succeed");
499 assert_eq!(write_output.kind, "create");
500
501 let read_output = read_file(path.to_string_lossy().as_ref(), Some(1), Some(1))
502 .expect("read should succeed");
503 assert_eq!(read_output.file.content, "two");
504 }
505
506 #[test]
507 fn edits_file_contents() {
508 let path = temp_path("edit.txt");
509 write_file(path.to_string_lossy().as_ref(), "alpha beta alpha")
510 .expect("initial write should succeed");
511 let output = edit_file(path.to_string_lossy().as_ref(), "alpha", "omega", true)
512 .expect("edit should succeed");
513 assert!(output.replace_all);
514 }
515
516 #[test]
517 fn globs_and_greps_directory() {
518 let dir = temp_path("search-dir");
519 std::fs::create_dir_all(&dir).expect("directory should be created");
520 let file = dir.join("demo.rs");
521 write_file(
522 file.to_string_lossy().as_ref(),
523 "fn main() {\n println!(\"hello\");\n}\n",
524 )
525 .expect("file write should succeed");
526
527 let globbed = glob_search("**/*.rs", Some(dir.to_string_lossy().as_ref()))
528 .expect("glob should succeed");
529 assert_eq!(globbed.num_files, 1);
530
531 let grep_output = grep_search(&GrepSearchInput {
532 pattern: String::from("hello"),
533 path: Some(dir.to_string_lossy().into_owned()),
534 glob: Some(String::from("**/*.rs")),
535 output_mode: Some(String::from("content")),
536 before: None,
537 after: None,
538 context_short: None,
539 context: None,
540 line_numbers: Some(true),
541 case_insensitive: Some(false),
542 file_type: None,
543 head_limit: Some(10),
544 offset: Some(0),
545 multiline: Some(false),
546 })
547 .expect("grep should succeed");
548 assert!(grep_output.content.unwrap_or_default().contains("hello"));
549 }
550}