1use crate::error::LuaError;
26use mlua::{Function, Lua, LuaSerdeExt, Table};
27use orcs_runtime::sandbox::SandboxPolicy;
28use std::path::Path;
29use std::sync::Arc;
30
31pub(crate) fn tool_read(path: &str, sandbox: &dyn SandboxPolicy) -> Result<(String, u64), String> {
35 let canonical = sandbox.validate_read(path).map_err(|e| e.to_string())?;
36
37 let metadata =
38 std::fs::metadata(&canonical).map_err(|e| format!("cannot read metadata: {path} ({e})"))?;
39
40 if !metadata.is_file() {
41 return Err(format!("not a file: {path}"));
42 }
43
44 let size = metadata.len();
45 let content =
46 std::fs::read_to_string(&canonical).map_err(|e| format!("read failed: {path} ({e})"))?;
47
48 Ok((content, size))
49}
50
51pub(crate) fn tool_write(
57 path: &str,
58 content: &str,
59 sandbox: &dyn SandboxPolicy,
60) -> Result<usize, String> {
61 let target = sandbox.validate_write(path).map_err(|e| e.to_string())?;
62
63 let parent = target
65 .parent()
66 .ok_or_else(|| format!("cannot determine parent directory: {path}"))?;
67 std::fs::create_dir_all(parent).map_err(|e| format!("cannot create parent directory: {e}"))?;
68
69 let bytes = content.len();
70
71 let mut temp = tempfile::NamedTempFile::new_in(parent)
74 .map_err(|e| format!("temp file creation failed: {path} ({e})"))?;
75
76 use std::io::Write;
77 temp.write_all(content.as_bytes())
78 .map_err(|e| format!("write failed: {path} ({e})"))?;
79
80 temp.persist(&target)
81 .map_err(|e| format!("rename failed: {path} ({e})"))?;
82
83 Ok(bytes)
84}
85
86#[derive(Debug)]
88pub(crate) struct GrepMatch {
89 pub(crate) line_number: usize,
90 pub(crate) line: String,
91}
92
93const MAX_GREP_DEPTH: usize = 32;
95
96const MAX_GREP_MATCHES: usize = 10_000;
98
99pub(crate) fn tool_grep(
106 pattern: &str,
107 path: &str,
108 sandbox: &dyn SandboxPolicy,
109) -> Result<Vec<GrepMatch>, String> {
110 let re = regex::Regex::new(pattern).map_err(|e| format!("invalid regex: {pattern} ({e})"))?;
111
112 let canonical = sandbox.validate_read(path).map_err(|e| e.to_string())?;
113 let mut matches = Vec::new();
114
115 let sandbox_root = sandbox.root();
116 if canonical.is_file() {
117 grep_file(&re, &canonical, &mut matches)?;
118 } else if canonical.is_dir() {
119 grep_dir(&re, &canonical, sandbox_root, &mut matches, 0)?;
120 } else {
121 return Err(format!("not a file or directory: {path}"));
122 }
123
124 Ok(matches)
125}
126
127fn grep_file(re: ®ex::Regex, path: &Path, matches: &mut Vec<GrepMatch>) -> Result<(), String> {
128 let content =
129 std::fs::read_to_string(path).map_err(|e| format!("read failed: {:?} ({e})", path))?;
130
131 for (i, line) in content.lines().enumerate() {
132 if matches.len() >= MAX_GREP_MATCHES {
133 break;
134 }
135 if re.is_match(line) {
136 matches.push(GrepMatch {
137 line_number: i + 1,
138 line: line.to_string(),
139 });
140 }
141 }
142
143 Ok(())
144}
145
146fn grep_dir(
155 re: ®ex::Regex,
156 dir: &Path,
157 sandbox_root: &Path,
158 matches: &mut Vec<GrepMatch>,
159 depth: usize,
160) -> Result<(), String> {
161 if depth > MAX_GREP_DEPTH {
162 tracing::debug!("grep: max depth ({MAX_GREP_DEPTH}) reached at {:?}", dir);
163 return Ok(());
164 }
165 if matches.len() >= MAX_GREP_MATCHES {
166 return Ok(());
167 }
168
169 let entries =
170 std::fs::read_dir(dir).map_err(|e| format!("cannot read directory: {:?} ({e})", dir))?;
171
172 for entry in entries.flatten() {
173 if matches.len() >= MAX_GREP_MATCHES {
174 break;
175 }
176
177 let path = entry.path();
178
179 let canonical = match path.canonicalize() {
181 Ok(c) if c.starts_with(sandbox_root) => c,
182 _ => continue, };
184
185 if canonical.is_file() {
186 let is_binary = {
188 use std::io::Read;
189 match std::fs::File::open(&canonical) {
190 Ok(mut file) => {
191 let mut buf = [0u8; 512];
192 match file.read(&mut buf) {
193 Ok(n) => buf[..n].contains(&0),
194 Err(_) => true, }
196 }
197 Err(_) => true, }
199 };
200 if is_binary {
201 continue;
202 }
203 if let Err(e) = grep_file(re, &canonical, matches) {
204 tracing::debug!("grep: skip {:?}: {e}", canonical);
205 }
206 } else if canonical.is_dir() {
207 if let Err(e) = grep_dir(re, &canonical, sandbox_root, matches, depth + 1) {
208 tracing::debug!("grep: skip dir {:?}: {e}", canonical);
209 }
210 }
211 }
212
213 Ok(())
214}
215
216pub(crate) fn tool_glob(
221 pattern: &str,
222 dir: Option<&str>,
223 sandbox: &dyn SandboxPolicy,
224) -> Result<Vec<String>, String> {
225 if pattern.contains("..") {
227 return Err("glob pattern must not contain '..'".to_string());
228 }
229
230 let full_pattern = match dir {
231 Some(d) => {
232 let base = sandbox.validate_read(d).map_err(|e| e.to_string())?;
233 if !base.is_dir() {
234 return Err(format!("not a directory: {d}"));
235 }
236 format!("{}/{pattern}", base.display())
237 }
238 None => {
239 format!("{}/{pattern}", sandbox.root().display())
240 }
241 };
242
243 let paths =
244 glob::glob(&full_pattern).map_err(|e| format!("invalid glob pattern: {pattern} ({e})"))?;
245
246 let sandbox_root = sandbox.root();
247 let mut results = Vec::new();
248 for entry in paths.flatten() {
249 if let Ok(canonical) = entry.canonicalize() {
251 if canonical.starts_with(sandbox_root) {
252 results.push(canonical.display().to_string());
253 }
254 }
255 }
256
257 results.sort();
258 Ok(results)
259}
260
261pub(crate) fn tool_mkdir(path: &str, sandbox: &dyn SandboxPolicy) -> Result<(), String> {
265 let target = sandbox.validate_write(path).map_err(|e| e.to_string())?;
266 std::fs::create_dir_all(&target).map_err(|e| format!("mkdir failed: {path} ({e})"))
267}
268
269pub(crate) fn tool_remove(path: &str, sandbox: &dyn SandboxPolicy) -> Result<(), String> {
274 sandbox.validate_write(path).map_err(|e| e.to_string())?;
276 let canonical = sandbox.validate_read(path).map_err(|e| e.to_string())?;
278
279 if canonical.is_file() {
280 std::fs::remove_file(&canonical).map_err(|e| format!("remove failed: {path} ({e})"))
281 } else if canonical.is_dir() {
282 std::fs::remove_dir_all(&canonical).map_err(|e| format!("remove failed: {path} ({e})"))
283 } else {
284 Err(format!("not found: {path}"))
285 }
286}
287
288pub(crate) fn tool_mv(src: &str, dst: &str, sandbox: &dyn SandboxPolicy) -> Result<(), String> {
293 let src_canonical = sandbox.validate_read(src).map_err(|e| e.to_string())?;
294 let dst_target = sandbox.validate_write(dst).map_err(|e| e.to_string())?;
295
296 if let Some(parent) = dst_target.parent() {
298 std::fs::create_dir_all(parent)
299 .map_err(|e| format!("cannot create parent directory: {e}"))?;
300 }
301
302 std::fs::rename(&src_canonical, &dst_target)
303 .map_err(|e| format!("mv failed: {src} -> {dst} ({e})"))
304}
305
306pub(crate) struct ScanEntry {
312 pub path: String,
313 pub relative: String,
314 pub is_dir: bool,
315 pub size: u64,
316 pub modified: u64,
317}
318
319pub(crate) fn tool_scan_dir(
321 path: &str,
322 recursive: bool,
323 exclude: &[String],
324 include: &[String],
325 max_depth: Option<usize>,
326 sandbox: &dyn SandboxPolicy,
327) -> Result<Vec<ScanEntry>, String> {
328 let base = sandbox.validate_read(path).map_err(|e| e.to_string())?;
329
330 if !base.is_dir() {
331 return Err(format!("not a directory: {path}"));
332 }
333
334 let exclude_set = build_glob_set(exclude)?;
335 let include_set = if include.is_empty() {
336 None
337 } else {
338 Some(build_glob_set(include)?)
339 };
340
341 let mut walker = walkdir::WalkDir::new(&base);
342 if !recursive {
343 walker = walker.max_depth(1);
344 } else if let Some(depth) = max_depth {
345 walker = walker.max_depth(depth);
346 }
347
348 let mut entries = Vec::new();
349 for entry in walker.into_iter().filter_map(|e| e.ok()) {
350 if entry.path() == base {
351 continue;
352 }
353
354 let relative = entry
355 .path()
356 .strip_prefix(&base)
357 .unwrap_or(entry.path())
358 .to_string_lossy()
359 .to_string();
360
361 if exclude_set.is_match(&relative) {
362 continue;
363 }
364
365 let is_dir = entry.file_type().is_dir();
366
367 if !is_dir {
368 if let Some(ref inc) = include_set {
369 if !inc.is_match(&relative) {
370 continue;
371 }
372 }
373 }
374
375 let metadata = entry.metadata().ok();
376 let size = metadata.as_ref().map_or(0, |m| m.len());
377 let modified = metadata
378 .and_then(|m| m.modified().ok())
379 .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
380 .map_or(0, |d| d.as_secs());
381
382 entries.push(ScanEntry {
383 path: entry.path().to_string_lossy().to_string(),
384 relative,
385 is_dir,
386 size,
387 modified,
388 });
389 }
390
391 Ok(entries)
392}
393
394fn build_glob_set(patterns: &[String]) -> Result<globset::GlobSet, String> {
395 let mut builder = globset::GlobSetBuilder::new();
396 for pattern in patterns {
397 let glob =
398 globset::Glob::new(pattern).map_err(|e| format!("invalid glob '{pattern}': {e}"))?;
399 builder.add(glob);
400 }
401 builder
402 .build()
403 .map_err(|e| format!("glob set build error: {e}"))
404}
405
406pub(crate) struct FrontmatterResult {
410 pub frontmatter: Option<serde_json::Value>,
411 pub body: String,
412 pub format: Option<String>,
413}
414
415pub(crate) fn tool_parse_frontmatter_str(content: &str) -> Result<FrontmatterResult, String> {
419 let trimmed = content.trim_start();
420
421 if let Some(rest) = trimmed.strip_prefix("---") {
422 if let Some(end_idx) = rest.find("\n---") {
424 let yaml_str = &rest[..end_idx];
425 let body_start = end_idx + 4; let body = rest[body_start..].trim_start_matches('\n').to_string();
427
428 let value: serde_json::Value =
429 serde_yaml::from_str(yaml_str).map_err(|e| format!("YAML parse error: {e}"))?;
430
431 Ok(FrontmatterResult {
432 frontmatter: Some(value),
433 body,
434 format: Some("yaml".to_string()),
435 })
436 } else {
437 Ok(FrontmatterResult {
438 frontmatter: None,
439 body: content.to_string(),
440 format: None,
441 })
442 }
443 } else if let Some(rest) = trimmed.strip_prefix("+++") {
444 if let Some(end_idx) = rest.find("\n+++") {
446 let toml_str = &rest[..end_idx];
447 let body_start = end_idx + 4;
448 let body = rest[body_start..].trim_start_matches('\n').to_string();
449
450 let toml_value: toml::Value = toml_str
451 .parse()
452 .map_err(|e| format!("TOML parse error: {e}"))?;
453 let json_value = toml_to_json(toml_value);
454
455 Ok(FrontmatterResult {
456 frontmatter: Some(json_value),
457 body,
458 format: Some("toml".to_string()),
459 })
460 } else {
461 Ok(FrontmatterResult {
462 frontmatter: None,
463 body: content.to_string(),
464 format: None,
465 })
466 }
467 } else {
468 Ok(FrontmatterResult {
469 frontmatter: None,
470 body: content.to_string(),
471 format: None,
472 })
473 }
474}
475
476pub(crate) fn tool_parse_frontmatter(
478 path: &str,
479 sandbox: &dyn SandboxPolicy,
480) -> Result<FrontmatterResult, String> {
481 let canonical = sandbox.validate_read(path).map_err(|e| e.to_string())?;
482 let content =
483 std::fs::read_to_string(&canonical).map_err(|e| format!("read failed: {path} ({e})"))?;
484 tool_parse_frontmatter_str(&content)
485}
486
487pub(crate) fn tool_parse_toml(content: &str) -> Result<serde_json::Value, String> {
491 let toml_value: toml::Value = content
492 .parse()
493 .map_err(|e| format!("TOML parse error: {e}"))?;
494 Ok(toml_to_json(toml_value))
495}
496
497fn toml_to_json(value: toml::Value) -> serde_json::Value {
498 match value {
499 toml::Value::String(s) => serde_json::Value::String(s),
500 toml::Value::Integer(i) => serde_json::json!(i),
501 toml::Value::Float(f) => serde_json::json!(f),
502 toml::Value::Boolean(b) => serde_json::Value::Bool(b),
503 toml::Value::Datetime(d) => serde_json::Value::String(d.to_string()),
504 toml::Value::Array(arr) => {
505 serde_json::Value::Array(arr.into_iter().map(toml_to_json).collect())
506 }
507 toml::Value::Table(map) => {
508 let obj = map.into_iter().map(|(k, v)| (k, toml_to_json(v))).collect();
509 serde_json::Value::Object(obj)
510 }
511 }
512}
513
514pub(crate) struct GlobMatchResult {
518 pub matched: Vec<String>,
519 pub unmatched: Vec<String>,
520}
521
522pub(crate) fn tool_glob_match(
524 patterns: &[String],
525 paths: &[String],
526) -> Result<GlobMatchResult, String> {
527 let glob_set = build_glob_set(patterns)?;
528
529 let mut matched = Vec::new();
530 let mut unmatched = Vec::new();
531
532 for path in paths {
533 if glob_set.is_match(path) {
534 matched.push(path.clone());
535 } else {
536 unmatched.push(path.clone());
537 }
538 }
539
540 Ok(GlobMatchResult { matched, unmatched })
541}
542
543pub(crate) fn tool_load_lua(
553 lua: &Lua,
554 content: &str,
555 source_name: &str,
556) -> Result<mlua::Value, String> {
557 let env = lua
559 .create_table()
560 .map_err(|e| format!("env creation failed: {e}"))?;
561
562 let globals = lua.globals();
563
564 let safe_globals = [
566 "table",
567 "string",
568 "math",
569 "pairs",
570 "ipairs",
571 "next",
572 "type",
573 "tostring",
574 "tonumber",
575 "select",
576 "unpack",
577 "error",
578 "pcall",
579 "xpcall",
580 "rawget",
581 "rawset",
582 "rawequal",
583 "rawlen",
584 "setmetatable",
585 "getmetatable",
586 ];
587
588 for name in &safe_globals {
589 if let Ok(val) = globals.get::<mlua::Value>(*name) {
590 env.set(*name, val)
591 .map_err(|e| format!("env.{name}: {e}"))?;
592 }
593 }
594
595 let src = source_name.to_string();
597 let print_fn = lua
598 .create_function(move |_, args: mlua::MultiValue| {
599 let parts: Vec<String> = args.iter().map(|v| format!("{v:?}")).collect();
600 tracing::info!(source = %src, "[lua-sandbox] {}", parts.join("\t"));
601 Ok(())
602 })
603 .map_err(|e| format!("print fn: {e}"))?;
604 env.set("print", print_fn)
605 .map_err(|e| format!("env.print: {e}"))?;
606
607 let chunk = lua.load(content).set_name(source_name);
609
610 chunk
611 .set_environment(env)
612 .eval::<mlua::Value>()
613 .map_err(|e| format!("{source_name}: {e}"))
614}
615
616pub fn register_tool_functions(lua: &Lua, sandbox: Arc<dyn SandboxPolicy>) -> Result<(), LuaError> {
627 let orcs_table: Table = lua.globals().get("orcs")?;
628
629 let sb = Arc::clone(&sandbox);
631 let read_fn = lua.create_function(move |lua, path: String| {
632 let result = lua.create_table()?;
633 match tool_read(&path, sb.as_ref()) {
634 Ok((content, size)) => {
635 result.set("ok", true)?;
636 result.set("content", content)?;
637 result.set("size", size)?;
638 }
639 Err(e) => {
640 result.set("ok", false)?;
641 result.set("error", e)?;
642 }
643 }
644 Ok(result)
645 })?;
646 orcs_table.set("read", read_fn)?;
647
648 let sb = Arc::clone(&sandbox);
650 let write_fn = lua.create_function(move |lua, (path, content): (String, String)| {
651 let result = lua.create_table()?;
652 match tool_write(&path, &content, sb.as_ref()) {
653 Ok(bytes) => {
654 result.set("ok", true)?;
655 result.set("bytes_written", bytes)?;
656 }
657 Err(e) => {
658 result.set("ok", false)?;
659 result.set("error", e)?;
660 }
661 }
662 Ok(result)
663 })?;
664 orcs_table.set("write", write_fn)?;
665
666 let sb = Arc::clone(&sandbox);
668 let grep_fn = lua.create_function(move |lua, (pattern, path): (String, String)| {
669 let result = lua.create_table()?;
670 match tool_grep(&pattern, &path, sb.as_ref()) {
671 Ok(grep_matches) => {
672 let matches_table = lua.create_table()?;
673 for (i, m) in grep_matches.iter().enumerate() {
674 let entry = lua.create_table()?;
675 entry.set("line_number", m.line_number)?;
676 entry.set("line", m.line.as_str())?;
677 matches_table.set(i + 1, entry)?;
678 }
679 result.set("ok", true)?;
680 result.set("matches", matches_table)?;
681 result.set("count", grep_matches.len())?;
682 }
683 Err(e) => {
684 result.set("ok", false)?;
685 result.set("error", e)?;
686 }
687 }
688 Ok(result)
689 })?;
690 orcs_table.set("grep", grep_fn)?;
691
692 let sb = Arc::clone(&sandbox);
694 let glob_fn = lua.create_function(move |lua, (pattern, dir): (String, Option<String>)| {
695 let result = lua.create_table()?;
696 match tool_glob(&pattern, dir.as_deref(), sb.as_ref()) {
697 Ok(files) => {
698 let files_table = lua.create_table()?;
699 for (i, f) in files.iter().enumerate() {
700 files_table.set(i + 1, f.as_str())?;
701 }
702 result.set("ok", true)?;
703 result.set("files", files_table)?;
704 result.set("count", files.len())?;
705 }
706 Err(e) => {
707 result.set("ok", false)?;
708 result.set("error", e)?;
709 }
710 }
711 Ok(result)
712 })?;
713 orcs_table.set("glob", glob_fn)?;
714
715 let sb = Arc::clone(&sandbox);
717 let mkdir_fn = lua.create_function(move |lua, path: String| {
718 let result = lua.create_table()?;
719 match tool_mkdir(&path, sb.as_ref()) {
720 Ok(()) => result.set("ok", true)?,
721 Err(e) => {
722 result.set("ok", false)?;
723 result.set("error", e)?;
724 }
725 }
726 Ok(result)
727 })?;
728 orcs_table.set("mkdir", mkdir_fn)?;
729
730 let sb = Arc::clone(&sandbox);
732 let remove_fn = lua.create_function(move |lua, path: String| {
733 let result = lua.create_table()?;
734 match tool_remove(&path, sb.as_ref()) {
735 Ok(()) => result.set("ok", true)?,
736 Err(e) => {
737 result.set("ok", false)?;
738 result.set("error", e)?;
739 }
740 }
741 Ok(result)
742 })?;
743 orcs_table.set("remove", remove_fn)?;
744
745 let sb = Arc::clone(&sandbox);
747 let mv_fn = lua.create_function(move |lua, (src, dst): (String, String)| {
748 let result = lua.create_table()?;
749 match tool_mv(&src, &dst, sb.as_ref()) {
750 Ok(()) => result.set("ok", true)?,
751 Err(e) => {
752 result.set("ok", false)?;
753 result.set("error", e)?;
754 }
755 }
756 Ok(result)
757 })?;
758 orcs_table.set("mv", mv_fn)?;
759
760 let sb = Arc::clone(&sandbox);
762 let scan_dir_fn = lua.create_function(move |lua, config: Table| {
763 let path: String = config.get("path")?;
764 let recursive: bool = config.get("recursive").unwrap_or(true);
765 let max_depth: Option<usize> = config.get("max_depth").ok();
766
767 let exclude: Vec<String> = config
768 .get::<Table>("exclude")
769 .map(|t| {
770 t.sequence_values::<String>()
771 .filter_map(|v| v.ok())
772 .collect()
773 })
774 .unwrap_or_default();
775
776 let include: Vec<String> = config
777 .get::<Table>("include")
778 .map(|t| {
779 t.sequence_values::<String>()
780 .filter_map(|v| v.ok())
781 .collect()
782 })
783 .unwrap_or_default();
784
785 match tool_scan_dir(&path, recursive, &exclude, &include, max_depth, sb.as_ref()) {
786 Ok(entries) => {
787 let result = lua.create_table()?;
788 for (i, entry) in entries.iter().enumerate() {
789 let t = lua.create_table()?;
790 t.set("path", entry.path.as_str())?;
791 t.set("relative", entry.relative.as_str())?;
792 t.set("is_dir", entry.is_dir)?;
793 t.set("size", entry.size)?;
794 t.set("modified", entry.modified)?;
795 result.set(i + 1, t)?;
796 }
797 Ok(result)
798 }
799 Err(e) => Err(mlua::Error::RuntimeError(e)),
800 }
801 })?;
802 orcs_table.set("scan_dir", scan_dir_fn)?;
803
804 let sb = Arc::clone(&sandbox);
806 let parse_fm_fn =
807 lua.create_function(move |lua, path: String| {
808 match tool_parse_frontmatter(&path, sb.as_ref()) {
809 Ok(result) => frontmatter_result_to_lua(lua, result),
810 Err(e) => {
811 let t = lua.create_table()?;
812 t.set("ok", false)?;
813 t.set("error", e)?;
814 Ok(t)
815 }
816 }
817 })?;
818 orcs_table.set("parse_frontmatter", parse_fm_fn)?;
819
820 let parse_fm_str_fn = lua.create_function(move |lua, content: String| {
822 match tool_parse_frontmatter_str(&content) {
823 Ok(result) => frontmatter_result_to_lua(lua, result),
824 Err(e) => {
825 let t = lua.create_table()?;
826 t.set("ok", false)?;
827 t.set("error", e)?;
828 Ok(t)
829 }
830 }
831 })?;
832 orcs_table.set("parse_frontmatter_str", parse_fm_str_fn)?;
833
834 let parse_toml_fn =
836 lua.create_function(
837 move |lua, content: String| match tool_parse_toml(&content) {
838 Ok(value) => lua.to_value(&value).map_err(|e| {
839 mlua::Error::RuntimeError(format!("TOML to Lua conversion failed: {e}"))
840 }),
841 Err(e) => Err(mlua::Error::RuntimeError(e)),
842 },
843 )?;
844 orcs_table.set("parse_toml", parse_toml_fn)?;
845
846 let glob_match_fn =
848 lua.create_function(move |lua, (patterns_tbl, paths_tbl): (Table, Table)| {
849 let patterns: Vec<String> = patterns_tbl
850 .sequence_values::<String>()
851 .filter_map(|v| v.ok())
852 .collect();
853 let paths: Vec<String> = paths_tbl
854 .sequence_values::<String>()
855 .filter_map(|v| v.ok())
856 .collect();
857
858 match tool_glob_match(&patterns, &paths) {
859 Ok(result) => {
860 let t = lua.create_table()?;
861
862 let matched = lua.create_table()?;
863 for (i, m) in result.matched.iter().enumerate() {
864 matched.set(i + 1, m.as_str())?;
865 }
866 t.set("matched", matched)?;
867
868 let unmatched = lua.create_table()?;
869 for (i, u) in result.unmatched.iter().enumerate() {
870 unmatched.set(i + 1, u.as_str())?;
871 }
872 t.set("unmatched", unmatched)?;
873
874 Ok(t)
875 }
876 Err(e) => Err(mlua::Error::RuntimeError(e)),
877 }
878 })?;
879 orcs_table.set("glob_match", glob_match_fn)?;
880
881 let load_lua_fn = lua.create_function(
883 move |lua, (content, source_name): (String, Option<String>)| {
884 let name = source_name.as_deref().unwrap_or("(eval)");
885 tool_load_lua(lua, &content, name).map_err(mlua::Error::RuntimeError)
886 },
887 )?;
888 orcs_table.set("load_lua", load_lua_fn)?;
889
890 tracing::debug!(
891 "Registered orcs tool functions: read, write, grep, glob, mkdir, remove, mv, scan_dir, parse_frontmatter, parse_toml, glob_match, load_lua (sandbox_root={})",
892 sandbox.root().display()
893 );
894 Ok(())
895}
896
897fn frontmatter_result_to_lua(lua: &Lua, result: FrontmatterResult) -> Result<Table, mlua::Error> {
899 let t = lua.create_table()?;
900 match result.frontmatter {
901 Some(fm) => {
902 let lua_fm = lua.to_value(&fm)?;
903 t.set("frontmatter", lua_fm)?;
904 }
905 None => t.set("frontmatter", mlua::Value::Nil)?,
906 }
907 t.set("body", result.body)?;
908 match result.format {
909 Some(f) => t.set("format", f)?,
910 None => t.set("format", mlua::Value::Nil)?,
911 }
912 Ok(t)
913}
914
915pub(crate) struct ToolHookContext {
923 pub(crate) registry: orcs_hook::SharedHookRegistry,
924 pub(crate) component_id: orcs_types::ComponentId,
925}
926
927const HOOKABLE_TOOLS: &[&str] = &[
929 "read",
930 "write",
931 "grep",
932 "glob",
933 "mkdir",
934 "remove",
935 "mv",
936 "scan_dir",
937 "parse_frontmatter",
938];
939
940pub(crate) fn wrap_tools_with_hooks(lua: &Lua) -> Result<(), LuaError> {
968 let orcs_table: Table = lua.globals().get("orcs")?;
969
970 let dispatch_fn = lua.create_function(
973 |lua, (phase, tool_name, args_val): (String, String, mlua::Value)| {
974 let (registry, component_id) = {
976 let ctx = lua.app_data_ref::<ToolHookContext>();
977 let Some(ctx) = ctx else {
978 return Ok(mlua::Value::Nil);
979 };
980 (
981 std::sync::Arc::clone(&ctx.registry),
982 ctx.component_id.clone(),
983 )
984 };
985
986 let point = match phase.as_str() {
987 "pre" => orcs_hook::HookPoint::ToolPreExecute,
988 "post" => orcs_hook::HookPoint::ToolPostExecute,
989 _ => return Ok(mlua::Value::Nil),
990 };
991
992 let args_json: serde_json::Value =
993 lua.from_value(args_val).unwrap_or(serde_json::Value::Null);
994
995 let payload = serde_json::json!({
996 "tool": tool_name,
997 "args": args_json,
998 });
999
1000 let original_payload = if phase == "post" {
1003 Some(payload.clone())
1004 } else {
1005 None
1006 };
1007
1008 let hook_ctx = orcs_hook::HookContext::new(
1009 point,
1010 component_id.clone(),
1011 orcs_types::ChannelId::new(),
1012 orcs_types::Principal::System,
1013 0,
1014 payload,
1015 );
1016
1017 let action = {
1018 let guard = registry.read().unwrap_or_else(|poisoned| {
1019 tracing::warn!("hook registry lock poisoned, using inner value");
1020 poisoned.into_inner()
1021 });
1022 guard.dispatch(point, &component_id, None, hook_ctx)
1023 };
1024
1025 match action {
1026 orcs_hook::HookAction::Abort { reason } => {
1027 let result = lua.create_table()?;
1028 result.set("ok", false)?;
1029 result.set("error", format!("blocked by hook: {reason}"))?;
1030 Ok(mlua::Value::Table(result))
1031 }
1032 orcs_hook::HookAction::Skip(val) | orcs_hook::HookAction::Replace(val) => {
1033 lua.to_value(&val)
1034 }
1035 orcs_hook::HookAction::Continue(ctx) => {
1036 if let Some(original) = original_payload {
1039 if ctx.payload != original {
1040 lua.to_value(&ctx.payload)
1041 } else {
1042 Ok(mlua::Value::Nil)
1043 }
1044 } else {
1045 Ok(mlua::Value::Nil)
1046 }
1047 }
1048 }
1049 },
1050 )?;
1051 orcs_table.set("_dispatch_tool_hook", dispatch_fn)?;
1052
1053 for &name in HOOKABLE_TOOLS {
1055 if orcs_table.get::<Function>(name).is_err() {
1056 continue;
1057 }
1058
1059 let wrap_code = format!(
1060 r#"
1061 do
1062 local _orig = orcs.{name}
1063 orcs.{name} = function(...)
1064 local pre = orcs._dispatch_tool_hook("pre", "{name}", {{...}})
1065 if pre ~= nil then return pre end
1066 local result = _orig(...)
1067 local post = orcs._dispatch_tool_hook("post", "{name}", result)
1068 if post ~= nil then return post end
1069 return result
1070 end
1071 end
1072 "#,
1073 );
1074
1075 lua.load(&wrap_code)
1076 .exec()
1077 .map_err(|e| LuaError::InvalidScript(format!("failed to wrap tool '{name}': {e}")))?;
1078 }
1079
1080 tracing::debug!("Wrapped tools with hook dispatch: {:?}", HOOKABLE_TOOLS);
1081 Ok(())
1082}
1083
1084#[cfg(test)]
1085mod tests {
1086 use super::*;
1087 use orcs_runtime::sandbox::ProjectSandbox;
1088 use std::fs;
1089 use std::path::PathBuf;
1090
1091 fn test_sandbox() -> (tempfile::TempDir, PathBuf, Arc<dyn SandboxPolicy>) {
1094 let td = tempfile::tempdir().expect("should create temp directory");
1095 let root = td
1096 .path()
1097 .canonicalize()
1098 .expect("should canonicalize temp dir path");
1099 let sandbox = ProjectSandbox::new(&root).expect("should create project sandbox");
1100 (td, root, Arc::new(sandbox))
1101 }
1102
1103 #[test]
1106 fn read_existing_file() {
1107 let (_td, root, sandbox) = test_sandbox();
1108 let file = root.join("test.txt");
1109 fs::write(&file, "hello world").expect("should write test file");
1110
1111 let (content, size) = tool_read(
1112 file.to_str().expect("path should be valid UTF-8"),
1113 sandbox.as_ref(),
1114 )
1115 .expect("should read existing file");
1116 assert_eq!(content, "hello world");
1117 assert_eq!(size, 11);
1118 }
1119
1120 #[test]
1121 fn read_nonexistent_file() {
1122 let (_td, _root, sandbox) = test_sandbox();
1123 let result = tool_read("nonexistent.txt", sandbox.as_ref());
1124 assert!(result.is_err());
1125 }
1126
1127 #[test]
1128 fn read_directory_fails() {
1129 let (_td, root, sandbox) = test_sandbox();
1130 let sub = root.join("subdir");
1131 fs::create_dir_all(&sub).expect("should create subdirectory");
1132
1133 let result = tool_read(
1134 sub.to_str().expect("path should be valid UTF-8"),
1135 sandbox.as_ref(),
1136 );
1137 assert!(result.is_err());
1138 assert!(result
1139 .expect_err("should fail for directory")
1140 .contains("not a file"));
1141 }
1142
1143 #[test]
1144 fn read_outside_root_rejected() {
1145 let (_td, _root, sandbox) = test_sandbox();
1146 let result = tool_read("/etc/hosts", sandbox.as_ref());
1147 assert!(result.is_err());
1148 assert!(result
1149 .expect_err("should deny access outside root")
1150 .contains("access denied"));
1151 }
1152
1153 #[test]
1156 fn write_new_file() {
1157 let (_td, root, sandbox) = test_sandbox();
1158 let file = root.join("new.txt");
1159
1160 let bytes = tool_write(
1161 file.to_str().expect("path should be valid UTF-8"),
1162 "new content",
1163 sandbox.as_ref(),
1164 )
1165 .expect("should write new file");
1166 assert_eq!(bytes, 11);
1167 assert_eq!(
1168 fs::read_to_string(&file).expect("should read written file"),
1169 "new content"
1170 );
1171 }
1172
1173 #[test]
1174 fn write_overwrites_existing() {
1175 let (_td, root, sandbox) = test_sandbox();
1176 let file = root.join("existing.txt");
1177 fs::write(&file, "old").expect("should write initial file");
1178
1179 tool_write(
1180 file.to_str().expect("path should be valid UTF-8"),
1181 "new",
1182 sandbox.as_ref(),
1183 )
1184 .expect("should overwrite existing file");
1185 assert_eq!(
1186 fs::read_to_string(&file).expect("should read overwritten file"),
1187 "new"
1188 );
1189 }
1190
1191 #[test]
1192 fn write_creates_parent_dirs() {
1193 let (_td, root, sandbox) = test_sandbox();
1194 let file = root.join("sub/dir/file.txt");
1195
1196 tool_write(
1197 file.to_str().expect("path should be valid UTF-8"),
1198 "nested",
1199 sandbox.as_ref(),
1200 )
1201 .expect("should write file with parent dir creation");
1202 assert_eq!(
1203 fs::read_to_string(&file).expect("should read nested file"),
1204 "nested"
1205 );
1206 }
1207
1208 #[test]
1209 fn write_atomic_no_temp_leftover() {
1210 let (_td, root, sandbox) = test_sandbox();
1211 let file = root.join("atomic.txt");
1212
1213 tool_write(
1214 file.to_str().expect("path should be valid UTF-8"),
1215 "content",
1216 sandbox.as_ref(),
1217 )
1218 .expect("should write file atomically");
1219
1220 let temp = file.with_extension("tmp.orcs");
1222 assert!(!temp.exists());
1223 }
1224
1225 #[test]
1226 fn write_outside_root_rejected() {
1227 let (_td, _root, sandbox) = test_sandbox();
1228 let result = tool_write("/etc/evil.txt", "bad", sandbox.as_ref());
1229 assert!(result.is_err());
1230 assert!(result
1231 .expect_err("should deny write outside root")
1232 .contains("access denied"));
1233 }
1234
1235 #[test]
1238 fn grep_finds_matches() {
1239 let (_td, root, sandbox) = test_sandbox();
1240 let file = root.join("search.txt");
1241 fs::write(&file, "line one\nline two\nthird line").expect("should write search file");
1242
1243 let matches = tool_grep(
1244 "line",
1245 file.to_str().expect("path should be valid UTF-8"),
1246 sandbox.as_ref(),
1247 )
1248 .expect("should find grep matches");
1249 assert_eq!(matches.len(), 3);
1250 assert_eq!(matches[0].line_number, 1);
1251 assert_eq!(matches[0].line, "line one");
1252 }
1253
1254 #[test]
1255 fn grep_regex_pattern() {
1256 let (_td, root, sandbox) = test_sandbox();
1257 let file = root.join("regex.txt");
1258 fs::write(&file, "foo123\nbar456\nfoo789").expect("should write regex test file");
1259
1260 let matches = tool_grep(
1261 r"foo\d+",
1262 file.to_str().expect("path should be valid UTF-8"),
1263 sandbox.as_ref(),
1264 )
1265 .expect("should find regex matches");
1266 assert_eq!(matches.len(), 2);
1267 }
1268
1269 #[test]
1270 fn grep_no_matches() {
1271 let (_td, root, sandbox) = test_sandbox();
1272 let file = root.join("empty.txt");
1273 fs::write(&file, "nothing here").expect("should write test file");
1274
1275 let matches = tool_grep(
1276 "nonexistent",
1277 file.to_str().expect("path should be valid UTF-8"),
1278 sandbox.as_ref(),
1279 )
1280 .expect("should return empty matches without error");
1281 assert!(matches.is_empty());
1282 }
1283
1284 #[test]
1285 fn grep_invalid_regex() {
1286 let (_td, root, sandbox) = test_sandbox();
1287 let file = root.join("test.txt");
1288 fs::write(&file, "content").expect("should write test file");
1289
1290 let result = tool_grep(
1291 "[invalid",
1292 file.to_str().expect("path should be valid UTF-8"),
1293 sandbox.as_ref(),
1294 );
1295 assert!(result.is_err());
1296 assert!(result
1297 .expect_err("should fail for invalid regex")
1298 .contains("invalid regex"));
1299 }
1300
1301 #[test]
1302 fn grep_directory_recursive() {
1303 let (_td, root, sandbox) = test_sandbox();
1304 let sub = root.join("sub");
1305 fs::create_dir_all(&sub).expect("should create subdirectory");
1306
1307 fs::write(root.join("a.txt"), "target line\nother").expect("should write a.txt");
1308 fs::write(sub.join("b.txt"), "no match\ntarget here").expect("should write b.txt");
1309
1310 let matches = tool_grep(
1311 "target",
1312 root.to_str().expect("path should be valid UTF-8"),
1313 sandbox.as_ref(),
1314 )
1315 .expect("should find recursive grep matches");
1316 assert_eq!(matches.len(), 2);
1317 }
1318
1319 #[test]
1320 fn grep_outside_root_rejected() {
1321 let (_td, _root, sandbox) = test_sandbox();
1322 let result = tool_grep("pattern", "/etc", sandbox.as_ref());
1323 assert!(result.is_err());
1324 assert!(result
1325 .expect_err("should deny grep outside root")
1326 .contains("access denied"));
1327 }
1328
1329 #[test]
1332 fn glob_finds_files() {
1333 let (_td, root, sandbox) = test_sandbox();
1334 fs::write(root.join("a.txt"), "").expect("should write a.txt");
1335 fs::write(root.join("b.txt"), "").expect("should write b.txt");
1336 fs::write(root.join("c.rs"), "").expect("should write c.rs");
1337
1338 let files = tool_glob(
1339 "*.txt",
1340 Some(root.to_str().expect("path should be valid UTF-8")),
1341 sandbox.as_ref(),
1342 )
1343 .expect("should find txt files via glob");
1344 assert_eq!(files.len(), 2);
1345 }
1346
1347 #[test]
1348 fn glob_recursive() {
1349 let (_td, root, sandbox) = test_sandbox();
1350 let sub = root.join("sub");
1351 fs::create_dir_all(&sub).expect("should create subdirectory");
1352 fs::write(root.join("top.rs"), "").expect("should write top.rs");
1353 fs::write(sub.join("nested.rs"), "").expect("should write nested.rs");
1354
1355 let files = tool_glob(
1356 "**/*.rs",
1357 Some(root.to_str().expect("path should be valid UTF-8")),
1358 sandbox.as_ref(),
1359 )
1360 .expect("should find rs files recursively");
1361 assert_eq!(files.len(), 2);
1362 }
1363
1364 #[test]
1365 fn glob_no_matches() {
1366 let (_td, root, sandbox) = test_sandbox();
1367 let files = tool_glob(
1368 "*.xyz",
1369 Some(root.to_str().expect("path should be valid UTF-8")),
1370 sandbox.as_ref(),
1371 )
1372 .expect("should return empty matches for no-match glob");
1373 assert!(files.is_empty());
1374 }
1375
1376 #[test]
1377 fn glob_invalid_pattern() {
1378 let (_td, root, sandbox) = test_sandbox();
1379 let result = tool_glob(
1380 "[invalid",
1381 Some(root.to_str().expect("path should be valid UTF-8")),
1382 sandbox.as_ref(),
1383 );
1384 assert!(result.is_err());
1385 }
1386
1387 #[test]
1388 fn glob_outside_root_rejected() {
1389 let (_td, _root, sandbox) = test_sandbox();
1390 let result = tool_glob("*", Some("/etc"), sandbox.as_ref());
1391 assert!(result.is_err());
1392 assert!(result
1393 .expect_err("should deny glob outside root")
1394 .contains("access denied"));
1395 }
1396
1397 #[test]
1398 fn glob_rejects_dotdot_in_pattern() {
1399 let (_td, _root, sandbox) = test_sandbox();
1400 let result = tool_glob("../../**/*", None, sandbox.as_ref());
1401 assert!(result.is_err());
1402 assert!(
1403 result
1404 .expect_err("should reject dotdot pattern")
1405 .contains("'..'"),
1406 "expected dotdot rejection"
1407 );
1408 }
1409
1410 #[test]
1413 fn grep_respects_depth_limit() {
1414 let (_td, root, sandbox) = test_sandbox();
1415
1416 let mut deep = root.clone();
1418 for i in 0..35 {
1419 deep = deep.join(format!("d{i}"));
1420 }
1421 fs::create_dir_all(&deep).expect("should create deep directory structure");
1422 fs::write(deep.join("deep.txt"), "needle").expect("should write deep file");
1423
1424 fs::write(root.join("shallow.txt"), "needle").expect("should write shallow file");
1426
1427 let matches = tool_grep(
1428 "needle",
1429 root.to_str().expect("path should be valid UTF-8"),
1430 sandbox.as_ref(),
1431 )
1432 .expect("should grep respecting depth limit");
1433 assert_eq!(matches.len(), 1);
1435 }
1436
1437 #[test]
1440 fn register_tools_in_lua() {
1441 let (_td, _root, sandbox) = test_sandbox();
1442 let lua = Lua::new();
1443 let orcs = lua.create_table().expect("should create orcs table");
1444 lua.globals()
1445 .set("orcs", orcs)
1446 .expect("should set orcs global");
1447
1448 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1449
1450 let orcs: Table = lua
1451 .globals()
1452 .get("orcs")
1453 .expect("should get orcs table back");
1454 assert!(orcs.get::<mlua::Function>("read").is_ok());
1455 assert!(orcs.get::<mlua::Function>("write").is_ok());
1456 assert!(orcs.get::<mlua::Function>("grep").is_ok());
1457 assert!(orcs.get::<mlua::Function>("glob").is_ok());
1458 }
1459
1460 #[test]
1461 fn lua_read_file() {
1462 let (_td, root, sandbox) = test_sandbox();
1463 let file = root.join("lua_read.txt");
1464 fs::write(&file, "lua content").expect("should write lua read test file");
1465
1466 let lua = Lua::new();
1467 let orcs = lua.create_table().expect("should create orcs table");
1468 lua.globals()
1469 .set("orcs", orcs)
1470 .expect("should set orcs global");
1471 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1472
1473 let code = format!(
1474 r#"return orcs.read("{}")"#,
1475 file.display().to_string().replace('\\', "\\\\")
1476 );
1477 let result: Table = lua.load(&code).eval().expect("should eval lua read");
1478 assert!(result.get::<bool>("ok").expect("should have ok field"));
1479 assert_eq!(
1480 result
1481 .get::<String>("content")
1482 .expect("should have content field"),
1483 "lua content"
1484 );
1485 assert_eq!(
1486 result.get::<u64>("size").expect("should have size field"),
1487 11
1488 );
1489 }
1490
1491 #[test]
1492 fn lua_write_file() {
1493 let (_td, root, sandbox) = test_sandbox();
1494 let file = root.join("lua_write.txt");
1495
1496 let lua = Lua::new();
1497 let orcs = lua.create_table().expect("should create orcs table");
1498 lua.globals()
1499 .set("orcs", orcs)
1500 .expect("should set orcs global");
1501 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1502
1503 let code = format!(
1504 r#"return orcs.write("{}", "written from lua")"#,
1505 file.display().to_string().replace('\\', "\\\\")
1506 );
1507 let result: Table = lua.load(&code).eval().expect("should eval lua write");
1508 assert!(result.get::<bool>("ok").expect("should have ok field"));
1509 assert_eq!(
1510 fs::read_to_string(&file).expect("should read lua-written file"),
1511 "written from lua"
1512 );
1513 }
1514
1515 #[test]
1516 fn lua_grep_file() {
1517 let (_td, root, sandbox) = test_sandbox();
1518 let file = root.join("lua_grep.txt");
1519 fs::write(&file, "alpha\nbeta\nalpha_two").expect("should write grep test file");
1520
1521 let lua = Lua::new();
1522 let orcs = lua.create_table().expect("should create orcs table");
1523 lua.globals()
1524 .set("orcs", orcs)
1525 .expect("should set orcs global");
1526 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1527
1528 let code = format!(
1529 r#"return orcs.grep("alpha", "{}")"#,
1530 file.display().to_string().replace('\\', "\\\\")
1531 );
1532 let result: Table = lua.load(&code).eval().expect("should eval lua grep");
1533 assert!(result.get::<bool>("ok").expect("should have ok field"));
1534 assert_eq!(
1535 result
1536 .get::<usize>("count")
1537 .expect("should have count field"),
1538 2
1539 );
1540 }
1541
1542 #[test]
1543 fn lua_glob_files() {
1544 let (_td, root, sandbox) = test_sandbox();
1545 fs::write(root.join("a.lua"), "").expect("should write a.lua");
1546 fs::write(root.join("b.lua"), "").expect("should write b.lua");
1547
1548 let lua = Lua::new();
1549 let orcs = lua.create_table().expect("should create orcs table");
1550 lua.globals()
1551 .set("orcs", orcs)
1552 .expect("should set orcs global");
1553 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1554
1555 let code = format!(
1556 r#"return orcs.glob("*.lua", "{}")"#,
1557 root.display().to_string().replace('\\', "\\\\")
1558 );
1559 let result: Table = lua.load(&code).eval().expect("should eval lua glob");
1560 assert!(result.get::<bool>("ok").expect("should have ok field"));
1561 assert_eq!(
1562 result
1563 .get::<usize>("count")
1564 .expect("should have count field"),
1565 2
1566 );
1567 }
1568
1569 #[test]
1570 fn lua_read_nonexistent_returns_error() {
1571 let (_td, _root, sandbox) = test_sandbox();
1572 let lua = Lua::new();
1573 let orcs = lua.create_table().expect("should create orcs table");
1574 lua.globals()
1575 .set("orcs", orcs)
1576 .expect("should set orcs global");
1577 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1578
1579 let result: Table = lua
1580 .load(r#"return orcs.read("nonexistent_file_xyz.txt")"#)
1581 .eval()
1582 .expect("should eval lua read for nonexistent file");
1583 assert!(!result.get::<bool>("ok").expect("should have ok field"));
1584 assert!(result.get::<String>("error").is_ok());
1585 }
1586
1587 #[test]
1588 fn lua_read_outside_sandbox_returns_error() {
1589 let (_td, _root, sandbox) = test_sandbox();
1590 let lua = Lua::new();
1591 let orcs = lua.create_table().expect("should create orcs table");
1592 lua.globals()
1593 .set("orcs", orcs)
1594 .expect("should set orcs global");
1595 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1596
1597 let result: Table = lua
1598 .load(r#"return orcs.read("/etc/hosts")"#)
1599 .eval()
1600 .expect("should eval lua read for outside sandbox");
1601 assert!(!result.get::<bool>("ok").expect("should have ok field"));
1602 let error = result
1603 .get::<String>("error")
1604 .expect("should have error field");
1605 assert!(
1606 error.contains("access denied"),
1607 "expected 'access denied', got: {error}"
1608 );
1609 }
1610
1611 #[cfg(unix)]
1614 mod symlink_tests {
1615 use super::*;
1616 use std::os::unix::fs::symlink;
1617
1618 #[test]
1619 fn glob_skips_symlink_outside_sandbox() {
1620 let (_td, root, sandbox) = test_sandbox();
1621 let outside = tempfile::tempdir().expect("should create outside temp dir");
1622 let outside_canon = outside
1623 .path()
1624 .canonicalize()
1625 .expect("should canonicalize outside path");
1626 fs::write(outside_canon.join("leaked.txt"), "secret")
1627 .expect("should write leaked file");
1628 symlink(&outside_canon, root.join("escape")).expect("should create escape symlink");
1629 fs::write(root.join("ok.txt"), "safe").expect("should write ok file");
1630
1631 let files =
1632 tool_glob("**/*.txt", None, sandbox.as_ref()).expect("should glob without error");
1633 for f in &files {
1634 assert!(!f.contains("leaked"), "leaked file found: {f}");
1635 }
1636 assert_eq!(files.len(), 1, "only ok.txt should be found");
1637 }
1638
1639 #[test]
1640 fn grep_dir_skips_symlink_outside_sandbox() {
1641 let (_td, root, sandbox) = test_sandbox();
1642 let outside = tempfile::tempdir().expect("should create outside temp dir");
1643 let outside_canon = outside
1644 .path()
1645 .canonicalize()
1646 .expect("should canonicalize outside path");
1647 fs::write(outside_canon.join("secret.txt"), "password123")
1648 .expect("should write secret file");
1649 symlink(&outside_canon, root.join("escape")).expect("should create escape symlink");
1650 fs::write(root.join("ok.txt"), "password123").expect("should write ok file");
1651
1652 let matches = tool_grep(
1653 "password",
1654 root.to_str().expect("path should be valid UTF-8"),
1655 sandbox.as_ref(),
1656 )
1657 .expect("should grep without error");
1658 assert_eq!(matches.len(), 1, "symlinked outside file should be skipped");
1660 }
1661
1662 #[test]
1663 fn write_via_symlink_escape_rejected() {
1664 let (_td, root, sandbox) = test_sandbox();
1665 let outside = tempfile::tempdir().expect("should create outside temp dir");
1666 let outside_canon = outside
1667 .path()
1668 .canonicalize()
1669 .expect("should canonicalize outside path");
1670 symlink(&outside_canon, root.join("escape")).expect("should create escape symlink");
1671
1672 let result = tool_write(
1673 root.join("escape/evil.txt")
1674 .to_str()
1675 .expect("path should be valid UTF-8"),
1676 "evil",
1677 sandbox.as_ref(),
1678 );
1679 assert!(
1680 result.is_err(),
1681 "write via symlink escape should be rejected"
1682 );
1683 }
1684
1685 #[test]
1686 fn read_via_symlink_escape_rejected() {
1687 let (_td, root, sandbox) = test_sandbox();
1688 let outside = tempfile::tempdir().expect("should create outside temp dir");
1689 let outside_canon = outside
1690 .path()
1691 .canonicalize()
1692 .expect("should canonicalize outside path");
1693 fs::write(outside_canon.join("secret.txt"), "secret")
1694 .expect("should write secret file");
1695 symlink(&outside_canon, root.join("escape")).expect("should create escape symlink");
1696
1697 let result = tool_read(
1698 root.join("escape/secret.txt")
1699 .to_str()
1700 .expect("path should be valid UTF-8"),
1701 sandbox.as_ref(),
1702 );
1703 assert!(
1704 result.is_err(),
1705 "read via symlink escape should be rejected"
1706 );
1707 }
1708 }
1709
1710 mod tool_hook_tests {
1713 use super::*;
1714 use orcs_hook::{HookPoint, HookRegistry};
1715 use orcs_types::ComponentId;
1716
1717 fn setup_lua_with_hooks() -> (Lua, orcs_hook::SharedHookRegistry, tempfile::TempDir) {
1718 let td = tempfile::tempdir().expect("should create temp dir for hooks");
1719 let root = td
1720 .path()
1721 .canonicalize()
1722 .expect("should canonicalize hook test root");
1723 let sandbox: Arc<dyn SandboxPolicy> =
1724 Arc::new(ProjectSandbox::new(&root).expect("should create hook sandbox"));
1725
1726 let lua = Lua::new();
1727 let orcs = lua.create_table().expect("should create orcs table");
1728 lua.globals()
1729 .set("orcs", orcs)
1730 .expect("should set orcs global");
1731 register_tool_functions(&lua, sandbox).expect("should register tool functions");
1732
1733 let registry = std::sync::Arc::new(std::sync::RwLock::new(HookRegistry::new()));
1734 let comp_id = ComponentId::builtin("test");
1735
1736 lua.set_app_data(ToolHookContext {
1737 registry: std::sync::Arc::clone(®istry),
1738 component_id: comp_id,
1739 });
1740
1741 wrap_tools_with_hooks(&lua).expect("should wrap tools with hooks");
1742
1743 (lua, registry, td)
1744 }
1745
1746 #[test]
1747 fn dispatch_function_registered() {
1748 let (lua, _registry, _td) = setup_lua_with_hooks();
1749 let orcs: Table = lua.globals().get("orcs").expect("should get orcs table");
1750 assert!(orcs.get::<Function>("_dispatch_tool_hook").is_ok());
1751 }
1752
1753 #[test]
1754 fn tools_work_normally_without_hooks() {
1755 let (lua, _registry, td) = setup_lua_with_hooks();
1756 let root = td.path().canonicalize().expect("should canonicalize root");
1757 fs::write(root.join("test.txt"), "hello").expect("should write test file");
1758
1759 let code = format!(
1760 r#"return orcs.read("{}")"#,
1761 root.join("test.txt")
1762 .display()
1763 .to_string()
1764 .replace('\\', "\\\\")
1765 );
1766 let result: Table = lua
1767 .load(&code)
1768 .eval()
1769 .expect("should eval read without hooks");
1770 assert!(result.get::<bool>("ok").expect("should have ok field"));
1771 assert_eq!(
1772 result
1773 .get::<String>("content")
1774 .expect("should have content field"),
1775 "hello"
1776 );
1777 }
1778
1779 #[test]
1780 fn pre_hook_abort_blocks_read() {
1781 let (lua, registry, td) = setup_lua_with_hooks();
1782 let root = td.path().canonicalize().expect("should canonicalize root");
1783 fs::write(root.join("secret.txt"), "top secret").expect("should write secret file");
1784
1785 {
1786 let mut guard = registry.write().expect("should acquire write lock");
1787 guard.register(Box::new(orcs_hook::testing::MockHook::aborter(
1788 "block-read",
1789 "*::*",
1790 HookPoint::ToolPreExecute,
1791 "access denied by policy",
1792 )));
1793 }
1794
1795 let code = format!(
1796 r#"return orcs.read("{}")"#,
1797 root.join("secret.txt")
1798 .display()
1799 .to_string()
1800 .replace('\\', "\\\\")
1801 );
1802 let result: Table = lua
1803 .load(&code)
1804 .eval()
1805 .expect("should eval read with abort hook");
1806 assert!(!result.get::<bool>("ok").expect("should have ok field"));
1807 let error = result
1808 .get::<String>("error")
1809 .expect("should have error field");
1810 assert!(
1811 error.contains("blocked by hook"),
1812 "expected 'blocked by hook', got: {error}"
1813 );
1814 assert!(error.contains("access denied by policy"));
1815 }
1816
1817 #[test]
1818 fn pre_hook_skip_returns_custom_value() {
1819 let (lua, registry, td) = setup_lua_with_hooks();
1820 let root = td.path().canonicalize().expect("should canonicalize root");
1821 fs::write(root.join("real.txt"), "real content").expect("should write real file");
1822
1823 {
1824 let mut guard = registry.write().expect("should acquire write lock");
1825 guard.register(Box::new(orcs_hook::testing::MockHook::skipper(
1826 "skip-read",
1827 "*::*",
1828 HookPoint::ToolPreExecute,
1829 serde_json::json!({"ok": true, "content": "cached", "size": 6}),
1830 )));
1831 }
1832
1833 let code = format!(
1834 r#"return orcs.read("{}")"#,
1835 root.join("real.txt")
1836 .display()
1837 .to_string()
1838 .replace('\\', "\\\\")
1839 );
1840 let result: Table = lua
1841 .load(&code)
1842 .eval()
1843 .expect("should eval read with skip hook");
1844 assert!(result.get::<bool>("ok").expect("should have ok field"));
1845 assert_eq!(
1846 result
1847 .get::<String>("content")
1848 .expect("should have content field"),
1849 "cached"
1850 );
1851 }
1852
1853 #[test]
1854 fn pre_hook_continue_allows_tool() {
1855 let (lua, registry, td) = setup_lua_with_hooks();
1856 let root = td.path().canonicalize().expect("should canonicalize root");
1857 fs::write(root.join("allowed.txt"), "allowed content")
1858 .expect("should write allowed file");
1859
1860 {
1861 let mut guard = registry.write().expect("should acquire write lock");
1862 guard.register(Box::new(orcs_hook::testing::MockHook::pass_through(
1863 "pass-read",
1864 "*::*",
1865 HookPoint::ToolPreExecute,
1866 )));
1867 }
1868
1869 let code = format!(
1870 r#"return orcs.read("{}")"#,
1871 root.join("allowed.txt")
1872 .display()
1873 .to_string()
1874 .replace('\\', "\\\\")
1875 );
1876 let result: Table = lua
1877 .load(&code)
1878 .eval()
1879 .expect("should eval read with continue hook");
1880 assert!(result.get::<bool>("ok").expect("should have ok field"));
1881 assert_eq!(
1882 result
1883 .get::<String>("content")
1884 .expect("should have content field"),
1885 "allowed content"
1886 );
1887 }
1888
1889 #[test]
1890 fn post_hook_replace_changes_result() {
1891 let (lua, registry, td) = setup_lua_with_hooks();
1892 let root = td.path().canonicalize().expect("should canonicalize root");
1893 fs::write(root.join("original.txt"), "original").expect("should write original file");
1894
1895 {
1896 let mut guard = registry.write().expect("should acquire write lock");
1897 guard.register(Box::new(orcs_hook::testing::MockHook::replacer(
1898 "replace-result",
1899 "*::*",
1900 HookPoint::ToolPostExecute,
1901 serde_json::json!({"ok": true, "content": "replaced", "size": 8}),
1902 )));
1903 }
1904
1905 let code = format!(
1906 r#"return orcs.read("{}")"#,
1907 root.join("original.txt")
1908 .display()
1909 .to_string()
1910 .replace('\\', "\\\\")
1911 );
1912 let result: Table = lua
1913 .load(&code)
1914 .eval()
1915 .expect("should eval read with replace hook");
1916 assert!(result.get::<bool>("ok").expect("should have ok field"));
1917 assert_eq!(
1918 result
1919 .get::<String>("content")
1920 .expect("should have content field"),
1921 "replaced"
1922 );
1923 }
1924
1925 #[test]
1926 fn post_hook_continue_preserves_result() {
1927 let (lua, registry, td) = setup_lua_with_hooks();
1928 let root = td.path().canonicalize().expect("should canonicalize root");
1929 fs::write(root.join("keep.txt"), "keep this").expect("should write keep file");
1930
1931 {
1932 let mut guard = registry.write().expect("should acquire write lock");
1933 guard.register(Box::new(orcs_hook::testing::MockHook::pass_through(
1934 "observe-only",
1935 "*::*",
1936 HookPoint::ToolPostExecute,
1937 )));
1938 }
1939
1940 let code = format!(
1941 r#"return orcs.read("{}")"#,
1942 root.join("keep.txt")
1943 .display()
1944 .to_string()
1945 .replace('\\', "\\\\")
1946 );
1947 let result: Table = lua
1948 .load(&code)
1949 .eval()
1950 .expect("should eval read with observe hook");
1951 assert!(result.get::<bool>("ok").expect("should have ok field"));
1952 assert_eq!(
1953 result
1954 .get::<String>("content")
1955 .expect("should have content field"),
1956 "keep this"
1957 );
1958 }
1959
1960 #[test]
1961 fn pre_hook_abort_blocks_write() {
1962 let (lua, registry, td) = setup_lua_with_hooks();
1963 let root = td.path().canonicalize().expect("should canonicalize root");
1964
1965 {
1966 let mut guard = registry.write().expect("should acquire write lock");
1967 guard.register(Box::new(orcs_hook::testing::MockHook::aborter(
1968 "block-write",
1969 "*::*",
1970 HookPoint::ToolPreExecute,
1971 "writes disabled",
1972 )));
1973 }
1974
1975 let code = format!(
1976 r#"return orcs.write("{}", "evil")"#,
1977 root.join("blocked.txt")
1978 .display()
1979 .to_string()
1980 .replace('\\', "\\\\")
1981 );
1982 let result: Table = lua
1983 .load(&code)
1984 .eval()
1985 .expect("should eval write with abort hook");
1986 assert!(!result.get::<bool>("ok").expect("should have ok field"));
1987 let error = result
1988 .get::<String>("error")
1989 .expect("should have error field");
1990 assert!(error.contains("writes disabled"));
1991
1992 assert!(!root.join("blocked.txt").exists());
1994 }
1995
1996 #[test]
1997 fn hooks_receive_tool_name_in_payload() {
1998 let (lua, registry, td) = setup_lua_with_hooks();
1999 let root = td.path().canonicalize().expect("should canonicalize root");
2000 fs::write(root.join("check.txt"), "data").expect("should write check file");
2001
2002 {
2004 let mut guard = registry.write().expect("should acquire write lock");
2005 guard.register(Box::new(orcs_hook::testing::MockHook::modifier(
2006 "check-tool",
2007 "*::*",
2008 HookPoint::ToolPreExecute,
2009 |ctx| {
2010 assert!(ctx.payload.get("tool").is_some());
2012 assert!(ctx.payload.get("args").is_some());
2013 },
2014 )));
2015 }
2016
2017 let code = format!(
2018 r#"return orcs.read("{}")"#,
2019 root.join("check.txt")
2020 .display()
2021 .to_string()
2022 .replace('\\', "\\\\")
2023 );
2024 let result: Table = lua
2025 .load(&code)
2026 .eval()
2027 .expect("should eval read with modifier hook");
2028 assert!(result.get::<bool>("ok").expect("should have ok field"));
2029 }
2030
2031 #[test]
2032 fn no_context_tools_work_normally() {
2033 let td = tempfile::tempdir().expect("should create temp dir");
2035 let root = td.path().canonicalize().expect("should canonicalize root");
2036 let sandbox: Arc<dyn SandboxPolicy> =
2037 Arc::new(ProjectSandbox::new(&root).expect("should create sandbox"));
2038
2039 let lua = Lua::new();
2040 let orcs = lua.create_table().expect("should create orcs table");
2041 lua.globals()
2042 .set("orcs", orcs)
2043 .expect("should set orcs global");
2044 register_tool_functions(&lua, sandbox).expect("should register tool functions");
2045
2046 wrap_tools_with_hooks(&lua).expect("should wrap tools with hooks");
2048
2049 fs::write(root.join("nocontext.txt"), "works").expect("should write nocontext file");
2050
2051 let code = format!(
2052 r#"return orcs.read("{}")"#,
2053 root.join("nocontext.txt")
2054 .display()
2055 .to_string()
2056 .replace('\\', "\\\\")
2057 );
2058 let result: Table = lua
2059 .load(&code)
2060 .eval()
2061 .expect("should eval read without hook context");
2062 assert!(result.get::<bool>("ok").expect("should have ok field"));
2063 assert_eq!(
2064 result
2065 .get::<String>("content")
2066 .expect("should have content field"),
2067 "works"
2068 );
2069 }
2070
2071 #[test]
2072 fn pre_hook_abort_blocks_glob() {
2073 let (lua, registry, td) = setup_lua_with_hooks();
2074 let root = td.path().canonicalize().expect("should canonicalize root");
2075 fs::write(root.join("a.txt"), "").expect("should write test file");
2076
2077 {
2078 let mut guard = registry.write().expect("should acquire write lock");
2079 guard.register(Box::new(orcs_hook::testing::MockHook::aborter(
2080 "block-glob",
2081 "*::*",
2082 HookPoint::ToolPreExecute,
2083 "glob not allowed",
2084 )));
2085 }
2086
2087 let code = format!(
2088 r#"return orcs.glob("*.txt", "{}")"#,
2089 root.display().to_string().replace('\\', "\\\\")
2090 );
2091 let result: Table = lua
2092 .load(&code)
2093 .eval()
2094 .expect("should eval glob with abort hook");
2095 assert!(!result.get::<bool>("ok").expect("should have ok field"));
2096 let error = result
2097 .get::<String>("error")
2098 .expect("should have error field");
2099 assert!(error.contains("glob not allowed"));
2100 }
2101 }
2102
2103 }