1use anyhow::{Context, Result};
2use regex::Regex;
3use serde::Deserialize;
4use std::fs;
5use std::path::Path;
6use std::sync::LazyLock;
7
8fn never_match_regex() -> Regex {
13 Regex::new("$^").unwrap_or_else(|_| unreachable!("$^ is valid"))
14}
15
16static YAML_CONTINUATION_RE: LazyLock<Regex> =
18 LazyLock::new(|| Regex::new(r"\n(\s+):(\s+)").unwrap_or_else(|_| never_match_regex()));
19
20static YAML_FRONT_MATTER_RE: LazyLock<Regex> = LazyLock::new(|| {
22 Regex::new(r"(?s)^---\s*\n(.*?)\n---").unwrap_or_else(|_| never_match_regex())
23});
24
25static ALLOWED_TOOLS_RE: LazyLock<Regex> =
27 LazyLock::new(|| Regex::new(r"Bash\(([^)]+)\)").unwrap_or_else(|_| never_match_regex()));
28
29#[derive(Deserialize, Debug, Clone, Default)]
32#[allow(dead_code)]
33struct FrontMatter {
34 #[serde(default)]
36 pub name: String,
37
38 #[serde(default)]
40 pub description: Option<String>,
41
42 #[serde(default)]
44 pub license: Option<String>,
45
46 #[serde(default)]
49 pub compatibility: Option<String>,
50
51 #[serde(default)]
53 pub entry_point: Option<String>,
54
55 #[serde(default)]
57 pub metadata: Option<serde_json::Value>,
58
59 #[serde(default, rename = "allowed-tools")]
61 pub allowed_tools: Option<String>,
62
63 #[serde(default, rename = "requires_elevated_permissions")]
65 pub requires_elevated_permissions: Option<bool>,
66
67 #[serde(default)]
71 pub capabilities: Vec<String>,
72}
73
74#[derive(Debug, Clone)]
76pub struct BashToolPattern {
77 pub command_prefix: String,
79 pub raw_pattern: String,
82}
83
84pub fn parse_allowed_tools(raw: &str) -> Vec<BashToolPattern> {
91 let mut patterns = Vec::new();
92
93 for cap in ALLOWED_TOOLS_RE.captures_iter(raw) {
94 if let Some(inner) = cap.get(1) {
95 let pattern_str = inner.as_str().trim();
96 let command_prefix = if let Some(idx) = pattern_str.find(':') {
100 pattern_str[..idx].trim().to_string()
101 } else {
102 pattern_str.to_string()
103 };
104
105 if !command_prefix.is_empty() {
106 patterns.push(BashToolPattern {
107 command_prefix,
108 raw_pattern: pattern_str.to_string(),
109 });
110 }
111 }
112 }
113
114 patterns
115}
116
117#[derive(Debug, Clone)]
119pub struct SkillMetadata {
120 pub name: String,
122
123 pub entry_point: String,
125
126 pub language: Option<String>,
128
129 pub description: Option<String>,
131
132 pub version: Option<String>,
134
135 pub compatibility: Option<String>,
137
138 pub network: NetworkPolicy,
140
141 pub resolved_packages: Option<Vec<String>>,
144
145 pub allowed_tools: Option<String>,
148
149 pub requires_elevated_permissions: bool,
151
152 pub capabilities: Vec<String>,
158}
159
160impl SkillMetadata {
161 pub fn is_bash_tool_skill(&self) -> bool {
166 self.allowed_tools.is_some() && self.entry_point.is_empty()
167 }
168
169 pub fn get_bash_patterns(&self) -> Vec<BashToolPattern> {
172 match &self.allowed_tools {
173 Some(raw) => parse_allowed_tools(raw),
174 None => Vec::new(),
175 }
176 }
177
178 pub fn uses_playwright(&self) -> bool {
180 if let Some(ref packages) = self.resolved_packages {
181 if packages
182 .iter()
183 .any(|p| p.to_lowercase().trim() == "playwright")
184 {
185 return true;
186 }
187 }
188 if let Some(ref compat) = self.compatibility {
189 if compat.to_lowercase().contains("playwright") {
190 return true;
191 }
192 }
193 false
194 }
195}
196
197#[derive(Debug, Clone, Default)]
199pub struct NetworkPolicy {
200 pub enabled: bool,
202
203 pub outbound: Vec<String>,
206}
207
208fn parse_compatibility_for_network(compatibility: Option<&str>) -> NetworkPolicy {
215 let Some(compat) = compatibility else {
216 return NetworkPolicy::default();
217 };
218
219 let compat_lower = compat.to_lowercase();
220
221 let needs_network = compat_lower.contains("network")
223 || compat_lower.contains("internet")
224 || compat_lower.contains("http")
225 || compat_lower.contains("api")
226 || compat_lower.contains("web")
227 || compat_lower.contains("网络")
229 || compat_lower.contains("联网")
230 || compat_lower.contains("网页")
231 || compat_lower.contains("在线");
232
233 if needs_network {
234 NetworkPolicy {
235 enabled: true,
236 outbound: vec!["*".to_string()],
239 }
240 } else {
241 NetworkPolicy::default()
242 }
243}
244
245fn parse_compatibility_for_language(compatibility: Option<&str>) -> Option<String> {
251 let compat = compatibility?;
252 let compat_lower = compat.to_lowercase();
253
254 if compat_lower.contains("python") {
255 Some("python".to_string())
256 } else if compat_lower.contains("node")
257 || compat_lower.contains("javascript")
258 || compat_lower.contains("typescript")
259 {
260 Some("node".to_string())
261 } else if compat_lower.contains("bash") || compat_lower.contains("shell") {
262 Some("bash".to_string())
263 } else {
264 None
265 }
266}
267
268fn detect_entry_point(skill_dir: &Path) -> Option<String> {
271 let scripts_dir = skill_dir.join("scripts");
272 if !scripts_dir.exists() {
273 return None;
274 }
275
276 for ext in [".py", ".js", ".ts", ".sh"] {
278 let main_file = scripts_dir.join(format!("main{}", ext));
279 if main_file.exists() {
280 return Some(format!("scripts/main{}", ext));
281 }
282 }
283
284 for ext in [".py", ".js", ".ts", ".sh"] {
286 let index_file = scripts_dir.join(format!("index{}", ext));
287 if index_file.exists() {
288 return Some(format!("scripts/index{}", ext));
289 }
290 }
291
292 let mut script_files = Vec::new();
294 if let Ok(entries) = fs::read_dir(&scripts_dir) {
295 for entry in entries.flatten() {
296 let path = entry.path();
297 if let Some(ext) = path.extension() {
298 let ext_str = ext.to_string_lossy();
299 if ["py", "js", "ts", "sh"].contains(&ext_str.as_ref()) {
300 let name = path.file_name().unwrap_or_default().to_string_lossy();
302 if !name.starts_with("test_")
303 && !name.ends_with("_test.py")
304 && name != "__init__.py"
305 && !name.starts_with('.')
306 {
307 script_files.push(format!("scripts/{}", name));
308 }
309 }
310 }
311 }
312 }
313
314 if script_files.len() == 1 {
315 return Some(script_files.remove(0));
316 }
317
318 None
319}
320
321fn detect_language_from_entry_point(entry_point: &str) -> Option<String> {
323 if entry_point.ends_with(".py") {
324 Some("python".to_string())
325 } else if entry_point.ends_with(".js") || entry_point.ends_with(".ts") {
326 Some("node".to_string())
327 } else if entry_point.ends_with(".sh") {
328 Some("bash".to_string())
329 } else {
330 None
331 }
332}
333
334pub fn parse_skill_metadata(skill_dir: &Path) -> Result<SkillMetadata> {
336 let skill_md_path = skill_dir.join("SKILL.md");
337
338 if !skill_md_path.exists() {
339 anyhow::bail!("SKILL.md not found in directory: {}", skill_dir.display());
340 }
341
342 let content = fs::read_to_string(&skill_md_path)
343 .with_context(|| format!("Failed to read SKILL.md: {}", skill_md_path.display()))?;
344
345 extract_yaml_front_matter_with_detection(&content, skill_dir)
346}
347
348fn merge_openclaw_requires(
351 compat: Option<&str>,
352 metadata: Option<&serde_json::Value>,
353) -> Option<String> {
354 let openclaw = metadata
355 .and_then(|m| m.get("openclaw"))
356 .and_then(|o| o.get("requires"));
357 let Some(openclaw) = openclaw else {
358 return compat.map(String::from);
359 };
360
361 let mut adds = Vec::new();
362 if let Some(bins) = openclaw.get("bins").and_then(|v| v.as_array()) {
363 let s: Vec<_> = bins.iter().filter_map(|b| b.as_str()).collect();
364 if !s.is_empty() {
365 adds.push(format!("Requires bins: {}", s.join(", ")));
366 }
367 }
368 if let Some(env) = openclaw.get("env").and_then(|v| v.as_array()) {
369 let s: Vec<_> = env.iter().filter_map(|e| e.as_str()).collect();
370 if !s.is_empty() {
371 adds.push(format!("Requires env: {}", s.join(", ")));
372 }
373 }
374 if adds.is_empty() {
375 return compat.map(String::from);
376 }
377 let base = compat.unwrap_or("");
378 let merged = if base.is_empty() {
379 adds.join(". ")
380 } else {
381 format!("{}. {}", base, adds.join(". "))
382 };
383 Some(merged)
384}
385
386fn infer_capabilities_from_compatibility(
390 compatibility: &str,
391 name: &str,
392 description: &str,
393) -> Vec<String> {
394 let mut caps = std::collections::HashSet::new();
395 let s = format!("{} {} {}", compatibility, name, description).to_lowercase();
396
397 let rules: &[(&str, &str)] = &[
399 ("python", "python"),
400 ("network", "web"),
401 ("网络", "web"),
402 ("http", "web"),
403 ("internet", "web"),
404 ("node.js", "node"),
405 ("nodejs", "node"),
406 ("playwright", "browser"),
407 ("agent-browser", "browser"),
408 ("chromium", "browser"),
409 ("browser", "browser"),
410 ("pandas", "data"),
411 ("numpy", "data"),
412 ("data-analysis", "data"),
413 ("calculator", "calc"),
414 ("计算", "calc"),
415 ("arithmetic", "calc"),
416 ("math", "calc"),
417 ];
418
419 for (keyword, tag) in rules {
420 if s.contains(keyword) {
421 caps.insert(tag.to_string());
422 }
423 }
424
425 let mut v: Vec<_> = caps.into_iter().collect();
426 v.sort();
427 v
428}
429
430#[cfg(test)]
432fn extract_yaml_front_matter(content: &str) -> Result<SkillMetadata> {
433 extract_yaml_front_matter_impl(content, None)
434}
435
436fn extract_yaml_front_matter_with_detection(
438 content: &str,
439 skill_dir: &Path,
440) -> Result<SkillMetadata> {
441 extract_yaml_front_matter_impl(content, Some(skill_dir))
442}
443
444fn normalize_yaml_continuation_lines(yaml: &str) -> String {
447 YAML_CONTINUATION_RE.replace_all(yaml, " ").to_string()
448}
449
450fn extract_yaml_front_matter_impl(
452 content: &str,
453 skill_dir: Option<&Path>,
454) -> Result<SkillMetadata> {
455 let captures = YAML_FRONT_MATTER_RE
457 .captures(content)
458 .ok_or_else(|| anyhow::anyhow!("No YAML front matter found in SKILL.md"))?;
459
460 let yaml_content = captures
461 .get(1)
462 .ok_or_else(|| anyhow::anyhow!("Failed to extract YAML content"))?
463 .as_str();
464
465 let yaml_content = normalize_yaml_continuation_lines(yaml_content);
468
469 let front_matter: FrontMatter =
470 serde_yaml::from_str(&yaml_content).with_context(|| "Failed to parse YAML front matter")?;
471
472 let mut entry_point = String::new();
475 if let Some(dir) = skill_dir {
476 if let Some(ref ep) = front_matter.entry_point {
477 let ep = ep.trim();
478 if !ep.is_empty() && dir.join(ep).is_file() {
479 entry_point = ep.to_string();
480 }
481 }
482 if entry_point.is_empty() {
483 if let Some(detected) = detect_entry_point(dir) {
484 entry_point = detected;
485 }
486 }
487 }
488
489 let compatibility = merge_openclaw_requires(
491 front_matter.compatibility.as_deref(),
492 front_matter.metadata.as_ref(),
493 );
494
495 let language = parse_compatibility_for_language(compatibility.as_deref())
497 .or_else(|| detect_language_from_entry_point(&entry_point));
498
499 let network = parse_compatibility_for_network(compatibility.as_deref());
501
502 let resolved_packages =
504 skill_dir.and_then(|dir| read_lock_file_packages(dir, compatibility.as_deref()));
505
506 let requires_elevated = front_matter.requires_elevated_permissions.unwrap_or(false);
507
508 let capabilities = if !front_matter.capabilities.is_empty() {
511 front_matter.capabilities.clone()
512 } else {
513 front_matter
514 .metadata
515 .as_ref()
516 .and_then(|m| m.get("capabilities"))
517 .and_then(|v| v.as_array())
518 .map(|arr| {
519 arr.iter()
520 .filter_map(|v| v.as_str().map(String::from))
521 .collect()
522 })
523 .filter(|v: &Vec<String>| !v.is_empty())
524 .unwrap_or_else(|| {
525 infer_capabilities_from_compatibility(
526 compatibility.as_deref().unwrap_or(""),
527 &front_matter.name,
528 front_matter.description.as_deref().unwrap_or(""),
529 )
530 })
531 };
532
533 let metadata = SkillMetadata {
534 name: front_matter.name.clone(),
535 entry_point,
536 language,
537 description: front_matter.description.clone(),
538 version: front_matter
539 .metadata
540 .as_ref()
541 .and_then(|m| m.get("version"))
542 .and_then(|v| v.as_str())
543 .map(|s| s.to_string()),
544 compatibility,
545 network,
546 resolved_packages,
547 allowed_tools: front_matter.allowed_tools.clone(),
548 requires_elevated_permissions: requires_elevated,
549 capabilities,
550 };
551
552 if metadata.name.is_empty() {
554 anyhow::bail!("Skill name is required in SKILL.md");
555 }
556
557 Ok(metadata)
558}
559
560fn read_lock_file_packages(skill_dir: &Path, compatibility: Option<&str>) -> Option<Vec<String>> {
565 let lock_path = skill_dir.join(".skilllite.lock");
566 let content = fs::read_to_string(&lock_path).ok()?;
567 let lock: serde_json::Value = serde_json::from_str(&content).ok()?;
568
569 use sha2::{Digest, Sha256};
571 let mut hasher = Sha256::new();
572 hasher.update(compatibility.unwrap_or("").as_bytes());
573 let current_hash = hex::encode(hasher.finalize());
574
575 if lock.get("compatibility_hash")?.as_str()? != current_hash {
576 return None; }
578
579 let arr = lock.get("resolved_packages")?.as_array()?;
580 let packages: Vec<String> = arr
581 .iter()
582 .filter_map(|v| v.as_str().map(String::from))
583 .collect();
584
585 if packages.is_empty() {
586 None
587 } else {
588 Some(packages)
589 }
590}
591
592pub fn detect_language(skill_dir: &Path, metadata: &SkillMetadata) -> String {
598 if let Some(ref lang) = metadata.language {
600 return lang.clone();
601 }
602
603 if metadata.entry_point.ends_with(".py") {
605 return "python".to_string();
606 }
607
608 if metadata.entry_point.ends_with(".js") || metadata.entry_point.ends_with(".ts") {
609 return "node".to_string();
610 }
611
612 if metadata.entry_point.ends_with(".sh") {
613 return "bash".to_string();
614 }
615
616 let scripts_dir = skill_dir.join("scripts");
618 if scripts_dir.exists() {
619 if let Ok(entries) = std::fs::read_dir(&scripts_dir) {
620 for entry in entries.flatten() {
621 let path = entry.path();
622 if let Some(ext) = path.extension() {
623 match ext.to_string_lossy().as_ref() {
624 "py" => return "python".to_string(),
625 "js" | "ts" => return "node".to_string(),
626 "sh" => return "bash".to_string(),
627 _ => {}
628 }
629 }
630 }
631 }
632 }
633
634 "python".to_string()
636}
637
638#[cfg(test)]
639mod tests {
640 use super::*;
641
642 #[test]
643 fn test_infer_capabilities_from_compatibility() {
644 let caps = infer_capabilities_from_compatibility(
646 "Requires Python 3.x, network access",
647 "test",
648 "",
649 );
650 assert!(caps.contains(&"python".to_string()));
651 assert!(caps.contains(&"web".to_string()));
652
653 let caps = infer_capabilities_from_compatibility("", "calculator", "");
655 assert_eq!(caps, vec!["calc"]);
656
657 let caps = infer_capabilities_from_compatibility("", "foo", "basic arithmetic operations");
659 assert_eq!(caps, vec!["calc"]);
660 }
661
662 #[test]
663 fn test_parse_yaml_continuation_lines() {
664 let content = r#"---
666name: agent-browser
667description: Browser automation CLI for AI agents.
668 : Requires Node.js with agent-browser Use when the user needs to interact with websites.
669allowed-tools: Bash(agent-browser:*)
670---
671"#;
672 let metadata =
673 extract_yaml_front_matter(content).expect("continuation lines should be normalized");
674 assert_eq!(metadata.name, "agent-browser");
675 assert!(metadata
676 .description
677 .as_ref()
678 .expect("test skill has description")
679 .contains("Browser automation CLI"));
680 assert!(metadata
681 .description
682 .as_ref()
683 .expect("test skill has description")
684 .contains("Requires Node.js"));
685 }
686
687 #[test]
688 fn test_parse_yaml_front_matter_with_compatibility() {
689 let content = r#"---
690name: test-skill
691description: A test skill for testing
692compatibility: Requires Python 3.x with requests library, network access
693---
694
695# Test Skill
696
697This is a test skill.
698"#;
699
700 let metadata =
701 extract_yaml_front_matter(content).expect("test YAML parsing should succeed");
702 assert_eq!(metadata.name, "test-skill");
703 assert_eq!(metadata.language, Some("python".to_string()));
704 assert!(metadata.network.enabled);
705 assert_eq!(metadata.network.outbound, vec!["*"]);
707 assert!(metadata.capabilities.contains(&"python".to_string()));
709 assert!(metadata.capabilities.contains(&"web".to_string()));
710 }
711
712 #[test]
713 fn test_parse_compatibility_for_network() {
714 assert!(parse_compatibility_for_network(Some("Requires network access")).enabled);
716 assert!(parse_compatibility_for_network(Some("Requires internet")).enabled);
717 assert!(parse_compatibility_for_network(Some("Requires http client")).enabled);
718 assert!(parse_compatibility_for_network(Some("Requires API access")).enabled);
719 assert!(parse_compatibility_for_network(Some("Requires web access")).enabled);
720
721 assert!(parse_compatibility_for_network(Some("需网络权限")).enabled);
723 assert!(parse_compatibility_for_network(Some("Python 3.x,需网络权限")).enabled);
724 assert!(parse_compatibility_for_network(Some("需要联网")).enabled);
725 assert!(parse_compatibility_for_network(Some("需要网页访问")).enabled);
726 assert!(parse_compatibility_for_network(Some("在线服务")).enabled);
727
728 assert!(!parse_compatibility_for_network(Some("Requires git, docker")).enabled);
730 assert!(!parse_compatibility_for_network(Some("Requires Python 3.x")).enabled);
731 assert!(!parse_compatibility_for_network(None).enabled);
732 }
733
734 #[test]
735 fn test_parse_compatibility_for_language() {
736 assert_eq!(
737 parse_compatibility_for_language(Some("Requires Python 3.x")),
738 Some("python".to_string())
739 );
740 assert_eq!(
741 parse_compatibility_for_language(Some("Requires Node.js")),
742 Some("node".to_string())
743 );
744 assert_eq!(
745 parse_compatibility_for_language(Some("Requires JavaScript")),
746 Some("node".to_string())
747 );
748 assert_eq!(
749 parse_compatibility_for_language(Some("Requires bash")),
750 Some("bash".to_string())
751 );
752 assert_eq!(
753 parse_compatibility_for_language(Some("Requires git, docker")),
754 None
755 );
756 assert_eq!(parse_compatibility_for_language(None), None);
757 }
758
759 #[test]
760 fn test_default_network_policy() {
761 let content = r#"---
762name: simple-skill
763description: A simple skill
764---
765"#;
766
767 let metadata =
768 extract_yaml_front_matter(content).expect("test YAML parsing should succeed");
769 assert!(!metadata.network.enabled);
770 assert!(metadata.network.outbound.is_empty());
771 }
772
773 #[test]
774 fn test_parse_allowed_tools_single() {
775 let patterns = parse_allowed_tools("Bash(agent-browser:*)");
776 assert_eq!(patterns.len(), 1);
777 assert_eq!(patterns[0].command_prefix, "agent-browser");
778 assert_eq!(patterns[0].raw_pattern, "agent-browser:*");
779 }
780
781 #[test]
782 fn test_parse_allowed_tools_multiple() {
783 let patterns = parse_allowed_tools("Bash(agent-browser:*), Bash(npm:*)");
784 assert_eq!(patterns.len(), 2);
785 assert_eq!(patterns[0].command_prefix, "agent-browser");
786 assert_eq!(patterns[1].command_prefix, "npm");
787 }
788
789 #[test]
790 fn test_parse_allowed_tools_mixed() {
791 let patterns = parse_allowed_tools("Read, Edit, Bash(mycli:*)");
793 assert_eq!(patterns.len(), 1);
794 assert_eq!(patterns[0].command_prefix, "mycli");
795 }
796
797 #[test]
798 fn test_parse_allowed_tools_no_colon() {
799 let patterns = parse_allowed_tools("Bash(simple-tool)");
800 assert_eq!(patterns.len(), 1);
801 assert_eq!(patterns[0].command_prefix, "simple-tool");
802 }
803
804 #[test]
805 fn test_parse_allowed_tools_empty() {
806 let patterns = parse_allowed_tools("Read, Edit");
807 assert!(patterns.is_empty());
808 }
809
810 #[test]
811 fn test_bash_tool_skill_yaml() {
812 let content = r#"---
813name: agent-browser
814description: Headless browser automation for AI agents
815allowed-tools: Bash(agent-browser:*)
816---
817
818# Agent Browser
819
820Use agent-browser CLI to automate web browsing.
821"#;
822
823 let metadata =
824 extract_yaml_front_matter(content).expect("bash tool skill YAML should parse");
825 assert_eq!(metadata.name, "agent-browser");
826 assert!(metadata.entry_point.is_empty());
827 assert_eq!(
828 metadata.allowed_tools,
829 Some("Bash(agent-browser:*)".to_string())
830 );
831 assert!(metadata.is_bash_tool_skill());
832
833 let patterns = metadata.get_bash_patterns();
834 assert_eq!(patterns.len(), 1);
835 assert_eq!(patterns[0].command_prefix, "agent-browser");
836 }
837
838 #[test]
839 fn test_not_bash_tool_skill_with_entry_point() {
840 let content = r#"---
841name: regular-skill
842description: A regular skill with scripts
843compatibility: Requires Python 3.x
844---
845"#;
846 let metadata = extract_yaml_front_matter(content).expect("regular skill YAML should parse");
848 assert!(!metadata.is_bash_tool_skill());
849 }
850
851 #[test]
852 fn test_openclaw_metadata_merge() {
853 let content = r#"---
854name: nano-banana-pro
855description: Generate or edit images via Gemini 3 Pro Image
856metadata:
857 openclaw:
858 requires:
859 bins: [uv]
860 env: [GEMINI_API_KEY]
861 config: [browser.enabled]
862 primaryEnv: GEMINI_API_KEY
863---
864"#;
865 let metadata =
866 extract_yaml_front_matter(content).expect("OpenClaw format YAML should parse");
867 assert_eq!(metadata.name, "nano-banana-pro");
868 assert_eq!(
869 metadata.compatibility.as_deref(),
870 Some("Requires bins: uv. Requires env: GEMINI_API_KEY")
871 );
872 }
873
874 #[test]
875 fn test_openclaw_metadata_merge_with_base_compatibility() {
876 let content = r#"---
877name: test-skill
878description: Test
879compatibility: Requires Python 3.x
880metadata:
881 openclaw:
882 requires:
883 bins: [uv]
884 env: [API_KEY]
885---
886"#;
887 let metadata = extract_yaml_front_matter(content)
888 .expect("OpenClaw format with base compat should parse");
889 assert_eq!(
890 metadata.compatibility.as_deref(),
891 Some("Requires Python 3.x. Requires bins: uv. Requires env: API_KEY")
892 );
893 assert_eq!(metadata.language, Some("python".to_string()));
894 }
895
896 #[test]
897 fn test_entry_point_from_front_matter() {
898 let dir = tempfile::tempdir().expect("temp dir");
899 let skill_dir = dir.path();
900 std::fs::create_dir_all(skill_dir.join("scripts")).expect("create scripts");
901 std::fs::write(skill_dir.join("scripts/entry.py"), "").expect("write entry.py");
902 let content = r#"---
903name: my-skill
904entry_point: scripts/entry.py
905---
906
907# Doc
908"#;
909 std::fs::write(skill_dir.join("SKILL.md"), content).expect("write SKILL.md");
910 let meta = parse_skill_metadata(skill_dir).expect("parse skill metadata");
911 assert_eq!(meta.entry_point, "scripts/entry.py");
912 }
913
914 #[test]
915 fn test_entry_point_no_explicit_uses_directory_convention() {
916 let dir = tempfile::tempdir().expect("temp dir");
917 let skill_dir = dir.path();
918 std::fs::create_dir_all(skill_dir.join("scripts")).expect("create scripts");
919 std::fs::write(skill_dir.join("scripts/main.py"), "").expect("write main.py");
920 let content = r#"---
921name: my-skill
922---
923"#;
924 std::fs::write(skill_dir.join("SKILL.md"), content).expect("write SKILL.md");
925 let meta = parse_skill_metadata(skill_dir).expect("parse skill metadata");
926 assert_eq!(meta.entry_point, "scripts/main.py");
927 }
928}