use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use serde::Deserialize;
use super::manifest::{SkillSource, SkillsSource};
#[derive(Debug, Clone)]
pub struct BundledSkill {
pub name: &'static str,
pub body: &'static str,
}
#[derive(Debug, Clone, Default, Deserialize)]
pub struct SkillFrontmatter {
#[serde(default)]
pub name: String,
#[serde(default)]
pub description: String,
#[serde(default)]
pub applies_to: Option<HashMap<String, String>>,
#[serde(default)]
pub references_tools: Vec<String>,
#[serde(default)]
pub references_arguments: Vec<String>,
#[serde(default)]
pub references_properties: Vec<String>,
#[serde(default = "default_auto_inject_hint")]
pub auto_inject_hint: bool,
#[serde(default)]
pub applies_when: Option<AppliesWhen>,
}
fn default_auto_inject_hint() -> bool {
true
}
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Eq)]
pub struct AppliesWhen {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub graph_has_node_type: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub graph_has_property: Option<GraphPropertyCheck>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_registered: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub extension_enabled: Option<String>,
}
#[derive(Debug, Clone, Deserialize, PartialEq, Eq)]
pub struct GraphPropertyCheck {
pub node_type: String,
pub prop_name: String,
}
#[derive(Debug)]
pub enum PredicateClause<'a> {
GraphHasNodeType(&'a [String]),
GraphHasProperty {
node_type: &'a str,
prop_name: &'a str,
},
ToolRegistered(&'a str),
ExtensionEnabled(&'a str),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PredicateOutcome {
Satisfied,
Unsatisfied,
Unknown,
}
#[derive(Debug, Clone, Default)]
pub struct SkillActivation {
pub active: bool,
pub clauses: Vec<(String, PredicateOutcome)>,
}
pub trait SkillPredicateEvaluator: Send + Sync {
fn evaluate(&self, clause: &PredicateClause<'_>) -> Option<bool>;
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SkillProvenance {
Project,
DomainPack(PathBuf),
Bundled,
}
#[derive(Debug, Clone)]
pub struct Skill {
pub frontmatter: SkillFrontmatter,
pub body: String,
pub provenance: SkillProvenance,
}
impl Skill {
pub fn name(&self) -> &str {
&self.frontmatter.name
}
pub fn description(&self) -> &str {
&self.frontmatter.description
}
}
#[derive(Debug)]
pub enum SkillError {
Io {
path: PathBuf,
source: std::io::Error,
},
MissingFrontmatter { path: PathBuf },
InvalidFrontmatter { path: PathBuf, message: String },
MissingRequiredField { path: PathBuf, field: &'static str },
SkillTooLarge {
path: PathBuf,
bytes: usize,
limit: usize,
},
PathNotFound { raw: String, resolved: PathBuf },
BundledSkillInvalid { name: &'static str, message: String },
}
impl std::fmt::Display for SkillError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SkillError::Io { path, source } => {
write!(f, "skill I/O error at {}: {source}", path.display())
}
SkillError::MissingFrontmatter { path } => write!(
f,
"skill at {} is missing the `---` YAML frontmatter delimiter at the start of the file",
path.display()
),
SkillError::InvalidFrontmatter { path, message } => {
write!(
f,
"skill frontmatter at {} is not valid YAML: {message}",
path.display()
)
}
SkillError::MissingRequiredField { path, field } => write!(
f,
"skill at {} is missing required frontmatter field `{field}`",
path.display()
),
SkillError::SkillTooLarge {
path,
bytes,
limit,
} => write!(
f,
"skill at {} is {bytes} bytes; exceeds the {limit} byte hard limit",
path.display()
),
SkillError::PathNotFound { raw, resolved } => write!(
f,
"skill path {raw:?} (resolved to {}) does not exist or is not a directory",
resolved.display()
),
SkillError::BundledSkillInvalid { name, message } => write!(
f,
"bundled skill `{name}` is malformed: {message}"
),
}
}
}
impl std::error::Error for SkillError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
SkillError::Io { source, .. } => Some(source),
_ => None,
}
}
}
pub const SOFT_SIZE_LIMIT_BYTES: usize = 4 * 1024;
pub const HARD_SIZE_LIMIT_BYTES: usize = 16 * 1024;
pub const SESSION_TOTAL_LIMIT_BYTES: usize = 64 * 1024;
fn split_frontmatter(content: &str) -> Option<(&str, &str)> {
let trimmed = content.strip_prefix("---\n").or_else(|| {
content.strip_prefix("---\r\n")
})?;
let mut search_start = 0;
while let Some(idx) = trimmed[search_start..].find("---") {
let abs = search_start + idx;
let at_line_start = abs == 0 || trimmed.as_bytes().get(abs - 1) == Some(&b'\n');
let after = &trimmed[abs + 3..];
let line_end_ok = after.is_empty() || after.starts_with('\n') || after.starts_with("\r\n");
if at_line_start && line_end_ok {
let frontmatter = &trimmed[..abs];
let body_start = if after.starts_with("\r\n") {
abs + 3 + 2
} else if after.starts_with('\n') {
abs + 3 + 1
} else {
abs + 3
};
let body = &trimmed[body_start..];
return Some((frontmatter, body));
}
search_start = abs + 3;
}
None
}
pub fn parse_skill(content: &str, path: &Path) -> Result<(SkillFrontmatter, String), SkillError> {
let (frontmatter_str, body) =
split_frontmatter(content).ok_or_else(|| SkillError::MissingFrontmatter {
path: path.to_path_buf(),
})?;
let frontmatter: SkillFrontmatter =
serde_yaml::from_str(frontmatter_str).map_err(|e| SkillError::InvalidFrontmatter {
path: path.to_path_buf(),
message: e.to_string(),
})?;
if frontmatter.name.is_empty() {
return Err(SkillError::MissingRequiredField {
path: path.to_path_buf(),
field: "name",
});
}
if frontmatter.description.is_empty() {
return Err(SkillError::MissingRequiredField {
path: path.to_path_buf(),
field: "description",
});
}
Ok((frontmatter, body.to_string()))
}
pub fn load_skill_from_file(path: &Path, provenance: SkillProvenance) -> Result<Skill, SkillError> {
let content = fs::read_to_string(path).map_err(|e| SkillError::Io {
path: path.to_path_buf(),
source: e,
})?;
if content.len() > HARD_SIZE_LIMIT_BYTES {
return Err(SkillError::SkillTooLarge {
path: path.to_path_buf(),
bytes: content.len(),
limit: HARD_SIZE_LIMIT_BYTES,
});
}
if content.len() > SOFT_SIZE_LIMIT_BYTES {
tracing::warn!(
path = %path.display(),
bytes = content.len(),
soft_limit = SOFT_SIZE_LIMIT_BYTES,
"skill exceeds the soft size limit; consider splitting"
);
}
let (frontmatter, body) = parse_skill(&content, path)?;
Ok(Skill {
frontmatter,
body,
provenance,
})
}
#[derive(Debug, Clone)]
pub struct ParseWarning {
pub path: PathBuf,
pub error: String,
}
pub fn load_skills_from_dir(
dir: &Path,
provenance: SkillProvenance,
) -> Result<(Vec<Skill>, Vec<ParseWarning>), SkillError> {
if !dir.is_dir() {
return Ok((Vec::new(), Vec::new()));
}
let entries = fs::read_dir(dir).map_err(|e| SkillError::Io {
path: dir.to_path_buf(),
source: e,
})?;
let mut skills = Vec::new();
let mut warnings = Vec::new();
for entry in entries {
let entry = match entry {
Ok(e) => e,
Err(e) => {
tracing::warn!(
dir = %dir.display(),
error = %e,
"failed to read directory entry; skipping"
);
warnings.push(ParseWarning {
path: dir.to_path_buf(),
error: format!("failed to read directory entry: {e}"),
});
continue;
}
};
let path = entry.path();
if path.extension().map(|e| e == "md").unwrap_or(false) {
match load_skill_from_file(&path, provenance.clone()) {
Ok(skill) => skills.push(skill),
Err(e) => {
tracing::warn!(
path = %path.display(),
error = %e,
"failed to load skill; skipping"
);
warnings.push(ParseWarning {
path: path.clone(),
error: e.to_string(),
});
}
}
}
}
Ok((skills, warnings))
}
pub fn resolve_skill_path(raw: &str, manifest_dir: &Path) -> PathBuf {
let p = Path::new(raw);
if p.is_absolute() {
return p.to_path_buf();
}
if let Some(rest) = raw.strip_prefix("~/") {
if let Some(home) = std::env::var_os("HOME") {
return PathBuf::from(home).join(rest);
}
}
manifest_dir.join(raw)
}
pub fn project_skills_dir(yaml_path: &Path) -> PathBuf {
let stem = yaml_path
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_else(|| "manifest".to_string());
let parent = yaml_path.parent().unwrap_or_else(|| Path::new("."));
parent.join(format!("{stem}.skills"))
}
pub fn library_bundled_skills() -> Vec<BundledSkill> {
crate::server::bundled_skills_index::library_bundled_skills()
}
pub fn render_skill_template(name: &str, description: &str) -> String {
format!(
"---\n\
name: {name}\n\
description: {description}\n\
# Optional mcp-methods extension fields (uncomment as needed):\n\
# applies_to:\n\
# mcp_methods: \">=0.3.35\"\n\
# references_tools:\n\
# - {name}\n\
# references_arguments:\n\
# - {name}.<arg_name>\n\
# auto_inject_hint: true\n\
---\n\
\n\
# `{name}` methodology\n\
\n\
## Overview\n\
\n\
<TODO: 2–3 sentences. What this skill enables, when to reach for it,\n\
what comes before and after it in the typical workflow.>\n\
\n\
## Quick Reference\n\
\n\
| Task | Approach |\n\
|---|---|\n\
| <TODO: common task A> | <TODO: one-line pattern> |\n\
| <TODO: common task B> | <TODO: one-line pattern> |\n\
\n\
## <TODO: Major topic>\n\
\n\
<TODO: concrete prose, code blocks, examples.>\n\
\n\
## Common Pitfalls\n\
\n\
❌ <TODO: specific anti-pattern, framed as a behaviour to avoid>\n\
\n\
✅ <TODO: positive guidance, often a heuristic>\n\
\n\
## When `{name}` is the wrong tool\n\
\n\
- **<TODO: scenario>** — use <other tool> because <reason>.\n"
)
}
pub fn write_skill_template(
dest: &Path,
name: &str,
description: &str,
) -> Result<PathBuf, SkillError> {
let path = resolve_template_dest(dest, name);
if path.exists() {
return Err(SkillError::Io {
path: path.clone(),
source: std::io::Error::new(
std::io::ErrorKind::AlreadyExists,
"destination already exists; delete it before re-running",
),
});
}
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() && !parent.exists() {
fs::create_dir_all(parent).map_err(|e| SkillError::Io {
path: parent.to_path_buf(),
source: e,
})?;
}
}
let body = render_skill_template(name, description);
fs::write(&path, body).map_err(|e| SkillError::Io {
path: path.clone(),
source: e,
})?;
Ok(path)
}
fn resolve_template_dest(dest: &Path, name: &str) -> PathBuf {
if dest.is_dir() {
return dest.join(format!("{name}.md"));
}
if dest
.extension()
.map(|e| e.eq_ignore_ascii_case("md"))
.unwrap_or(false)
{
return dest.to_path_buf();
}
dest.join(format!("{name}.md"))
}
#[derive(Default)]
pub struct Registry {
bundled: Vec<BundledSkill>,
root_dirs: Vec<(PathBuf, String)>, root_includes_bundled: bool,
project_dir: Option<PathBuf>,
evaluator: Option<Arc<dyn SkillPredicateEvaluator>>,
}
impl std::fmt::Debug for Registry {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Registry")
.field("bundled", &self.bundled)
.field("root_dirs", &self.root_dirs)
.field("root_includes_bundled", &self.root_includes_bundled)
.field("project_dir", &self.project_dir)
.field(
"evaluator",
&self
.evaluator
.as_ref()
.map(|_| "<dyn SkillPredicateEvaluator>"),
)
.finish()
}
}
impl Registry {
pub fn new() -> Self {
Self::default()
}
pub fn with_predicate_evaluator(
mut self,
evaluator: impl SkillPredicateEvaluator + 'static,
) -> Self {
self.evaluator = Some(Arc::new(evaluator));
self
}
pub fn add_bundled(mut self, skill: BundledSkill) -> Self {
self.bundled.push(skill);
self
}
pub fn add_bundled_many(mut self, skills: impl IntoIterator<Item = BundledSkill>) -> Self {
self.bundled.extend(skills);
self
}
pub fn merge_framework_defaults(self) -> Self {
let defaults = library_bundled_skills();
self.add_bundled_many(defaults)
}
pub fn layer_dirs(
mut self,
source: &SkillsSource,
yaml_path: &Path,
) -> Result<Self, SkillError> {
let manifest_dir = yaml_path.parent().unwrap_or_else(|| Path::new("."));
match source {
SkillsSource::Disabled => {
self.root_includes_bundled = false;
}
SkillsSource::Sources(sources) => {
for src in sources {
match src {
SkillSource::Bundled => {
self.root_includes_bundled = true;
}
SkillSource::Path(raw) => {
let resolved = resolve_skill_path(raw, manifest_dir);
if !resolved.is_dir() {
return Err(SkillError::PathNotFound {
raw: raw.clone(),
resolved,
});
}
self.root_dirs.push((resolved, raw.clone()));
}
}
}
}
}
Ok(self)
}
pub fn auto_detect_project_layer(mut self, yaml_path: &Path) -> Self {
let candidate = project_skills_dir(yaml_path);
if candidate.is_dir() {
self.project_dir = Some(candidate);
}
self
}
pub fn finalise(self) -> Result<ResolvedRegistry, SkillError> {
let Self {
bundled,
root_dirs,
root_includes_bundled,
project_dir,
evaluator,
} = self;
let mut bundled_skills: Vec<Skill> = Vec::with_capacity(bundled.len());
if root_includes_bundled {
for b in &bundled {
let path = PathBuf::from(format!("<bundled:{}>", b.name));
let (frontmatter, body) =
parse_skill(b.body, &path).map_err(|e| SkillError::BundledSkillInvalid {
name: b.name,
message: e.to_string(),
})?;
if frontmatter.name != b.name {
return Err(SkillError::BundledSkillInvalid {
name: b.name,
message: format!(
"frontmatter name {:?} does not match the bundled key {:?}",
frontmatter.name, b.name
),
});
}
bundled_skills.push(Skill {
frontmatter,
body,
provenance: SkillProvenance::Bundled,
});
}
}
let mut parse_warnings: Vec<ParseWarning> = Vec::new();
let mut root_skills_per_dir: Vec<Vec<Skill>> = Vec::with_capacity(root_dirs.len());
for (resolved, _raw) in &root_dirs {
let provenance = SkillProvenance::DomainPack(resolved.clone());
let (skills, warnings) = load_skills_from_dir(resolved, provenance)?;
parse_warnings.extend(warnings);
root_skills_per_dir.push(skills);
}
let project_skills: Vec<Skill> = match &project_dir {
Some(dir) => {
let (skills, warnings) = load_skills_from_dir(dir, SkillProvenance::Project)?;
parse_warnings.extend(warnings);
skills
}
None => Vec::new(),
};
let mut resolved: HashMap<String, Skill> = HashMap::new();
let mut collisions: HashMap<String, Vec<SkillProvenance>> = HashMap::new();
for skill in &bundled_skills {
let name = skill.name().to_string();
collisions
.entry(name.clone())
.or_default()
.push(skill.provenance.clone());
resolved.insert(name, skill.clone());
}
for skills in root_skills_per_dir.iter().rev() {
for skill in skills {
let name = skill.name().to_string();
collisions
.entry(name.clone())
.or_default()
.push(skill.provenance.clone());
resolved.insert(name, skill.clone());
}
}
for skill in &project_skills {
let name = skill.name().to_string();
collisions
.entry(name.clone())
.or_default()
.push(skill.provenance.clone());
resolved.insert(name, skill.clone());
}
for (name, candidates) in &collisions {
if candidates.len() > 1 {
let winner = resolved
.get(name)
.map(|s| format_provenance(&s.provenance))
.unwrap_or_else(|| "<none>".to_string());
let all_candidates: Vec<String> =
candidates.iter().map(format_provenance).collect();
tracing::info!(
skill = %name,
candidates = ?all_candidates,
winner = %winner,
"skill resolved across multiple layers"
);
}
}
let total_bytes: usize = resolved.values().map(|s| s.body.len()).sum();
if total_bytes > SESSION_TOTAL_LIMIT_BYTES {
tracing::warn!(
total_bytes,
limit = SESSION_TOTAL_LIMIT_BYTES,
skill_count = resolved.len(),
"total resolved skill body size exceeds session limit; \
consider trimming or splitting skills"
);
}
Ok(ResolvedRegistry {
skills: resolved,
evaluator,
parse_warnings,
})
}
}
fn format_provenance(p: &SkillProvenance) -> String {
match p {
SkillProvenance::Project => "project".to_string(),
SkillProvenance::DomainPack(path) => format!("pack:{}", path.display()),
SkillProvenance::Bundled => "bundled".to_string(),
}
}
#[derive(Default)]
pub struct ResolvedRegistry {
skills: HashMap<String, Skill>,
pub(crate) evaluator: Option<Arc<dyn SkillPredicateEvaluator>>,
parse_warnings: Vec<ParseWarning>,
}
impl std::fmt::Debug for ResolvedRegistry {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ResolvedRegistry")
.field("skills", &self.skills)
.field(
"evaluator",
&self
.evaluator
.as_ref()
.map(|_| "<dyn SkillPredicateEvaluator>"),
)
.finish()
}
}
impl ResolvedRegistry {
pub fn skill_names(&self) -> Vec<String> {
let mut names: Vec<String> = self.skills.keys().cloned().collect();
names.sort();
names
}
pub fn get(&self, name: &str) -> Option<&Skill> {
self.skills.get(name)
}
pub fn iter(&self) -> impl Iterator<Item = (&String, &Skill)> {
self.skills.iter()
}
pub fn len(&self) -> usize {
self.skills.len()
}
pub fn is_empty(&self) -> bool {
self.skills.is_empty()
}
pub fn parse_warnings(&self) -> &[ParseWarning] {
&self.parse_warnings
}
pub fn activation_for(
&self,
skill: &Skill,
registered_tools: &std::collections::HashSet<String>,
extensions: &serde_json::Map<String, serde_json::Value>,
) -> SkillActivation {
let Some(applies_when) = skill.frontmatter.applies_when.as_ref() else {
return SkillActivation {
active: true,
clauses: Vec::new(),
};
};
let mut clauses = Vec::new();
let mut all_satisfied = true;
if let Some(types) = applies_when.graph_has_node_type.as_ref() {
let clause = PredicateClause::GraphHasNodeType(types);
let outcome = self.dispatch_clause(&clause, registered_tools, extensions);
if outcome != PredicateOutcome::Satisfied {
all_satisfied = false;
}
clauses.push((format!("graph_has_node_type: {types:?}"), outcome));
}
if let Some(prop) = applies_when.graph_has_property.as_ref() {
let clause = PredicateClause::GraphHasProperty {
node_type: &prop.node_type,
prop_name: &prop.prop_name,
};
let outcome = self.dispatch_clause(&clause, registered_tools, extensions);
if outcome != PredicateOutcome::Satisfied {
all_satisfied = false;
}
clauses.push((
format!("graph_has_property: {}.{}", prop.node_type, prop.prop_name),
outcome,
));
}
if let Some(tool) = applies_when.tool_registered.as_ref() {
let clause = PredicateClause::ToolRegistered(tool);
let outcome = self.dispatch_clause(&clause, registered_tools, extensions);
if outcome != PredicateOutcome::Satisfied {
all_satisfied = false;
}
clauses.push((format!("tool_registered: {tool}"), outcome));
}
if let Some(key) = applies_when.extension_enabled.as_ref() {
let clause = PredicateClause::ExtensionEnabled(key);
let outcome = self.dispatch_clause(&clause, registered_tools, extensions);
if outcome != PredicateOutcome::Satisfied {
all_satisfied = false;
}
clauses.push((format!("extension_enabled: {key}"), outcome));
}
SkillActivation {
active: all_satisfied,
clauses,
}
}
fn dispatch_clause(
&self,
clause: &PredicateClause<'_>,
registered_tools: &std::collections::HashSet<String>,
extensions: &serde_json::Map<String, serde_json::Value>,
) -> PredicateOutcome {
match clause {
PredicateClause::ToolRegistered(name) => {
return if registered_tools.contains(*name) {
PredicateOutcome::Satisfied
} else {
PredicateOutcome::Unsatisfied
};
}
PredicateClause::ExtensionEnabled(key) => {
let truthy = extensions
.get(*key)
.map(|v| !v.is_null() && v != &serde_json::Value::Bool(false))
.unwrap_or(false);
return if truthy {
PredicateOutcome::Satisfied
} else {
PredicateOutcome::Unsatisfied
};
}
_ => {}
}
match self.evaluator.as_ref().and_then(|e| e.evaluate(clause)) {
Some(true) => PredicateOutcome::Satisfied,
Some(false) => PredicateOutcome::Unsatisfied,
None => PredicateOutcome::Unknown,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
fn write_skill(dir: &Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(format!("{name}.md"));
let mut f = fs::File::create(&path).unwrap();
f.write_all(content.as_bytes()).unwrap();
path
}
fn minimal_skill(name: &str) -> String {
format!(
"---\nname: {name}\ndescription: A test skill named {name}.\n---\n\n# {name}\n\nBody.\n"
)
}
#[test]
fn parse_frontmatter_basic() {
let content = "---\nname: foo\ndescription: A foo skill.\n---\n\nBody here.\n";
let path = PathBuf::from("test.md");
let (fm, body) = parse_skill(content, &path).unwrap();
assert_eq!(fm.name, "foo");
assert_eq!(fm.description, "A foo skill.");
assert_eq!(body, "\nBody here.\n");
assert!(fm.auto_inject_hint, "auto_inject_hint defaults to true");
}
#[test]
fn parse_frontmatter_missing_delimiters_rejected() {
let content = "name: foo\ndescription: bar\n";
let path = PathBuf::from("test.md");
let err = parse_skill(content, &path).unwrap_err();
assert!(matches!(err, SkillError::MissingFrontmatter { .. }));
}
#[test]
fn parse_frontmatter_invalid_yaml_rejected() {
let content = "---\nname: foo\n bad: yaml: nesting\n---\nbody\n";
let path = PathBuf::from("test.md");
let err = parse_skill(content, &path).unwrap_err();
assert!(matches!(err, SkillError::InvalidFrontmatter { .. }));
}
#[test]
fn parse_frontmatter_missing_name_rejected() {
let content = "---\ndescription: bar\n---\nbody\n";
let path = PathBuf::from("test.md");
let err = parse_skill(content, &path).unwrap_err();
assert!(matches!(
err,
SkillError::MissingRequiredField { field: "name", .. }
));
}
#[test]
fn parse_frontmatter_missing_description_rejected() {
let content = "---\nname: foo\n---\nbody\n";
let path = PathBuf::from("test.md");
let err = parse_skill(content, &path).unwrap_err();
assert!(matches!(
err,
SkillError::MissingRequiredField {
field: "description",
..
}
));
}
#[test]
fn parse_frontmatter_all_optional_fields() {
let content = "---\n\
name: foo\n\
description: Full surface.\n\
references_tools: [grep, list_source]\n\
references_arguments: [grep.pattern]\n\
references_properties: [Function.module]\n\
auto_inject_hint: false\n\
applies_to:\n mcp_methods: \">=0.3.35\"\n\
---\n\
Body.\n";
let path = PathBuf::from("test.md");
let (fm, _) = parse_skill(content, &path).unwrap();
assert_eq!(fm.references_tools, vec!["grep", "list_source"]);
assert_eq!(fm.references_arguments, vec!["grep.pattern"]);
assert_eq!(fm.references_properties, vec!["Function.module"]);
assert!(!fm.auto_inject_hint);
assert_eq!(
fm.applies_to.unwrap().get("mcp_methods"),
Some(&">=0.3.35".to_string())
);
}
#[test]
fn load_skill_from_file_basic() {
let dir = tempfile::tempdir().unwrap();
let path = write_skill(dir.path(), "foo", &minimal_skill("foo"));
let skill = load_skill_from_file(&path, SkillProvenance::Project).unwrap();
assert_eq!(skill.name(), "foo");
assert_eq!(skill.provenance, SkillProvenance::Project);
}
#[test]
fn load_skill_too_large_rejected() {
let dir = tempfile::tempdir().unwrap();
let big_body = "x".repeat(HARD_SIZE_LIMIT_BYTES + 100);
let content = format!("---\nname: big\ndescription: too big.\n---\n{big_body}");
let path = write_skill(dir.path(), "big", &content);
let err = load_skill_from_file(&path, SkillProvenance::Project).unwrap_err();
assert!(matches!(err, SkillError::SkillTooLarge { .. }));
}
#[test]
fn load_skills_from_dir_walks_markdown_only() {
let dir = tempfile::tempdir().unwrap();
write_skill(dir.path(), "a", &minimal_skill("a"));
write_skill(dir.path(), "b", &minimal_skill("b"));
fs::write(dir.path().join("readme.txt"), "not a skill").unwrap();
let sub = dir.path().join("sub");
fs::create_dir(&sub).unwrap();
write_skill(&sub, "c", &minimal_skill("c"));
let (skills, warnings) =
load_skills_from_dir(dir.path(), SkillProvenance::Project).unwrap();
assert_eq!(skills.len(), 2);
assert!(warnings.is_empty());
let mut names: Vec<&str> = skills.iter().map(|s| s.name()).collect();
names.sort();
assert_eq!(names, vec!["a", "b"]);
}
#[test]
fn load_skills_from_dir_missing_returns_empty() {
let dir = tempfile::tempdir().unwrap();
let nonexistent = dir.path().join("does-not-exist");
let (skills, warnings) =
load_skills_from_dir(&nonexistent, SkillProvenance::Project).unwrap();
assert!(skills.is_empty());
assert!(warnings.is_empty());
}
#[test]
fn load_skills_from_dir_surfaces_yaml_parse_failure_as_warning() {
let dir = tempfile::tempdir().unwrap();
write_skill(dir.path(), "good", &minimal_skill("good"));
write_skill(
dir.path(),
"broken",
"---\nname: broken\ndescription: First clause: second clause\n---\n# body\n",
);
let (skills, warnings) =
load_skills_from_dir(dir.path(), SkillProvenance::Project).unwrap();
assert_eq!(skills.len(), 1, "the good skill should still load");
assert_eq!(skills[0].name(), "good");
assert_eq!(
warnings.len(),
1,
"the broken file should surface as a warning"
);
assert!(warnings[0].path.ends_with("broken.md"));
assert!(!warnings[0].error.is_empty());
}
#[test]
fn resolved_registry_parse_warnings_propagated_from_project_layer() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: t\nskills: true\n").unwrap();
let skills_dir = dir.path().join("test_mcp.skills");
fs::create_dir(&skills_dir).unwrap();
write_skill(&skills_dir, "good", &minimal_skill("good"));
write_skill(
&skills_dir,
"broken",
"---\nname: broken\ndescription: bad\nstill in frontmatter\n",
);
let registry = Registry::new()
.auto_detect_project_layer(&yaml)
.finalise()
.unwrap();
assert_eq!(registry.len(), 1, "good skill resolved");
assert!(registry.get("good").is_some());
let warnings = registry.parse_warnings();
assert_eq!(warnings.len(), 1);
assert!(warnings[0].path.ends_with("broken.md"));
}
#[test]
fn resolve_skill_path_relative() {
let manifest_dir = Path::new("/a/b");
assert_eq!(
resolve_skill_path("./skills", manifest_dir),
PathBuf::from("/a/b/./skills")
);
assert_eq!(
resolve_skill_path("skills", manifest_dir),
PathBuf::from("/a/b/skills")
);
}
#[test]
fn resolve_skill_path_absolute() {
let manifest_dir = Path::new("/a/b");
assert_eq!(
resolve_skill_path("/abs/skills", manifest_dir),
PathBuf::from("/abs/skills")
);
}
#[test]
fn resolve_skill_path_home_relative() {
let manifest_dir = Path::new("/a/b");
unsafe {
std::env::set_var("HOME", "/home/test");
}
assert_eq!(
resolve_skill_path("~/skills", manifest_dir),
PathBuf::from("/home/test/skills")
);
}
#[test]
fn project_skills_dir_naming() {
assert_eq!(
project_skills_dir(Path::new("/a/b/legal_mcp.yaml")),
PathBuf::from("/a/b/legal_mcp.skills")
);
assert_eq!(
project_skills_dir(Path::new("workspace_mcp.yaml")),
PathBuf::from("workspace_mcp.skills")
);
}
#[test]
fn registry_disabled_resolves_empty() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let registry = Registry::new()
.layer_dirs(&SkillsSource::Disabled, &yaml)
.unwrap()
.auto_detect_project_layer(&yaml)
.finalise()
.unwrap();
assert!(registry.is_empty());
}
#[test]
fn registry_add_bundled_only_visible_when_opted_in() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let bundled = BundledSkill {
name: "foo",
body: Box::leak(minimal_skill("foo").into_boxed_str()),
};
let registry = Registry::new()
.add_bundled(bundled.clone())
.layer_dirs(&SkillsSource::Disabled, &yaml)
.unwrap()
.finalise()
.unwrap();
assert!(registry.is_empty(), "disabled must short-circuit bundled");
let registry = Registry::new()
.add_bundled(bundled)
.layer_dirs(&SkillsSource::Sources(vec![SkillSource::Bundled]), &yaml)
.unwrap()
.finalise()
.unwrap();
assert_eq!(registry.len(), 1);
assert!(registry.get("foo").is_some());
assert_eq!(
registry.get("foo").unwrap().provenance,
SkillProvenance::Bundled
);
}
#[test]
fn registry_three_layer_resolution_project_wins_over_bundled() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let bundled = BundledSkill {
name: "foo",
body: "---\nname: foo\ndescription: from bundled.\n---\nbundled body\n",
};
let project_dir = dir.path().join("test_mcp.skills");
fs::create_dir(&project_dir).unwrap();
fs::write(
project_dir.join("foo.md"),
"---\nname: foo\ndescription: from project.\n---\nproject body\n",
)
.unwrap();
let registry = Registry::new()
.add_bundled(bundled)
.layer_dirs(&SkillsSource::Sources(vec![SkillSource::Bundled]), &yaml)
.unwrap()
.auto_detect_project_layer(&yaml)
.finalise()
.unwrap();
assert_eq!(registry.len(), 1);
let skill = registry.get("foo").unwrap();
assert_eq!(skill.description(), "from project.");
assert_eq!(skill.provenance, SkillProvenance::Project);
}
#[test]
fn registry_root_layer_first_declaration_wins() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let primary = dir.path().join("primary");
fs::create_dir(&primary).unwrap();
fs::write(
primary.join("foo.md"),
"---\nname: foo\ndescription: from primary.\n---\nprimary body\n",
)
.unwrap();
let secondary = dir.path().join("secondary");
fs::create_dir(&secondary).unwrap();
fs::write(
secondary.join("foo.md"),
"---\nname: foo\ndescription: from secondary.\n---\nsecondary body\n",
)
.unwrap();
let registry = Registry::new()
.layer_dirs(
&SkillsSource::Sources(vec![
SkillSource::Path("./primary".into()),
SkillSource::Path("./secondary".into()),
]),
&yaml,
)
.unwrap()
.finalise()
.unwrap();
assert_eq!(registry.len(), 1);
assert_eq!(registry.get("foo").unwrap().description(), "from primary.");
}
#[test]
fn registry_root_layer_nonexistent_path_rejected() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let err = Registry::new()
.layer_dirs(
&SkillsSource::Sources(vec![SkillSource::Path("./does-not-exist".into())]),
&yaml,
)
.unwrap_err();
assert!(matches!(err, SkillError::PathNotFound { .. }));
}
#[test]
fn registry_empty_list_opts_in_without_root_sources() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let project_dir = dir.path().join("test_mcp.skills");
fs::create_dir(&project_dir).unwrap();
fs::write(project_dir.join("only.md"), minimal_skill("only")).unwrap();
let registry = Registry::new()
.layer_dirs(&SkillsSource::Sources(vec![]), &yaml)
.unwrap()
.auto_detect_project_layer(&yaml)
.finalise()
.unwrap();
assert_eq!(registry.len(), 1);
assert_eq!(
registry.get("only").unwrap().provenance,
SkillProvenance::Project
);
}
#[test]
fn registry_bundled_name_mismatch_rejected_at_finalise() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let bundled = BundledSkill {
name: "foo",
body: Box::leak(
"---\nname: bar\ndescription: mismatch.\n---\nbody\n"
.to_string()
.into_boxed_str(),
),
};
let err = Registry::new()
.add_bundled(bundled)
.layer_dirs(&SkillsSource::Sources(vec![SkillSource::Bundled]), &yaml)
.unwrap()
.finalise()
.unwrap_err();
assert!(matches!(err, SkillError::BundledSkillInvalid { .. }));
}
#[test]
fn registry_library_bundled_skills_returns_vec() {
let skills = library_bundled_skills();
assert!(
!skills.is_empty(),
"library_bundled_skills should return framework defaults from Phase 1d onward"
);
}
#[test]
fn registry_skill_names_sorted() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: x\n").unwrap();
let pack = dir.path().join("pack");
fs::create_dir(&pack).unwrap();
fs::write(pack.join("zeta.md"), minimal_skill("zeta")).unwrap();
fs::write(pack.join("alpha.md"), minimal_skill("alpha")).unwrap();
fs::write(pack.join("mu.md"), minimal_skill("mu")).unwrap();
let registry = Registry::new()
.layer_dirs(
&SkillsSource::Sources(vec![SkillSource::Path("./pack".into())]),
&yaml,
)
.unwrap()
.finalise()
.unwrap();
assert_eq!(registry.skill_names(), vec!["alpha", "mu", "zeta"]);
}
#[test]
fn render_skill_template_is_parse_valid() {
let body = render_skill_template("custom_method", "A test description for the skill.");
let (fm, _body) =
parse_skill(&body, &PathBuf::from("test.md")).expect("rendered template must parse");
assert_eq!(fm.name, "custom_method");
assert_eq!(fm.description, "A test description for the skill.");
}
#[test]
fn render_skill_template_substitutes_name_into_body_headings() {
let body = render_skill_template("my_skill", "desc");
assert!(body.contains("# `my_skill` methodology"));
assert!(body.contains("## When `my_skill` is the wrong tool"));
}
#[test]
fn write_skill_template_writes_into_directory() {
let dir = tempfile::tempdir().unwrap();
let dest = write_skill_template(dir.path(), "alpha", "First skill.").unwrap();
assert_eq!(dest, dir.path().join("alpha.md"));
let content = fs::read_to_string(&dest).unwrap();
assert!(content.contains("name: alpha"));
}
#[test]
fn write_skill_template_writes_to_explicit_md_path() {
let dir = tempfile::tempdir().unwrap();
let explicit = dir.path().join("renamed.md");
let dest = write_skill_template(&explicit, "alpha", "First skill.").unwrap();
assert_eq!(dest, explicit);
assert!(explicit.is_file());
}
#[test]
fn write_skill_template_creates_missing_parents() {
let dir = tempfile::tempdir().unwrap();
let nested = dir.path().join("a/b/c");
let dest = write_skill_template(&nested, "alpha", "First skill.").unwrap();
assert_eq!(dest, nested.join("alpha.md"));
assert!(dest.is_file());
}
#[test]
fn write_skill_template_refuses_to_overwrite() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("alpha.md");
fs::write(&path, "existing").unwrap();
let err = write_skill_template(dir.path(), "alpha", "Replace me?").unwrap_err();
assert!(matches!(err, SkillError::Io { .. }));
assert_eq!(fs::read_to_string(&path).unwrap(), "existing");
}
#[test]
fn write_skill_template_round_trips_through_registry() {
let dir = tempfile::tempdir().unwrap();
let yaml = dir.path().join("test_mcp.yaml");
fs::write(&yaml, "name: t\nskills: true\n").unwrap();
let skills_dir = dir.path().join("test_mcp.skills");
write_skill_template(&skills_dir, "custom_method", "Project-layer skill body.").unwrap();
let registry = Registry::new()
.auto_detect_project_layer(&yaml)
.finalise()
.unwrap();
let skill = registry
.get("custom_method")
.expect("template should resolve");
assert_eq!(skill.description(), "Project-layer skill body.");
}
fn skill_with_applies_when(applies_when_yaml: &str) -> Skill {
let body = format!(
"---\nname: gated\ndescription: A gated skill.\n\
applies_when:\n{applies_when_yaml}\n---\n\nBody.\n"
);
let (frontmatter, body) = parse_skill(&body, &PathBuf::from("gated.md")).unwrap();
Skill {
frontmatter,
body,
provenance: SkillProvenance::Bundled,
}
}
#[test]
fn applies_when_parses_map_shape() {
let skill = skill_with_applies_when(
" graph_has_node_type: [Function, Class]\n\
\x20 tool_registered: cypher_query\n\
\x20 extension_enabled: csv_http_server\n\
\x20 graph_has_property:\n\
\x20 node_type: Function\n\
\x20 prop_name: module",
);
let applies = skill.frontmatter.applies_when.unwrap();
assert_eq!(
applies.graph_has_node_type.as_deref(),
Some(["Function".to_string(), "Class".to_string()].as_slice())
);
assert_eq!(applies.tool_registered.as_deref(), Some("cypher_query"));
assert_eq!(
applies.extension_enabled.as_deref(),
Some("csv_http_server")
);
assert_eq!(
applies.graph_has_property,
Some(GraphPropertyCheck {
node_type: "Function".to_string(),
prop_name: "module".to_string(),
})
);
}
#[test]
fn applies_when_absent_means_always_active() {
let body = "---\nname: ungated\ndescription: An ungated skill.\n---\n\nBody.\n";
let (frontmatter, body) = parse_skill(body, &PathBuf::from("ungated.md")).unwrap();
let skill = Skill {
frontmatter,
body,
provenance: SkillProvenance::Bundled,
};
let registry = ResolvedRegistry::default();
let activation = registry.activation_for(
&skill,
&std::collections::HashSet::new(),
&serde_json::Map::new(),
);
assert!(activation.active);
assert!(activation.clauses.is_empty());
}
#[test]
fn tool_registered_predicate_dispatches_in_framework() {
let skill = skill_with_applies_when(" tool_registered: cypher_query");
let registry = ResolvedRegistry::default();
let mut tools = std::collections::HashSet::new();
let inactive = registry.activation_for(&skill, &tools, &serde_json::Map::new());
assert!(!inactive.active);
assert_eq!(inactive.clauses[0].1, PredicateOutcome::Unsatisfied);
tools.insert("cypher_query".to_string());
let active = registry.activation_for(&skill, &tools, &serde_json::Map::new());
assert!(active.active);
assert_eq!(active.clauses[0].1, PredicateOutcome::Satisfied);
}
#[test]
fn extension_enabled_predicate_dispatches_in_framework() {
let skill = skill_with_applies_when(" extension_enabled: csv_http_server");
let registry = ResolvedRegistry::default();
let tools = std::collections::HashSet::new();
let mut extensions = serde_json::Map::new();
assert!(!registry.activation_for(&skill, &tools, &extensions).active);
extensions.insert("csv_http_server".to_string(), serde_json::json!(false));
assert!(!registry.activation_for(&skill, &tools, &extensions).active);
extensions.insert("csv_http_server".to_string(), serde_json::Value::Null);
assert!(!registry.activation_for(&skill, &tools, &extensions).active);
extensions.insert("csv_http_server".to_string(), serde_json::json!(true));
assert!(registry.activation_for(&skill, &tools, &extensions).active);
extensions.insert(
"csv_http_server".to_string(),
serde_json::json!({"enabled": true}),
);
assert!(registry.activation_for(&skill, &tools, &extensions).active);
}
struct StubEvaluator {
has_function: bool,
}
impl SkillPredicateEvaluator for StubEvaluator {
fn evaluate(&self, clause: &PredicateClause<'_>) -> Option<bool> {
match clause {
PredicateClause::GraphHasNodeType(types) => {
Some(types.iter().any(|t| t == "Function") && self.has_function)
}
_ => None,
}
}
}
#[test]
fn graph_predicate_dispatches_via_evaluator() {
let skill = skill_with_applies_when(" graph_has_node_type: [Function, Class]");
let registry = Registry::new()
.with_predicate_evaluator(StubEvaluator { has_function: true })
.finalise()
.unwrap();
let active = registry.activation_for(
&skill,
&std::collections::HashSet::new(),
&serde_json::Map::new(),
);
assert!(active.active);
assert_eq!(active.clauses[0].1, PredicateOutcome::Satisfied);
let registry = Registry::new()
.with_predicate_evaluator(StubEvaluator {
has_function: false,
})
.finalise()
.unwrap();
let inactive = registry.activation_for(
&skill,
&std::collections::HashSet::new(),
&serde_json::Map::new(),
);
assert!(!inactive.active);
assert_eq!(inactive.clauses[0].1, PredicateOutcome::Unsatisfied);
}
#[test]
fn graph_predicate_unknown_without_evaluator_means_inactive() {
let skill = skill_with_applies_when(" graph_has_node_type: [Function]");
let registry = ResolvedRegistry::default();
let activation = registry.activation_for(
&skill,
&std::collections::HashSet::new(),
&serde_json::Map::new(),
);
assert!(!activation.active);
assert_eq!(activation.clauses[0].1, PredicateOutcome::Unknown);
}
#[test]
fn multiple_predicates_all_must_be_satisfied() {
let skill = skill_with_applies_when(
" graph_has_node_type: [Function]\n\
\x20 tool_registered: cypher_query",
);
let registry = Registry::new()
.with_predicate_evaluator(StubEvaluator { has_function: true })
.finalise()
.unwrap();
let mut tools = std::collections::HashSet::new();
let extensions = serde_json::Map::new();
assert!(!registry.activation_for(&skill, &tools, &extensions).active);
tools.insert("cypher_query".to_string());
assert!(registry.activation_for(&skill, &tools, &extensions).active);
}
}