use crate::skill::metadata::SkillMetadata;
use anyhow::Result;
use sha2::{Digest, Sha256};
use std::path::Path;
#[derive(Debug, Clone)]
pub struct DependencyInfo {
pub dep_type: DependencyType,
pub packages: Vec<String>,
pub content_hash: String,
}
#[derive(Debug, Clone, PartialEq)]
pub enum DependencyType {
Python,
Node,
None,
}
pub fn detect_dependencies(_skill_dir: &Path, metadata: &SkillMetadata) -> Result<DependencyInfo> {
let language = crate::skill::metadata::detect_language(_skill_dir, metadata);
let mut packages = if let Some(ref resolved) = metadata.resolved_packages {
resolved.clone()
} else {
parse_compatibility_for_packages(metadata.compatibility.as_deref())
};
if packages.is_empty() {
if let Some(ref allowed) = metadata.allowed_tools {
let patterns = crate::skill::metadata::parse_allowed_tools(allowed);
if !patterns.is_empty() {
packages = patterns.iter().map(|p| p.command_prefix.clone()).collect();
let hash = compute_packages_hash(&packages);
return Ok(DependencyInfo {
dep_type: DependencyType::Node, packages,
content_hash: hash,
});
}
}
}
if packages.is_empty() {
return Ok(DependencyInfo {
dep_type: DependencyType::None,
packages: vec![],
content_hash: String::new(),
});
}
let hash = compute_packages_hash(&packages);
let dep_type = match language.as_str() {
"python" => DependencyType::Python,
"node" => DependencyType::Node,
_ => DependencyType::None,
};
Ok(DependencyInfo {
dep_type,
packages,
content_hash: hash,
})
}
pub fn parse_compatibility_for_packages(compatibility: Option<&str>) -> Vec<String> {
let Some(compat) = compatibility else {
return vec![];
};
let known_python_packages = [
"requests",
"pandas",
"numpy",
"scipy",
"matplotlib",
"seaborn",
"sklearn",
"scikit-learn",
"tensorflow",
"torch",
"pytorch",
"flask",
"django",
"fastapi",
"aiohttp",
"httpx",
"beautifulsoup",
"bs4",
"lxml",
"selenium",
"html2text",
"pillow",
"opencv",
"cv2",
"pyyaml",
"yaml",
"sqlalchemy",
"psycopg2",
"pymysql",
"redis",
"pymongo",
"pyodps",
"boto3",
"google-cloud",
"azure",
"oss2",
"pytest",
"unittest",
"mock",
"click",
"argparse",
"typer",
"pydantic",
"dataclasses",
"attrs",
"jinja2",
"mako",
"celery",
"rq",
"cryptography",
"jwt",
"passlib",
"playwright",
"openpyxl",
"pyarrow",
"polars",
"duckdb",
"openai",
"anthropic",
"langchain",
"langgraph",
"llama-index",
"aiofiles",
"tenacity",
"orjson",
"ujson",
];
let known_node_packages = [
"axios",
"node-fetch",
"got",
"express",
"koa",
"fastify",
"hapi",
"lodash",
"underscore",
"ramda",
"moment",
"dayjs",
"date-fns",
"cheerio",
"puppeteer",
"playwright",
"@playwright/test",
"mongoose",
"sequelize",
"knex",
"prisma",
"ioredis",
"aws-sdk",
"googleapis",
"openai",
"@anthropic-ai/sdk",
"jest",
"mocha",
"chai",
"commander",
"yargs",
"inquirer",
"chalk",
"ora",
"boxen",
"dotenv",
"jsonwebtoken",
"bcrypt",
"crypto-js",
"socket.io",
"ws",
"sharp",
"jimp",
];
let compat_lower = compat.to_lowercase();
let mut packages = Vec::new();
for pkg in known_python_packages.iter() {
let pkg_lower = pkg.to_lowercase();
if is_word_match(&compat_lower, &pkg_lower) {
packages.push(pkg.to_string());
}
}
for pkg in known_node_packages.iter() {
let pkg_lower = pkg.to_lowercase();
if is_word_match(&compat_lower, &pkg_lower) {
packages.push(pkg.to_string());
}
}
packages
}
fn is_word_match(text: &str, word: &str) -> bool {
let word_chars: Vec<char> = word.chars().collect();
let text_chars: Vec<char> = text.chars().collect();
let mut i = 0;
while i <= text_chars.len().saturating_sub(word_chars.len()) {
let mut matches = true;
for (j, wc) in word_chars.iter().enumerate() {
if text_chars.get(i + j) != Some(wc) {
matches = false;
break;
}
}
if matches {
let before_ok = i == 0 || !text_chars[i - 1].is_alphanumeric();
let after_pos = i + word_chars.len();
let after_ok =
after_pos >= text_chars.len() || !text_chars[after_pos].is_alphanumeric();
if before_ok && after_ok {
return true;
}
}
i += 1;
}
false
}
fn compute_packages_hash(packages: &[String]) -> String {
let mut sorted_packages: Vec<&String> = packages.iter().collect();
sorted_packages.sort();
let mut hasher = Sha256::new();
for pkg in sorted_packages {
hasher.update(pkg.as_bytes());
hasher.update(b"\n");
}
let result = hasher.finalize();
hex::encode(result)
}
pub fn validate_dependencies(_skill_dir: &Path, _metadata: &SkillMetadata) -> Result<()> {
Ok(())
}
pub fn get_cache_key(dep_info: &DependencyInfo) -> String {
match dep_info.dep_type {
DependencyType::Python => {
if dep_info.content_hash.is_empty() {
"py-none".to_string()
} else {
format!(
"py-{}",
&dep_info.content_hash[..16.min(dep_info.content_hash.len())]
)
}
}
DependencyType::Node => {
if dep_info.content_hash.is_empty() {
"node-none".to_string()
} else {
format!(
"node-{}",
&dep_info.content_hash[..16.min(dep_info.content_hash.len())]
)
}
}
DependencyType::None => "none".to_string(),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_compatibility_for_common_python_packages() {
let packages = parse_compatibility_for_packages(Some(
"Requires Python 3.x with pyodps, pyarrow, polars, openai and langchain",
));
assert!(packages.contains(&"pyodps".to_string()));
assert!(packages.contains(&"pyarrow".to_string()));
assert!(packages.contains(&"polars".to_string()));
assert!(packages.contains(&"openai".to_string()));
assert!(packages.contains(&"langchain".to_string()));
}
#[test]
fn test_parse_compatibility_for_common_node_packages() {
let packages = parse_compatibility_for_packages(Some(
"Requires Node.js with openai, @anthropic-ai/sdk, and @playwright/test",
));
assert!(packages.contains(&"openai".to_string()));
assert!(packages.contains(&"@anthropic-ai/sdk".to_string()));
assert!(packages.contains(&"@playwright/test".to_string()));
}
}