use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use serde_yaml::Value as YamlValue;
use walkdir::WalkDir;
use wax::{CandidatePath, Glob, Pattern};
use crate::config::WorkspaceBundle;
use crate::error::{AugentError, Result};
use crate::platform::{MergeStrategy, Platform, TransformRule};
use crate::progress::ProgressDisplay;
use crate::resolver::ResolvedBundle;
use crate::universal;
const RESOURCE_DIRS: &[&str] = &["commands", "rules", "agents", "skills", "root"];
const RESOURCE_FILES: &[&str] = &["mcp.jsonc", "AGENTS.md"];
#[derive(Debug, Clone)]
pub struct DiscoveredResource {
pub bundle_path: PathBuf,
pub absolute_path: PathBuf,
pub resource_type: String,
}
#[derive(Debug, Clone)]
pub struct InstalledFile {
pub bundle_path: String,
pub resource_type: String,
pub target_paths: Vec<String>,
}
pub struct Installer<'a> {
workspace_root: &'a Path,
platforms: Vec<Platform>,
installed_files: HashMap<String, InstalledFile>,
dry_run: bool,
progress: Option<&'a mut ProgressDisplay>,
pub(super) leaf_skill_dirs: Option<std::collections::HashSet<String>>,
}
#[derive(Debug, Clone)]
struct PendingInstallation {
source_path: PathBuf,
target_path: PathBuf,
merge_strategy: MergeStrategy,
bundle_path: String,
resource_type: String,
}
impl<'a> Installer<'a> {
#[allow(dead_code)] pub fn new(workspace_root: &'a Path, platforms: Vec<Platform>) -> Self {
Self {
workspace_root,
platforms,
installed_files: HashMap::new(),
dry_run: false,
progress: None,
leaf_skill_dirs: None,
}
}
pub fn new_with_dry_run(
workspace_root: &'a Path,
platforms: Vec<Platform>,
dry_run: bool,
) -> Self {
Self {
workspace_root,
platforms,
installed_files: HashMap::new(),
dry_run,
progress: None,
leaf_skill_dirs: None,
}
}
pub fn new_with_progress(
workspace_root: &'a Path,
platforms: Vec<Platform>,
dry_run: bool,
progress: Option<&'a mut ProgressDisplay>,
) -> Self {
Self {
workspace_root,
platforms,
installed_files: HashMap::new(),
dry_run,
progress,
leaf_skill_dirs: None,
}
}
pub fn discover_resources(bundle_path: &Path) -> Result<Vec<DiscoveredResource>> {
let mut resources = Vec::new();
for dir_name in RESOURCE_DIRS {
let dir_path = bundle_path.join(dir_name);
if dir_path.is_dir() {
for entry in WalkDir::new(&dir_path)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())
{
if entry.file_type().is_file() {
let absolute_path = entry.path().to_path_buf();
let bundle_path = entry
.path()
.strip_prefix(bundle_path)
.unwrap_or(entry.path())
.to_path_buf();
resources.push(DiscoveredResource {
bundle_path,
absolute_path,
resource_type: (*dir_name).to_string(),
});
}
}
}
}
for file_name in RESOURCE_FILES {
let file_path = bundle_path.join(file_name);
if file_path.is_file() {
resources.push(DiscoveredResource {
bundle_path: PathBuf::from(file_name),
absolute_path: file_path,
resource_type: "root".to_string(),
});
}
}
Ok(resources)
}
#[allow(dead_code)]
fn validate_skill_frontmatter_spec(content: &str, _parent_dir_name: &str) -> bool {
let (fm, _) = match universal::parse_frontmatter_and_body(content) {
Some(p) => p,
None => return false,
};
let name = match universal::get_str(&fm, "name") {
Some(n) => n,
None => return false,
};
let description = match universal::get_str(&fm, "description") {
Some(d) => d,
None => return false,
};
if name.is_empty() || name.len() > 64 {
return false;
}
let name_ok = name
.chars()
.all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-');
if !name_ok {
return false;
}
if name.starts_with('-') || name.ends_with('-') {
return false;
}
if name.contains("--") {
return false;
}
let desc_trim = description.trim();
!desc_trim.is_empty() && desc_trim.len() <= 1024
}
fn filter_skills_resources(resources: Vec<DiscoveredResource>) -> Vec<DiscoveredResource> {
const SKILLS_PREFIX: &str = "skills/";
const SKILL_MD_NAME: &str = "SKILL.md";
let all_skill_dirs: std::collections::HashSet<String> = resources
.iter()
.filter(|r| r.resource_type == "skills")
.filter(|r| r.bundle_path.file_name().and_then(|n| n.to_str()) == Some(SKILL_MD_NAME))
.filter_map(|r| {
let parent = r.bundle_path.parent()?;
Some(parent.to_string_lossy().replace('\\', "/"))
})
.collect();
let leaf_skill_dirs: std::collections::HashSet<String> = all_skill_dirs
.iter()
.filter(|dir| {
!all_skill_dirs
.iter()
.any(|other| *other != **dir && other.starts_with(&format!("{}/", dir)))
})
.cloned()
.collect();
resources
.into_iter()
.filter(|r| {
if r.resource_type != "skills" {
return true;
}
let path_str = r.bundle_path.to_string_lossy().replace('\\', "/");
if !path_str.starts_with(SKILLS_PREFIX) {
return true;
}
let after_skills = path_str.trim_start_matches(SKILLS_PREFIX);
if !after_skills.contains('/') {
return false;
}
leaf_skill_dirs.iter().any(|skill_dir| {
path_str == *skill_dir || path_str.starts_with(&format!("{}/", skill_dir))
})
})
.collect()
}
fn compute_leaf_skill_dirs(
resources: &[DiscoveredResource],
) -> std::collections::HashSet<String> {
const SKILLS_PREFIX: &str = "skills/";
const SKILL_MD_NAME: &str = "SKILL.md";
let all_skill_dirs: std::collections::HashSet<String> = resources
.iter()
.filter(|r| r.resource_type == "skills")
.filter(|r| r.bundle_path.file_name().and_then(|n| n.to_str()) == Some(SKILL_MD_NAME))
.filter_map(|r| {
let parent = r.bundle_path.parent()?;
let s = parent.to_string_lossy().replace('\\', "/");
if s.starts_with(SKILLS_PREFIX) {
Some(s)
} else {
None
}
})
.collect();
all_skill_dirs
.iter()
.filter(|dir| {
!all_skill_dirs
.iter()
.any(|other| *other != **dir && other.starts_with(&format!("{}/", dir)))
})
.cloned()
.collect()
}
pub fn install_bundle(&mut self, bundle: &ResolvedBundle) -> Result<WorkspaceBundle> {
let resources =
Self::filter_skills_resources(Self::discover_resources(&bundle.source_path)?);
self.leaf_skill_dirs = Some(Self::compute_leaf_skill_dirs(&resources));
let pending_installations = self.collect_pending_installations(&resources, bundle)?;
let grouped_by_target = self.group_by_target(&pending_installations);
let mut workspace_bundle = WorkspaceBundle::new(&bundle.name);
for (ref target_path, ref installations) in grouped_by_target {
if let Some(ref mut progress) = self.progress {
let relative = target_path
.strip_prefix(self.workspace_root)
.unwrap_or(target_path);
progress.update_file(&relative.to_string_lossy());
}
let _installed = self.execute_installations(target_path, installations)?;
}
let mut source_to_targets: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut bundle_source_paths: std::collections::HashSet<String> =
std::collections::HashSet::new();
for installation in &pending_installations {
bundle_source_paths.insert(installation.bundle_path.clone());
}
for source_path in bundle_source_paths {
if let Some(installed_file) = self.installed_files.get(&source_path) {
source_to_targets.insert(source_path, installed_file.target_paths.clone());
}
}
for (source_path, target_paths) in source_to_targets {
workspace_bundle.add_file(source_path, target_paths);
}
self.leaf_skill_dirs = None;
Ok(workspace_bundle)
}
pub fn install_bundles(&mut self, bundles: &[ResolvedBundle]) -> Result<Vec<WorkspaceBundle>> {
let mut workspace_bundles = Vec::new();
let total_bundles = bundles.len();
let total_files = if self.progress.is_some() {
bundles
.iter()
.map(|b| {
Self::discover_resources(&b.source_path)
.map(|resources| {
let filtered = Self::filter_skills_resources(resources);
filtered.len() * self.platforms.len()
})
.unwrap_or(0)
})
.sum()
} else {
0
};
if let Some(ref mut progress) = self.progress {
if total_files > 0 {
progress.init_file_progress(total_files as u64);
}
}
for (idx, bundle) in bundles.iter().enumerate() {
if let Some(ref mut progress) = self.progress {
progress.update_bundle(&bundle.name, idx + 1, total_bundles);
}
let workspace_bundle = self.install_bundle(bundle)?;
if let Some(ref mut progress) = self.progress {
progress.inc_bundle();
}
workspace_bundles.push(workspace_bundle);
}
Ok(workspace_bundles)
}
fn collect_pending_installations(
&self,
resources: &[DiscoveredResource],
_bundle: &ResolvedBundle,
) -> Result<Vec<PendingInstallation>> {
let mut pending = Vec::new();
for resource in resources {
if resource.resource_type == "root" {
let relative_path = resource
.bundle_path
.strip_prefix("root")
.unwrap_or(&resource.bundle_path);
let mut found_rule = false;
for platform in &self.platforms {
if let Some(rule) = self.find_transform_rule(platform, relative_path) {
let target = self.apply_transform_rule(rule, relative_path);
pending.push(PendingInstallation {
source_path: resource.absolute_path.clone(),
target_path: target,
merge_strategy: rule.merge,
bundle_path: resource.bundle_path.to_string_lossy().replace('\\', "/"),
resource_type: resource.resource_type.clone(),
});
found_rule = true;
}
}
if !found_rule {
let target = self.workspace_root.join(relative_path);
pending.push(PendingInstallation {
source_path: resource.absolute_path.clone(),
target_path: target,
merge_strategy: MergeStrategy::Replace,
bundle_path: resource.bundle_path.to_string_lossy().replace('\\', "/"),
resource_type: resource.resource_type.clone(),
});
}
} else {
for platform in &self.platforms {
if let Some((target_path, merge_strategy)) =
self.get_target_path_and_strategy(resource, platform)?
{
pending.push(PendingInstallation {
source_path: resource.absolute_path.clone(),
target_path,
merge_strategy,
bundle_path: resource.bundle_path.to_string_lossy().replace('\\', "/"),
resource_type: resource.resource_type.clone(),
});
}
}
}
}
Ok(pending)
}
fn get_target_path_and_strategy(
&self,
resource: &DiscoveredResource,
platform: &Platform,
) -> Result<Option<(PathBuf, MergeStrategy)>> {
let rule = self.find_transform_rule(platform, &resource.bundle_path);
let (target_path, merge_strategy) = match rule {
Some(r) => {
let target = self.apply_transform_rule(r, &resource.bundle_path);
(target, r.merge)
}
None => {
let target = platform
.directory_path(self.workspace_root)
.join(&resource.bundle_path);
(target, MergeStrategy::Replace)
}
};
Ok(Some((target_path, merge_strategy)))
}
fn group_by_target(
&self,
installations: &[PendingInstallation],
) -> Vec<(PathBuf, Vec<PendingInstallation>)> {
let mut grouped: HashMap<PathBuf, Vec<PendingInstallation>> = HashMap::new();
for installation in installations {
grouped
.entry(installation.target_path.clone())
.or_default()
.push(installation.clone());
}
grouped.into_iter().collect()
}
fn execute_installations(
&mut self,
target_path: &Path,
installations: &[PendingInstallation],
) -> Result<InstalledFile> {
if installations.is_empty() {
return Err(AugentError::FileReadFailed {
path: target_path.display().to_string(),
reason: "No installations to execute".to_string(),
});
}
if !self.dry_run {
if installations.len() == 1 {
let installation = &installations[0];
self.apply_merge_and_copy(
&installation.source_path,
target_path,
&installation.merge_strategy,
)?;
} else {
self.merge_multiple_installations(target_path, installations)?;
}
} else {
if installations.len() == 1 {
let installation = &installations[0];
let relative = target_path
.strip_prefix(self.workspace_root)
.unwrap_or(target_path);
println!(
" Would install: {} -> {}",
installation.bundle_path,
relative.display()
);
} else {
let relative = target_path
.strip_prefix(self.workspace_root)
.unwrap_or(target_path);
println!(
" Would merge {} files -> {}",
installations.len(),
relative.display()
);
}
}
let actual_target_path = if self.is_gemini_command_file(target_path) {
target_path.with_extension("toml")
} else {
target_path.to_path_buf()
};
let relative = actual_target_path
.strip_prefix(self.workspace_root)
.unwrap_or(&actual_target_path);
let target_paths = vec![relative.to_string_lossy().to_string()];
let resource_type = installations[0].resource_type.clone();
let bundle_path = installations[0].bundle_path.clone();
let installed = InstalledFile {
bundle_path: bundle_path.clone(),
resource_type: resource_type.clone(),
target_paths: target_paths.clone(),
};
self.installed_files
.entry(bundle_path.clone())
.and_modify(|existing| {
for target in &target_paths {
if !existing.target_paths.contains(target) {
existing.target_paths.push(target.clone());
}
}
})
.or_insert_with(|| InstalledFile {
bundle_path: bundle_path.clone(),
resource_type,
target_paths: target_paths.clone(),
});
Ok(installed)
}
fn merge_multiple_installations(
&self,
target_path: &Path,
installations: &[PendingInstallation],
) -> Result<()> {
if installations.is_empty() {
return Ok(());
}
if self.dry_run {
return Ok(());
}
let merge_strategy = &installations[0].merge_strategy;
match merge_strategy {
MergeStrategy::Replace => {
let last_installation = installations.last().unwrap();
self.apply_merge_and_copy(
&last_installation.source_path,
target_path,
merge_strategy,
)?;
}
MergeStrategy::Shallow | MergeStrategy::Deep => {
self.merge_multiple_json_files(target_path, installations, merge_strategy)?;
}
MergeStrategy::Composite => {
self.merge_multiple_text_files(target_path, installations)?;
}
}
Ok(())
}
fn merge_multiple_json_files(
&self,
target_path: &Path,
installations: &[PendingInstallation],
strategy: &MergeStrategy,
) -> Result<()> {
let mut result_value: serde_json::Value = if target_path.exists() {
let existing_content =
fs::read_to_string(target_path).map_err(|e| AugentError::FileReadFailed {
path: target_path.display().to_string(),
reason: e.to_string(),
})?;
let existing_json = strip_jsonc_comments(&existing_content);
serde_json::from_str(&existing_json).map_err(|e| AugentError::ConfigParseFailed {
path: target_path.display().to_string(),
reason: e.to_string(),
})?
} else {
serde_json::json!({})
};
for installation in installations {
let source_content = fs::read_to_string(&installation.source_path).map_err(|e| {
AugentError::FileReadFailed {
path: installation.source_path.display().to_string(),
reason: e.to_string(),
}
})?;
let source_json = strip_jsonc_comments(&source_content);
let source_value: serde_json::Value =
serde_json::from_str(&source_json).map_err(|e| AugentError::ConfigParseFailed {
path: installation.source_path.display().to_string(),
reason: e.to_string(),
})?;
match strategy {
MergeStrategy::Shallow => {
shallow_merge(&mut result_value, &source_value);
}
MergeStrategy::Deep => {
deep_merge(&mut result_value, &source_value);
}
_ => {}
}
}
let result = serde_json::to_string_pretty(&result_value).map_err(|e| {
AugentError::ConfigParseFailed {
path: target_path.display().to_string(),
reason: e.to_string(),
}
})?;
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
fs::write(target_path, result).map_err(|e| AugentError::FileWriteFailed {
path: target_path.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn merge_multiple_text_files(
&self,
target_path: &Path,
installations: &[PendingInstallation],
) -> Result<()> {
let mut result = if target_path.exists() {
fs::read_to_string(target_path).map_err(|e| AugentError::FileReadFailed {
path: target_path.display().to_string(),
reason: e.to_string(),
})?
} else {
String::new()
};
for installation in installations {
let mut source_content =
fs::read_to_string(&installation.source_path).map_err(|e| {
AugentError::FileReadFailed {
path: installation.source_path.display().to_string(),
reason: e.to_string(),
}
})?;
if self.is_opencode_metadata_file(target_path) {
if let Ok(converted) = self.convert_opencode_frontmatter_only(&source_content) {
source_content = converted;
}
}
if !result.is_empty() {
result.push_str("\n\n<!-- Augent: merged content below -->\n\n");
}
result.push_str(&source_content);
}
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
fs::write(target_path, result).map_err(|e| AugentError::FileWriteFailed {
path: target_path.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn find_transform_rule<'b>(
&self,
platform: &'b Platform,
resource_path: &Path,
) -> Option<&'b TransformRule> {
let path_str = resource_path.to_string_lossy().replace('\\', "/");
let candidate = CandidatePath::from(path_str.as_str());
platform.transforms.iter().find(|rule| {
if let Ok(glob) = Glob::new(&rule.from) {
glob.matched(&candidate).is_some()
} else {
rule.from == path_str
}
})
}
fn apply_transform_rule(&self, rule: &TransformRule, resource_path: &Path) -> PathBuf {
let path_str = resource_path.to_string_lossy().replace('\\', "/");
let skill_root: Option<&str> = if path_str.starts_with("skills/")
&& self.leaf_skill_dirs.as_ref().is_some_and(|dirs| {
dirs.iter()
.any(|d| path_str == d.as_str() || path_str.starts_with(&format!("{}/", d)))
}) {
self.leaf_skill_dirs.as_ref().and_then(|dirs| {
dirs.iter()
.find(|dir| {
path_str == dir.as_str() || path_str.starts_with(&format!("{}/", dir))
})
.map(String::as_str)
})
} else {
None
};
let mut target = rule.to.clone();
if target.contains("{name}") {
let name = if path_str.starts_with("skills/") {
skill_root
.and_then(|root| root.split('/').next_back().map(String::from))
.unwrap_or_else(|| {
path_str
.trim_start_matches("skills/")
.split('/')
.next()
.map(String::from)
.unwrap_or_else(|| {
resource_path
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default()
})
})
} else {
resource_path
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default()
};
if !name.is_empty() {
target = target.replace("{name}", &name);
}
}
let relative_part = if rule.to.contains("{name}") {
if let Some(root) = skill_root {
path_str
.strip_prefix(root)
.unwrap_or(&path_str)
.trim_start_matches('/')
.to_string()
} else {
self.extract_relative_part(&rule.from, &path_str)
}
} else {
self.extract_relative_part(&rule.from, &path_str)
};
if target.contains("**") {
if let Some(pos) = target.find("**") {
let prefix = &target[..pos];
let suffix = if pos + 2 < target.len() {
&target[pos + 2..]
} else {
""
};
let relative_to_use =
if rule.extension.is_some() && (suffix.contains('.') || suffix.contains('*')) {
let rel_path = PathBuf::from(&relative_part);
if let Some(stem) = rel_path.file_stem() {
if let Some(parent) = rel_path.parent() {
if parent.as_os_str().is_empty() {
stem.to_string_lossy().to_string()
} else {
format!(
"{}/{}",
parent.to_string_lossy().replace('\\', "/"),
stem.to_string_lossy()
)
}
} else {
stem.to_string_lossy().to_string()
}
} else {
relative_part.clone()
}
} else {
relative_part.clone()
};
if suffix.starts_with('/') {
let suffix_clean = suffix.strip_prefix('/').unwrap_or(suffix);
if suffix_clean.contains('.') || suffix_clean.contains('*') {
target = format!("{}{}", prefix, relative_to_use);
} else {
target = format!("{}{}/{}", prefix, relative_to_use, suffix_clean);
}
} else if !suffix.is_empty() {
target = format!("{}{}{}", prefix, relative_to_use, suffix);
} else {
target = format!("{}{}", prefix, relative_to_use);
}
}
} else if target.contains('*') {
if let Some(stem) = resource_path.file_stem() {
target = target.replace('*', &stem.to_string_lossy());
}
}
if let Some(ref ext) = rule.extension {
let target_path = PathBuf::from(&target.replace('\\', "/"));
if let Some(file_stem) = target_path.file_stem() {
let new_filename = format!("{}.{}", file_stem.to_string_lossy(), ext);
if let Some(parent) = target_path.parent() {
target = parent
.join(&new_filename)
.to_string_lossy()
.replace('\\', "/");
} else {
target = new_filename;
}
} else {
target = format!("{}.{}", target, ext);
}
}
let target_path = PathBuf::from(&target.replace('\\', "/"));
self.workspace_root.join(target_path)
}
fn extract_relative_part(&self, pattern: &str, path: &str) -> String {
let wildcard_pos = pattern.find('*').unwrap_or(pattern.len());
let pattern_prefix = &pattern[..wildcard_pos];
if let Some(relative) = path.strip_prefix(pattern_prefix) {
relative.trim_start_matches('/').to_string()
} else {
if let Some(filename) = PathBuf::from(path).file_name() {
filename.to_string_lossy().to_string()
} else {
path.to_string()
}
}
}
fn apply_merge_and_copy(
&self,
source: &Path,
target: &Path,
strategy: &MergeStrategy,
) -> Result<()> {
if self.dry_run {
return Ok(());
}
if let Some(parent) = target.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
if !target.exists() {
return self.copy_file(source, target);
}
match strategy {
MergeStrategy::Replace => {
self.copy_file(source, target)?;
}
MergeStrategy::Shallow | MergeStrategy::Deep => {
self.merge_json_files(source, target, strategy)?;
}
MergeStrategy::Composite => {
self.merge_text_files(source, target)?;
}
}
Ok(())
}
fn copy_file(&self, source: &Path, target: &Path) -> Result<()> {
if self.is_platform_resource_file(target) && !Self::is_likely_binary_file(source) {
let content = fs::read_to_string(source).map_err(|e| AugentError::FileReadFailed {
path: source.display().to_string(),
reason: e.to_string(),
})?;
let known: Vec<String> = self.platforms.iter().map(|p| p.id.clone()).collect();
if let Some((fm, body)) = universal::parse_frontmatter_and_body(&content) {
if let Some(pid) = self.platform_id_from_target(target) {
let merged = universal::merge_frontmatter_for_platform(&fm, pid, &known);
if self.is_gemini_command_file(target) {
return self.convert_gemini_command_from_merged(&merged, &body, target);
}
return self.write_merged_frontmatter_markdown(&merged, &body, target);
}
}
if self.is_gemini_command_file(target) {
return self.convert_markdown_to_toml(source, target);
}
if self.is_opencode_metadata_file(target) {
return self.convert_opencode_frontmatter(source, target);
}
}
if let Some(parent) = target.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
fs::copy(source, target).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn platform_id_from_target(&self, target: &Path) -> Option<&str> {
for platform in &self.platforms {
let platform_dir = self.workspace_root.join(&platform.directory);
if target.starts_with(&platform_dir) {
return Some(platform.id.as_str());
}
}
None
}
fn is_platform_resource_file(&self, target: &Path) -> bool {
if self.platform_id_from_target(target).is_none() {
return false;
}
let path_str = target.to_string_lossy();
path_str.contains("/commands/")
|| path_str.contains("/rules/")
|| path_str.contains("/agents/")
|| path_str.contains("/skills/")
|| path_str.contains("/workflows/")
|| path_str.contains("/prompts/")
|| path_str.contains("/instructions/")
|| path_str.contains("/guidelines")
|| path_str.contains("/droids/")
|| path_str.contains("/steering/")
}
fn write_merged_frontmatter_markdown(
&self,
merged: &YamlValue,
body: &str,
target: &Path,
) -> Result<()> {
let yaml = universal::serialize_to_yaml(merged);
let yaml = yaml.trim_end(); let out = if yaml.is_empty() || yaml == "{}" {
format!("---\n---\n\n{}", body)
} else {
format!("---\n{}\n---\n\n{}", yaml, body)
};
if let Some(parent) = target.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
fs::write(target, out).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn is_likely_binary_file(path: &Path) -> bool {
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
matches!(
ext.to_lowercase().as_str(),
"zip"
| "pdf"
| "png"
| "jpg"
| "jpeg"
| "gif"
| "webp"
| "ico"
| "woff"
| "woff2"
| "ttf"
| "otf"
| "eot"
| "mp3"
| "mp4"
| "webm"
| "avi"
| "mov"
| "exe"
| "dll"
| "so"
| "dylib"
| "bin"
)
}
fn is_gemini_command_file(&self, target: &Path) -> bool {
let path_str = target.to_string_lossy();
path_str.contains(".gemini/commands/") && path_str.ends_with(".md")
}
fn is_opencode_metadata_file(&self, target: &Path) -> bool {
let path_str = target.to_string_lossy();
(path_str.contains(".opencode/commands/") && path_str.ends_with(".md"))
|| (path_str.contains(".opencode/agents/") && path_str.ends_with(".md"))
|| (path_str.contains(".opencode/skills/") && path_str.ends_with(".md"))
}
fn convert_gemini_command_from_merged(
&self,
merged: &YamlValue,
body: &str,
target: &Path,
) -> Result<()> {
let description = universal::get_str(merged, "description");
let mut toml_content = String::new();
if let Some(desc) = description {
toml_content.push_str(&format!(
"description = {}\n",
self.escape_toml_string(&desc)
));
}
let is_multiline = body.contains('\n');
if is_multiline {
toml_content.push_str(&format!("prompt = \"\"\"\n{}\"\"\"\n", body));
} else {
toml_content.push_str(&format!("prompt = {}\n", self.escape_toml_string(body)));
}
let toml_target = target.with_extension("toml");
if let Some(parent) = toml_target.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
fs::write(&toml_target, toml_content).map_err(|e| AugentError::FileWriteFailed {
path: toml_target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn convert_markdown_to_toml(&self, source: &Path, target: &Path) -> Result<()> {
let content = fs::read_to_string(source).map_err(|e| AugentError::FileReadFailed {
path: source.display().to_string(),
reason: e.to_string(),
})?;
let (description, prompt) = self.extract_description_and_prompt(&content);
let mut toml_content = String::new();
if let Some(desc) = description {
toml_content.push_str(&format!(
"description = {}\n",
self.escape_toml_string(&desc)
));
}
let is_multiline = prompt.contains('\n');
if is_multiline {
toml_content.push_str(&format!("prompt = \"\"\"\n{}\"\"\"\n", prompt));
} else {
toml_content.push_str(&format!("prompt = {}\n", self.escape_toml_string(&prompt)));
}
let toml_target = target.with_extension("toml");
if let Some(parent) = toml_target.parent() {
fs::create_dir_all(parent).map_err(|e| AugentError::FileWriteFailed {
path: parent.display().to_string(),
reason: e.to_string(),
})?;
}
fs::write(&toml_target, toml_content).map_err(|e| AugentError::FileWriteFailed {
path: toml_target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn extract_description_and_prompt(&self, content: &str) -> (Option<String>, String) {
let lines: Vec<&str> = content.lines().collect();
if lines.len() >= 3 && lines[0].eq("---") {
if let Some(end_idx) = lines[1..].iter().position(|line| line.eq(&"---")) {
let end_idx = end_idx + 1;
let frontmatter: String = lines[1..end_idx].join("\n");
let description = self.extract_description_from_frontmatter(&frontmatter);
let prompt: String = lines[end_idx + 1..].join("\n");
return (description, prompt);
}
}
(None, content.to_string())
}
fn extract_description_from_frontmatter(&self, frontmatter: &str) -> Option<String> {
for line in frontmatter.lines() {
let line = line.trim();
if line.starts_with("description:") || line.starts_with("description =") {
let value = if let Some(idx) = line.find(':') {
line[idx + 1..].trim()
} else if let Some(idx) = line.find('=') {
line[idx + 1..].trim()
} else {
continue;
};
let value = value
.trim_start_matches('"')
.trim_start_matches('\'')
.trim_end_matches('"')
.trim_end_matches('\'');
return Some(value.to_string());
}
}
None
}
fn convert_opencode_frontmatter(&self, source: &Path, target: &Path) -> Result<()> {
let content = fs::read_to_string(source).map_err(|e| AugentError::FileReadFailed {
path: source.display().to_string(),
reason: e.to_string(),
})?;
let path_str = target.to_string_lossy();
if path_str.contains(".opencode/skills/") {
self.convert_opencode_skill(&content, target)?;
} else if path_str.contains(".opencode/commands/") {
self.convert_opencode_command(&content, target)?;
} else if path_str.contains(".opencode/agents/") {
self.convert_opencode_agent(&content, target)?;
} else {
fs::copy(source, target).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
}
Ok(())
}
fn convert_opencode_skill(&self, content: &str, target: &Path) -> Result<()> {
let lines: Vec<&str> = content.lines().collect();
let (frontmatter, body) = if lines.len() >= 3 && lines[0].eq("---") {
if let Some(end_idx) = lines[1..].iter().position(|line| line.eq(&"---")) {
let fm = lines[1..end_idx + 1].join("\n");
let body_content = lines[end_idx + 2..].join("\n");
(Some(fm), body_content)
} else {
(None, content.to_string())
}
} else {
(None, content.to_string())
};
if frontmatter.is_none() {
fs::write(target, body).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
return Ok(());
}
let mut new_frontmatter = String::new();
let mut frontmatter_map = std::collections::HashMap::new();
if let Some(fm) = &frontmatter {
for line in fm.lines() {
let line = line.trim();
if let Some((key, value)) = line.split_once(':') {
let key = key.trim();
let value = value.trim().trim_start_matches('"').trim_end_matches('"');
frontmatter_map.insert(key.to_string(), value.to_string());
}
}
}
new_frontmatter.push_str("---\n");
let name = frontmatter_map
.get("name")
.map(|s| s.as_str())
.or_else(|| target.file_stem().and_then(|s| s.to_str()))
.unwrap_or("unknown");
new_frontmatter.push_str(&format!("name: {}\n", name));
if let Some(desc) = frontmatter_map.get("description") {
new_frontmatter.push_str(&format!("description: {}\n", desc));
}
if let Some(license) = frontmatter_map.get("license") {
new_frontmatter.push_str(&format!("license: {}\n", license));
}
if let Some(compatibility) = frontmatter_map.get("compatibility") {
new_frontmatter.push_str(&format!("compatibility: {}\n", compatibility));
}
if frontmatter_map.contains_key("metadata") {
if let Some(meta) = frontmatter_map.get("metadata") {
new_frontmatter.push_str(&format!("metadata: {}\n", meta));
}
}
new_frontmatter.push_str("---\n\n");
fs::write(target, format!("{}{}", new_frontmatter, body)).map_err(|e| {
AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
}
})?;
Ok(())
}
fn convert_opencode_command(&self, content: &str, target: &Path) -> Result<()> {
let (description, prompt) = self.extract_description_and_prompt(content);
let mut new_content = String::new();
if let Some(desc) = description {
new_content.push_str("---\n");
new_content.push_str(&format!("description: {}\n", desc));
new_content.push_str("---\n\n");
}
new_content.push_str(&prompt);
fs::write(target, new_content).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn convert_opencode_agent(&self, content: &str, target: &Path) -> Result<()> {
let (description, prompt) = self.extract_description_and_prompt(content);
let mut new_content = String::new();
if let Some(desc) = description {
new_content.push_str("---\n");
new_content.push_str(&format!("description: {}\n", desc));
new_content.push_str("---\n\n");
}
new_content.push_str(&prompt);
fs::write(target, new_content).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn convert_opencode_frontmatter_only(&self, content: &str) -> Result<String> {
let lines: Vec<&str> = content.lines().collect();
let (frontmatter, body) = if lines.len() >= 3 && lines[0].eq("---") {
if let Some(end_idx) = lines[1..].iter().position(|line| line.eq(&"---")) {
let fm = lines[1..end_idx + 1].join("\n");
let body_content = lines[end_idx + 2..].join("\n");
(Some(fm), body_content)
} else {
(None, content.to_string())
}
} else {
(None, content.to_string())
};
let mut new_frontmatter = String::new();
let mut frontmatter_map = std::collections::HashMap::new();
if let Some(fm) = &frontmatter {
for line in fm.lines() {
let line = line.trim();
if let Some((key, value)) = line.split_once(':') {
let key = key.trim();
let value = value.trim().trim_start_matches('"').trim_end_matches('"');
frontmatter_map.insert(key.to_string(), value.to_string());
}
}
}
new_frontmatter.push_str("---\n");
for (key, value) in &frontmatter_map {
new_frontmatter.push_str(&format!("{}: {}\n", key, value));
}
new_frontmatter.push_str("---\n\n");
Ok(format!("{}{}", new_frontmatter, body))
}
fn escape_toml_string(&self, s: &str) -> String {
let mut escaped = String::new();
for c in s.chars() {
match c {
'\\' => escaped.push_str("\\\\"),
'"' => escaped.push_str("\\\""),
'\n' => escaped.push_str("\\n"),
'\r' => escaped.push_str("\\r"),
'\t' => escaped.push_str("\\t"),
'\x00'..='\x08' | '\x0B' | '\x0C' | '\x0E'..='\x1F' => {
escaped.push_str(&format!("\\x{:02X}", c as u8));
}
_ => escaped.push(c),
}
}
format!("\"{}\"", escaped)
}
fn merge_json_files(
&self,
source: &Path,
target: &Path,
strategy: &MergeStrategy,
) -> Result<()> {
let source_content =
fs::read_to_string(source).map_err(|e| AugentError::FileReadFailed {
path: source.display().to_string(),
reason: e.to_string(),
})?;
let source_json = strip_jsonc_comments(&source_content);
let source_value: serde_json::Value =
serde_json::from_str(&source_json).map_err(|e| AugentError::ConfigParseFailed {
path: source.display().to_string(),
reason: e.to_string(),
})?;
let target_content =
fs::read_to_string(target).map_err(|e| AugentError::FileReadFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
let target_json = strip_jsonc_comments(&target_content);
let mut target_value: serde_json::Value =
serde_json::from_str(&target_json).map_err(|e| AugentError::ConfigParseFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
match strategy {
MergeStrategy::Shallow => {
shallow_merge(&mut target_value, &source_value);
}
MergeStrategy::Deep => {
deep_merge(&mut target_value, &source_value);
}
_ => {}
}
let result = serde_json::to_string_pretty(&target_value).map_err(|e| {
AugentError::ConfigParseFailed {
path: target.display().to_string(),
reason: e.to_string(),
}
})?;
fs::write(target, result).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
fn merge_text_files(&self, source: &Path, target: &Path) -> Result<()> {
let source_content =
fs::read_to_string(source).map_err(|e| AugentError::FileReadFailed {
path: source.display().to_string(),
reason: e.to_string(),
})?;
let target_content =
fs::read_to_string(target).map_err(|e| AugentError::FileReadFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
let merged = format!(
"{}\n\n<!-- Augent: merged content below -->\n\n{}",
target_content.trim_end(),
source_content
);
fs::write(target, merged).map_err(|e| AugentError::FileWriteFailed {
path: target.display().to_string(),
reason: e.to_string(),
})?;
Ok(())
}
pub fn installed_files(&self) -> &HashMap<String, InstalledFile> {
&self.installed_files
}
}
fn strip_jsonc_comments(content: &str) -> String {
let mut result = String::new();
let mut in_string = false;
let mut in_single_comment = false;
let mut in_multi_comment = false;
let chars: Vec<char> = content.chars().collect();
let len = chars.len();
let mut i = 0;
while i < len {
let c = chars[i];
let next = chars.get(i + 1).copied();
if in_single_comment {
if c == '\n' {
in_single_comment = false;
result.push(c);
}
} else if in_multi_comment {
if c == '*' && next == Some('/') {
in_multi_comment = false;
i += 1;
}
} else if in_string {
result.push(c);
if c == '"' && (i == 0 || chars[i - 1] != '\\') {
in_string = false;
}
} else {
match (c, next) {
('/', Some('/')) => {
in_single_comment = true;
i += 1;
}
('/', Some('*')) => {
in_multi_comment = true;
i += 1;
}
('"', _) => {
in_string = true;
result.push(c);
}
_ => {
result.push(c);
}
}
}
i += 1;
}
result
}
fn shallow_merge(target: &mut serde_json::Value, source: &serde_json::Value) {
if let (Some(target_obj), Some(source_obj)) = (target.as_object_mut(), source.as_object()) {
for (key, value) in source_obj {
target_obj.insert(key.clone(), value.clone());
}
}
}
fn deep_merge(target: &mut serde_json::Value, source: &serde_json::Value) {
match (target, source) {
(serde_json::Value::Object(target_obj), serde_json::Value::Object(source_obj)) => {
for (key, source_value) in source_obj {
if let Some(target_value) = target_obj.get_mut(key) {
deep_merge(target_value, source_value);
} else {
target_obj.insert(key.clone(), source_value.clone());
}
}
}
(target, source) => {
*target = source.clone();
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_discover_resources_empty() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let resources = Installer::discover_resources(temp.path()).unwrap();
assert!(resources.is_empty());
}
#[test]
fn test_discover_resources_commands() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let commands_dir = temp.path().join("commands");
fs::create_dir(&commands_dir).unwrap();
fs::write(commands_dir.join("debug.md"), "# Debug command").unwrap();
fs::write(commands_dir.join("test.md"), "# Test command").unwrap();
let resources = Installer::discover_resources(temp.path()).unwrap();
assert_eq!(resources.len(), 2);
assert!(
resources
.iter()
.any(|r| r.bundle_path == Path::new("commands/debug.md"))
);
assert!(
resources
.iter()
.any(|r| r.bundle_path == Path::new("commands/test.md"))
);
}
#[test]
fn test_discover_resources_root_files() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
fs::write(temp.path().join("AGENTS.md"), "# Agents").unwrap();
fs::write(temp.path().join("mcp.jsonc"), "{}").unwrap();
let resources = Installer::discover_resources(temp.path()).unwrap();
assert_eq!(resources.len(), 2);
}
#[test]
fn test_filter_skills_resources() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let base = temp.path();
let valid_skill_md =
"---\nname: valid-skill\ndescription: A valid skill for testing.\n---\n\nBody.";
fs::write(base.join("b.md"), valid_skill_md).unwrap();
fn make_resource(bundle_path: &str, absolute: &Path) -> DiscoveredResource {
DiscoveredResource {
bundle_path: PathBuf::from(bundle_path),
absolute_path: absolute.to_path_buf(),
resource_type: if bundle_path.starts_with("skills/") {
"skills".to_string()
} else {
"commands".to_string()
},
}
}
let resources = vec![
make_resource("skills/web-design-guidelines.zip", &base.join("a.zip")),
make_resource("skills/valid-skill/SKILL.md", &base.join("b.md")),
make_resource("skills/valid-skill/metadata.json", &base.join("c.json")),
make_resource("skills/metadata-only/metadata.json", &base.join("d.json")),
make_resource("commands/debug.md", &base.join("e.md")),
];
let filtered = Installer::filter_skills_resources(resources);
let paths: Vec<_> = filtered
.iter()
.map(|r| r.bundle_path.to_string_lossy().into_owned())
.collect();
assert!(
!paths.contains(&"skills/web-design-guidelines.zip".to_string()),
"standalone file in skills/ should be skipped"
);
assert!(
!paths.contains(&"skills/metadata-only/metadata.json".to_string()),
"skill dir without SKILL.md should be skipped"
);
assert!(
paths.contains(&"skills/valid-skill/SKILL.md".to_string()),
"skill dir with valid SKILL.md should keep SKILL.md"
);
assert!(
paths.contains(&"skills/valid-skill/metadata.json".to_string()),
"skill dir with valid SKILL.md should keep metadata.json"
);
assert!(
paths.contains(&"commands/debug.md".to_string()),
"non-skills resources should be unchanged"
);
assert_eq!(filtered.len(), 3);
}
#[test]
fn test_filter_skills_resources_nested_skill_dir() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let base = temp.path();
let nested_skill_md =
"---\nname: vercel-deploy\ndescription: Deploy to Vercel.\n---\n\nBody.";
fs::write(base.join("nested.md"), nested_skill_md).unwrap();
fn make_resource(bundle_path: &str, absolute: &Path) -> DiscoveredResource {
DiscoveredResource {
bundle_path: PathBuf::from(bundle_path),
absolute_path: absolute.to_path_buf(),
resource_type: if bundle_path.starts_with("skills/") {
"skills".to_string()
} else {
"commands".to_string()
},
}
}
let resources = vec![
make_resource(
"skills/claude.ai/vercel-deploy-claimable/SKILL.md",
&base.join("nested.md"),
),
make_resource(
"skills/claude.ai/vercel-deploy-claimable/scripts/deploy.sh",
&base.join("deploy.sh"),
),
make_resource(
"skills/claude.ai/vercel-deploy-claimable.zip",
&base.join("a.zip"),
),
];
let filtered = Installer::filter_skills_resources(resources);
let paths: Vec<_> = filtered
.iter()
.map(|r| r.bundle_path.to_string_lossy().into_owned())
.collect();
assert!(
paths.contains(&"skills/claude.ai/vercel-deploy-claimable/SKILL.md".to_string()),
"nested skill dir with valid SKILL.md should keep SKILL.md"
);
assert!(
paths.contains(
&"skills/claude.ai/vercel-deploy-claimable/scripts/deploy.sh".to_string()
),
"nested skill dir should keep files under it"
);
assert!(
!paths.contains(&"skills/claude.ai/vercel-deploy-claimable.zip".to_string()),
"zip file in skills/ (not under a skill dir) should be skipped"
);
assert_eq!(filtered.len(), 2);
}
#[test]
fn test_filter_skills_resources_leaf_only_parent_and_child_have_skill_md() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let base = temp.path();
fn make_resource(bundle_path: &str, absolute: &Path) -> DiscoveredResource {
DiscoveredResource {
bundle_path: PathBuf::from(bundle_path),
absolute_path: absolute.to_path_buf(),
resource_type: if bundle_path.starts_with("skills/") {
"skills".to_string()
} else {
"commands".to_string()
},
}
}
let resources = vec![
make_resource("skills/claude.ai/SKILL.md", &base.join("parent.md")),
make_resource(
"skills/claude.ai/vercel-deploy-claimable/SKILL.md",
&base.join("leaf.md"),
),
make_resource(
"skills/claude.ai/vercel-deploy-claimable/scripts/deploy.sh",
&base.join("deploy.sh"),
),
];
let filtered = Installer::filter_skills_resources(resources);
let paths: Vec<_> = filtered
.iter()
.map(|r| r.bundle_path.to_string_lossy().into_owned())
.collect();
assert!(
paths.contains(&"skills/claude.ai/vercel-deploy-claimable/SKILL.md".to_string()),
"leaf skill dir should keep SKILL.md"
);
assert!(
paths.contains(
&"skills/claude.ai/vercel-deploy-claimable/scripts/deploy.sh".to_string()
),
"leaf skill dir should keep files under it"
);
assert!(
!paths.contains(&"skills/claude.ai/SKILL.md".to_string()),
"parent dir with SKILL.md should be skipped when child also has SKILL.md (leaf-only)"
);
assert_eq!(filtered.len(), 2);
}
#[test]
fn test_strip_jsonc_comments() {
let jsonc = r#"{
// This is a comment
"key": "value",
/* Multi-line
comment */
"key2": "value2"
}"#;
let json = strip_jsonc_comments(jsonc);
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
assert_eq!(parsed["key"], "value");
assert_eq!(parsed["key2"], "value2");
}
#[test]
fn test_shallow_merge() {
let mut target: serde_json::Value = serde_json::json!({
"a": 1,
"b": {"nested": true}
});
let source: serde_json::Value = serde_json::json!({
"b": {"different": true},
"c": 3
});
shallow_merge(&mut target, &source);
assert_eq!(target["a"], 1);
assert_eq!(target["b"]["different"], true);
assert!(target["b"].get("nested").is_none()); assert_eq!(target["c"], 3);
}
#[test]
fn test_deep_merge() {
let mut target: serde_json::Value = serde_json::json!({
"a": 1,
"b": {"nested": true, "keep": "this"}
});
let source: serde_json::Value = serde_json::json!({
"b": {"different": true},
"c": 3
});
deep_merge(&mut target, &source);
assert_eq!(target["a"], 1);
assert_eq!(target["b"]["nested"], true); assert_eq!(target["b"]["keep"], "this"); assert_eq!(target["b"]["different"], true);
assert_eq!(target["c"], 3);
}
#[test]
fn test_pattern_matches() {
use wax::{CandidatePath, Glob};
assert!(
Glob::new("commands/*.md")
.unwrap()
.matched(&CandidatePath::from("commands/debug.md"))
.is_some()
);
assert!(
Glob::new("commands/**/*.md")
.unwrap()
.matched(&CandidatePath::from("commands/sub/debug.md"))
.is_some()
);
assert!(
Glob::new("AGENTS.md")
.unwrap()
.matched(&CandidatePath::from("AGENTS.md"))
.is_some()
);
assert!(
Glob::new("commands/*.md")
.unwrap()
.matched(&CandidatePath::from("rules/debug.md"))
.is_none()
);
}
#[test]
fn test_install_resource_no_platforms() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let mut installer = Installer::new(temp.path(), vec![]);
let bundle = ResolvedBundle {
name: "test-bundle".to_string(),
dependency: None,
source_path: temp.path().to_path_buf(),
resolved_sha: None,
resolved_ref: None,
git_source: None,
config: None,
};
let result = installer.install_bundle(&bundle);
assert!(result.is_ok());
let workspace_bundle = result.unwrap();
assert_eq!(workspace_bundle.name, "test-bundle");
}
#[test]
fn test_copy_file() {
let temp = TempDir::new_in(crate::temp::temp_dir_base()).unwrap();
let installer = Installer::new(temp.path(), vec![]);
let source = temp.path().join("source.txt");
let target = temp.path().join("target.txt");
fs::write(&source, "test content").unwrap();
let result = installer.copy_file(&source, &target);
assert!(result.is_ok());
assert!(target.exists());
assert_eq!(fs::read_to_string(&target).unwrap(), "test content");
}
#[test]
fn test_deep_merge_new_keys() {
let mut target: serde_json::Value = serde_json::json!({
"a": 1
});
let source: serde_json::Value = serde_json::json!({
"b": 2,
"c": 3
});
deep_merge(&mut target, &source);
assert_eq!(target["a"], 1);
assert_eq!(target["b"], 2);
assert_eq!(target["c"], 3);
}
#[test]
fn test_find_transform_rule_no_match() {
let platform = Platform::new("test", "Test", ".test");
let installer = Installer::new(Path::new("/test"), vec![platform.clone()]);
let resource = DiscoveredResource {
bundle_path: PathBuf::from("other/test.md"),
absolute_path: PathBuf::from("/test/other/test.md"),
resource_type: "commands".to_string(),
};
let rule = installer.find_transform_rule(&platform, &resource.bundle_path);
assert!(rule.is_none());
}
#[test]
fn test_apply_transform_rule_single_wildcard_with_extension() {
let installer = Installer::new(Path::new("/workspace"), vec![]);
let single_wildcard_rule =
TransformRule::new("rules/*.md", ".cursor/rules/*.mdc").with_extension("mdc");
let format_resource = PathBuf::from("rules/format.md");
let result = installer.apply_transform_rule(&single_wildcard_rule, &format_resource);
assert_eq!(
result,
PathBuf::from("/workspace/.cursor/rules/format.mdc"),
"Single wildcard should be replaced with filename stem before extension"
);
}
#[test]
fn test_apply_transform_rule_double_wildcard_with_extension() {
let installer = Installer::new(Path::new("/workspace"), vec![]);
let double_wildcard_rule =
TransformRule::new("rules/**/*.md", ".cursor/rules/**/*.mdc").with_extension("mdc");
let format_resource = PathBuf::from("rules/format.md");
let result = installer.apply_transform_rule(&double_wildcard_rule, &format_resource);
assert_eq!(
result,
PathBuf::from("/workspace/.cursor/rules/format.mdc"),
"Double wildcard should be replaced correctly before extension"
);
}
#[test]
fn test_apply_transform_rule_nested_path_double_wildcard_with_extension() {
let installer = Installer::new(Path::new("/workspace"), vec![]);
let nested_rule =
TransformRule::new("rules/**/*.md", ".cursor/rules/**/*.mdc").with_extension("mdc");
let nested_resource = PathBuf::from("rules/subdir/nested.md");
let result = installer.apply_transform_rule(&nested_rule, &nested_resource);
assert_eq!(
result,
PathBuf::from("/workspace/.cursor/rules/subdir/nested.mdc"),
"Nested path should be preserved with correct extension"
);
}
#[test]
fn test_apply_transform_rule_name_placeholder_with_extension() {
let installer = Installer::new(Path::new("/workspace"), vec![]);
let name_placeholder_rule =
TransformRule::new("rules/{name}.md", ".cursor/rules/{name}.mdc").with_extension("mdc");
let debug_resource = PathBuf::from("rules/debug.md");
let result = installer.apply_transform_rule(&name_placeholder_rule, &debug_resource);
assert_eq!(
result,
PathBuf::from("/workspace/.cursor/rules/debug.mdc"),
"Name placeholder should be replaced correctly"
);
}
}