#![cfg(not(windows))]
use crate::common::wt_command;
use ansi_str::AnsiStr;
use ansi_to_html::convert as ansi_to_html;
use regex::Regex;
use std::fs;
use std::path::Path;
use std::sync::LazyLock;
static MARKER_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?s)<!-- ⚠️ AUTO-GENERATED from ([^\n]+?) — edit [^\n]+ to update -->\n+([\s\S]*?)\n*<!-- END AUTO-GENERATED -->",
)
.unwrap()
});
static ANSI_LITERAL_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[[0-9;]*m").unwrap());
static DOCS_SNAPSHOT_MARKER_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?s)<!-- ⚠️ AUTO-GENERATED-HTML from ([^\s]+\.snap) — edit source to update -->\n+\{% terminal\([^)]*\) %\}\n(.*?)\{% end %\}\n+<!-- END AUTO-GENERATED -->"#,
)
.unwrap()
});
static HASH_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[HASH\]").unwrap());
static TMPDIR_BRANCH_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\[TMPDIR\]/repo\.([^\s/]+)").unwrap());
static TMPDIR_MAIN_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\[TMPDIR\]/repo(\s|$)").unwrap());
static REPO_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[REPO\]").unwrap());
static REPO_UNDERSCORE_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"_REPO_(\.([a-zA-Z0-9_-]+))?").unwrap());
static USER_CONFIG_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(?s)<!-- USER_CONFIG_START -->\n(.*?)\n<!-- USER_CONFIG_END -->").unwrap()
});
static PROJECT_CONFIG_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(?s)<!-- PROJECT_CONFIG_START -->\n(.*?)\n<!-- PROJECT_CONFIG_END -->").unwrap()
});
static DEFAULT_TEMPLATE_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(?s)(<!-- DEFAULT_TEMPLATE_START -->\n).*?(<!-- DEFAULT_TEMPLATE_END -->)")
.unwrap()
});
static SQUASH_TEMPLATE_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?s)(<!-- DEFAULT_SQUASH_TEMPLATE_START -->\n).*?(<!-- DEFAULT_SQUASH_TEMPLATE_END -->)",
)
.unwrap()
});
static RUST_RAW_STRING_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r##"(?s)const (DEFAULT_TEMPLATE|DEFAULT_SQUASH_TEMPLATE): &str = r#"(.*?)"#;"##)
.unwrap()
});
static ZOLA_LINK_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\[((?:`[^`]*`|[^\]`])+)\]\(@/([^)#]+)\.md(#[^)]*)?\)").unwrap());
static UNTRANSFORMED_ZOLA_LINK_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\]\(@/[^)]+\.md").unwrap());
static ZOLA_RAWCODE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?s)\{% rawcode\(\) %\}(.*?)\{% end %\}").unwrap());
static ZOLA_FIGURE_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?s)<figure class="demo">\s*<picture>.*?<img src="/assets/([^"]+)" alt="([^"]*)"[^>]*>.*?</picture>.*?</figure>"#,
)
.unwrap()
});
enum OutputFormat {
DocsHtml,
Unwrapped,
}
#[derive(Clone, Copy)]
enum MarkerType {
Snapshot,
Help,
Section,
}
impl MarkerType {
fn from_id(id: &str) -> Self {
if id.starts_with('`') && id.ends_with('`') {
Self::Help
} else if id.contains('#') {
Self::Section
} else {
Self::Snapshot
}
}
fn output_format(&self) -> OutputFormat {
match self {
Self::Snapshot => unreachable!("README has no snapshot markers"),
Self::Help | Self::Section => OutputFormat::Unwrapped,
}
}
fn extract_inner(&self, content: &str) -> String {
match self {
Self::Snapshot => unreachable!("README has no snapshot markers"),
Self::Help | Self::Section => content.to_string(),
}
}
}
fn parse_snapshot_raw(content: &str) -> String {
let content = if content.starts_with("---") {
let parts: Vec<&str> = content.splitn(3, "---").collect();
if parts.len() >= 3 {
parts[2].trim().to_string()
} else {
content.to_string()
}
} else {
content.to_string()
};
if content.contains("----- stdout -----") {
let stderr = extract_section(&content, "----- stderr -----\n", "----- ");
if !stderr.is_empty() {
return stderr;
}
let stdout = extract_section(&content, "----- stdout -----\n", "----- stderr -----");
return stdout; }
content
}
fn extract_section(content: &str, start_marker: &str, end_marker: &str) -> String {
if let Some(start) = content.find(start_marker) {
let after_header = &content[start + start_marker.len()..];
if let Some(end) = after_header.find(end_marker) {
after_header[..end].trim_end().to_string()
} else {
after_header.trim_end().to_string()
}
} else {
String::new()
}
}
fn extract_command_from_snapshot(content: &str) -> Option<String> {
if !content.starts_with("---") {
return None;
}
let parts: Vec<&str> = content.splitn(3, "---").collect();
if parts.len() < 3 {
return None;
}
let yaml = parts[1];
let program = yaml
.lines()
.find(|l| l.trim().starts_with("program:"))
.map(|l| l.trim().strip_prefix("program:").unwrap().trim())?;
let args_start = yaml.find("args:")?;
let args_section = &yaml[args_start..];
let args: Vec<&str> = args_section
.lines()
.skip(1) .take_while(|l| l.trim().starts_with("- "))
.map(|l| l.trim().strip_prefix("- ").unwrap().trim_matches('"'))
.collect();
if args.is_empty() {
Some(program.to_string())
} else {
Some(format!("{} {}", program, args.join(" ")))
}
}
fn replace_placeholders(content: &str) -> String {
let content = HASH_REGEX.replace_all(content, "a1b2c3d");
let content = TMPDIR_BRANCH_REGEX.replace_all(&content, "../repo.$1");
let content = TMPDIR_MAIN_REGEX.replace_all(&content, "../repo$1");
let content = REPO_REGEX.replace_all(&content, "../repo");
REPO_UNDERSCORE_REGEX
.replace_all(&content, |caps: ®ex::Captures| {
if let Some(branch) = caps.get(2) {
format!("repo.{}", branch.as_str())
} else {
"repo".to_string()
}
})
.into_owned()
}
fn format_replacement(id: &str, content: &str, format: &OutputFormat) -> String {
match format {
OutputFormat::DocsHtml => {
let cmd_re = Regex::new(r#"^<span class="cmd">([^<]+)</span>"#).unwrap();
let cmd_attr = cmd_re
.captures(content)
.map(|c| format!(r#"cmd="{}""#, c.get(1).unwrap().as_str()))
.unwrap_or_default();
format!(
"<!-- ⚠️ AUTO-GENERATED-HTML from {} — edit source to update -->\n\n{{% terminal({}) %}}\n{}\n{{% end %}}\n\n<!-- END AUTO-GENERATED -->",
id, cmd_attr, content
)
}
OutputFormat::Unwrapped => {
format!(
"<!-- ⚠️ AUTO-GENERATED from {} — edit source to update -->\n\n{}\n\n<!-- END AUTO-GENERATED -->",
id, content
)
}
}
}
fn update_section(
content: &str,
pattern: &Regex,
format: OutputFormat,
get_replacement: impl Fn(&str, &str) -> Result<String, String>,
) -> Result<(String, usize, usize), Vec<String>> {
let mut result = content.to_string();
let mut errors = Vec::new();
let mut updated = 0;
let matches: Vec<_> = pattern
.captures_iter(content)
.map(|cap| {
let full_match = cap.get(0).unwrap();
let id = cap.get(1).unwrap().as_str().to_string();
let current = trim_lines(cap.get(2).unwrap().as_str());
(full_match.start(), full_match.end(), id, current)
})
.collect();
let total = matches.len();
for (start, end, id, current) in matches.into_iter().rev() {
let expected = match get_replacement(&id, ¤t) {
Ok(content) => content,
Err(e) => {
errors.push(format!("❌ {}: {}", id, e));
continue;
}
};
if current != expected {
let replacement = format_replacement(&id, &expected, &format);
result.replace_range(start..end, &replacement);
updated += 1;
}
}
if errors.is_empty() {
Ok((result, updated, total))
} else {
Err(errors)
}
}
static COMMAND_PLACEHOLDER_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"<!-- (wt [^>]+) -->\n```bash\n(?:\$ )?(wt [^\n]+)\n```").unwrap()
});
fn command_to_snapshot(command: &str) -> Option<&'static str> {
match command {
"wt list" => Some("integration__integration_tests__list__readme_example_list.snap"),
"wt list --full" => {
Some("integration__integration_tests__list__readme_example_list_full.snap")
}
"wt list --branches --full" => {
Some("integration__integration_tests__list__readme_example_list_branches.snap")
}
"wt list (markers)" => {
Some("integration__integration_tests__list__readme_example_list_marker.snap")
}
_ => None,
}
}
enum ExpandMode {
Html,
Plain,
}
fn expand_command_placeholders(
content: &str,
snapshots_dir: &Path,
mode: ExpandMode,
) -> Result<String, String> {
let mut result = content.to_string();
let mut errors = Vec::new();
for cap in COMMAND_PLACEHOLDER_PATTERN.captures_iter(content) {
let full_match = cap.get(0).unwrap().as_str();
let placeholder_id = cap.get(1).unwrap().as_str();
let display_cmd = cap.get(2).unwrap().as_str();
let Some(snapshot_name) = command_to_snapshot(placeholder_id) else {
continue;
};
let snapshot_path = snapshots_dir.join(snapshot_name);
if !snapshot_path.exists() {
errors.push(format!(
"Snapshot file not found: {} (for command '{}')",
snapshot_path.display(),
placeholder_id
));
continue;
}
let snapshot_content = fs::read_to_string(&snapshot_path)
.map_err(|e| format!("Failed to read {}: {}", snapshot_path.display(), e))?;
let replacement = match mode {
ExpandMode::Html => {
let html = parse_snapshot_content_for_docs(&snapshot_content)?;
let normalized = encode_leading_spaces(&trim_lines(&html));
format!(
"<!-- ⚠️ AUTO-GENERATED from tests/snapshots/{snapshot_name} — edit source to update -->\n\n\
{{% terminal(cmd=\"{display_cmd}\") %}}\n\
{normalized}\n\
{{% end %}}\n\n\
<!-- END AUTO-GENERATED -->",
)
}
ExpandMode::Plain => {
let plain = trim_lines(&parse_snapshot_content_for_skill(&snapshot_content));
format!("```\n$ {display_cmd}\n{plain}\n```")
}
};
result = result.replace(full_match, &replacement);
}
if !errors.is_empty() {
return Err(errors.join("\n"));
}
Ok(result)
}
fn literal_to_escape(text: &str) -> String {
ANSI_LITERAL_REGEX
.replace_all(text, |caps: ®ex::Captures| {
let code = caps.get(0).unwrap().as_str();
format!("\x1b{code}")
})
.to_string()
}
fn trim_lines(content: &str) -> String {
content
.lines()
.map(|line| line.trim_end())
.collect::<Vec<_>>()
.join("\n")
.trim_end()
.to_string()
}
fn encode_leading_spaces(content: &str) -> String {
let first_line = content.lines().next().unwrap_or("");
let leading = first_line.len() - first_line.trim_start().len();
if leading == 0 {
return content.to_string();
}
format!("{}{}", " ".repeat(leading), &content[leading..])
}
fn parse_snapshot_content_for_docs(content: &str) -> Result<String, String> {
let content = parse_snapshot_raw(content);
let content = replace_placeholders(&content);
let content = literal_to_escape(&content);
let content = ensure_line_resets(&content);
let html = ansi_to_html(&content).map_err(|e| format!("ANSI conversion failed: {e}"))?;
Ok(clean_ansi_html(&html))
}
fn parse_snapshot_content_for_skill(content: &str) -> String {
let content = parse_snapshot_raw(content);
let content = replace_placeholders(&content);
let content = literal_to_escape(&content);
content.ansi_strip().into_owned()
}
fn ensure_line_resets(ansi: &str) -> String {
ensure_line_resets_impl(ansi, false)
}
fn ensure_line_resets_with_carry(ansi: &str) -> String {
ensure_line_resets_impl(ansi, true)
}
fn ensure_line_resets_impl(ansi: &str, carry_styles: bool) -> String {
const RESET: &str = "\x1b[0m";
static SGR_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\x1b\[([0-9;]*)m").unwrap());
let lines: Vec<&str> = ansi.lines().collect();
let mut result = Vec::with_capacity(lines.len());
let mut active_styles: Vec<String> = Vec::new();
for line in lines {
let line = if !carry_styles || active_styles.is_empty() {
line.to_string()
} else {
let prefix: String = active_styles.iter().map(|s| s.as_str()).collect();
format!("{prefix}{line}")
};
active_styles.clear();
for cap in SGR_RE.captures_iter(&line) {
let params = &cap[1];
if params.is_empty() || params == "0" {
active_styles.clear();
} else {
active_styles.push(format!("\x1b[{params}m"));
}
}
if line.ends_with(RESET) {
result.push(line);
} else {
result.push(format!("{line}{RESET}"));
}
}
result.join("\n")
}
fn clean_ansi_html(html: &str) -> String {
static EMPTY_SPAN_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"<span[^>]*></span>").unwrap());
let html = html.replace('\x1b', "");
let html = html.replace("<b></b>", "");
let html = EMPTY_SPAN_REGEX.replace_all(&html, "").to_string();
html.replace("<span style='opacity:0.67'>", "<span class=d>")
.replace("<span style='color:var(--green,#0a0)'>", "<span class=g>")
.replace("<span style='color:var(--red,#a00)'>", "<span class=r>")
.replace("<span style='color:var(--cyan,#0aa)'>", "<span class=c>")
}
static COMMAND_REF_BLOCK_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?s)(###? Command reference\n\n)```\n(.*?)\n```").unwrap());
fn convert_command_reference_to_html(content: &str) -> Result<String, String> {
let mut result = content.to_string();
let matches: Vec<_> = COMMAND_REF_BLOCK_PATTERN
.captures_iter(content)
.map(|cap| {
let full_match = cap.get(0).unwrap();
let header = cap.get(1).unwrap().as_str();
let code_content = cap.get(2).unwrap().as_str();
(full_match.start(), full_match.end(), header, code_content)
})
.collect();
for (start, end, header, code_content) in matches.into_iter().rev() {
let with_resets = ensure_line_resets_with_carry(code_content);
let html =
ansi_to_html(&with_resets).map_err(|e| format!("ANSI conversion failed: {e}"))?;
let clean_html = clean_ansi_html(&html);
let trimmed_html = trim_lines(&clean_html);
let replacement = format!("{header}{{% terminal() %}}\n{trimmed_html}\n{{% end %}}");
result.replace_range(start..end, &replacement);
}
Ok(result)
}
fn help_output(id: &str, project_root: &Path) -> Result<String, String> {
let command = id.trim_matches('`');
let args: Vec<&str> = command.split_whitespace().collect();
if args.is_empty() {
return Err("Empty command".to_string());
}
if args.first() != Some(&"wt") {
return Err(format!("Command must start with 'wt': {}", command));
}
if args.last() != Some(&"--help-md") {
return Err(format!("Command must end with '--help-md': {}", command));
}
let output = wt_command()
.env("NO_COLOR", "1") .args(&args[1..]) .current_dir(project_root)
.output()
.map_err(|e| format!("Failed to run command: {}", e))?;
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
let help_output = if !stdout.is_empty() {
stdout.to_string()
} else {
stderr.to_string()
};
let help_output = help_output
.lines()
.map(|line| line.trim_end())
.collect::<Vec<_>>()
.join("\n")
.trim()
.to_string();
let result = if let Some(first_newline) = help_output.find('\n') {
let (first_line, rest) = help_output.split_at(first_newline);
let first_line = first_line.replacen(" - ", " — ", 1);
if let Some(header_pos) = rest.find("\n## ") {
let (synopsis, docs) = rest.split_at(header_pos);
let docs = docs.trim_start_matches('\n');
let docs = increase_heading_levels(docs);
format!("```\n{}{}\n```\n\n{}", first_line, synopsis, docs)
} else {
format!("```\n{}{}\n```", first_line, rest)
}
} else {
help_output.replacen(" - ", " — ", 1)
};
Ok(result)
}
fn increase_heading_levels(content: &str) -> String {
let mut result = Vec::new();
let mut in_code_block = false;
for line in content.lines() {
if line.trim_start().starts_with("```") {
in_code_block = !in_code_block;
result.push(line.to_string());
continue;
}
if !in_code_block && line.starts_with('#') {
result.push(format!("#{}", line));
} else {
result.push(line.to_string());
}
}
result.join("\n")
}
fn extract_templates(content: &str) -> std::collections::HashMap<String, String> {
RUST_RAW_STRING_PATTERN
.captures_iter(content)
.map(|cap| {
let name = cap.get(1).unwrap().as_str().to_string();
let template = cap.get(2).unwrap().as_str().to_string();
(name, template)
})
.collect()
}
fn extract_section_by_anchor(content: &str, anchor: &str) -> Option<String> {
let (start_anchor, end_anchor) = if let Some((start, end)) = anchor.split_once("..") {
(start, Some(end))
} else {
(anchor, None)
};
let lines: Vec<&str> = content.lines().collect();
let start_idx = lines.iter().position(|line| {
line.strip_prefix("## ")
.or_else(|| line.strip_prefix("### "))
.is_some_and(|text| heading_to_anchor(text) == start_anchor)
})?;
let end_idx = if let Some(end_anchor) = end_anchor {
let end_heading_idx = lines.iter().skip(start_idx + 1).position(|line| {
line.strip_prefix("## ")
.or_else(|| line.strip_prefix("### "))
.is_some_and(|text| heading_to_anchor(text) == end_anchor)
})? + start_idx
+ 1;
lines
.iter()
.skip(end_heading_idx + 1)
.position(|line| line.starts_with("## "))
.map(|i| i + end_heading_idx + 1)
.unwrap_or(lines.len())
} else {
lines
.iter()
.skip(start_idx + 1)
.position(|line| line.starts_with("## "))
.map(|i| i + start_idx + 1)
.unwrap_or(lines.len())
};
let section = lines[start_idx..end_idx].join("\n").trim().to_string();
Some(section)
}
fn heading_to_anchor(heading: &str) -> String {
heading
.to_lowercase()
.chars()
.map(|c| if c.is_alphanumeric() { c } else { '-' })
.collect::<String>()
.split('-')
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-")
}
static TERMINAL_MARKER_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?s)(?:```bash\n[^\n]+\n```\n+)?<!-- ⚠️ AUTO-GENERATED-HTML from [^\n]+ -->\n+\{% terminal\([^)]*\) %\}\n(.*?)\{% end %\}\n+<!-- END AUTO-GENERATED -->"#,
)
.unwrap()
});
fn strip_html(content: &str) -> String {
let cmd_pattern = Regex::new(r#"<span class="cmd">([^<]*)</span>"#).unwrap();
let result = cmd_pattern.replace_all(content, "$ $1");
let tag_pattern = Regex::new(r"<[^>]+>").unwrap();
let result = tag_pattern.replace_all(&result, "");
result
.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace(""", "\"")
.replace("'", "'")
}
fn transform_zola_to_github(content: &str) -> String {
let content = ZOLA_LINK_PATTERN
.replace_all(content, |caps: ®ex::Captures| {
let text = caps.get(1).unwrap().as_str();
let page = caps.get(2).unwrap().as_str();
let anchor = caps.get(3).map_or("", |m| m.as_str());
format!("[{text}](https://worktrunk.dev/{page}/{anchor})")
})
.into_owned();
let content = ZOLA_RAWCODE_PATTERN
.replace_all(&content, |caps: ®ex::Captures| {
let inner = caps.get(1).unwrap().as_str();
format!("<pre>{}</pre>", inner)
})
.into_owned();
let content = TERMINAL_MARKER_PATTERN
.replace_all(&content, |caps: ®ex::Captures| {
let inner = caps.get(1).unwrap().as_str();
let plain = strip_html(inner);
format!("```console\n{}\n```", plain)
})
.into_owned();
let content = ZOLA_TERMINAL_SELF_CLOSING_PATTERN
.replace_all(&content, |caps: ®ex::Captures| {
cmd_to_bash_block(caps.get(1).map_or("", |m| m.as_str()), "", false)
})
.into_owned();
ZOLA_FIGURE_PATTERN
.replace_all(&content, |caps: ®ex::Captures| {
let filename = caps.get(1).unwrap().as_str();
let alt = caps.get(2).unwrap().as_str();
format!(
""
)
})
.into_owned()
}
fn docs_section_for_readme(id: &str, project_root: &Path) -> Result<String, String> {
let (path, anchor) = id
.split_once('#')
.ok_or_else(|| format!("Invalid section ID (missing #): {}", id))?;
let docs_path = project_root.join(path);
let content = fs::read_to_string(&docs_path)
.map_err(|e| format!("Failed to read {}: {}", docs_path.display(), e))?;
let section = extract_section_by_anchor(&content, anchor)
.ok_or_else(|| format!("Section '{}' not found in {}", anchor, docs_path.display()))?;
Ok(transform_zola_to_github(§ion))
}
fn generate_readme_content(
id: &str,
_current_content: &str,
project_root: &Path,
) -> Result<String, String> {
match MarkerType::from_id(id) {
MarkerType::Snapshot => unreachable!("README has no snapshot markers"),
MarkerType::Help => help_output(id, project_root),
MarkerType::Section => docs_section_for_readme(id, project_root).map(|c| trim_lines(&c)),
}
}
fn sync_readme_markers(
readme_content: &str,
project_root: &Path,
) -> Result<(String, usize, usize), Vec<String>> {
let mut result = readme_content.to_string();
let mut errors = Vec::new();
let mut updated = 0;
let matches: Vec<_> = MARKER_PATTERN
.captures_iter(readme_content)
.map(|cap| {
let full_match = cap.get(0).unwrap();
let id = cap.get(1).unwrap().as_str().trim().to_string();
let current = cap.get(2).unwrap().as_str().to_string();
(full_match.start(), full_match.end(), id, current)
})
.collect();
let total = matches.len();
for (start, end, id, current_with_wrapper) in matches.into_iter().rev() {
let marker_type = MarkerType::from_id(&id);
let current_inner = marker_type.extract_inner(¤t_with_wrapper);
let expected = match generate_readme_content(&id, ¤t_with_wrapper, project_root) {
Ok(content) => content,
Err(e) => {
errors.push(format!("❌ {}: {}", id, e));
continue;
}
};
if trim_lines(¤t_inner) != trim_lines(&expected) {
let replacement = format_replacement(&id, &expected, &marker_type.output_format());
result.replace_range(start..end, &replacement);
updated += 1;
}
}
if errors.is_empty() {
Ok((result, updated, total))
} else {
Err(errors)
}
}
fn transform_config_source_to_toml(source: &str) -> String {
let mut result = Vec::new();
let mut in_code_block = false;
for line in source.lines() {
let trimmed = line.trim();
if trimmed.starts_with("```") {
in_code_block = !in_code_block;
continue;
}
let line = convert_markdown_links_for_config(line);
if line.is_empty() {
result.push(String::from("#"));
} else {
result.push(format!("# {}", line));
}
}
while result.last().is_some_and(|l| l == "#" || l.is_empty()) {
result.pop();
}
result.join("\n")
}
fn convert_markdown_links_for_config(line: &str) -> String {
use regex::Regex;
use std::sync::LazyLock;
static MARKDOWN_LINK: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap());
MARKDOWN_LINK
.replace_all(line, |caps: ®ex::Captures| {
let text = &caps[1];
let url = &caps[2];
let url = if let Some(path) = url.strip_prefix("@/") {
let (page, anchor) = match path.split_once('#') {
Some((p, a)) => (p.trim_end_matches(".md"), Some(a)),
None => (path.trim_end_matches(".md"), None),
};
match anchor {
Some(a) => format!("https://worktrunk.dev/{page}/#{a}"),
None => format!("https://worktrunk.dev/{page}/"),
}
} else {
url.to_string()
};
format!("{text} ({url})")
})
.to_string()
}
fn extract_config_section(cli_mod_content: &str, pattern: &Regex, label: &str) -> String {
pattern
.captures(cli_mod_content)
.and_then(|cap| cap.get(1))
.map(|m| m.as_str().to_string())
.unwrap_or_else(|| panic!("{label} markers not found in src/cli/mod.rs"))
}
fn assert_config_example_in_sync(
cli_mod_content: &str,
pattern: &Regex,
marker_label: &str,
example_path: &Path,
) {
let source = extract_config_section(cli_mod_content, pattern, marker_label);
let expected = trim_lines(&transform_config_source_to_toml(&source));
let current = fs::read_to_string(example_path)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", example_path.display(), e));
let current = trim_lines(¤t);
if current != expected {
fs::write(example_path, format!("{}\n", expected)).unwrap();
panic!(
"{} out of sync with {} section in src/cli/mod.rs. \
Run tests locally and commit the changes.",
example_path.file_name().unwrap().to_string_lossy(),
marker_label,
);
}
}
#[test]
fn test_config_source_generates_example_toml() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let cli_mod_content = fs::read_to_string(project_root.join("src/cli/mod.rs"))
.unwrap_or_else(|e| panic!("Failed to read src/cli/mod.rs: {e}"));
assert_config_example_in_sync(
&cli_mod_content,
&USER_CONFIG_PATTERN,
"USER_CONFIG_START/END",
&project_root.join("dev/config.example.toml"),
);
}
#[test]
fn test_project_config_source_generates_example_toml() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let cli_mod_content = fs::read_to_string(project_root.join("src/cli/mod.rs"))
.unwrap_or_else(|e| panic!("Failed to read src/cli/mod.rs: {e}"));
assert_config_example_in_sync(
&cli_mod_content,
&PROJECT_CONFIG_PATTERN,
"PROJECT_CONFIG_START/END",
&project_root.join("dev/wt.example.toml"),
);
}
#[test]
fn test_config_docs_include_all_sections() {
use std::collections::HashSet;
use strum::IntoEnumIterator;
use worktrunk::config::{DEPRECATED_SECTION_KEYS, valid_user_config_keys};
use worktrunk::git::HookType;
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let cli_mod_path = project_root.join("src/cli/mod.rs");
let cli_mod_content = fs::read_to_string(&cli_mod_path).unwrap();
let user_config_content =
extract_config_section(&cli_mod_content, &USER_CONFIG_PATTERN, "USER_CONFIG");
let all_keys = valid_user_config_keys();
let hook_keys: HashSet<String> = HookType::iter()
.map(|h| h.to_string())
.chain(std::iter::once("post-create".to_string()))
.collect();
let non_section_keys: HashSet<&str> = [
"worktree-path",
"skip-shell-integration-prompt",
"skip-commit-generation-prompt",
]
.into();
let section_keys: Vec<&String> = all_keys
.iter()
.filter(|k| !hook_keys.contains(*k) && !non_section_keys.contains(k.as_str()))
.collect();
for key in §ion_keys {
if DEPRECATED_SECTION_KEYS
.iter()
.any(|d| d.key == key.as_str())
{
let header = format!("[{key}]");
assert!(
!user_config_content.contains(&header),
"Deprecated section `{header}` should not appear in user config docs.\n\
Use the new section name instead."
);
} else {
let header = format!("[{key}]");
let nested = format!("[{key}.");
assert!(
user_config_content.contains(&header) || user_config_content.contains(&nested),
"Config section `[{key}]` (from UserConfig schema) is missing from user \
config docs in src/cli/mod.rs.\nAll config sections must be documented between \
USER_CONFIG_START/END markers."
);
}
}
}
#[test]
fn test_project_config_docs_include_all_sections() {
use std::collections::HashSet;
use strum::IntoEnumIterator;
use worktrunk::config::{DEPRECATED_SECTION_KEYS, valid_project_config_keys};
use worktrunk::git::HookType;
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let cli_mod_path = project_root.join("src/cli/mod.rs");
let cli_mod_content = fs::read_to_string(&cli_mod_path).unwrap();
let project_config_content =
extract_config_section(&cli_mod_content, &PROJECT_CONFIG_PATTERN, "PROJECT_CONFIG");
let all_keys = valid_project_config_keys();
let hook_keys: HashSet<String> = HookType::iter()
.map(|h| h.to_string())
.chain(std::iter::once("post-create".to_string()))
.collect();
let section_keys: Vec<&String> = all_keys
.iter()
.filter(|k| !hook_keys.contains(*k))
.collect();
for key in §ion_keys {
if DEPRECATED_SECTION_KEYS
.iter()
.any(|d| d.key == key.as_str())
{
let header = format!("[{key}]");
assert!(
!project_config_content.contains(&header),
"Deprecated section `{header}` should not appear in project config docs.\n\
Use the new section name instead."
);
} else {
let header = format!("[{key}]");
let nested = format!("[{key}.");
assert!(
project_config_content.contains(&header)
|| project_config_content.contains(&nested),
"Config section `[{key}]` (from ProjectConfig schema) is missing from project \
config docs in src/cli/mod.rs.\nAll config sections must be documented between \
PROJECT_CONFIG_START/END markers."
);
}
}
assert!(
project_config_content.contains("## Hooks"),
"Hooks section heading missing from project config docs.\n\
Expected `## Hooks` between PROJECT_CONFIG_START/END markers."
);
}
#[test]
fn test_llm_docs_commands_match_config_example() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let config_example = fs::read_to_string(project_root.join("dev/config.example.toml")).unwrap();
let llm_docs = fs::read_to_string(project_root.join("docs/content/llm-commits.md")).unwrap();
let config_commands: Vec<String> = config_example
.lines()
.filter_map(|line| line.strip_prefix("# "))
.filter(|line| line.starts_with("command = "))
.filter_map(|line| {
let table: toml::Table = toml::from_str(line).ok()?;
Some(table["command"].as_str()?.to_string())
})
.collect();
let doc_commands: Vec<String> = llm_docs
.lines()
.filter(|line| line.starts_with("command = "))
.filter_map(|line| {
let table: toml::Table = toml::from_str(line).ok()?;
Some(table["command"].as_str()?.to_string())
})
.collect();
assert!(
config_commands.len() >= 2,
"Expected at least 2 tool commands in config.example.toml, found {}",
config_commands.len()
);
for cmd in &config_commands {
assert!(
doc_commands.contains(cmd),
"Command from config.example.toml not found in docs/content/llm-commits.md:\n {cmd}\n\
Update llm-commits.md to match the config example (source of truth: dev/config.example.toml, \
generated from src/cli/mod.rs)."
);
}
}
#[test]
fn test_taskfile_llm_commands_match_config_example() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let config_example = fs::read_to_string(project_root.join("dev/config.example.toml")).unwrap();
let taskfile = fs::read_to_string(project_root.join("Taskfile.yaml")).unwrap();
let mut config_commands = std::collections::HashMap::new();
let mut current_tool: Option<String> = None;
for line in config_example.lines() {
if let Some(heading) = line.strip_prefix("# ### ") {
current_tool = heading.split_whitespace().next().map(|s| s.to_lowercase());
} else if let Some(cmd_line) = line.strip_prefix("# ")
&& cmd_line.starts_with("command = ")
&& let Some(ref tool) = current_tool
&& let Ok(table) = toml::from_str::<toml::Table>(cmd_line)
&& let Some(cmd) = table.get("command").and_then(|v| v.as_str())
{
config_commands.insert(tool.clone(), cmd.to_string());
}
}
let taskfile_re = Regex::new(r#"COMMANDS\["(\w+)"\]=(.*)"#).unwrap();
let taskfile_commands: std::collections::HashMap<String, String> = taskfile
.lines()
.filter_map(|line| {
let caps = taskfile_re.captures(line.trim())?;
let tool = caps[1].to_string();
let raw = &caps[2];
let unescaped = raw.replace("'\"'\"'", "'");
let cmd = unescaped
.strip_prefix('\'')?
.strip_suffix('\'')?
.to_string();
Some((tool, cmd))
})
.collect();
let mut checked = 0;
for (tool, taskfile_cmd) in &taskfile_commands {
if let Some(config_cmd) = config_commands.get(tool.as_str()) {
assert_eq!(
config_cmd, taskfile_cmd,
"Command mismatch for '{tool}'.\n\
Config example: {config_cmd}\n\
Taskfile: {taskfile_cmd}\n\
Update Taskfile.yaml to match dev/config.example.toml (source of truth)."
);
checked += 1;
}
}
assert!(
checked >= 1,
"No overlapping tools between config.example.toml and Taskfile.yaml"
);
}
#[test]
fn test_config_source_templates_are_in_sync() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let llm_rs_path = project_root.join("src/llm.rs");
let cli_mod_path = project_root.join("src/cli/mod.rs");
let llm_content = fs::read_to_string(&llm_rs_path).unwrap();
let cli_mod_content = fs::read_to_string(&cli_mod_path).unwrap();
let templates = extract_templates(&llm_content);
assert!(
templates.contains_key("DEFAULT_TEMPLATE"),
"DEFAULT_TEMPLATE not found in src/llm.rs"
);
assert!(
templates.contains_key("DEFAULT_SQUASH_TEMPLATE"),
"DEFAULT_SQUASH_TEMPLATE not found in src/llm.rs"
);
let mut updated_content = cli_mod_content.clone();
let mut updated_count = 0;
let mut replace_template = |pattern: &Regex, name: &str, key: &str| {
if let Some(cap) = pattern.captures(&updated_content.clone()) {
let full_match = cap.get(0).unwrap();
let prefix = cap.get(1).unwrap().as_str();
let suffix = cap.get(2).unwrap().as_str();
let template = templates
.get(name)
.unwrap_or_else(|| panic!("{name} not found in src/llm.rs"));
let replacement = format!(
r#"{prefix}```toml
[commit.generation]
{key} = """
{template}
"""
```
{suffix}"#
);
if full_match.as_str() != replacement {
updated_content = updated_content.replace(full_match.as_str(), &replacement);
updated_count += 1;
}
}
};
replace_template(&DEFAULT_TEMPLATE_PATTERN, "DEFAULT_TEMPLATE", "template");
replace_template(
&SQUASH_TEMPLATE_PATTERN,
"DEFAULT_SQUASH_TEMPLATE",
"squash-template",
);
if updated_count > 0 {
fs::write(&cli_mod_path, &updated_content).unwrap();
panic!(
"Templates out of sync: updated {} section(s) in src/cli/mod.rs. \
Run tests locally and commit the changes.",
updated_count
);
}
}
fn sync_help_markers(file_path: &Path, project_root: &Path) -> Result<usize, Vec<String>> {
let content = fs::read_to_string(file_path)
.map_err(|e| vec![format!("Failed to read {}: {}", file_path.display(), e)])?;
let mut result = content.clone();
let mut errors = Vec::new();
let mut updated = 0;
let matches: Vec<_> = MARKER_PATTERN
.captures_iter(&content)
.filter_map(|cap| {
let id = cap.get(1).unwrap().as_str().trim();
if matches!(MarkerType::from_id(id), MarkerType::Help) {
let full_match = cap.get(0).unwrap();
let current = cap.get(2).unwrap().as_str();
Some((
full_match.start(),
full_match.end(),
id.to_string(),
current.to_string(),
))
} else {
None
}
})
.collect();
for (start, end, id, current) in matches.into_iter().rev() {
let expected = match help_output(&id, project_root) {
Ok(content) => content,
Err(e) => {
errors.push(format!("❌ {}: {}", id, e));
continue;
}
};
if trim_lines(¤t) != trim_lines(&expected) {
let replacement = format_replacement(&id, &expected, &OutputFormat::Unwrapped);
result.replace_range(start..end, &replacement);
updated += 1;
}
}
if !errors.is_empty() {
return Err(errors);
}
if updated > 0 {
fs::write(file_path, &result).unwrap();
}
Ok(updated)
}
#[test]
fn test_readme_examples_are_in_sync() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let readme_path = project_root.join("README.md");
let readme_content = fs::read_to_string(&readme_path).unwrap();
match sync_readme_markers(&readme_content, project_root) {
Ok((updated_content, updated_count, total_count)) => {
if total_count == 0 {
panic!("No README markers found in README.md");
}
if updated_count > 0 {
fs::write(&readme_path, &updated_content).unwrap();
panic!(
"README out of sync: updated {} of {} section(s). \
Run tests locally and commit the changes.",
updated_count, total_count
);
}
}
Err(errors) => {
panic!(
"README examples are out of sync:\n\n{}\n",
errors.join("\n")
);
}
}
}
#[test]
fn test_docs_commands_are_in_sync() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let commands_path = project_root.join("docs/content/commands.md");
if !commands_path.exists() {
return;
}
match sync_help_markers(&commands_path, project_root) {
Ok(updated_count) => {
if updated_count > 0 {
panic!(
"Docs commands out of sync: updated {} section(s) in {}. \
Run tests locally and commit the changes.",
updated_count,
commands_path.display()
);
}
}
Err(errors) => {
panic!("Docs commands are out of sync:\n\n{}\n", errors.join("\n"));
}
}
}
fn sync_docs_snapshots(doc_path: &Path, project_root: &Path) -> Result<usize, Vec<String>> {
if !doc_path.exists() {
return Ok(0);
}
let content = fs::read_to_string(doc_path)
.map_err(|e| vec![format!("Failed to read {}: {}", doc_path.display(), e)])?;
let project_root_for_snapshots = project_root.to_path_buf();
match update_section(
&content,
&DOCS_SNAPSHOT_MARKER_PATTERN,
OutputFormat::DocsHtml,
|snap_path, _current_content| {
let full_path = project_root_for_snapshots.join(snap_path);
let raw = fs::read_to_string(&full_path)
.map_err(|e| format!("Failed to read {}: {}", full_path.display(), e))?;
let command = extract_command_from_snapshot(&raw);
let html_content = parse_snapshot_content_for_docs(&raw)?;
let normalized = trim_lines(&html_content);
Ok(match command {
Some(cmd) => format!("<span class=\"cmd\">{}</span>\n{}", cmd, normalized),
None => normalized,
})
},
) {
Ok((new_content, updated_count, _total_count)) => {
if updated_count > 0 {
fs::write(doc_path, &new_content).unwrap();
}
Ok(updated_count)
}
Err(errs) => Err(errs),
}
}
#[test]
fn test_docs_quickstart_examples_are_in_sync() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let doc_files = [
"docs/content/worktrunk.md",
"docs/content/claude-code.md",
"docs/content/tips-patterns.md",
];
let mut all_errors = Vec::new();
let mut total_updated = 0;
for doc_file in doc_files {
let doc_path = project_root.join(doc_file);
match sync_docs_snapshots(&doc_path, project_root) {
Ok(updated) => total_updated += updated,
Err(errors) => all_errors.extend(errors),
}
}
if !all_errors.is_empty() {
panic!(
"Docs examples are out of sync:\n\n{}\n",
all_errors.join("\n")
);
}
if total_updated > 0 {
panic!(
"Docs examples out of sync: updated {} section(s). \
Run tests locally and commit the changes.",
total_updated
);
}
}
fn sync_frontmatter_description(content: &str, description: &str) -> String {
static DESC_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"(?m)^description\s*=\s*"[^"]*""#).unwrap());
let new_field = format!(r#"description = "{}""#, description.replace('"', r#"\""#));
if !content.starts_with("+++\n") {
return content.to_string();
}
if DESC_PATTERN.is_match(content) {
DESC_PATTERN
.replace(content, new_field.as_str())
.to_string()
} else {
static TITLE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"(?m)^(title\s*=\s*"[^"]*")\n"#).unwrap());
TITLE_PATTERN
.replace(content, |caps: ®ex::Captures| {
format!("{}\n{}\n", &caps[1], new_field)
})
.to_string()
}
}
const COMMAND_PAGES: &[&str] = &[
"switch", "list", "merge", "remove", "config", "step", "hook",
];
fn sync_command_pages(project_root: &Path) -> (Vec<String>, Vec<String>) {
let mut errors = Vec::new();
let mut updated_files = Vec::new();
for cmd in COMMAND_PAGES {
let doc_path = project_root.join(format!("docs/content/{}.md", cmd));
if !doc_path.exists() {
errors.push(format!("Missing command page: {}", doc_path.display()));
continue;
}
let output = wt_command()
.args([cmd, "--help-page"])
.current_dir(project_root)
.output()
.expect("Failed to run wt --help-page");
if !output.status.success() {
errors.push(format!(
"'wt {} --help-page' failed (exit {}): {}",
cmd,
output.status.code().unwrap_or(-1),
String::from_utf8_lossy(&output.stderr)
));
continue;
}
let generated: String = String::from_utf8_lossy(&output.stdout)
.lines()
.map(|line| line.trim_end())
.collect::<Vec<_>>()
.join("\n");
if generated.trim().is_empty() {
errors.push(format!(
"Empty output from 'wt {} --help-page': {}",
cmd,
String::from_utf8_lossy(&output.stderr)
));
continue;
}
let snapshots_dir = project_root.join("tests/snapshots");
let generated =
match expand_command_placeholders(&generated, &snapshots_dir, ExpandMode::Html) {
Ok(expanded) => expanded,
Err(e) => {
errors.push(format!(
"Failed to expand placeholders for '{}': {}",
cmd, e
));
continue;
}
};
let generated = match convert_command_reference_to_html(&generated) {
Ok(converted) => converted,
Err(e) => {
errors.push(format!(
"Failed to convert command reference for '{}': {}",
cmd, e
));
continue;
}
};
let desc_output = wt_command()
.args([cmd, "--help-description"])
.current_dir(project_root)
.output()
.expect("Failed to run wt --help-description");
let description = String::from_utf8_lossy(&desc_output.stdout)
.trim()
.to_string();
let current = fs::read_to_string(&doc_path)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", doc_path.display(), e));
let new_content = if !description.is_empty() {
sync_frontmatter_description(¤t, &description)
} else {
current.clone()
};
let marker_pattern = Regex::new(&format!(
r"(?s)<!-- ⚠️ AUTO-GENERATED from `wt {} --help-page`[^>]*-->.*?<!-- END AUTO-GENERATED from `wt {} --help-page` -->",
cmd, cmd
)).unwrap();
let new_content = if let Some(m) = marker_pattern.find(&new_content) {
let before = &new_content[..m.start()];
let after = &new_content[m.end()..];
format!("{}{}{}", before, generated.trim(), after)
} else {
errors.push(format!(
"No AUTO-GENERATED region found in {}. \
Ensure file has marker region for `wt {} --help-page`.",
doc_path.display(),
cmd
));
continue;
};
if current != new_content {
fs::write(&doc_path, &new_content)
.unwrap_or_else(|e| panic!("Failed to write {}: {}", doc_path.display(), e));
updated_files.push(format!("docs/content/{}.md", cmd));
}
}
(errors, updated_files)
}
static ZOLA_FRONTMATTER_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?s)^\+\+\+\n(.*?)\n\+\+\+\n*").unwrap());
static ZOLA_TITLE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"title\s*=\s*"([^"]+)""#).unwrap());
static ZOLA_TERMINAL_BODY_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r#"(?s)\{%\s*terminal\((?:cmd="([^"]*)"\s*)?\)\s*%\}\n?(.*?)\{%\s*end\s*%\}"#)
.unwrap()
});
static ZOLA_TERMINAL_SELF_CLOSING_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"\{\{ terminal\(cmd="([^"]*)"\) \}\}"#).unwrap());
static ZOLA_EXPERIMENTAL_SHORTCODE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\{\{\s*experimental\(\)\s*\}\}").unwrap());
static AUTO_GENERATED_MARKER_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"<!-- ⚠️ AUTO-GENERATED[^>]*-->\n*|<!-- END AUTO-GENERATED[^>]*-->\n*").unwrap()
});
static HTML_FIGURE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?s)<figure[^>]*>.*?</figure>\n*").unwrap());
static SPAN_CMD_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"<span class="cmd">[^<]*</span>\n?"#).unwrap());
static SPAN_CMD_TO_DOLLAR: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"<span class="cmd">([^<]*)</span>"#).unwrap());
fn cmd_to_bash_block(cmd: &str, body: &str, with_prompt: bool) -> String {
let mut result = String::from("```bash\n");
for line in cmd.split("|||") {
if line.is_empty() {
result.push('\n');
} else if line.starts_with('#') {
result.push_str(line);
result.push('\n');
} else {
if with_prompt {
result.push_str("$ ");
}
result.push_str(line);
result.push('\n');
}
}
let clean_body = SPAN_CMD_PATTERN.replace_all(body, "");
if !clean_body.is_empty() {
result.push_str(&clean_body);
if !clean_body.ends_with('\n') {
result.push('\n');
}
}
result.push_str("```");
result
}
fn transform_docs_for_skill(content: &str) -> String {
let title = ZOLA_FRONTMATTER_PATTERN
.captures(content)
.and_then(|caps| caps.get(1))
.and_then(|fm| ZOLA_TITLE_PATTERN.captures(fm.as_str()))
.and_then(|caps| caps.get(1))
.map(|m| m.as_str().to_string());
let content = ZOLA_FRONTMATTER_PATTERN.replace(content, "");
let content = ZOLA_TERMINAL_BODY_PATTERN.replace_all(&content, |caps: ®ex::Captures| {
let body = caps.get(2).map_or("", |m| m.as_str());
match caps.get(1) {
Some(cmd) => cmd_to_bash_block(cmd.as_str(), body, !body.trim().is_empty()),
None if body.contains(r#"<span class="cmd">"#) => {
let converted = SPAN_CMD_TO_DOLLAR.replace_all(body, "$$ $1");
format!("```bash\n{converted}```")
}
None => strip_html(body),
}
});
let content =
ZOLA_TERMINAL_SELF_CLOSING_PATTERN.replace_all(&content, |caps: ®ex::Captures| {
cmd_to_bash_block(caps.get(1).map_or("", |m| m.as_str()), "", false)
});
let content = ZOLA_RAWCODE_PATTERN.replace_all(&content, "$1");
let content = content
.replace("__WT_OPEN2__", "{{")
.replace("__WT_CLOSE2__", "}}")
.replace("__WT_QUOT__", "\"");
let content = AUTO_GENERATED_MARKER_PATTERN.replace_all(&content, "");
let content = HTML_FIGURE_PATTERN.replace_all(&content, "");
let content = ZOLA_EXPERIMENTAL_SHORTCODE.replace_all(&content, "[experimental]");
let content = content.replace(
"<span class=\"badge-experimental\"></span>",
"[experimental]",
);
let content = if let Some(title) = title {
format!("# {}\n\n{}", title, content.trim())
} else {
content.trim().to_string()
};
finalize_skill_content(&content)
}
fn remove_section(content: &str, heading: &str) -> String {
let lines: Vec<&str> = content.lines().collect();
let heading_level = heading.chars().take_while(|&c| c == '#').count();
if let Some(start_idx) = lines.iter().position(|line| line.starts_with(heading)) {
let end_idx = lines
.iter()
.skip(start_idx + 1)
.position(|line| {
let level = line.chars().take_while(|&c| c == '#').count();
level > 0 && level <= heading_level
})
.map(|i| i + start_idx + 1)
.unwrap_or(lines.len());
let mut result: Vec<&str> = lines[..start_idx].to_vec();
result.extend(&lines[end_idx..]);
result.join("\n")
} else {
content.to_string()
}
}
fn convert_console_blocks_in_docs(project_root: &Path) -> Vec<String> {
let docs_dir = project_root.join("docs/content");
let mut updated_files = Vec::new();
for entry in fs::read_dir(&docs_dir).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if path.extension().is_some_and(|e| e == "md") {
let content = fs::read_to_string(&path).unwrap();
let converted = worktrunk::docs::convert_dollar_console_to_terminal(&content);
if converted != content {
fs::write(&path, &converted).unwrap();
let rel = path.strip_prefix(project_root).unwrap_or(&path);
updated_files.push(rel.display().to_string());
}
}
}
updated_files
}
fn sync_skill_files(project_root: &Path) -> (Vec<String>, Vec<String>) {
let mut errors = Vec::new();
let mut updated_files = Vec::new();
let docs_dir = project_root.join("docs/content");
let skill_dir = project_root.join("skills/worktrunk/reference");
let mut entries: Vec<_> = fs::read_dir(&docs_dir)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", docs_dir.display(), e))
.filter_map(|entry| {
let entry = entry.ok()?;
let name = entry.file_name().to_string_lossy().to_string();
if name.ends_with(".md") && !name.starts_with('_') {
Some(name)
} else {
None
}
})
.collect();
entries.sort();
for name in &entries {
let skill_file = skill_dir.join(name);
let cmd_name = name.trim_end_matches(".md");
let expected = if COMMAND_PAGES.contains(&cmd_name) {
match generate_skill_from_help(cmd_name, project_root) {
Ok(content) => content,
Err(e) => {
errors.push(e);
continue;
}
}
} else {
let docs_file = docs_dir.join(name);
if !docs_file.exists() {
errors.push(format!("Missing docs file: {}", docs_file.display()));
continue;
}
let docs_content = fs::read_to_string(&docs_file)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", docs_file.display(), e));
transform_docs_for_skill(&docs_content)
};
let expected = trim_lines(&expected);
let current = if skill_file.exists() {
fs::read_to_string(&skill_file)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", skill_file.display(), e))
} else {
String::new()
};
let current = trim_lines(¤t);
if current != expected {
if let Some(parent) = skill_file.parent() {
fs::create_dir_all(parent).unwrap_or_else(|e| {
panic!("Failed to create directory {}: {}", parent.display(), e)
});
}
fs::write(&skill_file, format!("{}\n", expected))
.unwrap_or_else(|e| panic!("Failed to write {}: {}", skill_file.display(), e));
updated_files.push(format!("skills/worktrunk/reference/{name}"));
}
}
(errors, updated_files)
}
fn generate_skill_from_help(cmd: &str, project_root: &Path) -> Result<String, String> {
let output = wt_command()
.args([cmd, "--help-page", "--plain"])
.current_dir(project_root)
.output()
.expect("Failed to run wt --help-page --plain");
if !output.status.success() {
return Err(format!(
"'wt {} --help-page --plain' failed (exit {}): {}",
cmd,
output.status.code().unwrap_or(-1),
String::from_utf8_lossy(&output.stderr)
));
}
let content = String::from_utf8_lossy(&output.stdout).to_string();
if content.trim().is_empty() {
return Err(format!(
"Empty output from 'wt {} --help-page --plain': {}",
cmd,
String::from_utf8_lossy(&output.stderr)
));
}
let snapshots_dir = project_root.join("tests/snapshots");
let content = expand_command_placeholders(&content, &snapshots_dir, ExpandMode::Plain)?;
Ok(finalize_skill_content(&content))
}
fn finalize_skill_content(content: &str) -> String {
let content = ZOLA_LINK_PATTERN
.replace_all(content, |caps: ®ex::Captures| {
let text = caps.get(1).unwrap().as_str();
let page = caps.get(2).unwrap().as_str();
let anchor = caps.get(3).map_or("", |m| m.as_str());
format!("[{text}](https://worktrunk.dev/{page}/{anchor})")
})
.into_owned();
if let Some(m) = UNTRANSFORMED_ZOLA_LINK_PATTERN.find(&content) {
let snippet_start = content[..m.start()].rfind('\n').map_or(0, |i| i + 1);
let snippet_end = content[m.end()..]
.find('\n')
.map_or(content.len(), |i| m.end() + i);
panic!(
"ZOLA_LINK_PATTERN failed to transform a Zola internal link in skill content — \
likely an unsupported character in the link text. Offending line:\n{}",
&content[snippet_start..snippet_end]
);
}
let content = remove_section(&content, "## See also");
content
.lines()
.fold((Vec::new(), false), |(mut acc, prev_blank), line| {
let is_blank = line.trim().is_empty();
if !(is_blank && prev_blank) {
acc.push(line);
}
(acc, is_blank)
})
.0
.join("\n")
}
fn sync_well_known_skills(project_root: &Path) -> Vec<String> {
let mut updated_files = Vec::new();
let well_known_dir = project_root.join("docs/static/.well-known/agent-skills");
let symlink_path = well_known_dir.join("worktrunk");
let expected_target = Path::new("../../../../skills/worktrunk");
match fs::read_link(&symlink_path) {
Ok(target) => {
assert_eq!(
target,
expected_target,
"Symlink at {} points to {:?}, expected {:?}",
symlink_path.display(),
target,
expected_target
);
}
Err(_) => {
panic!(
"Expected symlink at {} → {:?}, but it doesn't exist or isn't a symlink",
symlink_path.display(),
expected_target
);
}
}
let skill_md_path = symlink_path.join("SKILL.md");
let skill_md_content = fs::read_to_string(&skill_md_path)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", skill_md_path.display(), e));
let digest = {
use sha2::{Digest, Sha256};
let file_bytes = fs::read(&skill_md_path)
.unwrap_or_else(|e| panic!("Failed to read {}: {}", skill_md_path.display(), e));
let hash = Sha256::digest(&file_bytes);
let hex: String = hash.iter().map(|b| format!("{b:02x}")).collect();
format!("sha256:{hex}")
};
let description = skill_md_content
.strip_prefix("---\n")
.and_then(|rest| rest.split_once("\n---"))
.and_then(|(frontmatter, _)| {
frontmatter
.lines()
.find(|line| line.starts_with("description:"))
.map(|line| line.trim_start_matches("description:").trim().to_string())
})
.unwrap_or_default();
let index_json = format!(
"{{\n \"$schema\": \"https://schemas.agentskills.io/discovery/0.2.0/schema.json\",\n \"skills\": [\n {{\n \"name\": \"worktrunk\",\n \"type\": \"skill-md\",\n \"description\": {description},\n \"url\": \"./worktrunk/SKILL.md\",\n \"digest\": \"{digest}\"\n }}\n ]\n}}\n",
description = serde_json::to_string(&description).unwrap(),
);
let index_dst = well_known_dir.join("index.json");
let current_index = fs::read_to_string(&index_dst).unwrap_or_default();
if current_index != index_json {
fs::write(&index_dst, &index_json)
.unwrap_or_else(|e| panic!("Failed to write {}: {}", index_dst.display(), e));
updated_files.push("docs/static/.well-known/agent-skills/index.json".to_string());
}
updated_files
}
#[test]
fn test_command_pages_and_skill_files_are_in_sync() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let (cmd_errors, cmd_files) = sync_command_pages(project_root);
let console_files = convert_console_blocks_in_docs(project_root);
let (skill_errors, skill_files) = sync_skill_files(project_root);
let well_known_files = sync_well_known_skills(project_root);
let all_errors: Vec<_> = cmd_errors.into_iter().chain(skill_errors).collect();
let all_files: Vec<_> = cmd_files
.into_iter()
.chain(console_files)
.chain(skill_files)
.chain(well_known_files)
.collect();
if !all_errors.is_empty() {
panic!("Sync errors:\n\n{}\n", all_errors.join("\n"));
}
if !all_files.is_empty() {
panic!(
"Files out of sync (updated):\n {}\n\nRun tests locally and commit the changes.",
all_files.join("\n ")
);
}
}
#[test]
fn test_template_variables_table_matches_constants() {
use std::collections::{BTreeMap, BTreeSet};
use strum::IntoEnumIterator;
use worktrunk::config::{
ACTIVE_VARS, ALIAS_ARGS_KEY, DEPRECATED_TEMPLATE_VARS, EXEC_BASE_VARS, REPO_VARS,
ValidationScope, vars_available_in,
};
use worktrunk::git::HookType;
let cli_path = Path::new(env!("CARGO_MANIFEST_DIR")).join("src/cli/mod.rs");
let content = fs::read_to_string(&cli_path).unwrap();
let heading = "\n## Template variables\n";
let start = content
.find(heading)
.expect("`## Template variables` heading missing in src/cli/mod.rs");
let rest = &content[start + heading.len()..];
let end = rest.find("\n## ").unwrap_or(rest.len());
let section = &rest[..end];
let var_re = Regex::new(r"\{\{\s*([a-zA-Z_][a-zA-Z0-9_.<>]*)\s*\}\}").unwrap();
let mut actual: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut current_kind: Option<String> = None;
for line in section.lines() {
if !line.starts_with("| ") || line.starts_with("|---") {
continue;
}
let cells: Vec<&str> = line.split('|').map(str::trim).collect();
if cells.len() < 4 {
continue;
}
let kind_cell = cells[1];
let var_cell = cells[2];
if kind_cell == "Kind" {
continue;
}
if !kind_cell.is_empty() {
current_kind = Some(kind_cell.to_string());
}
let Some(kind) = current_kind.as_ref() else {
continue;
};
if let Some(cap) = var_re.captures(var_cell) {
let name = cap[1].to_string();
actual.entry(kind.clone()).or_default().insert(name);
}
}
let mut expected: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
expected.insert(
"active".into(),
ACTIVE_VARS.iter().map(|s| s.to_string()).collect(),
);
expected.insert(
"repo".into(),
REPO_VARS.iter().map(|s| s.to_string()).collect(),
);
let mut exec: BTreeSet<String> = EXEC_BASE_VARS.iter().map(|s| s.to_string()).collect();
exec.insert("hook_type".into());
exec.insert("hook_name".into());
exec.insert(ALIAS_ARGS_KEY.to_string());
expected.insert("exec".into(), exec);
expected.insert("user".into(), BTreeSet::from(["vars.<key>".to_string()]));
let base: BTreeSet<&&str> = ACTIVE_VARS
.iter()
.chain(REPO_VARS.iter())
.chain(EXEC_BASE_VARS.iter())
.chain(DEPRECATED_TEMPLATE_VARS.iter())
.collect();
let infra_and_args: BTreeSet<&str> = ["hook_type", "hook_name", ALIAS_ARGS_KEY].into();
let mut operation: BTreeSet<String> = BTreeSet::new();
for ht in HookType::iter() {
for v in vars_available_in(ValidationScope::Hook(ht)) {
if !base.contains(&v) && !infra_and_args.contains(v) {
operation.insert(v.to_string());
}
}
}
expected.insert("operation".into(), operation);
assert_eq!(
actual, expected,
"`## Template variables` table in src/cli/mod.rs drifted from \
constants in src/config/expansion.rs. Update the table or the constants."
);
}
#[test]
fn test_approval_prompt_styled_in_hook_page() {
let project_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let output = wt_command()
.args(["hook", "--help-page"])
.current_dir(project_root)
.output()
.expect("Failed to run wt hook --help-page");
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains(r#"class="y""#),
"hook --help-page should contain styled approval prompt (class=\"y\" for yellow ▲). \
If cli/mod.rs approval example changed, update the replacement in help.rs post_process_for_html()."
);
}