use std::{fmt::Write, fs, path::Path};
use html_escape;
use super::process::process_safe;
fn safe_select(
document: &kuchikikiki::NodeRef,
selector: &str,
) -> Vec<kuchikikiki::NodeRef> {
match document.select(selector) {
Ok(selections) => selections.map(|sel| sel.as_node().clone()).collect(),
Err(e) => {
log::warn!("DOM selector '{selector}' failed: {e:?}");
Vec::new()
},
}
}
#[cfg(feature = "gfm")]
#[must_use]
pub fn apply_gfm_extensions(markdown: &str) -> String {
markdown.to_owned()
}
const MAX_INCLUDE_DEPTH: usize = 8;
#[cfg(feature = "nixpkgs")]
fn is_safe_path(path: &str, _base_dir: &Path) -> bool {
let p = Path::new(path);
if p.is_absolute() || path.contains('\\') {
return false;
}
for component in p.components() {
if matches!(component, std::path::Component::ParentDir) {
return false;
}
}
true
}
#[cfg(feature = "nixpkgs")]
#[allow(
clippy::option_if_let_else,
reason = "Nested options are clearer with if-let"
)]
fn parse_include_directive(line: &str) -> Option<String> {
if let Some(start) = line.find("html:into-file=") {
let start = start + "html:into-file=".len();
if let Some(end) = line[start..].find(' ') {
Some(line[start..start + end].to_string())
} else {
Some(line[start..].trim().to_string())
}
} else {
None
}
}
#[cfg(feature = "nixpkgs")]
#[allow(
clippy::needless_pass_by_value,
reason = "Owned value needed for cloning in loop"
)]
fn read_includes(
listing: &str,
base_dir: &Path,
custom_output: Option<String>,
included_files: &mut Vec<crate::types::IncludedFile>,
depth: usize,
) -> Result<String, String> {
let mut result = String::new();
for line in listing.lines() {
let trimmed = line.trim();
if trimmed.is_empty() || !is_safe_path(trimmed, base_dir) {
continue;
}
let full_path = base_dir.join(trimmed);
log::info!("Including file: {}", full_path.display());
match fs::read_to_string(&full_path) {
Ok(content) => {
let file_dir = full_path.parent().unwrap_or(base_dir);
let (processed_content, nested_includes) =
process_file_includes(&content, file_dir, depth + 1)?;
result.push_str(&processed_content);
if !processed_content.ends_with('\n') {
result.push('\n');
}
included_files.push(crate::types::IncludedFile {
path: trimmed.to_string(),
custom_output: custom_output.clone(),
});
for nested in nested_includes {
let nested_full_path = file_dir.join(&nested.path);
if let Ok(normalized_path) = nested_full_path.strip_prefix(base_dir) {
included_files.push(crate::types::IncludedFile {
path: normalized_path.to_string_lossy().to_string(),
custom_output: nested.custom_output,
});
}
}
},
Err(_) => {
let _ = writeln!(
result,
"<!-- ndg: could not include file: {} -->",
full_path.display()
);
},
}
}
Ok(result)
}
#[cfg(feature = "nixpkgs")]
pub fn process_file_includes(
markdown: &str,
base_dir: &std::path::Path,
depth: usize,
) -> Result<(String, Vec<crate::types::IncludedFile>), String> {
if depth >= MAX_INCLUDE_DEPTH {
return Err(format!(
"Maximum include recursion depth ({MAX_INCLUDE_DEPTH}) exceeded. This \
likely indicates a cycle in file includes."
));
}
let mut output = String::new();
let mut lines = markdown.lines();
let mut fence_tracker = crate::utils::codeblock::FenceTracker::new();
let mut all_included_files: Vec<crate::types::IncludedFile> = Vec::new();
while let Some(line) = lines.next() {
let trimmed = line.trim_start();
if !fence_tracker.in_code_block() && trimmed.starts_with("```{=include=}") {
let custom_output = parse_include_directive(trimmed);
let mut include_listing = String::new();
for next_line in lines.by_ref() {
if next_line.trim_start().starts_with("```") {
break;
}
include_listing.push_str(next_line);
include_listing.push('\n');
}
let included = read_includes(
&include_listing,
base_dir,
custom_output,
&mut all_included_files,
depth,
)?;
output.push_str(&included);
continue;
}
fence_tracker = fence_tracker.process_line(line);
output.push_str(line);
output.push('\n');
}
Ok((output, all_included_files))
}
#[cfg(any(feature = "nixpkgs", feature = "ndg-flavored"))]
#[must_use]
#[allow(
clippy::implicit_hasher,
reason = "Standard HashMap/HashSet sufficient for this use case"
)]
pub fn process_role_markup(
content: &str,
manpage_urls: Option<&std::collections::HashMap<String, String>>,
auto_link_options: bool,
valid_options: Option<&std::collections::HashSet<String>>,
) -> String {
let mut result = String::new();
let mut chars = content.chars().peekable();
let mut tracker = crate::utils::codeblock::InlineTracker::new();
while let Some(ch) = chars.next() {
if ch == '`' {
let (new_tracker, tick_count) = tracker.process_backticks(&mut chars);
tracker = new_tracker;
result.push_str(&"`".repeat(tick_count));
continue;
}
if ch == '~' && chars.peek() == Some(&'~') {
let (new_tracker, tilde_count) = tracker.process_tildes(&mut chars);
tracker = new_tracker;
result.push_str(&"~".repeat(tilde_count));
continue;
}
if ch == '\n' {
tracker = tracker.process_newline();
result.push(ch);
continue;
}
if ch == '{' && !tracker.in_any_code() {
let remaining: Vec<char> = chars.clone().collect();
let remaining_str: String = remaining.iter().collect();
let mut temp_chars = remaining_str.chars().peekable();
if let Some(role_markup) = parse_role_markup(
&mut temp_chars,
manpage_urls,
auto_link_options,
valid_options,
) {
let remaining_after_parse: String = temp_chars.collect();
let consumed = remaining_str.len() - remaining_after_parse.len();
for _ in 0..consumed {
chars.next();
}
result.push_str(&role_markup);
} else {
result.push(ch);
}
} else {
result.push(ch);
}
}
result
}
fn parse_role_markup(
chars: &mut std::iter::Peekable<std::str::Chars>,
manpage_urls: Option<&std::collections::HashMap<String, String>>,
auto_link_options: bool,
valid_options: Option<&std::collections::HashSet<String>>,
) -> Option<String> {
let mut role_name = String::new();
while let Some(&ch) = chars.peek() {
if ch.is_ascii_lowercase() {
role_name.push(ch);
chars.next();
} else {
break;
}
}
if role_name.is_empty() {
return None;
}
if chars.peek() != Some(&'}') {
return None;
}
chars.next();
if chars.peek() != Some(&'`') {
return None;
}
chars.next();
let mut content = String::new();
for ch in chars.by_ref() {
if ch == '`' {
if content.is_empty() && !matches!(role_name.as_str(), "manpage") {
return None; }
return Some(format_role_markup(
&role_name,
&content,
manpage_urls,
auto_link_options,
valid_options,
));
}
content.push(ch);
}
None
}
#[must_use]
#[allow(
clippy::option_if_let_else,
reason = "Nested options clearer with if-let"
)]
#[allow(
clippy::implicit_hasher,
reason = "Standard HashMap/HashSet sufficient for this use case"
)]
pub fn format_role_markup(
role_type: &str,
content: &str,
manpage_urls: Option<&std::collections::HashMap<String, String>>,
auto_link_options: bool,
valid_options: Option<&std::collections::HashSet<String>>,
) -> String {
let escaped_content = html_escape::encode_text(content);
match role_type {
"manpage" => {
if let Some(urls) = manpage_urls {
if let Some(url) = urls.get(content) {
format!(
"<a href=\"{url}\" \
class=\"manpage-reference\">{escaped_content}</a>"
)
} else {
format!("<span class=\"manpage-reference\">{escaped_content}</span>")
}
} else {
format!("<span class=\"manpage-reference\">{escaped_content}</span>")
}
},
"command" => format!("<code class=\"command\">{escaped_content}</code>"),
"env" => format!("<code class=\"env-var\">{escaped_content}</code>"),
"file" => format!("<code class=\"file-path\">{escaped_content}</code>"),
"option" => {
if cfg!(feature = "ndg-flavored") && auto_link_options {
let should_link =
valid_options.is_none_or(|opts| opts.contains(content));
if should_link {
let option_id = format!("option-{}", content.replace('.', "-"));
format!(
"<a class=\"option-reference\" \
href=\"options.html#{option_id}\"><code \
class=\"nixos-option\">{escaped_content}</code></a>"
)
} else {
format!("<code class=\"nixos-option\">{escaped_content}</code>")
}
} else {
format!("<code class=\"nixos-option\">{escaped_content}</code>")
}
},
"var" => format!("<code class=\"nix-var\">{escaped_content}</code>"),
_ => format!("<span class=\"{role_type}-markup\">{escaped_content}</span>"),
}
}
#[must_use]
pub fn process_myst_autolinks(content: &str) -> String {
let mut result = String::with_capacity(content.len());
let mut fence_tracker = crate::utils::codeblock::FenceTracker::new();
for line in content.lines() {
fence_tracker = fence_tracker.process_line(line);
if fence_tracker.in_code_block() {
result.push_str(line);
} else {
result.push_str(&process_line_myst_autolinks(line));
}
result.push('\n');
}
result
}
fn process_line_myst_autolinks(line: &str) -> String {
let mut result = String::with_capacity(line.len());
let mut chars = line.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '[' && chars.peek() == Some(&']') {
chars.next();
if chars.peek() == Some(&'{') {
result.push_str("[]");
continue;
}
if chars.peek() == Some(&'(') {
chars.next();
let mut url = String::new();
let mut found_closing = false;
while let Some(&next_ch) = chars.peek() {
if next_ch == ')' {
chars.next(); found_closing = true;
break;
}
url.push(next_ch);
chars.next();
}
if found_closing && !url.is_empty() {
if url.starts_with('#') {
let _ = write!(result, "[{{{{ANCHOR}}}}]({url})");
} else if url.starts_with("http://") || url.starts_with("https://") {
let _ = write!(result, "<{url}>");
} else {
let _ = write!(result, "[]({url})");
}
} else {
result.push_str("](");
result.push_str(&url);
}
} else {
result.push(']');
}
} else {
result.push(ch);
}
}
result
}
#[cfg(feature = "nixpkgs")]
#[must_use]
pub fn process_inline_anchors(content: &str) -> String {
let mut result = String::with_capacity(content.len() + 100);
let mut fence_tracker = crate::utils::codeblock::FenceTracker::new();
for line in content.lines() {
let trimmed = line.trim_start();
fence_tracker = fence_tracker.process_line(line);
if fence_tracker.in_code_block() {
result.push_str(line);
} else {
if let Some(anchor_start) = find_list_item_anchor(trimmed)
&& let Some(processed_line) =
process_list_item_anchor(line, anchor_start)
{
result.push_str(&processed_line);
result.push('\n');
continue;
}
result.push_str(&process_line_anchors(line));
}
result.push('\n');
}
result
}
fn find_list_item_anchor(trimmed: &str) -> Option<usize> {
if (trimmed.starts_with("- ")
|| trimmed.starts_with("* ")
|| trimmed.starts_with("+ "))
&& trimmed.len() > 2
{
let after_marker = &trimmed[2..];
if after_marker.starts_with("[]{#") {
return Some(2);
}
}
let mut i = 0;
while i < trimmed.len()
&& trimmed.chars().nth(i).unwrap_or(' ').is_ascii_digit()
{
i += 1;
}
if i > 0 && i < trimmed.len() - 1 && trimmed.chars().nth(i) == Some('.') {
let after_marker = &trimmed[i + 1..];
if after_marker.starts_with(" []{#") {
return Some(i + 2);
}
}
None
}
fn process_list_item_anchor(line: &str, anchor_start: usize) -> Option<String> {
let before_anchor = &line[..anchor_start];
let after_marker = &line[anchor_start..];
if !after_marker.starts_with("[]{#") {
return None;
}
if let Some(anchor_end) = after_marker.find('}') {
let id = &after_marker[4..anchor_end]; let remaining_content = &after_marker[anchor_end + 1..];
if id
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
&& !id.is_empty()
{
return Some(format!(
"{before_anchor}<span id=\"{id}\" \
class=\"nixos-anchor\"></span>{remaining_content}"
));
}
}
None
}
fn process_line_anchors(line: &str) -> String {
let mut result = String::with_capacity(line.len());
let mut chars = line.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '[' && chars.peek() == Some(&']') {
chars.next();
if chars.peek() == Some(&'{') {
chars.next(); if chars.peek() == Some(&'#') {
chars.next();
let mut id = String::new();
while let Some(&next_ch) = chars.peek() {
if next_ch == '}' {
chars.next();
if !id.is_empty()
&& id
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
{
let _ = write!(
result,
"<span id=\"{id}\" class=\"nixos-anchor\"></span>"
);
} else {
let _ = write!(result, "[]{{{{#{id}}}}}");
}
break;
} else if next_ch.is_ascii_alphanumeric()
|| next_ch == '-'
|| next_ch == '_'
{
id.push(next_ch);
chars.next();
} else {
let _ = write!(result, "[]{{{{#{id}");
break;
}
}
} else {
result.push_str("]{");
}
} else {
result.push(']');
}
} else {
result.push(ch);
}
}
result
}
#[cfg(feature = "nixpkgs")]
#[must_use]
pub fn process_block_elements(content: &str) -> String {
let mut result = Vec::new();
let mut lines = content.lines().peekable();
let mut fence_tracker = crate::utils::codeblock::FenceTracker::new();
while let Some(line) = lines.next() {
fence_tracker = fence_tracker.process_line(line);
if !fence_tracker.in_code_block() {
if let Some((callout_type, initial_content)) = parse_github_callout(line)
{
let content =
collect_github_callout_content(&mut lines, &initial_content);
let admonition = render_admonition(&callout_type, None, &content);
result.push(admonition);
continue;
}
if let Some((adm_type, id)) = parse_fenced_admonition_start(line) {
let content = collect_fenced_content(&mut lines);
let admonition = render_admonition(&adm_type, id.as_deref(), &content);
result.push(admonition);
continue;
}
if let Some((id, title, content)) = parse_figure_block(line, &mut lines) {
let figure = render_figure(id.as_deref(), &title, &content);
result.push(figure);
continue;
}
}
result.push(line.to_string());
}
result.join("\n")
}
fn parse_github_callout(line: &str) -> Option<(String, String)> {
let trimmed = line.trim_start();
if !trimmed.starts_with("> [!") {
return None;
}
if let Some(close_bracket) = trimmed.find(']')
&& close_bracket > 4
{
let callout_type = &trimmed[4..close_bracket];
match callout_type {
"NOTE" | "TIP" | "IMPORTANT" | "WARNING" | "CAUTION" | "DANGER" => {
let content = trimmed[close_bracket + 1..].trim();
return Some((callout_type.to_lowercase(), content.to_string()));
},
_ => return None,
}
}
None
}
fn is_atx_header(line: &str) -> bool {
let mut chars = line.chars();
let mut hash_count = 0;
while let Some(c) = chars.next() {
if c == '#' {
hash_count += 1;
if hash_count > 6 {
return false;
}
} else {
return (1..=6).contains(&hash_count)
&& (c.is_whitespace() || chars.as_str().is_empty());
}
}
(1..=6).contains(&hash_count)
}
fn collect_github_callout_content(
lines: &mut std::iter::Peekable<std::str::Lines>,
initial_content: &str,
) -> String {
let mut content = String::new();
if !initial_content.is_empty() {
content.push_str(initial_content);
content.push('\n');
}
while let Some(line) = lines.peek() {
let trimmed = line.trim_start();
if trimmed.is_empty() {
break;
}
let content_part = if trimmed.starts_with('>') {
trimmed.strip_prefix('>').unwrap_or("").trim_start()
} else {
let starts_new_block = is_atx_header(trimmed)
|| trimmed.starts_with("```")
|| trimmed.starts_with("~~~")
|| (trimmed.starts_with("---")
&& trimmed.chars().all(|c| c == '-' || c.is_whitespace()))
|| (trimmed.starts_with("===")
&& trimmed.chars().all(|c| c == '=' || c.is_whitespace()))
|| (trimmed.starts_with("***")
&& trimmed.chars().all(|c| c == '*' || c.is_whitespace()));
if starts_new_block {
break;
}
trimmed
};
content.push_str(content_part);
content.push('\n');
lines.next(); }
content.trim().to_string()
}
fn parse_fenced_admonition_start(
line: &str,
) -> Option<(String, Option<String>)> {
let trimmed = line.trim();
if !trimmed.starts_with(":::") {
return None;
}
let after_colons = trimmed[3..].trim_start();
if !after_colons.starts_with("{.") {
return None;
}
if let Some(close_brace) = after_colons.find('}') {
let content = &after_colons[2..close_brace];
let parts: Vec<&str> = content.split_whitespace().collect();
if let Some(&adm_type) = parts.first() {
let id = parts
.iter()
.find(|part| part.starts_with('#'))
.map(|id_part| id_part[1..].to_string());
return Some((adm_type.to_string(), id));
}
}
None
}
fn collect_fenced_content(
lines: &mut std::iter::Peekable<std::str::Lines>,
) -> String {
let mut content = String::new();
for line in lines.by_ref() {
if line.trim().starts_with(":::") {
break;
}
content.push_str(line);
content.push('\n');
}
content.trim().to_string()
}
#[allow(
clippy::option_if_let_else,
reason = "Nested options clearer with if-let"
)]
fn parse_figure_block(
line: &str,
lines: &mut std::iter::Peekable<std::str::Lines>,
) -> Option<(Option<String>, String, String)> {
let trimmed = line.trim();
if !trimmed.starts_with(":::") {
return None;
}
let after_colons = trimmed[3..].trim_start();
if !after_colons.starts_with("{.figure") {
return None;
}
let id = if let Some(hash_pos) = after_colons.find('#') {
if let Some(close_brace) = after_colons.find('}') {
if hash_pos < close_brace {
Some(after_colons[hash_pos + 1..close_brace].trim().to_string())
} else {
None
}
} else {
None
}
} else {
None
};
let title = if let Some(title_line) = lines.next() {
let trimmed_title = title_line.trim();
if let Some(this) = trimmed_title.strip_prefix('#') {
{ this.trim_matches(char::is_whitespace) }.to_string()
} else {
return None;
}
} else {
return None;
};
let mut content = String::new();
for line in lines.by_ref() {
if line.trim().starts_with(":::") {
break;
}
content.push_str(line);
content.push('\n');
}
Some((id, title, content.trim().to_string()))
}
fn render_admonition(
adm_type: &str,
id: Option<&str>,
content: &str,
) -> String {
let capitalized_type = crate::utils::capitalize_first(adm_type);
let id_attr = id.map_or(String::new(), |id| format!(" id=\"{id}\""));
let opening = format!(
"<div class=\"admonition {adm_type}\"{id_attr}>\n<p \
class=\"admonition-title\">{capitalized_type}</p>"
);
format!("{opening}\n\n{content}\n\n</div>\n")
}
fn render_figure(id: Option<&str>, title: &str, content: &str) -> String {
let id_attr = id.map_or(String::new(), |id| format!(" id=\"{id}\""));
format!(
"<figure{id_attr}>\n<figcaption>{title}</figcaption>\n{content}\n</figure>"
)
}
#[cfg(feature = "nixpkgs")]
#[must_use]
#[allow(
clippy::implicit_hasher,
reason = "Standard HashMap sufficient for this use case"
)]
pub fn process_manpage_references(
html: &str,
manpage_urls: Option<&std::collections::HashMap<String, String>>,
) -> String {
process_safe(
html,
|html| {
use kuchikikiki::NodeRef;
use tendril::TendrilSink;
let document = kuchikikiki::parse_html().one(html);
let mut to_replace = Vec::new();
for span_node in safe_select(&document, "span.manpage-reference") {
let span_el = span_node;
let span_text = span_el.text_contents();
if let Some(urls) = manpage_urls {
if let Some(url) = urls.get(&span_text) {
let clean_url = extract_url_from_html(url);
let link = NodeRef::new_element(
markup5ever::QualName::new(
None,
markup5ever::ns!(html),
markup5ever::local_name!("a"),
),
vec![
(
kuchikikiki::ExpandedName::new("", "href"),
kuchikikiki::Attribute {
prefix: None,
value: clean_url.into(),
},
),
(
kuchikikiki::ExpandedName::new("", "class"),
kuchikikiki::Attribute {
prefix: None,
value: "manpage-reference".into(),
},
),
],
);
link.append(NodeRef::new_text(span_text.clone()));
to_replace.push((span_el.clone(), link));
}
}
}
for (old, new) in to_replace {
old.insert_before(new);
old.detach();
}
let mut out = Vec::new();
let _ = document.serialize(&mut out);
String::from_utf8(out).unwrap_or_default()
},
"",
)
}
#[cfg(feature = "ndg-flavored")]
#[must_use]
#[allow(
clippy::implicit_hasher,
reason = "Standard HashSet sufficient for this use case"
)]
pub fn process_option_references(
html: &str,
valid_options: Option<&std::collections::HashSet<String>>,
) -> String {
use kuchikikiki::{Attribute, ExpandedName, NodeRef};
use markup5ever::{QualName, local_name, ns};
use tendril::TendrilSink;
process_safe(
html,
|html| {
let document = kuchikikiki::parse_html().one(html);
let mut to_replace = vec![];
for code_node in safe_select(&document, "code.nixos-option") {
let code_el = code_node;
let code_text = code_el.text_contents();
let mut is_already_option_ref = false;
let mut current = code_el.parent();
while let Some(parent) = current {
if let Some(element) = parent.as_element()
&& element.name.local == local_name!("a")
&& let Some(class_attr) =
element.attributes.borrow().get(local_name!("class"))
&& class_attr.contains("option-reference")
{
is_already_option_ref = true;
break;
}
current = parent.parent();
}
if !is_already_option_ref {
let should_link =
valid_options.is_none_or(|opts| opts.contains(code_text.as_str()));
if should_link {
let option_id = format!("option-{}", code_text.replace('.', "-"));
let attrs = vec![
(ExpandedName::new("", "href"), Attribute {
prefix: None,
value: format!("options.html#{option_id}"),
}),
(ExpandedName::new("", "class"), Attribute {
prefix: None,
value: "option-reference".into(),
}),
];
let a = NodeRef::new_element(
QualName::new(None, ns!(html), local_name!("a")),
attrs,
);
let code = NodeRef::new_element(
QualName::new(None, ns!(html), local_name!("code")),
vec![],
);
code.append(NodeRef::new_text(code_text.clone()));
a.append(code);
to_replace.push((code_el.clone(), a));
}
}
}
for (old, new) in to_replace {
old.insert_before(new);
old.detach();
}
let mut out = Vec::new();
let _ = document.serialize(&mut out);
String::from_utf8(out).unwrap_or_default()
},
"",
)
}
fn extract_url_from_html(url_or_html: &str) -> &str {
if url_or_html.starts_with("<a href=\"") {
if let Some(start) = url_or_html.find("href=\"") {
let start = start + 6; if let Some(end) = url_or_html[start..].find('"') {
return &url_or_html[start..start + end];
}
}
}
url_or_html
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_atx_header_valid_headers() {
assert!(is_atx_header("# Header"));
assert!(is_atx_header("## Header"));
assert!(is_atx_header("### Header"));
assert!(is_atx_header("#### Header"));
assert!(is_atx_header("##### Header"));
assert!(is_atx_header("###### Header"));
assert!(is_atx_header("#\tHeader"));
assert!(is_atx_header("##\tHeader"));
assert!(is_atx_header("#"));
assert!(is_atx_header("##"));
assert!(is_atx_header("###"));
assert!(is_atx_header("####"));
assert!(is_atx_header("#####"));
assert!(is_atx_header("######"));
assert!(is_atx_header("# Header with multiple spaces"));
assert!(is_atx_header("## Header"));
}
#[test]
fn test_is_atx_header_invalid_headers() {
assert!(!is_atx_header("####### Too many hashes"));
assert!(!is_atx_header("######## Even more"));
assert!(!is_atx_header("#NoSpace"));
assert!(!is_atx_header("##NoSpace"));
assert!(!is_atx_header("Not # a header"));
assert!(!is_atx_header(""));
assert!(!is_atx_header("Regular text"));
assert!(!is_atx_header("#hashtag"));
assert!(!is_atx_header("##hashtag"));
assert!(!is_atx_header("#123"));
assert!(!is_atx_header("##abc"));
assert!(!is_atx_header("#!important"));
assert!(!is_atx_header("#@mention"));
assert!(!is_atx_header("#$variable"));
}
#[test]
fn test_is_atx_header_edge_cases() {
assert!(!is_atx_header(" # Header"));
assert!(!is_atx_header(" ## Header"));
assert!(is_atx_header("# "));
assert!(is_atx_header("## "));
assert!(is_atx_header("# Header\n"));
assert!(is_atx_header("## Header\n"));
assert!(is_atx_header("# \t Header"));
assert!(is_atx_header("## \tHeader"));
}
#[test]
fn test_is_atx_header_blockquote_context() {
assert!(is_atx_header("# New Section"));
assert!(is_atx_header("## Subsection"));
assert!(!is_atx_header("#tag"));
assert!(!is_atx_header("##issue-123"));
assert!(!is_atx_header("###no-space"));
assert!(is_atx_header("###### Level 6"));
assert!(!is_atx_header("####### Not valid"));
}
}