use std::fs;
use std::io::{self, IsTerminal, Read};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use clap::Parser;
use similar::{ChangeTag, TextDiff};
use panache::{format, parse};
use serde_json::json;
mod cache;
mod cli;
mod diagnostic_renderer;
use cache::{
CachedLintDocument, CliCache, FormatCacheMode, FormatStoreArgs, global_cache_base_dir,
resolve_cache_dir_for_cli,
};
use cli::{Cli, ColorMode, Commands, DebugChecks, DebugCommands, ParseOutput};
use diagnostic_renderer::print_diagnostics;
const SUPPORTED_EXTENSIONS: &[&str] = &[
"md",
"qmd",
"Rmd",
"rmd",
"Rmarkdown",
"rmarkdown",
"markdown",
"mdown",
"mkd",
];
fn init_logger(debug_log: Option<&Path>) {
let Some(path) = debug_log else {
env_logger::Builder::from_default_env().init();
return;
};
let mut builder = env_logger::Builder::from_env(
env_logger::Env::default().default_filter_or("panache=debug"),
);
if let Ok(file) = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)
{
builder.target(env_logger::Target::Pipe(Box::new(file)));
}
builder.format_timestamp_millis();
builder.init();
log::info!("LSP debug logging enabled at {}", path.display());
}
fn init_lsp_debug_log() -> io::Result<PathBuf> {
let mut base = dirs::state_dir().unwrap_or_else(|| PathBuf::from("."));
base.push("panache");
fs::create_dir_all(&base)?;
base.push("lsp-debug.log");
Ok(base)
}
struct PathFilters {
exclude: ignore::gitignore::Gitignore,
include: ignore::gitignore::Gitignore,
}
fn effective_exclude_patterns(cfg: &panache::Config) -> Vec<String> {
let mut patterns = cfg.exclude.clone().unwrap_or_else(|| {
panache::config::DEFAULT_EXCLUDE_PATTERNS
.iter()
.map(|s| s.to_string())
.collect()
});
patterns.extend(cfg.extend_exclude.iter().cloned());
patterns
}
fn effective_include_patterns(cfg: &panache::Config) -> Vec<String> {
let mut patterns = cfg.include.clone().unwrap_or_else(|| {
panache::config::DEFAULT_INCLUDE_PATTERNS
.iter()
.map(|s| s.to_string())
.collect()
});
patterns.extend(cfg.extend_include.iter().cloned());
patterns
}
fn build_path_filters(root: &Path, cfg: &panache::Config) -> io::Result<PathFilters> {
let mut exclude_builder = ignore::gitignore::GitignoreBuilder::new(root);
for pattern in effective_exclude_patterns(cfg) {
exclude_builder
.add_line(None, &pattern)
.map_err(io::Error::other)?;
}
let exclude = exclude_builder.build().map_err(io::Error::other)?;
let mut include_builder = ignore::gitignore::GitignoreBuilder::new(root);
for pattern in effective_include_patterns(cfg) {
include_builder
.add_line(None, &pattern)
.map_err(io::Error::other)?;
}
let include = include_builder.build().map_err(io::Error::other)?;
Ok(PathFilters { exclude, include })
}
fn relative_path_from_root(path: &Path, root: &Path) -> Option<PathBuf> {
if let Ok(rel) = path.strip_prefix(root) {
return Some(rel.to_path_buf());
}
let canonical_path = path.canonicalize().ok()?;
let canonical_root = root.canonicalize().ok()?;
canonical_path
.strip_prefix(&canonical_root)
.ok()
.map(Path::to_path_buf)
}
fn expand_paths(
paths: &[PathBuf],
cfg: &panache::Config,
filter_root: &Path,
force_exclude: bool,
) -> io::Result<Vec<PathBuf>> {
use ignore::WalkBuilder;
let mut files = Vec::new();
for path in paths {
let matcher_root = if relative_path_from_root(path, filter_root).is_some() {
filter_root
} else if path.is_dir() {
path.as_path()
} else {
path.parent().unwrap_or(filter_root)
};
let filters = build_path_filters(matcher_root, cfg)?;
if path.is_file() {
let rel_path = relative_path_from_root(path, matcher_root)
.or_else(|| path.file_name().map(PathBuf::from))
.unwrap_or_else(|| path.to_path_buf());
if force_exclude
&& filters
.exclude
.matched_path_or_any_parents(&rel_path, false)
.is_ignore()
{
continue;
}
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if SUPPORTED_EXTENSIONS.contains(&ext) {
files.push(path.clone());
} else {
eprintln!(
"Warning: Skipping unsupported file type: {}",
path.display()
);
}
} else {
eprintln!(
"Warning: Skipping file without extension: {}",
path.display()
);
}
} else if path.is_dir() {
let walker = WalkBuilder::new(path)
.hidden(false) .git_ignore(true) .git_global(true) .build();
for entry in walker {
let entry = entry.map_err(io::Error::other)?;
let entry_path = entry.path();
let rel_path = relative_path_from_root(entry_path, matcher_root)
.unwrap_or_else(|| entry_path.to_path_buf());
if entry_path.is_dir() {
continue;
}
if filters
.exclude
.matched_path_or_any_parents(&rel_path, false)
.is_ignore()
{
continue;
}
if !filters.include.matched(&rel_path, false).is_ignore() {
continue;
}
if entry_path.is_file() {
files.push(entry_path.to_path_buf());
}
}
} else {
eprintln!("Warning: Path not found: {}", path.display());
}
}
Ok(files)
}
fn effective_parallelism(cli_jobs: usize, n_files: usize) -> usize {
if n_files <= 1 || cli_jobs == 1 {
return 1;
}
if cli_jobs > 0 {
return cli_jobs.min(n_files);
}
std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(1)
.min(n_files)
}
fn build_pool(n: usize) -> rayon::ThreadPool {
rayon::ThreadPoolBuilder::new()
.num_threads(n)
.thread_name(|i| format!("panache-worker-{i}"))
.build()
.expect("failed to build rayon thread pool")
}
fn parse_range(range_str: &str) -> Result<(usize, usize), String> {
let parts: Vec<&str> = range_str.split(':').collect();
if parts.len() != 2 {
return Err(format!(
"Invalid range format '{}'. Expected START:END (e.g., 5:10)",
range_str
));
}
let start = parts[0]
.parse::<usize>()
.map_err(|_| format!("Invalid start line '{}'", parts[0]))?;
let end = parts[1]
.parse::<usize>()
.map_err(|_| format!("Invalid end line '{}'", parts[1]))?;
if start == 0 || end == 0 {
return Err("Line numbers must be 1-indexed (start from 1)".to_string());
}
if start > end {
return Err(format!(
"Start line ({}) must be less than or equal to end line ({})",
start, end
));
}
Ok((start, end))
}
fn read_all(path: Option<&PathBuf>) -> io::Result<String> {
match path {
Some(p) => fs::read_to_string(p),
None => {
let mut buf = String::new();
io::stdin().read_to_string(&mut buf)?;
Ok(buf)
}
}
}
fn file_count_label(count: usize, singular: &str, plural: &str) -> String {
if count == 1 {
format!("{count} {singular}")
} else {
format!("{count} {plural}")
}
}
fn remove_dir_if_exists(path: &Path) -> io::Result<bool> {
match fs::remove_dir_all(path) {
Ok(()) => Ok(true),
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(false),
Err(err) => Err(err),
}
}
fn open_cli_cache_best_effort(
cfg: &panache::Config,
explicit_config: Option<&Path>,
start_dir: &Path,
) -> Option<CliCache> {
match CliCache::open(cfg, explicit_config, start_dir) {
Ok(cache) => cache,
Err(err) => {
log::warn!("Disabling CLI cache for this run: {err}");
None
}
}
}
fn start_dir_for(input_path: Option<&Path>) -> io::Result<PathBuf> {
if let Some(p) = input_path {
Ok(p.parent().unwrap_or(Path::new(".")).to_path_buf())
} else {
std::env::current_dir()
}
}
fn has_explicit_file_targets(paths: &[PathBuf]) -> bool {
paths.iter().any(|path| !path.is_dir())
}
fn path_matching_root(
explicit_config: Option<&Path>,
discovered_config: Option<&Path>,
traversal_start_dir: &Path,
) -> io::Result<PathBuf> {
let cwd = std::env::current_dir()?;
if explicit_config.is_some() {
return Ok(cwd);
}
if let Some(config_path) = discovered_config
&& let Some(parent) = config_path.parent()
&& relative_path_from_root(traversal_start_dir, parent).is_some()
{
return Ok(parent.to_path_buf());
}
Ok(cwd)
}
fn load_config_for_cli(
config_path: Option<&Path>,
isolated: bool,
cli_cache_dir: Option<&Path>,
start_dir: &Path,
input_path: Option<&Path>,
) -> io::Result<(panache::Config, Option<PathBuf>)> {
let mut loaded = if !isolated {
panache::config::load(config_path, start_dir, input_path)?
} else {
let mut cfg = panache::Config::default();
if let Some(input_path) = input_path
&& let Some(ext) = input_path.extension().and_then(|e| e.to_str())
{
let detected_flavor = match ext.to_lowercase().as_str() {
"qmd" => Some(panache::config::Flavor::Quarto),
"rmd" | "rmarkdown" => Some(panache::config::Flavor::RMarkdown),
"md" => Some(cfg.flavor),
_ => None,
};
if let Some(flavor) = detected_flavor {
cfg.flavor = flavor;
cfg.extensions = panache::config::Extensions::for_flavor(flavor);
}
}
(cfg, None)
};
if let Some(cache_dir) = cli_cache_dir {
loaded.0.cache_dir = Some(cache_dir.to_string_lossy().to_string());
}
Ok(loaded)
}
fn color_enabled(mode: ColorMode, no_color: bool) -> bool {
resolve_color(
mode,
no_color,
std::env::var_os("NO_COLOR").is_some(),
std::env::var_os("TERM").as_deref(),
io::stdout().is_terminal(),
)
}
fn resolve_color(
mode: ColorMode,
no_color_flag: bool,
no_color_env: bool,
term_env: Option<&std::ffi::OsStr>,
stdout_is_terminal: bool,
) -> bool {
if no_color_flag {
return false;
}
match mode {
ColorMode::Always => true,
ColorMode::Never => false,
ColorMode::Auto => {
if no_color_env {
return false;
}
match term_env {
Some(term) if term == "dumb" => return false,
None => return false,
_ => {}
}
stdout_is_terminal
}
}
}
fn print_diff(file_path: &str, original: &str, formatted: &str, use_color: bool) {
let diff = TextDiff::from_lines(original, formatted);
for (idx, group) in diff.grouped_ops(3).iter().enumerate() {
if idx > 0 {
println!("---");
}
println!("Diff in {}:{}:", file_path, group[0].old_range().start + 1);
for op in group {
for change in diff.iter_changes(op) {
let (sign, style) = match change.tag() {
ChangeTag::Delete => ("-", "\x1b[31m"), ChangeTag::Insert => ("+", "\x1b[32m"), ChangeTag::Equal => (" ", "\x1b[0m"), };
if use_color {
print!("{}{}{}", style, sign, change.value());
} else {
print!("{}{}", sign, change.value());
}
if use_color && change.tag() != ChangeTag::Equal {
print!("\x1b[0m");
}
}
}
}
}
#[derive(Clone, Copy)]
enum CheckKind {
Losslessness,
Idempotency,
}
impl CheckKind {
fn label(self) -> &'static str {
match self {
CheckKind::Losslessness => "losslessness",
CheckKind::Idempotency => "idempotency",
}
}
}
fn sanitize_path_for_filename(path: &str) -> String {
path.chars()
.map(|c| {
if c.is_ascii_alphanumeric() || c == '.' || c == '-' || c == '_' {
c
} else {
'_'
}
})
.collect()
}
#[derive(Clone)]
struct DebugFailure {
kind: CheckKind,
left: String,
right: String,
}
fn build_debug_failure_report(
checks: DebugChecks,
files_checked: usize,
failures: &[(String, DebugFailure)],
) -> String {
let mut out = String::new();
out.push_str("# Debug-format regression report\n\n");
out.push_str(&format!(
"- Checks: `{}`\n- Files checked: {}\n- Failures: {}\n\n",
format!("{:?}", checks).to_lowercase(),
files_checked,
failures.len()
));
if failures.is_empty() {
out.push_str("All checks passed.\n");
return out;
}
out.push_str("## Failures\n\n");
for (idx, (file, failure)) in failures.iter().enumerate() {
let diff = TextDiff::from_lines(&failure.left, &failure.right);
let location_line = diff
.grouped_ops(0)
.first()
.and_then(|group| group.first().map(|op| op.old_range().start + 1))
.unwrap_or(1);
out.push_str(&format!(
"### {}. `{}` ({})\n\n",
idx + 1,
file,
failure.kind.label()
));
out.push_str(&format!("- Approx. diff start line: {}\n\n", location_line));
out.push_str("```diff\n");
for change in diff.iter_all_changes() {
let sign = match change.tag() {
ChangeTag::Delete => "-",
ChangeTag::Insert => "+",
ChangeTag::Equal => " ",
};
out.push_str(sign);
out.push_str(change.value());
}
out.push_str("```\n\n");
}
out
}
#[derive(Default)]
struct DebugRunArtifacts {
losslessness: Option<(String, String)>,
idempotency: Option<(String, String, String)>,
failures: Vec<DebugFailure>,
}
fn write_debug_artifacts(
dump_dir: &Path,
stem: &str,
artifacts: &DebugRunArtifacts,
dump_passes: bool,
) -> io::Result<()> {
fs::create_dir_all(dump_dir)?;
if let Some((input, tree_text)) = artifacts.losslessness.as_ref()
&& (dump_passes
|| artifacts
.failures
.iter()
.any(|failure| matches!(failure.kind, CheckKind::Losslessness)))
{
fs::write(
dump_dir.join(format!("{stem}.losslessness.input.txt")),
input,
)?;
fs::write(
dump_dir.join(format!("{stem}.losslessness.parsed.txt")),
tree_text,
)?;
}
if let Some((input, once, twice)) = artifacts.idempotency.as_ref()
&& (dump_passes
|| artifacts
.failures
.iter()
.any(|failure| matches!(failure.kind, CheckKind::Idempotency)))
{
fs::write(
dump_dir.join(format!("{stem}.idempotency.input.txt")),
input,
)?;
fs::write(dump_dir.join(format!("{stem}.idempotency.once.txt")), once)?;
fs::write(
dump_dir.join(format!("{stem}.idempotency.twice.txt")),
twice,
)?;
}
for failure in &artifacts.failures {
let kind = failure.kind.label();
fs::write(
dump_dir.join(format!("{stem}.{kind}.left.txt")),
&failure.left,
)?;
fs::write(
dump_dir.join(format!("{stem}.{kind}.right.txt")),
&failure.right,
)?;
}
Ok(())
}
fn run_debug_checks_for_content(
input: &str,
cfg: &panache::Config,
checks: DebugChecks,
target_label: &str,
) -> DebugRunArtifacts {
let mut artifacts = DebugRunArtifacts::default();
log::debug!(
"debug format: start checks={} target={}",
format!("{:?}", checks).to_lowercase(),
target_label
);
if matches!(checks, DebugChecks::Losslessness | DebugChecks::All) {
log::debug!("debug format: losslessness start target={}", target_label);
let tree_text = parse(input, Some(cfg.clone())).text().to_string();
artifacts.losslessness = Some((input.to_string(), tree_text.clone()));
if input != tree_text {
artifacts.failures.push(DebugFailure {
kind: CheckKind::Losslessness,
left: input.to_string(),
right: tree_text,
});
}
log::debug!("debug format: losslessness end target={}", target_label);
}
if matches!(checks, DebugChecks::Idempotency | DebugChecks::All) {
log::debug!(
"debug format: idempotency pass1 start target={}",
target_label
);
let once = format(input, Some(cfg.clone()), None);
log::debug!(
"debug format: idempotency pass1 end target={}",
target_label
);
log::debug!(
"debug format: idempotency pass2 start target={}",
target_label
);
let twice = format(&once, Some(cfg.clone()), None);
log::debug!(
"debug format: idempotency pass2 end target={}",
target_label
);
artifacts.idempotency = Some((input.to_string(), once.clone(), twice.clone()));
if once != twice {
artifacts.failures.push(DebugFailure {
kind: CheckKind::Idempotency,
left: once,
right: twice,
});
}
}
log::debug!(
"debug format: end target={} failures={}",
target_label,
artifacts.failures.len()
);
artifacts
}
fn main() -> io::Result<()> {
let cli = Cli::parse();
let use_color = color_enabled(cli.color, cli.no_color);
panache::set_warning_color_override(use_color);
let debug_log = match &cli.command {
Commands::Lsp { debug } if *debug => Some(init_lsp_debug_log()?),
_ => None,
};
init_logger(debug_log.as_deref());
match cli.command {
Commands::Parse {
file,
to,
json,
verify,
} => {
if verify {
eprintln!(
"Warning: `panache parse --verify` is deprecated; use `panache debug format --checks losslessness`."
);
}
let input_path = file.as_deref().or(cli.stdin_filename.as_deref());
let start_dir = start_dir_for(input_path)?;
let (cfg, cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
input_path,
)?;
if let Some(path) = &cfg_path {
log::debug!("Using config from: {}", path.display());
} else {
log::debug!("Using default config");
}
let input = read_all(file.as_ref())?;
let tree = parse(&input, Some(cfg));
if verify {
let tree_text = tree.text().to_string();
if input != tree_text {
let file_label = file.as_ref().and_then(|p| p.to_str()).unwrap_or("<stdin>");
eprintln!(
"Verification failed (losslessness): parser output differs from input"
);
print_diff(file_label, &input, &tree_text, use_color);
std::process::exit(1);
}
}
if let Some(json_path) = json {
let json_value = panache::syntax::cst_to_json(&tree);
let json_output =
serde_json::to_string_pretty(&json_value).map_err(io::Error::other)?;
fs::write(json_path, json_output)?;
}
if !cli.quiet {
match to {
ParseOutput::Cst => println!("{:#?}", tree),
ParseOutput::PandocAst => {
println!("{}", panache::parser::to_pandoc_ast(&tree));
}
}
}
Ok(())
}
Commands::Format {
files,
check,
range,
verify,
force_exclude,
} => {
if verify {
eprintln!(
"Warning: `panache format --verify` is deprecated; use `panache debug format --checks all`."
);
}
let parsed_range = if let Some(range_str) = range {
if files.len() > 1 {
eprintln!("Error: --range cannot be used with multiple files");
std::process::exit(1);
}
match parse_range(&range_str) {
Ok(r) => Some(r),
Err(e) => {
eprintln!("Error: {}", e);
std::process::exit(1);
}
}
} else {
None
};
if files.is_empty() {
let start_dir = start_dir_for(cli.stdin_filename.as_deref())?;
let (cfg, cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
cli.stdin_filename.as_deref(),
)?;
if let Some(path) = &cfg_path {
log::debug!("Using config from: {}", path.display());
} else {
log::debug!("Using default config");
}
let input = read_all(None)?;
if verify {
let tree = parse(&input, Some(cfg.clone()));
let tree_text = tree.text().to_string();
if input != tree_text {
eprintln!(
"Verification failed (losslessness): parser output differs from input"
);
print_diff("<stdin>", &input, &tree_text, use_color);
std::process::exit(1);
}
}
let output = format(&input, Some(cfg.clone()), parsed_range);
if verify {
let output_twice = format(&output, Some(cfg), parsed_range);
if output != output_twice {
eprintln!(
"Verification failed (idempotency): format(format(x)) != format(x)"
);
print_diff("<stdin>", &output, &output_twice, use_color);
std::process::exit(1);
}
}
if check {
if input != output {
print_diff("<stdin>", &input, &output, use_color);
std::process::exit(1);
}
} else {
print!("{output}");
}
return Ok(());
}
let traversal_anchor = files.first().map(PathBuf::as_path);
let traversal_start_dir = if let Some(anchor) = traversal_anchor {
if anchor.is_dir() {
anchor.to_path_buf()
} else {
start_dir_for(Some(anchor))?
}
} else {
start_dir_for(None)?
};
let (traversal_cfg, traversal_cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&traversal_start_dir,
traversal_anchor,
)?;
let matching_root = path_matching_root(
cli.config.as_deref(),
traversal_cfg_path.as_deref(),
&traversal_start_dir,
)?;
let expanded_files =
expand_paths(&files, &traversal_cfg, &matching_root, force_exclude)?;
let mut cache = if cli.no_cache {
None
} else {
open_cli_cache_best_effort(
&traversal_cfg,
cli.config.as_deref(),
&traversal_start_dir,
)
};
if expanded_files.is_empty() {
if force_exclude {
return Ok(());
}
if has_explicit_file_targets(&files) {
eprintln!("Error: No supported files found");
std::process::exit(1);
}
if !cli.quiet {
println!("No supported files found");
}
return Ok(());
}
let workers = effective_parallelism(cli.jobs, expanded_files.len());
let parallel = workers > 1;
struct FormatOutcome {
file_path: PathBuf,
input: String,
output: String,
}
let cache_shared: Option<Arc<Mutex<CliCache>>> =
cache.take().map(|c| Arc::new(Mutex::new(c)));
let process_file = |file_path: &PathBuf| -> io::Result<FormatOutcome> {
let start_dir = file_path.parent().unwrap_or(Path::new(".")).to_path_buf();
let (mut cfg, cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
Some(file_path),
)?;
if parallel {
cfg.external_max_parallel = 1;
}
if let Some(path) = &cfg_path {
log::debug!("Using config from: {}", path.display());
} else {
log::debug!("Using default config");
}
let input = fs::read_to_string(file_path)?;
let mode = if check {
FormatCacheMode::Check
} else {
FormatCacheMode::Write
};
if verify {
let tree = parse(&input, Some(cfg.clone()));
let tree_text = tree.text().to_string();
if input != tree_text {
let file_name = file_path.to_str().unwrap_or("<unknown>");
eprintln!(
"Verification failed (losslessness): parser output differs from input"
);
print_diff(file_name, &input, &tree_text, use_color);
std::process::exit(1);
}
}
let output = if !verify && parsed_range.is_none() {
if let Some(cache_handle) = cache_shared.as_ref() {
let file_fingerprint = CliCache::file_fingerprint(&input);
let config_fingerprint = CliCache::config_fingerprint(&cfg);
let tool_fingerprint = CliCache::tool_fingerprint();
let cached = {
let guard = cache_handle.lock().unwrap();
if guard.supports_format_mode(&cfg, mode) {
guard
.get_format(
file_path,
mode,
&file_fingerprint,
&config_fingerprint,
&tool_fingerprint,
)
.map(|hit| hit.1)
} else {
None
}
};
if let Some(cached) = cached {
cached
} else {
let output = format(&input, Some(cfg.clone()), parsed_range);
let mut guard = cache_handle.lock().unwrap();
if guard.supports_format_mode(&cfg, mode) {
let unchanged = input == output;
guard.put_format(
file_path,
mode,
FormatStoreArgs {
file_fingerprint,
config_fingerprint,
tool_fingerprint,
unchanged,
output: output.clone(),
},
);
}
output
}
} else {
format(&input, Some(cfg.clone()), parsed_range)
}
} else {
format(&input, Some(cfg.clone()), parsed_range)
};
if verify {
let output_twice = format(&output, Some(cfg), parsed_range);
if output != output_twice {
let file_name = file_path.to_str().unwrap_or("<unknown>");
eprintln!(
"Verification failed (idempotency): format(format(x)) != format(x)"
);
print_diff(file_name, &output, &output_twice, use_color);
std::process::exit(1);
}
}
Ok(FormatOutcome {
file_path: file_path.clone(),
input,
output,
})
};
let outcomes: Vec<io::Result<FormatOutcome>> = if parallel {
use rayon::prelude::*;
let pool = build_pool(workers);
pool.install(|| expanded_files.par_iter().map(&process_file).collect())
} else {
expanded_files.iter().map(&process_file).collect()
};
if let Some(handle) = cache_shared {
cache = Some(
Arc::try_unwrap(handle)
.map_err(|_| {
io::Error::other("cache Arc still shared after parallel pass")
})?
.into_inner()
.map_err(|e| io::Error::other(format!("cache mutex poisoned: {e}")))?,
);
}
let mut all_formatted = true;
let mut reformatted_count = 0usize;
let mut unchanged_count = 0usize;
for outcome in outcomes {
let o = outcome?;
if check {
if o.input != o.output {
let file_name = o.file_path.to_str().unwrap_or("<unknown>");
print_diff(file_name, &o.input, &o.output, use_color);
all_formatted = false;
} else if expanded_files.len() == 1 && !cli.quiet {
println!("{} is correctly formatted", o.file_path.display());
}
} else if !verify {
if o.input != o.output {
fs::write(&o.file_path, &o.output)?;
if !cli.quiet {
println!("Formatted {}", o.file_path.display());
}
reformatted_count += 1;
} else {
unchanged_count += 1;
}
}
}
if check {
if all_formatted {
if expanded_files.len() > 1 && !cli.quiet {
println!("All {} files are correctly formatted", expanded_files.len());
}
} else {
std::process::exit(1);
}
} else if !verify && !cli.quiet {
if reformatted_count == 0 {
println!(
"{}",
file_count_label(
unchanged_count,
"file left unchanged",
"files left unchanged"
)
);
} else {
println!(
"{}, {}",
file_count_label(
reformatted_count,
"file reformatted",
"files reformatted"
),
file_count_label(
unchanged_count,
"file left unchanged",
"files left unchanged"
)
);
}
}
if let Some(cache_ref) = cache.as_mut() {
cache_ref.save_if_dirty()?;
}
Ok(())
}
Commands::Clean { all } => {
let start_dir = start_dir_for(None)?;
let (cfg, _) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
None,
)?;
let report_clean = |message: String| {
if !cli.quiet {
println!("{message}");
}
};
if all {
if cfg.cache_dir.is_some() {
let cache_dir =
resolve_cache_dir_for_cli(&cfg, cli.config.as_deref(), &start_dir)?;
let removed = remove_dir_if_exists(&cache_dir)?;
if removed {
report_clean(format!("Removed cache directory {}", cache_dir.display()));
} else {
report_clean(format!(
"No cache directory found at {}",
cache_dir.display()
));
}
} else if let Some(global_base) = global_cache_base_dir() {
let removed = remove_dir_if_exists(&global_base)?;
if removed {
report_clean(format!(
"Removed all cache buckets at {}",
global_base.display()
));
} else {
report_clean(format!(
"No cache buckets found at {}",
global_base.display()
));
}
} else {
let cache_dir =
resolve_cache_dir_for_cli(&cfg, cli.config.as_deref(), &start_dir)?;
let removed = remove_dir_if_exists(&cache_dir)?;
if removed {
report_clean(format!("Removed cache directory {}", cache_dir.display()));
} else {
report_clean(format!(
"No cache directory found at {}",
cache_dir.display()
));
}
}
} else {
let cache_dir = resolve_cache_dir_for_cli(&cfg, cli.config.as_deref(), &start_dir)?;
let removed = remove_dir_if_exists(&cache_dir)?;
if removed {
report_clean(format!("Removed cache directory {}", cache_dir.display()));
} else {
report_clean(format!(
"No cache directory found at {}",
cache_dir.display()
));
}
}
Ok(())
}
Commands::Debug { command } => match command {
DebugCommands::Format {
files,
checks,
json,
report,
dump_dir,
dump_passes,
force_exclude,
} => {
if json && report {
eprintln!("Error: --json and --report cannot be used together");
std::process::exit(1);
}
if dump_passes && dump_dir.is_none() {
eprintln!("Error: --dump-passes requires --dump-dir <DIR>");
std::process::exit(1);
}
let use_stdin = files.is_empty();
let targets = if use_stdin {
vec![]
} else {
let traversal_anchor = files.first().map(PathBuf::as_path);
let traversal_start_dir = if let Some(anchor) = traversal_anchor {
if anchor.is_dir() {
anchor.to_path_buf()
} else {
start_dir_for(Some(anchor))?
}
} else {
start_dir_for(None)?
};
let (traversal_cfg, traversal_cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&traversal_start_dir,
traversal_anchor,
)?;
let matching_root = path_matching_root(
cli.config.as_deref(),
traversal_cfg_path.as_deref(),
&traversal_start_dir,
)?;
expand_paths(&files, &traversal_cfg, &matching_root, force_exclude)?
};
if !use_stdin && targets.is_empty() {
if has_explicit_file_targets(&files) {
eprintln!("Error: No supported files found");
std::process::exit(1);
}
if json {
let output = json!({
"checks": format!("{:?}", checks).to_lowercase(),
"files_checked": 0,
"failure_count": 0,
"failures": Vec::<serde_json::Value>::new(),
});
println!(
"{}",
serde_json::to_string_pretty(&output).map_err(io::Error::other)?
);
} else if !cli.quiet {
println!("No supported files found");
}
return Ok(());
}
let mut files_checked = 0usize;
let mut failure_count = 0usize;
let mut json_failures = Vec::new();
let mut collected_failures: Vec<(String, DebugFailure)> = Vec::new();
if use_stdin {
let start_dir = start_dir_for(cli.stdin_filename.as_deref())?;
let (cfg, _) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
cli.stdin_filename.as_deref(),
)?;
let input = read_all(None)?;
files_checked += 1;
let artifacts = run_debug_checks_for_content(&input, &cfg, checks, "<stdin>");
if let Some(dir) = dump_dir.as_ref() {
write_debug_artifacts(dir, "stdin", &artifacts, dump_passes)?;
}
for failure in &artifacts.failures {
failure_count += 1;
if !json && !report {
eprintln!("Debug check failed ({}) in <stdin>", failure.kind.label());
print_diff("<stdin>", &failure.left, &failure.right, use_color);
}
json_failures.push(json!({
"file": "<stdin>",
"kind": failure.kind.label(),
}));
if report {
collected_failures.push(("<stdin>".to_string(), failure.clone()));
}
}
} else {
for file_path in &targets {
let start_dir = file_path.parent().unwrap_or(Path::new(".")).to_path_buf();
let (cfg, _) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
Some(file_path),
)?;
let input = fs::read_to_string(file_path)?;
files_checked += 1;
let file_label = file_path.to_str().unwrap_or("<unknown>");
let artifacts =
run_debug_checks_for_content(&input, &cfg, checks, file_label);
if let Some(dir) = dump_dir.as_ref() {
let safe = sanitize_path_for_filename(file_label);
write_debug_artifacts(dir, &safe, &artifacts, dump_passes)?;
}
for failure in &artifacts.failures {
failure_count += 1;
if !json && !report {
eprintln!(
"Debug check failed ({}) in {}",
failure.kind.label(),
file_label
);
print_diff(file_label, &failure.left, &failure.right, use_color);
}
json_failures.push(json!({
"file": file_label,
"kind": failure.kind.label(),
}));
if report {
collected_failures.push((file_label.to_string(), failure.clone()));
}
}
}
}
if json {
let output = json!({
"checks": format!("{:?}", checks).to_lowercase(),
"files_checked": files_checked,
"failure_count": failure_count,
"failures": json_failures,
});
println!(
"{}",
serde_json::to_string_pretty(&output).map_err(io::Error::other)?
);
} else if report {
let markdown =
build_debug_failure_report(checks, files_checked, &collected_failures);
println!("{markdown}");
} else if failure_count == 0 && !cli.quiet {
println!(
"All checks passed (checks: {}, files: {})",
format!("{:?}", checks).to_lowercase(),
files_checked
);
}
if dump_passes
&& !json
&& !cli.quiet
&& let Some(dir) = dump_dir.as_ref()
{
eprintln!("Wrote debug artifacts to {}", dir.display());
}
if failure_count > 0 && !json && !report && !cli.quiet && dump_dir.is_none() {
eprintln!(
"Tip: rerun with --dump-dir <DIR> --dump-passes to inspect input, parse, and format passes."
);
}
if failure_count > 0 {
std::process::exit(1);
}
Ok(())
}
},
#[cfg(feature = "lsp")]
Commands::Lsp { .. } => {
let rt = tokio::runtime::Runtime::new()?;
rt.block_on(async { panache::lsp::run().await })?;
Ok(())
}
Commands::Lint {
files,
check,
fix,
message_format,
force_exclude,
} => {
if files.is_empty() {
let start_dir = start_dir_for(cli.stdin_filename.as_deref())?;
let (cfg, cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
cli.stdin_filename.as_deref(),
)?;
if let Some(path) = &cfg_path {
log::debug!("Using config from: {}", path.display());
} else {
log::debug!("Using default config");
}
let input = read_all(None)?;
let tree = parse(&input, Some(cfg.clone()));
let stdin_path = cli
.stdin_filename
.as_deref()
.unwrap_or(Path::new("stdin.md"));
let metadata = panache::metadata::extract_project_metadata(&tree, stdin_path).ok();
let mut diagnostics = panache::linter::lint_with_external_sync_and_metadata(
&tree,
&input,
&cfg,
metadata.as_ref(),
);
let db = panache::salsa::SalsaDb::default();
let yaml_diags = panache::salsa::built_in_lint_plan(
&db,
panache::salsa::FileText::new(&db, input.clone()),
panache::salsa::FileConfig::new(&db, cfg.clone()),
stdin_path.to_path_buf(),
)
.diagnostics
.iter()
.filter(|d| d.code == "yaml-parse-error")
.cloned()
.collect::<Vec<_>>();
merge_missing_diagnostics(&mut diagnostics, yaml_diags);
if diagnostics.is_empty() {
if !check && !cli.quiet {
println!("No issues found");
}
return Ok(());
}
if fix {
let fixed_output = apply_fixes(&input, &diagnostics);
print!("{}", fixed_output);
} else if !cli.quiet {
print_diagnostics(&diagnostics, None, Some(&input), use_color, message_format);
}
if check {
std::process::exit(1);
}
return Ok(());
}
let traversal_anchor = files.first().map(PathBuf::as_path);
let traversal_start_dir = if let Some(anchor) = traversal_anchor {
if anchor.is_dir() {
anchor.to_path_buf()
} else {
start_dir_for(Some(anchor))?
}
} else {
start_dir_for(None)?
};
let (traversal_cfg, traversal_cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&traversal_start_dir,
traversal_anchor,
)?;
let matching_root = path_matching_root(
cli.config.as_deref(),
traversal_cfg_path.as_deref(),
&traversal_start_dir,
)?;
let expanded_files =
expand_paths(&files, &traversal_cfg, &matching_root, force_exclude)?;
let mut cache = if cli.no_cache {
None
} else {
open_cli_cache_best_effort(
&traversal_cfg,
cli.config.as_deref(),
&traversal_start_dir,
)
};
if expanded_files.is_empty() {
if force_exclude {
return Ok(());
}
if has_explicit_file_targets(&files) {
eprintln!("Error: No supported files found");
std::process::exit(1);
}
if !cli.quiet {
println!("No supported files found");
}
return Ok(());
}
let workers = effective_parallelism(cli.jobs, expanded_files.len());
let parallel = workers > 1;
struct LintOutcome {
file_path: PathBuf,
root_doc: Option<LintedDocument>,
included_docs: Vec<LintedDocument>,
}
let cache_shared: Option<Arc<Mutex<CliCache>>> =
cache.take().map(|c| Arc::new(Mutex::new(c)));
let process_file = |file_path: &PathBuf| -> io::Result<LintOutcome> {
let start_dir = file_path.parent().unwrap_or(Path::new(".")).to_path_buf();
let (mut cfg, cfg_path) = load_config_for_cli(
cli.config.as_deref(),
cli.isolated,
cli.cache_dir.as_deref(),
&start_dir,
Some(file_path),
)?;
if parallel {
cfg.external_max_parallel = 1;
}
if let Some(path) = &cfg_path {
log::debug!("Using config from: {}", path.display());
} else {
log::debug!("Using default config");
}
let root_input = fs::read_to_string(file_path)?;
let documents = if let Some(cache_handle) = cache_shared.as_ref() {
let file_fingerprint = CliCache::file_fingerprint(&root_input);
let config_fingerprint = CliCache::config_fingerprint(&cfg);
let tool_fingerprint = CliCache::tool_fingerprint();
let cached_lookup = {
let guard = cache_handle.lock().unwrap();
if guard.supports_lint(&cfg) {
guard
.get_lint(
file_path,
&file_fingerprint,
&config_fingerprint,
&tool_fingerprint,
)
.filter(|docs| cached_lint_documents_are_fresh(docs))
} else {
None
}
};
if let Some(cached_documents) = cached_lookup {
cached_documents
.iter()
.map(linted_document_from_cached)
.collect::<Vec<_>>()
} else {
let documents = lint_documents_with_includes(file_path, &root_input, &cfg)?;
let mut guard = cache_handle.lock().unwrap();
if guard.supports_lint(&cfg) {
let cached_docs = documents
.iter()
.map(cached_lint_document_from_linted)
.collect::<Vec<_>>();
guard.put_lint(
file_path,
file_fingerprint,
config_fingerprint,
tool_fingerprint,
cached_docs,
);
}
documents
}
} else {
lint_documents_with_includes(file_path, &root_input, &cfg)?
};
let root_doc = documents.iter().find(|doc| &doc.path == file_path).cloned();
let mut included_docs: Vec<LintedDocument> = documents
.into_iter()
.filter(|doc| &doc.path != file_path)
.collect();
included_docs.sort_by(|a, b| a.path.cmp(&b.path));
Ok(LintOutcome {
file_path: file_path.clone(),
root_doc,
included_docs,
})
};
let outcomes: Vec<io::Result<LintOutcome>> = if parallel {
use rayon::prelude::*;
let pool = build_pool(workers);
pool.install(|| expanded_files.par_iter().map(&process_file).collect())
} else {
expanded_files.iter().map(&process_file).collect()
};
if let Some(handle) = cache_shared {
cache = Some(
Arc::try_unwrap(handle)
.map_err(|_| {
io::Error::other("cache Arc still shared after parallel pass")
})?
.into_inner()
.map_err(|e| io::Error::other(format!("cache mutex poisoned: {e}")))?,
);
}
let mut any_issues = false;
let mut total_issues = 0;
for outcome in outcomes {
let LintOutcome {
file_path,
root_doc,
included_docs,
} = outcome?;
let Some(root_doc) = root_doc else {
continue;
};
if !root_doc.diagnostics.is_empty() {
any_issues = true;
total_issues += root_doc.diagnostics.len();
if fix {
let fixed_output = apply_fixes(&root_doc.input, &root_doc.diagnostics);
fs::write(&file_path, fixed_output)?;
if !cli.quiet {
println!(
"Fixed {} issue(s) in {}",
root_doc.diagnostics.len(),
file_path.display()
);
}
} else if !cli.quiet {
print_diagnostics(
&root_doc.diagnostics,
Some(file_path.as_path()),
Some(&root_doc.input),
use_color,
message_format,
);
}
}
if !fix {
for doc in &included_docs {
if doc.diagnostics.is_empty() {
continue;
}
any_issues = true;
total_issues += doc.diagnostics.len();
if !cli.quiet {
print_diagnostics(
&doc.diagnostics,
Some(doc.path.as_path()),
Some(&doc.input),
use_color,
message_format,
);
}
}
}
}
if let Some(cache_ref) = cache.as_mut() {
cache_ref.save_if_dirty()?;
}
if !any_issues && !check && !cli.quiet {
println!("No issues found in {} file(s)", expanded_files.len());
}
if check && any_issues {
eprintln!(
"\nFound {} issue(s) across {} file(s)",
total_issues,
expanded_files.len()
);
std::process::exit(1);
}
Ok(())
}
}
}
#[derive(Debug, Clone)]
struct LintedDocument {
path: PathBuf,
input: String,
diagnostics: Vec<panache::linter::Diagnostic>,
}
fn lint_documents_with_includes(
root_path: &PathBuf,
root_input: &str,
cfg: &panache::Config,
) -> io::Result<Vec<LintedDocument>> {
use std::collections::HashSet;
let mut results = Vec::new();
let mut visited = HashSet::new();
let mut active = HashSet::new();
let db = panache::salsa::SalsaDb::default();
let file_config = panache::salsa::FileConfig::new(&db, cfg.clone());
let root_file_text = panache::salsa::FileText::new(&db, root_input.to_string());
lint_loaded_document_with_includes(
root_path,
root_input,
Some(root_file_text),
cfg,
file_config,
&mut results,
&mut visited,
&mut active,
&db,
)?;
Ok(results)
}
#[allow(clippy::too_many_arguments, clippy::only_used_in_recursion)]
fn lint_loaded_document_with_includes(
doc_path: &PathBuf,
input: &str,
file_text: Option<panache::salsa::FileText>,
cfg: &panache::Config,
file_config: panache::salsa::FileConfig,
results: &mut Vec<LintedDocument>,
visited: &mut std::collections::HashSet<PathBuf>,
active: &mut std::collections::HashSet<PathBuf>,
db: &panache::salsa::SalsaDb,
) -> io::Result<()> {
if !visited.insert(doc_path.clone()) {
return Ok(());
}
active.insert(doc_path.clone());
let file_text =
file_text.unwrap_or_else(|| panache::salsa::FileText::new(db, input.to_string()));
let plan =
panache::salsa::built_in_lint_plan(db, file_text, file_config, doc_path.clone()).clone();
let mut diagnostics = plan.diagnostics;
if !plan.external_jobs.is_empty() {
diagnostics.extend(run_external_lint_jobs_sync(&plan.external_jobs, input));
diagnostics.sort_by_key(|d| (d.location.line, d.location.column));
}
let tree = panache::salsa::parsed_tree_root(db, file_text, file_config);
let base_dir = doc_path.parent().unwrap_or(Path::new("."));
let roots = panache::includes::find_project_roots(doc_path);
let project_root = roots.quarto.clone();
let resolution =
panache::includes::collect_includes(&tree, input, base_dir, project_root.as_deref(), cfg);
diagnostics.extend(resolution.diagnostics);
if roots.quarto.is_some() || roots.bookdown.is_some() || !resolution.includes.is_empty() {
let graph_diags = panache::salsa::project_graph::accumulated::<
panache::salsa::GraphDiagnostic,
>(db, file_text, file_config, doc_path.clone());
for entry in graph_diags {
if entry.0.path == *doc_path {
diagnostics.push(entry.0.diagnostic.clone());
}
}
}
for include in &resolution.includes {
if active.contains(&include.path) {
diagnostics.push(panache::includes::include_cycle_diagnostic(
input,
include.range,
&include.path,
));
continue;
}
if visited.contains(&include.path) {
continue;
}
match fs::read_to_string(&include.path) {
Ok(include_input) => {
lint_loaded_document_with_includes(
&include.path,
&include_input,
None,
cfg,
file_config,
results,
visited,
active,
db,
)?;
}
Err(err) => {
diagnostics.push(panache::includes::include_read_error_diagnostic(
input,
include.range,
&include.path,
&err.to_string(),
));
}
}
}
diagnostics.sort_by_key(|d| (d.location.line, d.location.column));
results.push(LintedDocument {
path: doc_path.clone(),
input: input.to_string(),
diagnostics,
});
active.remove(doc_path);
Ok(())
}
fn apply_fixes(input: &str, diagnostics: &[panache::linter::Diagnostic]) -> String {
use panache::linter::diagnostics::Edit;
let mut edits: Vec<&Edit> = diagnostics
.iter()
.filter_map(|d| d.fix.as_ref())
.flat_map(|f| &f.edits)
.collect();
edits.sort_by_key(|e| e.range.start());
let mut output = String::new();
let mut last_end = 0;
for edit in edits {
let start: usize = edit.range.start().into();
let end: usize = edit.range.end().into();
output.push_str(&input[last_end..start]);
output.push_str(&edit.replacement);
last_end = end;
}
output.push_str(&input[last_end..]);
output
}
fn merge_missing_diagnostics(
diagnostics: &mut Vec<panache::linter::Diagnostic>,
additional: Vec<panache::linter::Diagnostic>,
) {
for diag in additional {
if diagnostics.iter().any(|existing| {
existing.code == diag.code && existing.location.range == diag.location.range
}) {
continue;
}
diagnostics.push(diag);
}
}
fn run_external_lint_jobs_sync(
jobs: &[panache::salsa::ExternalLintJob],
input: &str,
) -> Vec<panache::linter::Diagnostic> {
use panache::linter::external_linters::ExternalLinterRegistry;
use panache::linter::external_linters_sync::run_linter_sync;
let registry = ExternalLinterRegistry::new();
let mut out = Vec::new();
for job in jobs {
match run_linter_sync(
&job.linter_name,
&job.language,
&job.content,
input,
®istry,
Some(&job.mappings),
) {
Ok(diags) => out.extend(diags),
Err(err) => log::warn!("External linter '{}' failed: {}", job.linter_name, err),
}
}
out
}
fn cached_lint_documents_are_fresh(documents: &[CachedLintDocument]) -> bool {
documents.iter().all(|doc| {
let path = PathBuf::from(&doc.path);
fs::read_to_string(path).is_ok_and(|current| current == doc.input)
})
}
fn cached_lint_document_from_linted(doc: &LintedDocument) -> CachedLintDocument {
CachedLintDocument {
path: doc.path.to_string_lossy().to_string(),
input: doc.input.clone(),
diagnostics: doc
.diagnostics
.iter()
.map(cached_diagnostic_from_runtime)
.collect(),
}
}
fn linted_document_from_cached(doc: &CachedLintDocument) -> LintedDocument {
LintedDocument {
path: PathBuf::from(&doc.path),
input: doc.input.clone(),
diagnostics: doc
.diagnostics
.iter()
.map(runtime_diagnostic_from_cached)
.collect(),
}
}
fn cached_diagnostic_from_runtime(diag: &panache::linter::Diagnostic) -> cache::CachedDiagnostic {
use cache::{
CachedDiagnostic, CachedDiagnosticNote, CachedDiagnosticNoteKind, CachedDiagnosticOrigin,
CachedEdit, CachedFix, CachedLocation, CachedSeverity,
};
let severity = match diag.severity {
panache::linter::Severity::Error => CachedSeverity::Error,
panache::linter::Severity::Warning => CachedSeverity::Warning,
panache::linter::Severity::Info => CachedSeverity::Info,
};
let origin = match diag.origin {
panache::linter::DiagnosticOrigin::BuiltIn => CachedDiagnosticOrigin::BuiltIn,
panache::linter::DiagnosticOrigin::External => CachedDiagnosticOrigin::External,
};
let notes = diag
.notes
.iter()
.map(|note| CachedDiagnosticNote {
kind: match note.kind {
panache::linter::DiagnosticNoteKind::Note => CachedDiagnosticNoteKind::Note,
panache::linter::DiagnosticNoteKind::Help => CachedDiagnosticNoteKind::Help,
},
message: note.message.clone(),
})
.collect();
let fix = diag.fix.as_ref().map(|fix| CachedFix {
message: fix.message.clone(),
edits: fix
.edits
.iter()
.map(|edit| CachedEdit {
start: u32::from(edit.range.start()),
end: u32::from(edit.range.end()),
replacement: edit.replacement.clone(),
})
.collect(),
});
CachedDiagnostic {
severity,
location: CachedLocation {
line: diag.location.line,
column: diag.location.column,
start: u32::from(diag.location.range.start()),
end: u32::from(diag.location.range.end()),
},
message: diag.message.clone(),
code: diag.code.clone(),
origin,
notes,
fix,
}
}
fn runtime_diagnostic_from_cached(diag: &cache::CachedDiagnostic) -> panache::linter::Diagnostic {
use rowan::{TextRange, TextSize};
let severity = match diag.severity {
cache::CachedSeverity::Error => panache::linter::Severity::Error,
cache::CachedSeverity::Warning => panache::linter::Severity::Warning,
cache::CachedSeverity::Info => panache::linter::Severity::Info,
};
let origin = match diag.origin {
cache::CachedDiagnosticOrigin::BuiltIn => panache::linter::DiagnosticOrigin::BuiltIn,
cache::CachedDiagnosticOrigin::External => panache::linter::DiagnosticOrigin::External,
};
let notes = diag
.notes
.iter()
.map(|note| panache::linter::DiagnosticNote {
kind: match note.kind {
cache::CachedDiagnosticNoteKind::Note => panache::linter::DiagnosticNoteKind::Note,
cache::CachedDiagnosticNoteKind::Help => panache::linter::DiagnosticNoteKind::Help,
},
message: note.message.clone(),
})
.collect();
let fix = diag.fix.as_ref().map(|fix| panache::linter::Fix {
message: fix.message.clone(),
edits: fix
.edits
.iter()
.map(|edit| panache::linter::diagnostics::Edit {
range: TextRange::new(TextSize::from(edit.start), TextSize::from(edit.end)),
replacement: edit.replacement.clone(),
})
.collect(),
});
panache::linter::Diagnostic {
severity,
location: panache::linter::Location {
line: diag.location.line,
column: diag.location.column,
range: TextRange::new(
TextSize::from(diag.location.start),
TextSize::from(diag.location.end),
),
},
message: diag.message.clone(),
code: diag.code.clone(),
origin,
notes,
fix,
}
}
#[cfg(test)]
mod tests {
use super::{ColorMode, resolve_color};
use std::ffi::OsStr;
#[test]
fn auto_disables_color_when_term_is_dumb() {
assert!(!resolve_color(
ColorMode::Auto,
false,
false,
Some(OsStr::new("dumb")),
true,
));
}
#[test]
fn auto_disables_color_when_term_is_unset() {
assert!(!resolve_color(ColorMode::Auto, false, false, None, true));
}
#[test]
fn auto_enables_color_on_tty_with_real_term() {
assert!(resolve_color(
ColorMode::Auto,
false,
false,
Some(OsStr::new("xterm-256color")),
true,
));
}
#[test]
fn auto_disables_color_when_not_a_tty() {
assert!(!resolve_color(
ColorMode::Auto,
false,
false,
Some(OsStr::new("xterm-256color")),
false,
));
}
#[test]
fn always_overrides_dumb_term() {
assert!(resolve_color(
ColorMode::Always,
false,
false,
Some(OsStr::new("dumb")),
false,
));
}
#[test]
fn no_color_flag_overrides_always() {
assert!(!resolve_color(
ColorMode::Always,
true,
false,
Some(OsStr::new("xterm-256color")),
true,
));
}
}