use std::collections::HashMap;
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};
use std::process::ExitCode;
use rayon::prelude::*;
use klasp_core::{
discover_config_for_path, CheckConfig, CheckResult, ConfigV1, GateProtocol, GitEvent,
RepoState, Trigger, UserTrigger, Verdict, VerdictPolicy,
};
use crate::cli::{GateArgs, OutputFormat};
use crate::git;
use crate::output;
use crate::sources::SourceRegistry;
const NOTICE_PREFIX: &str = "klasp-gate:";
pub fn run(args: &GateArgs) -> ExitCode {
let mut stderr = io::stderr().lock();
match gate(&mut stderr, args) {
Outcome::Pass => ExitCode::SUCCESS,
Outcome::Block => ExitCode::from(2),
}
}
enum Outcome {
Pass,
Block,
}
fn gate<W: Write>(stderr: &mut W, args: &GateArgs) -> Outcome {
let _ = (args.agent.as_deref(), args.trigger.as_deref());
match GateProtocol::read_schema_from_env() {
Ok(env_value) => {
if let Err(e) = GateProtocol::check_schema_env(env_value) {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} schema mismatch ({e}), skipping. \
Re-run `klasp install` to update the hook."
);
return Outcome::Pass;
}
}
Err(e) => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} could not read KLASP_GATE_SCHEMA ({e}), \
skipping. Re-run `klasp install` to regenerate the hook."
);
return Outcome::Pass;
}
}
let mut buf = String::new();
if let Err(e) = io::stdin().read_to_string(&mut buf) {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} could not read stdin ({e}), skipping."
);
return Outcome::Pass;
}
let input = match GateProtocol::parse(&buf) {
Ok(i) => i,
Err(e) => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} could not parse input ({e}), skipping."
);
return Outcome::Pass;
}
};
let command = match input.tool_input.command.as_deref() {
Some(c) => c,
None => return Outcome::Pass,
};
let agent_id = std::env::var("KLASP_AGENT_ID").unwrap_or_default();
let builtin_event = Trigger::classify(command);
let repo_root = match git::find_repo_root_from_cwd() {
Some(r) => r,
None => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} could not resolve repo root, skipping."
);
return Outcome::Pass;
}
};
let registry = SourceRegistry::default_v1();
let base_ref = git::compute_base_ref(&repo_root);
let staged = git::staged_files(&repo_root);
let mut all_check_results: Vec<CheckResult> = Vec::new();
let group_verdicts: Vec<Verdict> = if staged.is_empty() {
let config = match ConfigV1::load(&repo_root) {
Ok(c) => c,
Err(e) => {
let _ = writeln!(stderr, "{NOTICE_PREFIX} config error ({e}), skipping.");
return Outcome::Pass;
}
};
let compiled_triggers = config.compiled_triggers();
let event = match resolve_event(builtin_event, command, compiled_triggers, &agent_id) {
Some(e) => e,
None => return Outcome::Pass,
};
let repo_state = RepoState {
root: repo_root.clone(),
git_event: event,
base_ref,
staged_files: vec![],
};
let check_results = run_config_checks(stderr, &config, &repo_state, ®istry, event);
let verdicts: Vec<Verdict> = check_results.iter().map(|r| r.verdict.clone()).collect();
let group_verdict = Verdict::merge(verdicts, config.gate.policy);
all_check_results.extend(check_results);
vec![group_verdict]
} else {
let groups = group_by_config(stderr, &staged, &repo_root);
if groups.is_empty() {
return Outcome::Pass;
}
groups
.into_iter()
.filter_map(|(config_path, files)| {
let config = match ConfigV1::from_file(&config_path) {
Ok(c) => c,
Err(e) => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} config error for {path} ({e}), skipping group.",
path = config_path.display(),
);
return None;
}
};
let compiled_triggers = config.compiled_triggers();
let event = resolve_event(builtin_event, command, compiled_triggers, &agent_id)?;
let group_policy = config.gate.policy;
let repo_state = RepoState {
root: repo_root.clone(),
git_event: event,
base_ref: base_ref.clone(),
staged_files: files,
};
let check_results =
run_config_checks(stderr, &config, &repo_state, ®istry, event);
let verdicts: Vec<Verdict> =
check_results.iter().map(|r| r.verdict.clone()).collect();
let group_verdict = Verdict::merge(verdicts, group_policy);
all_check_results.extend(check_results);
Some(group_verdict)
})
.collect()
};
let cross_group_policy = VerdictPolicy::AnyFail;
let final_verdict = Verdict::merge(group_verdicts, cross_group_policy);
dispatch_output(
stderr,
args,
&final_verdict,
cross_group_policy,
&all_check_results,
);
if final_verdict.is_blocking() {
Outcome::Block
} else {
Outcome::Pass
}
}
fn resolve_event(
builtin: Option<GitEvent>,
command: &str,
user_triggers: &[UserTrigger],
agent_id: &str,
) -> Option<GitEvent> {
if let Some(event) = builtin {
return Some(event);
}
let matched = user_triggers.iter().any(|t| t.matches(command, agent_id));
if matched {
Some(GitEvent::Commit)
} else {
None
}
}
fn run_config_checks<W: Write>(
stderr: &mut W,
config: &ConfigV1,
repo_state: &RepoState,
registry: &SourceRegistry,
event: GitEvent,
) -> Vec<CheckResult> {
let triggered: Vec<&CheckConfig> = config
.checks
.iter()
.filter(|c| triggers_match(c, event))
.collect();
if config.gate.parallel {
run_parallel(&triggered, repo_state, registry)
} else {
run_sequential(stderr, &triggered, repo_state, registry)
}
}
fn run_sequential<W: Write>(
stderr: &mut W,
triggered: &[&CheckConfig],
repo_state: &RepoState,
registry: &SourceRegistry,
) -> Vec<CheckResult> {
let mut results = Vec::new();
for check in triggered {
let source = match registry.find_for(check) {
Some(s) => s,
None => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} no source registered for check `{}`, skipping.",
check.name,
);
continue;
}
};
match source.run(check, repo_state) {
Ok(result) => results.push(result),
Err(e) => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} check `{}` runtime error ({e}), skipping.",
check.name,
);
}
}
}
results
}
fn run_parallel(
triggered: &[&CheckConfig],
repo_state: &RepoState,
registry: &SourceRegistry,
) -> Vec<CheckResult> {
triggered
.par_iter()
.filter_map(|check| {
let source = match registry.find_for(check) {
Some(s) => s,
None => {
let _ = writeln!(
io::stderr(),
"{NOTICE_PREFIX} no source registered for check `{}`, skipping.",
check.name,
);
return None;
}
};
match source.run(check, repo_state) {
Ok(result) => Some(result),
Err(e) => {
let _ = writeln!(
io::stderr(),
"{NOTICE_PREFIX} check `{}` runtime error ({e}), skipping.",
check.name,
);
None
}
}
})
.collect()
}
fn group_by_config<W: Write>(
stderr: &mut W,
staged_files: &[PathBuf],
repo_root: &Path,
) -> Vec<(PathBuf, Vec<PathBuf>)> {
let mut order: Vec<PathBuf> = Vec::new();
let mut map: HashMap<PathBuf, Vec<PathBuf>> = HashMap::new();
for file in staged_files {
match discover_config_for_path(file, repo_root) {
Some(config_path) => {
if !map.contains_key(&config_path) {
order.push(config_path.clone());
}
map.entry(config_path).or_default().push(file.clone());
}
None => {
let _ = writeln!(
stderr,
"{NOTICE_PREFIX} no klasp.toml for {path}, skipping.",
path = file.display(),
);
}
}
}
order
.into_iter()
.map(|k| {
let v = map.remove(&k).unwrap_or_default();
(k, v)
})
.collect()
}
fn triggers_match(check: &CheckConfig, event: GitEvent) -> bool {
if check.triggers.is_empty() {
return true;
}
let needle = match event {
GitEvent::Commit => "commit",
GitEvent::Push => "push",
};
check
.triggers
.iter()
.any(|t| t.on.iter().any(|name| name == needle))
}
fn dispatch_output<W: Write>(
stderr: &mut W,
args: &GateArgs,
verdict: &Verdict,
policy: VerdictPolicy,
check_results: &[CheckResult],
) {
match args.format {
OutputFormat::Terminal => {
let text = output::terminal::render(verdict, policy);
let _ = write!(stderr, "{text}");
}
OutputFormat::Junit => {
let xml = output::junit::render(verdict, policy);
write_machine_output(&xml, args);
}
OutputFormat::Sarif => {
let json = output::sarif::render(verdict, policy);
write_machine_output(&json, args);
}
OutputFormat::Json => {
let json = output::json::render(verdict, policy, check_results);
write_machine_output(&json, args);
}
}
}
fn write_machine_output(content: &str, args: &GateArgs) {
match &args.output {
Some(path) => {
if let Err(e) = std::fs::write(path, content) {
let _ = writeln!(
io::stderr(),
"{NOTICE_PREFIX} could not write output file ({e})."
);
}
}
None => {
let _ = write!(io::stdout(), "{content}");
}
}
}
#[cfg(test)]
mod tests {
use klasp_core::{CheckConfig, CheckSourceConfig, TriggerConfig};
use super::*;
fn check_with_triggers(on: Vec<&str>) -> CheckConfig {
CheckConfig {
name: "demo".into(),
triggers: if on.is_empty() {
vec![]
} else {
vec![TriggerConfig {
on: on.into_iter().map(String::from).collect(),
}]
},
source: CheckSourceConfig::Shell {
command: "true".into(),
},
timeout_secs: None,
}
}
#[test]
fn empty_triggers_match_every_event() {
let c = check_with_triggers(vec![]);
assert!(triggers_match(&c, GitEvent::Commit));
assert!(triggers_match(&c, GitEvent::Push));
}
#[test]
fn commit_trigger_matches_only_commit() {
let c = check_with_triggers(vec!["commit"]);
assert!(triggers_match(&c, GitEvent::Commit));
assert!(!triggers_match(&c, GitEvent::Push));
}
#[test]
fn push_trigger_matches_only_push() {
let c = check_with_triggers(vec!["push"]);
assert!(!triggers_match(&c, GitEvent::Commit));
assert!(triggers_match(&c, GitEvent::Push));
}
#[test]
fn either_trigger_matches_both_events() {
let c = check_with_triggers(vec!["commit", "push"]);
assert!(triggers_match(&c, GitEvent::Commit));
assert!(triggers_match(&c, GitEvent::Push));
}
#[test]
fn unknown_trigger_name_matches_nothing() {
let c = check_with_triggers(vec!["pre-merge"]);
assert!(!triggers_match(&c, GitEvent::Commit));
assert!(!triggers_match(&c, GitEvent::Push));
}
#[test]
fn group_by_config_scopes_files_to_nearest_config() {
use std::io::sink;
use tempfile::TempDir;
let tmp = TempDir::new().unwrap();
let repo = tmp.path().to_path_buf();
let pkg_a = repo.join("packages").join("alpha");
let pkg_b = repo.join("packages").join("beta");
std::fs::create_dir_all(&pkg_a).unwrap();
std::fs::create_dir_all(&pkg_b).unwrap();
std::fs::write(
pkg_a.join("klasp.toml"),
"version = 1\n[gate]\nagents = []\n",
)
.unwrap();
std::fs::write(
pkg_b.join("klasp.toml"),
"version = 1\n[gate]\nagents = []\n",
)
.unwrap();
let file_a = pkg_a.join("index.ts");
let file_b = pkg_b.join("index.ts");
std::fs::write(&file_a, "").unwrap();
std::fs::write(&file_b, "").unwrap();
let staged = vec![file_a.clone(), file_b.clone()];
let mut stderr = sink();
let groups = group_by_config(&mut stderr, &staged, &repo);
assert_eq!(groups.len(), 2, "expected two groups");
let canon_a = pkg_a.canonicalize().unwrap_or(pkg_a.clone());
let canon_b = pkg_b.canonicalize().unwrap_or(pkg_b.clone());
for (config_path, files) in &groups {
if config_path.starts_with(&canon_a) {
assert_eq!(files.len(), 1, "alpha group must contain exactly one file");
let got = files[0].canonicalize().unwrap_or_else(|_| files[0].clone());
let exp = file_a.canonicalize().unwrap_or_else(|_| file_a.clone());
assert_eq!(got, exp, "alpha group file mismatch");
} else if config_path.starts_with(&canon_b) {
assert_eq!(files.len(), 1, "beta group must contain exactly one file");
let got = files[0].canonicalize().unwrap_or_else(|_| files[0].clone());
let exp = file_b.canonicalize().unwrap_or_else(|_| file_b.clone());
assert_eq!(got, exp, "beta group file mismatch");
} else {
panic!("unexpected group config path: {}", config_path.display());
}
}
}
}