worktrunk 0.34.2

A CLI for Git worktree management, designed for parallel AI agent workflows
Documentation
use anyhow::Result;
use std::collections::HashMap;
use std::path::Path;
use worktrunk::HookType;
use worktrunk::config::{
    Command, CommandConfig, HookStep, UserConfig, expand_template, template_references_var,
};
use worktrunk::git::Repository;
use worktrunk::path::to_posix_path;

use super::hook_filter::HookSource;

#[derive(Debug)]
pub struct PreparedCommand {
    pub name: Option<String>,
    pub expanded: String,
    pub context_json: String,
    /// Raw template for lazy expansion at execution time (when template references `vars.`).
    /// When `Some`, the `expanded` field is a placeholder — use `lazy_template` instead.
    pub lazy_template: Option<String>,
}

/// A step in a prepared pipeline, mirroring `HookStep`.
#[derive(Debug)]
pub enum PreparedStep {
    Single(PreparedCommand),
    Concurrent(Vec<PreparedCommand>),
}

#[derive(Clone, Copy, Debug)]
pub struct CommandContext<'a> {
    pub repo: &'a Repository,
    pub config: &'a UserConfig,
    /// Current branch name, if on a branch (None in detached HEAD state).
    pub branch: Option<&'a str>,
    pub worktree_path: &'a Path,
    pub yes: bool,
}

impl<'a> CommandContext<'a> {
    pub fn new(
        repo: &'a Repository,
        config: &'a UserConfig,
        branch: Option<&'a str>,
        worktree_path: &'a Path,
        yes: bool,
    ) -> Self {
        Self {
            repo,
            config,
            branch,
            worktree_path,
            yes,
        }
    }

    /// Get branch name, using "HEAD" as fallback for detached HEAD state.
    pub fn branch_or_head(&self) -> &str {
        self.branch.unwrap_or("HEAD")
    }

    /// Get the project identifier for per-project config lookup.
    ///
    /// Uses the remote URL if available, otherwise the canonical repository path.
    /// Returns None only if the path is not valid UTF-8.
    pub fn project_id(&self) -> Option<String> {
        self.repo.project_identifier().ok()
    }

    /// Get the commit generation config, merging project-specific settings.
    pub fn commit_generation(&self) -> worktrunk::config::CommitGenerationConfig {
        self.config.commit_generation(self.project_id().as_deref())
    }
}

/// Build hook context as a HashMap for JSON serialization and template expansion.
///
/// The resulting HashMap is passed to hook commands as JSON on stdin,
/// and used directly for template variable expansion.
pub fn build_hook_context(
    ctx: &CommandContext<'_>,
    extra_vars: &[(&str, &str)],
) -> Result<HashMap<String, String>> {
    let repo_root = ctx.repo.repo_path()?;
    let repo_name = repo_root
        .file_name()
        .and_then(|n| n.to_str())
        .unwrap_or("unknown");

    // Convert paths to POSIX format for Git Bash compatibility on Windows.
    // This avoids shell escaping of `:` and `\` characters in Windows paths.
    let worktree = to_posix_path(&ctx.worktree_path.to_string_lossy());
    let worktree_name = ctx
        .worktree_path
        .file_name()
        .and_then(|n| n.to_str())
        .unwrap_or("unknown");

    let repo_path = to_posix_path(&repo_root.to_string_lossy());

    let mut map = HashMap::new();
    map.insert("repo".into(), repo_name.into());
    map.insert("branch".into(), ctx.branch_or_head().into());
    map.insert("worktree_name".into(), worktree_name.into());

    // Canonical path variables
    map.insert("repo_path".into(), repo_path.clone());
    map.insert("worktree_path".into(), worktree.clone());

    // Deprecated aliases (kept for backward compatibility)
    map.insert("main_worktree".into(), repo_name.into());
    map.insert("repo_root".into(), repo_path);
    map.insert("worktree".into(), worktree);

    // Default branch
    if let Some(default_branch) = ctx.repo.default_branch() {
        map.insert("default_branch".into(), default_branch);
    }

    // Primary worktree path (where established files live)
    if let Ok(Some(path)) = ctx.repo.primary_worktree() {
        let path_str = to_posix_path(&path.to_string_lossy());
        map.insert("primary_worktree_path".into(), path_str.clone());
        // Deprecated alias
        map.insert("main_worktree_path".into(), path_str);
    }

    // Resolve commit from the Active branch, not HEAD at discovery path.
    // This ensures {{ commit }} follows the Active branch even when the
    // CommandContext points to a different worktree than where we're running.
    let commit_ref = ctx.branch.unwrap_or("HEAD");
    if let Ok(commit) = ctx.repo.run_command(&["rev-parse", commit_ref]) {
        let commit = commit.trim();
        map.insert("commit".into(), commit.into());
        if commit.len() >= 7 {
            map.insert("short_commit".into(), commit[..7].into());
        }
    }

    if let Ok(remote) = ctx.repo.primary_remote() {
        map.insert("remote".into(), remote.to_string());
        // Add remote URL for conditional hook execution (e.g., GitLab vs GitHub)
        if let Some(url) = ctx.repo.remote_url(&remote) {
            map.insert("remote_url".into(), url);
        }
        if let Some(branch) = ctx.branch
            && let Ok(Some(upstream)) = ctx.repo.branch(branch).upstream()
        {
            map.insert("upstream".into(), upstream);
        }
    }

    // Execution directory — always where the hook command runs, even when
    // worktree_path points to an Active identity that doesn't exist on disk.
    map.insert(
        "cwd".into(),
        to_posix_path(&ctx.worktree_path.to_string_lossy()),
    );

    // Add extra vars (e.g., target branch for merge, base for switch)
    for (k, v) in extra_vars {
        map.insert((*k).into(), (*v).into());
    }

    Ok(map)
}

/// Expand commands from a CommandConfig without approval.
///
/// When `lazy_enabled` is true, commands referencing `vars.` are validated but not
/// expanded — they carry a `lazy_template` for deferred expansion at execution time.
/// Only enable for pipeline steps where ordering guarantees vars are set by prior steps.
fn expand_commands(
    commands: &[Command],
    ctx: &CommandContext<'_>,
    extra_vars: &[(&str, &str)],
    hook_type: HookType,
    source: HookSource,
    lazy_enabled: bool,
) -> anyhow::Result<Vec<(Command, String, Option<String>)>> {
    if commands.is_empty() {
        return Ok(Vec::new());
    }

    let mut base_context = build_hook_context(ctx, extra_vars)?;

    // hook_type is always available as a template variable and in JSON context
    base_context.insert("hook_type".into(), hook_type.to_string());

    let mut result = Vec::new();

    for cmd in commands {
        // hook_name is per-command: available as template variable and in JSON context
        let mut cmd_context = base_context.clone();
        if let Some(ref name) = cmd.name {
            cmd_context.insert("hook_name".into(), name.clone());
        }

        let template_name = match &cmd.name {
            Some(name) => format!("{}:{}", source, name),
            None => format!("{} {} hook", source, hook_type),
        };

        let lazy = lazy_enabled && template_references_var(&cmd.template, "vars");

        let (expanded_str, lazy_template) = if lazy {
            // Parse-only validation: catch syntax errors upfront without rendering.
            // Full rendering (validate_template) would fail on {{ vars.X }} because
            // vars aren't set yet — that's the whole point of lazy expansion.
            let env = minijinja::Environment::new();
            env.template_from_named_str(&template_name, &cmd.template)
                .map_err(|e| anyhow::anyhow!("syntax error in {template_name}: {e}"))?;
            let tpl = cmd.template.clone();
            (tpl.clone(), Some(tpl))
        } else {
            let vars: HashMap<&str, &str> = cmd_context
                .iter()
                .map(|(k, v)| (k.as_str(), v.as_str()))
                .collect();
            (
                expand_template(&cmd.template, &vars, true, ctx.repo, &template_name)?,
                None,
            )
        };

        let context_json = serde_json::to_string(&cmd_context)
            .expect("HashMap<String, String> serialization should never fail");

        result.push((
            Command::with_expansion(cmd.name.clone(), cmd.template.clone(), expanded_str),
            context_json,
            lazy_template,
        ));
    }

    Ok(result)
}

/// Prepare commands for execution.
///
/// Expands command templates with context variables and returns prepared
/// commands ready for execution, each with JSON context for stdin.
///
/// Note: Approval logic (for project commands) is handled at the call site,
/// not here. User commands don't require approval since users implicitly
/// approve them by adding them to their config.
pub fn prepare_commands(
    command_config: &CommandConfig,
    ctx: &CommandContext<'_>,
    extra_vars: &[(&str, &str)],
    hook_type: HookType,
    source: HookSource,
) -> anyhow::Result<Vec<PreparedCommand>> {
    let commands: Vec<Command> = command_config.commands().cloned().collect();
    if commands.is_empty() {
        return Ok(Vec::new());
    }

    // Lazy expansion for pipeline configs (sequential ordering guarantees vars are
    // set by prior steps). Flat configs (concurrent table) expand eagerly.
    let lazy = command_config.is_pipeline();
    let expanded_with_json = expand_commands(&commands, ctx, extra_vars, hook_type, source, lazy)?;

    Ok(expanded_with_json
        .into_iter()
        .map(|(cmd, context_json, lazy_template)| PreparedCommand {
            name: cmd.name,
            expanded: cmd.expanded,
            context_json,
            lazy_template,
        })
        .collect())
}

/// Prepare pipeline steps for execution, preserving serial/concurrent structure.
///
/// Like `prepare_commands`, but returns `Vec<PreparedStep>` that preserves
/// the pipeline structure from the config. Used by post-* hooks that need
/// to distinguish serial steps from concurrent groups.
pub fn prepare_steps(
    command_config: &CommandConfig,
    ctx: &CommandContext<'_>,
    extra_vars: &[(&str, &str)],
    hook_type: HookType,
    source: HookSource,
) -> anyhow::Result<Vec<PreparedStep>> {
    let steps = command_config.steps();
    if steps.is_empty() {
        return Ok(Vec::new());
    }

    // Collect step sizes so we can re-partition after a single expand_commands call.
    // This avoids calling build_hook_context (which spawns git subprocesses) per step.
    let step_sizes: Vec<usize> = steps
        .iter()
        .map(|s| match s {
            HookStep::Single(_) => 1,
            HookStep::Concurrent(cmds) => cmds.len(),
        })
        .collect();

    let all_commands: Vec<Command> = command_config.commands().cloned().collect();
    let all_expanded = expand_commands(&all_commands, ctx, extra_vars, hook_type, source, true)?;
    let mut expanded_iter = all_expanded.into_iter();

    let mut result = Vec::new();
    for (step, &size) in steps.iter().zip(&step_sizes) {
        let chunk: Vec<_> = expanded_iter.by_ref().take(size).collect();
        match step {
            HookStep::Single(_) => {
                let (cmd, json, lazy) = chunk.into_iter().next().unwrap();
                result.push(PreparedStep::Single(PreparedCommand {
                    name: cmd.name,
                    expanded: cmd.expanded,
                    context_json: json,
                    lazy_template: lazy,
                }));
            }
            HookStep::Concurrent(_) => {
                let prepared = chunk
                    .into_iter()
                    .map(|(cmd, json, lazy)| PreparedCommand {
                        name: cmd.name,
                        expanded: cmd.expanded,
                        context_json: json,
                        lazy_template: lazy,
                    })
                    .collect();
                result.push(PreparedStep::Concurrent(prepared));
            }
        }
    }

    Ok(result)
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_template_references_var_for_vars() {
        // Real vars references
        assert!(template_references_var("{{ vars.container }}", "vars"));
        assert!(template_references_var("{{vars.container}}", "vars"));
        assert!(template_references_var(
            "docker run --name {{ vars.name }}",
            "vars"
        ));
        assert!(template_references_var(
            "{% if vars.key %}yes{% endif %}",
            "vars"
        ));

        // Literal text — not a template reference
        assert!(!template_references_var(
            "echo hello > template_vars.txt",
            "vars"
        ));
        assert!(!template_references_var("no vars references here", "vars"));
    }
}