Skip to main content

omni_dev/utils/
preflight.rs

1//! Preflight validation checks for early failure detection.
2//!
3//! This module provides functions to validate required services and credentials
4//! before starting expensive operations. Commands should call these checks early
5//! to fail fast with clear error messages.
6
7use anyhow::{bail, Context, Result};
8
9/// Result of AI credential validation.
10#[derive(Debug)]
11pub struct AiCredentialInfo {
12    /// The AI provider that will be used.
13    pub provider: AiProvider,
14    /// The model that will be used.
15    pub model: String,
16}
17
18/// AI provider types.
19#[derive(Debug, Clone, Copy, PartialEq, Eq)]
20pub enum AiProvider {
21    /// Anthropic Claude API.
22    Claude,
23    /// AWS Bedrock with Claude.
24    Bedrock,
25    /// OpenAI API.
26    OpenAi,
27    /// Local Ollama.
28    Ollama,
29}
30
31impl std::fmt::Display for AiProvider {
32    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
33        match self {
34            AiProvider::Claude => write!(f, "Claude API"),
35            AiProvider::Bedrock => write!(f, "AWS Bedrock"),
36            AiProvider::OpenAi => write!(f, "OpenAI API"),
37            AiProvider::Ollama => write!(f, "Ollama"),
38        }
39    }
40}
41
42/// Validates that AI credentials are available before processing.
43///
44/// This performs a lightweight check of environment variables without
45/// creating a full AI client. Use this at the start of commands that
46/// require AI to fail fast if credentials are missing.
47pub fn check_ai_credentials(model_override: Option<&str>) -> Result<AiCredentialInfo> {
48    use crate::utils::settings::{get_env_var, get_env_vars};
49
50    // Check provider selection flags
51    let use_openai = get_env_var("USE_OPENAI")
52        .map(|val| val == "true")
53        .unwrap_or(false);
54
55    let use_ollama = get_env_var("USE_OLLAMA")
56        .map(|val| val == "true")
57        .unwrap_or(false);
58
59    let use_bedrock = get_env_var("CLAUDE_CODE_USE_BEDROCK")
60        .map(|val| val == "true")
61        .unwrap_or(false);
62
63    // Check Ollama (no credentials required, just model)
64    if use_ollama {
65        let model = model_override
66            .map(String::from)
67            .or_else(|| get_env_var("OLLAMA_MODEL").ok())
68            .unwrap_or_else(|| "llama2".to_string());
69
70        return Ok(AiCredentialInfo {
71            provider: AiProvider::Ollama,
72            model,
73        });
74    }
75
76    // Check OpenAI
77    if use_openai {
78        let model = model_override
79            .map(String::from)
80            .or_else(|| get_env_var("OPENAI_MODEL").ok())
81            .unwrap_or_else(|| "gpt-5".to_string());
82
83        // Verify API key exists
84        get_env_vars(&["OPENAI_API_KEY", "OPENAI_AUTH_TOKEN"]).map_err(|_| {
85            anyhow::anyhow!(
86                "OpenAI API key not found.\n\
87                 Set one of these environment variables:\n\
88                 - OPENAI_API_KEY\n\
89                 - OPENAI_AUTH_TOKEN"
90            )
91        })?;
92
93        return Ok(AiCredentialInfo {
94            provider: AiProvider::OpenAi,
95            model,
96        });
97    }
98
99    // Check Bedrock
100    if use_bedrock {
101        let model = model_override
102            .map(String::from)
103            .or_else(|| get_env_var("ANTHROPIC_MODEL").ok())
104            .unwrap_or_else(|| "claude-opus-4-1-20250805".to_string());
105
106        // Verify Bedrock configuration
107        get_env_var("ANTHROPIC_AUTH_TOKEN").map_err(|_| {
108            anyhow::anyhow!(
109                "AWS Bedrock authentication not configured.\n\
110                 Set ANTHROPIC_AUTH_TOKEN environment variable."
111            )
112        })?;
113
114        get_env_var("ANTHROPIC_BEDROCK_BASE_URL").map_err(|_| {
115            anyhow::anyhow!(
116                "AWS Bedrock base URL not configured.\n\
117                 Set ANTHROPIC_BEDROCK_BASE_URL environment variable."
118            )
119        })?;
120
121        return Ok(AiCredentialInfo {
122            provider: AiProvider::Bedrock,
123            model,
124        });
125    }
126
127    // Default: Claude API
128    let model = model_override
129        .map(String::from)
130        .or_else(|| get_env_var("ANTHROPIC_MODEL").ok())
131        .unwrap_or_else(|| "claude-opus-4-1-20250805".to_string());
132
133    // Verify API key exists
134    get_env_vars(&[
135        "CLAUDE_API_KEY",
136        "ANTHROPIC_API_KEY",
137        "ANTHROPIC_AUTH_TOKEN",
138    ])
139    .map_err(|_| {
140        anyhow::anyhow!(
141            "Claude API key not found.\n\
142                 Set one of these environment variables:\n\
143                 - CLAUDE_API_KEY\n\
144                 - ANTHROPIC_API_KEY\n\
145                 - ANTHROPIC_AUTH_TOKEN"
146        )
147    })?;
148
149    Ok(AiCredentialInfo {
150        provider: AiProvider::Claude,
151        model,
152    })
153}
154
155/// Validates that GitHub CLI is available and authenticated.
156///
157/// This checks:
158/// 1. `gh` CLI is installed and in PATH
159/// 2. User is authenticated (can access the current repo)
160///
161/// Use this at the start of commands that require GitHub API access.
162pub fn check_github_cli() -> Result<()> {
163    // Check if gh CLI is available
164    let gh_check = std::process::Command::new("gh")
165        .args(["--version"])
166        .output();
167
168    match gh_check {
169        Ok(output) if output.status.success() => {
170            // Test if gh can access the current repo
171            let repo_check = std::process::Command::new("gh")
172                .args(["repo", "view", "--json", "name"])
173                .output();
174
175            match repo_check {
176                Ok(repo_output) if repo_output.status.success() => Ok(()),
177                Ok(repo_output) => {
178                    let error_details = String::from_utf8_lossy(&repo_output.stderr);
179                    if error_details.contains("authentication") || error_details.contains("login") {
180                        bail!(
181                            "GitHub CLI authentication failed.\n\
182                             Please run 'gh auth login' or set GITHUB_TOKEN environment variable."
183                        )
184                    } else {
185                        bail!(
186                            "GitHub CLI cannot access this repository.\n\
187                             Error: {}",
188                            error_details.trim()
189                        )
190                    }
191                }
192                Err(e) => bail!("Failed to test GitHub CLI access: {}", e),
193            }
194        }
195        _ => bail!(
196            "GitHub CLI (gh) is not installed or not in PATH.\n\
197             Please install it from https://cli.github.com/"
198        ),
199    }
200}
201
202/// Validates that the current directory is in a valid git repository.
203///
204/// This is a lightweight check that opens the repository without
205/// loading any commit data.
206pub fn check_git_repository() -> Result<()> {
207    crate::git::GitRepository::open().context(
208        "Not in a git repository. Please run this command from within a git repository.",
209    )?;
210    Ok(())
211}
212
213/// Validates that the working directory is clean (no uncommitted changes).
214///
215/// This checks for:
216/// - Staged changes
217/// - Unstaged modifications
218/// - Untracked files (excluding ignored files)
219///
220/// Use this before operations that require a clean working directory,
221/// like amending commits.
222pub fn check_working_directory_clean() -> Result<()> {
223    let repo = crate::git::GitRepository::open().context("Failed to open git repository")?;
224
225    let status = repo
226        .get_working_directory_status()
227        .context("Failed to get working directory status")?;
228
229    if !status.clean {
230        let mut message = String::from("Working directory has uncommitted changes:\n");
231        for change in &status.untracked_changes {
232            message.push_str(&format!("  {} {}\n", change.status, change.file));
233        }
234        message.push_str("\nPlease commit or stash your changes before proceeding.");
235        bail!(message);
236    }
237
238    Ok(())
239}
240
241/// Performs combined preflight check for AI commands.
242///
243/// Validates:
244/// - Git repository access
245/// - AI credentials
246///
247/// Returns information about the AI provider that will be used.
248pub fn check_ai_command_prerequisites(model_override: Option<&str>) -> Result<AiCredentialInfo> {
249    check_git_repository()?;
250    check_ai_credentials(model_override)
251}
252
253/// Performs combined preflight check for PR creation.
254///
255/// Validates:
256/// - Git repository access
257/// - AI credentials
258/// - GitHub CLI availability and authentication
259///
260/// Returns information about the AI provider that will be used.
261pub fn check_pr_command_prerequisites(model_override: Option<&str>) -> Result<AiCredentialInfo> {
262    check_git_repository()?;
263    let ai_info = check_ai_credentials(model_override)?;
264    check_github_cli()?;
265    Ok(ai_info)
266}
267
268#[cfg(test)]
269mod tests {
270    use super::*;
271
272    #[test]
273    fn ai_provider_display() {
274        assert_eq!(format!("{}", AiProvider::Claude), "Claude API");
275        assert_eq!(format!("{}", AiProvider::Bedrock), "AWS Bedrock");
276        assert_eq!(format!("{}", AiProvider::OpenAi), "OpenAI API");
277        assert_eq!(format!("{}", AiProvider::Ollama), "Ollama");
278    }
279}