omni_dev/utils/
preflight.rs1use anyhow::{bail, Context, Result};
8
9#[derive(Debug)]
11pub struct AiCredentialInfo {
12 pub provider: AiProvider,
14 pub model: String,
16}
17
18#[derive(Debug, Clone, Copy, PartialEq, Eq)]
20pub enum AiProvider {
21 Claude,
23 Bedrock,
25 OpenAi,
27 Ollama,
29}
30
31impl std::fmt::Display for AiProvider {
32 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
33 match self {
34 AiProvider::Claude => write!(f, "Claude API"),
35 AiProvider::Bedrock => write!(f, "AWS Bedrock"),
36 AiProvider::OpenAi => write!(f, "OpenAI API"),
37 AiProvider::Ollama => write!(f, "Ollama"),
38 }
39 }
40}
41
42pub fn check_ai_credentials(model_override: Option<&str>) -> Result<AiCredentialInfo> {
48 use crate::utils::settings::{get_env_var, get_env_vars};
49
50 let use_openai = get_env_var("USE_OPENAI")
52 .map(|val| val == "true")
53 .unwrap_or(false);
54
55 let use_ollama = get_env_var("USE_OLLAMA")
56 .map(|val| val == "true")
57 .unwrap_or(false);
58
59 let use_bedrock = get_env_var("CLAUDE_CODE_USE_BEDROCK")
60 .map(|val| val == "true")
61 .unwrap_or(false);
62
63 if use_ollama {
65 let model = model_override
66 .map(String::from)
67 .or_else(|| get_env_var("OLLAMA_MODEL").ok())
68 .unwrap_or_else(|| "llama2".to_string());
69
70 return Ok(AiCredentialInfo {
71 provider: AiProvider::Ollama,
72 model,
73 });
74 }
75
76 if use_openai {
78 let model = model_override
79 .map(String::from)
80 .or_else(|| get_env_var("OPENAI_MODEL").ok())
81 .unwrap_or_else(|| "gpt-5".to_string());
82
83 get_env_vars(&["OPENAI_API_KEY", "OPENAI_AUTH_TOKEN"]).map_err(|_| {
85 anyhow::anyhow!(
86 "OpenAI API key not found.\n\
87 Set one of these environment variables:\n\
88 - OPENAI_API_KEY\n\
89 - OPENAI_AUTH_TOKEN"
90 )
91 })?;
92
93 return Ok(AiCredentialInfo {
94 provider: AiProvider::OpenAi,
95 model,
96 });
97 }
98
99 if use_bedrock {
101 let model = model_override
102 .map(String::from)
103 .or_else(|| get_env_var("ANTHROPIC_MODEL").ok())
104 .unwrap_or_else(|| "claude-opus-4-1-20250805".to_string());
105
106 get_env_var("ANTHROPIC_AUTH_TOKEN").map_err(|_| {
108 anyhow::anyhow!(
109 "AWS Bedrock authentication not configured.\n\
110 Set ANTHROPIC_AUTH_TOKEN environment variable."
111 )
112 })?;
113
114 get_env_var("ANTHROPIC_BEDROCK_BASE_URL").map_err(|_| {
115 anyhow::anyhow!(
116 "AWS Bedrock base URL not configured.\n\
117 Set ANTHROPIC_BEDROCK_BASE_URL environment variable."
118 )
119 })?;
120
121 return Ok(AiCredentialInfo {
122 provider: AiProvider::Bedrock,
123 model,
124 });
125 }
126
127 let model = model_override
129 .map(String::from)
130 .or_else(|| get_env_var("ANTHROPIC_MODEL").ok())
131 .unwrap_or_else(|| "claude-opus-4-1-20250805".to_string());
132
133 get_env_vars(&[
135 "CLAUDE_API_KEY",
136 "ANTHROPIC_API_KEY",
137 "ANTHROPIC_AUTH_TOKEN",
138 ])
139 .map_err(|_| {
140 anyhow::anyhow!(
141 "Claude API key not found.\n\
142 Set one of these environment variables:\n\
143 - CLAUDE_API_KEY\n\
144 - ANTHROPIC_API_KEY\n\
145 - ANTHROPIC_AUTH_TOKEN"
146 )
147 })?;
148
149 Ok(AiCredentialInfo {
150 provider: AiProvider::Claude,
151 model,
152 })
153}
154
155pub fn check_github_cli() -> Result<()> {
163 let gh_check = std::process::Command::new("gh")
165 .args(["--version"])
166 .output();
167
168 match gh_check {
169 Ok(output) if output.status.success() => {
170 let repo_check = std::process::Command::new("gh")
172 .args(["repo", "view", "--json", "name"])
173 .output();
174
175 match repo_check {
176 Ok(repo_output) if repo_output.status.success() => Ok(()),
177 Ok(repo_output) => {
178 let error_details = String::from_utf8_lossy(&repo_output.stderr);
179 if error_details.contains("authentication") || error_details.contains("login") {
180 bail!(
181 "GitHub CLI authentication failed.\n\
182 Please run 'gh auth login' or set GITHUB_TOKEN environment variable."
183 )
184 } else {
185 bail!(
186 "GitHub CLI cannot access this repository.\n\
187 Error: {}",
188 error_details.trim()
189 )
190 }
191 }
192 Err(e) => bail!("Failed to test GitHub CLI access: {}", e),
193 }
194 }
195 _ => bail!(
196 "GitHub CLI (gh) is not installed or not in PATH.\n\
197 Please install it from https://cli.github.com/"
198 ),
199 }
200}
201
202pub fn check_git_repository() -> Result<()> {
207 crate::git::GitRepository::open().context(
208 "Not in a git repository. Please run this command from within a git repository.",
209 )?;
210 Ok(())
211}
212
213pub fn check_working_directory_clean() -> Result<()> {
223 let repo = crate::git::GitRepository::open().context("Failed to open git repository")?;
224
225 let status = repo
226 .get_working_directory_status()
227 .context("Failed to get working directory status")?;
228
229 if !status.clean {
230 let mut message = String::from("Working directory has uncommitted changes:\n");
231 for change in &status.untracked_changes {
232 message.push_str(&format!(" {} {}\n", change.status, change.file));
233 }
234 message.push_str("\nPlease commit or stash your changes before proceeding.");
235 bail!(message);
236 }
237
238 Ok(())
239}
240
241pub fn check_ai_command_prerequisites(model_override: Option<&str>) -> Result<AiCredentialInfo> {
249 check_git_repository()?;
250 check_ai_credentials(model_override)
251}
252
253pub fn check_pr_command_prerequisites(model_override: Option<&str>) -> Result<AiCredentialInfo> {
262 check_git_repository()?;
263 let ai_info = check_ai_credentials(model_override)?;
264 check_github_cli()?;
265 Ok(ai_info)
266}
267
268#[cfg(test)]
269mod tests {
270 use super::*;
271
272 #[test]
273 fn ai_provider_display() {
274 assert_eq!(format!("{}", AiProvider::Claude), "Claude API");
275 assert_eq!(format!("{}", AiProvider::Bedrock), "AWS Bedrock");
276 assert_eq!(format!("{}", AiProvider::OpenAi), "OpenAI API");
277 assert_eq!(format!("{}", AiProvider::Ollama), "Ollama");
278 }
279}