1use crate::error::{BevyAIError, Result};
39use serde::{Deserialize, Serialize};
40use std::env;
41use std::fs;
42use std::path::PathBuf;
43
44#[derive(Debug, Clone, Serialize, Deserialize)]
46pub struct AIConfig {
47 pub openai: Option<OpenAIConfig>,
49 pub anthropic: Option<AnthropicConfig>,
51 pub google: Option<GoogleConfig>,
53 pub default_model: ModelType,
55 pub generation: GenerationConfig,
57 pub project: ProjectSettings,
59}
60
61#[derive(Debug, Clone, Serialize, Deserialize)]
63pub struct OpenAIConfig {
64 pub api_key: String,
66 pub organization: Option<String>,
68 pub base_url: Option<String>,
70}
71
72#[derive(Debug, Clone, Serialize, Deserialize)]
74pub struct AnthropicConfig {
75 pub api_key: String,
77 pub base_url: Option<String>,
79}
80
81#[derive(Debug, Clone, Serialize, Deserialize)]
83pub struct GoogleConfig {
84 pub api_key: String,
86 pub base_url: Option<String>,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
92pub enum ModelType {
93 #[serde(rename = "gpt-4")]
95 GPT4,
96 #[serde(rename = "gpt-4-turbo")]
98 GPT4Turbo,
99 #[serde(rename = "gpt-3.5-turbo")]
101 GPT35Turbo,
102 #[serde(rename = "claude-3-opus")]
104 Claude3Opus,
105 #[serde(rename = "claude-3-sonnet")]
107 Claude3Sonnet,
108 #[serde(rename = "claude-3-haiku")]
110 Claude3Haiku,
111 #[serde(rename = "gemini-pro")]
113 GeminiPro,
114 #[serde(rename = "gemini-pro-vision")]
116 GeminiProVision,
117}
118
119impl ModelType {
120 pub fn as_str(&self) -> &'static str {
122 match self {
123 ModelType::GPT4 => "gpt-4",
124 ModelType::GPT4Turbo => "gpt-4-turbo",
125 ModelType::GPT35Turbo => "gpt-3.5-turbo",
126 ModelType::Claude3Opus => "claude-3-opus",
127 ModelType::Claude3Sonnet => "claude-3-sonnet",
128 ModelType::Claude3Haiku => "claude-3-haiku",
129 ModelType::GeminiPro => "gemini-pro",
130 ModelType::GeminiProVision => "gemini-pro-vision",
131 }
132 }
133
134 pub fn provider(&self) -> &'static str {
136 match self {
137 ModelType::GPT4 | ModelType::GPT4Turbo | ModelType::GPT35Turbo => "openai",
138 ModelType::Claude3Opus | ModelType::Claude3Sonnet | ModelType::Claude3Haiku => "anthropic",
139 ModelType::GeminiPro | ModelType::GeminiProVision => "google",
140 }
141 }
142
143 pub fn supports_vision(&self) -> bool {
145 matches!(self, ModelType::GPT4 | ModelType::GeminiProVision)
146 }
147
148 pub fn max_context_length(&self) -> usize {
150 match self {
151 ModelType::GPT4 => 8192,
152 ModelType::GPT4Turbo => 128000,
153 ModelType::GPT35Turbo => 16385,
154 ModelType::Claude3Opus => 200000,
155 ModelType::Claude3Sonnet => 200000,
156 ModelType::Claude3Haiku => 200000,
157 ModelType::GeminiPro => 32768,
158 ModelType::GeminiProVision => 16384,
159 }
160 }
161}
162
163impl std::fmt::Display for ModelType {
164 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
165 write!(f, "{}", self.as_str())
166 }
167}
168
169impl std::str::FromStr for ModelType {
170 type Err = BevyAIError;
171
172 fn from_str(s: &str) -> Result<Self> {
173 match s {
174 "gpt-4" => Ok(ModelType::GPT4),
175 "gpt-4-turbo" => Ok(ModelType::GPT4Turbo),
176 "gpt-3.5-turbo" => Ok(ModelType::GPT35Turbo),
177 "claude-3-opus" => Ok(ModelType::Claude3Opus),
178 "claude-3-sonnet" => Ok(ModelType::Claude3Sonnet),
179 "claude-3-haiku" => Ok(ModelType::Claude3Haiku),
180 "gemini-pro" => Ok(ModelType::GeminiPro),
181 "gemini-pro-vision" => Ok(ModelType::GeminiProVision),
182 _ => Err(BevyAIError::unsupported_model(s)),
183 }
184 }
185}
186
187#[derive(Debug, Clone, Serialize, Deserialize)]
189pub struct GenerationConfig {
190 pub temperature: f32,
192 pub max_tokens: u32,
194 pub include_comments: bool,
196 pub generate_tests: bool,
198 pub bevy_version: String,
200 pub rust_edition: String,
202}
203
204#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct ProjectSettings {
207 pub track_conversations: bool,
209 pub auto_format: bool,
211 pub auto_dependencies: bool,
213 pub default_template: String,
215}
216
217#[derive(Debug, Clone, Serialize, Deserialize)]
219pub struct ProjectConfig {
220 pub metadata: ProjectMetadata,
222 pub conversations: Vec<ConversationEntry>,
224 pub generated_files: Vec<GeneratedFile>,
226 pub dependencies: Vec<Dependency>,
228 pub templates: Vec<CustomTemplate>,
230}
231
232#[derive(Debug, Clone, Serialize, Deserialize)]
234pub struct ProjectMetadata {
235 pub name: String,
237 pub description: String,
239 pub version: String,
241 pub created_at: chrono::DateTime<chrono::Utc>,
243 pub updated_at: chrono::DateTime<chrono::Utc>,
245 pub bevy_version: String,
247 pub features: Vec<String>,
249 pub tags: Vec<String>,
251}
252
253#[derive(Debug, Clone, Serialize, Deserialize)]
255pub struct ConversationEntry {
256 pub id: uuid::Uuid,
258 pub request: String,
260 pub response: String,
262 pub model_used: ModelType,
264 pub timestamp: chrono::DateTime<chrono::Utc>,
266 pub tokens_used: Option<u32>,
268 pub cost: Option<f64>,
270 pub files_modified: Vec<String>,
272}
273
274#[derive(Debug, Clone, Serialize, Deserialize)]
276pub struct GeneratedFile {
277 pub path: String,
279 pub generator: String,
281 pub model: ModelType,
283 pub created_at: chrono::DateTime<chrono::Utc>,
285 pub checksum: String,
287}
288
289#[derive(Debug, Clone, Serialize, Deserialize)]
291pub struct Dependency {
292 pub name: String,
294 pub version: String,
296 pub features: Vec<String>,
298 pub reason: String,
300 pub added_by: ModelType,
302 pub added_at: chrono::DateTime<chrono::Utc>,
304}
305
306#[derive(Debug, Clone, Serialize, Deserialize)]
308pub struct CustomTemplate {
309 pub name: String,
311 pub description: String,
313 pub template_path: String,
315 pub variables: Vec<TemplateVariable>,
317 pub created_at: chrono::DateTime<chrono::Utc>,
319}
320
321#[derive(Debug, Clone, Serialize, Deserialize)]
323pub struct TemplateVariable {
324 pub name: String,
326 pub description: String,
328 pub default_value: Option<String>,
330 pub required: bool,
332}
333
334impl Default for AIConfig {
335 fn default() -> Self {
336 Self {
337 openai: None,
338 anthropic: None,
339 google: None,
340 default_model: ModelType::GPT4,
341 generation: GenerationConfig::default(),
342 project: ProjectSettings::default(),
343 }
344 }
345}
346
347impl Default for GenerationConfig {
348 fn default() -> Self {
349 Self {
350 temperature: 0.7,
351 max_tokens: 4000,
352 include_comments: true,
353 generate_tests: false,
354 bevy_version: "0.12".to_string(),
355 rust_edition: "2021".to_string(),
356 }
357 }
358}
359
360impl Default for ProjectSettings {
361 fn default() -> Self {
362 Self {
363 track_conversations: true,
364 auto_format: true,
365 auto_dependencies: true,
366 default_template: "basic".to_string(),
367 }
368 }
369}
370
371impl AIConfig {
372 pub fn from_env() -> Result<Self> {
374 let mut config = Self::default();
375
376 if let Ok(api_key) = env::var("OPENAI_API_KEY") {
377 config.openai = Some(OpenAIConfig {
378 api_key,
379 organization: env::var("OPENAI_ORGANIZATION").ok(),
380 base_url: env::var("OPENAI_BASE_URL").ok(),
381 });
382 }
383
384 if let Ok(api_key) = env::var("ANTHROPIC_API_KEY") {
385 config.anthropic = Some(AnthropicConfig {
386 api_key,
387 base_url: env::var("ANTHROPIC_BASE_URL").ok(),
388 });
389 }
390
391 if let Ok(api_key) = env::var("GOOGLE_API_KEY") {
392 config.google = Some(GoogleConfig {
393 api_key,
394 base_url: env::var("GOOGLE_BASE_URL").ok(),
395 });
396 }
397
398 if let Ok(model) = env::var("bevy_agent_DEFAULT_MODEL") {
399 config.default_model = model.parse()?;
400 }
401
402 Ok(config)
403 }
404
405 pub fn from_file<P: AsRef<std::path::Path>>(path: P) -> Result<Self> {
407 let content = fs::read_to_string(path)?;
408 Ok(serde_json::from_str(&content)?)
409 }
410
411 pub fn save_to_file<P: AsRef<std::path::Path>>(&self, path: P) -> Result<()> {
413 let content = serde_json::to_string_pretty(self)?;
414 fs::write(path, content)?;
415 Ok(())
416 }
417
418 pub fn default_config_path() -> Result<PathBuf> {
420 Ok(dirs::home_dir()
421 .ok_or_else(|| BevyAIError::Config(config::ConfigError::Message("Could not find home directory".to_string())))?
422 .join(".bevy-agent-config.json"))
423 }
424
425 pub fn load_or_create() -> Result<Self> {
427 let config_path = Self::default_config_path()?;
428
429 if config_path.exists() {
430 Self::from_file(&config_path)
431 } else {
432 let config = Self::from_env()?;
433 config.save_to_file(&config_path)?;
434 Ok(config)
435 }
436 }
437
438 pub fn get_api_key(&self, model: &ModelType) -> Result<String> {
440 match model.provider() {
441 "openai" => self.openai.as_ref()
442 .map(|c| c.api_key.clone())
443 .ok_or_else(|| BevyAIError::missing_api_key("OpenAI")),
444 "anthropic" => self.anthropic.as_ref()
445 .map(|c| c.api_key.clone())
446 .ok_or_else(|| BevyAIError::missing_api_key("Anthropic")),
447 "google" => self.google.as_ref()
448 .map(|c| c.api_key.clone())
449 .ok_or_else(|| BevyAIError::missing_api_key("Google")),
450 provider => Err(BevyAIError::unsupported_model(provider)),
451 }
452 }
453
454 pub fn is_model_available(&self, model: &ModelType) -> bool {
456 self.get_api_key(model).is_ok()
457 }
458
459 pub fn available_models(&self) -> Vec<ModelType> {
461 let all_models = vec![
462 ModelType::GPT4,
463 ModelType::GPT4Turbo,
464 ModelType::GPT35Turbo,
465 ModelType::Claude3Opus,
466 ModelType::Claude3Sonnet,
467 ModelType::Claude3Haiku,
468 ModelType::GeminiPro,
469 ModelType::GeminiProVision,
470 ];
471
472 all_models.into_iter()
473 .filter(|model| self.is_model_available(model))
474 .collect()
475 }
476}