bevy_agent/
config.rs

1//! Configuration management for Bevy AI
2//! 
3//! This module handles all configuration aspects of the Bevy AI system, including:
4//! 
5//! - AI provider configurations (OpenAI, Anthropic, Google)
6//! - Project-specific settings
7//! - Environment variable handling
8//! - Configuration file parsing (TOML, JSON)
9//! 
10//! # Configuration Sources
11//! 
12//! Configuration can be loaded from multiple sources in order of preference:
13//! 
14//! 1. Environment variables (`OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, etc.)
15//! 2. Configuration files (`~/.config/bevy-agent/config.toml`)
16//! 3. Project-specific config files (`./bevy-agent.toml`)
17//! 4. Default values
18//! 
19//! # Example
20//! 
21//! ```rust,no_run
22//! use bevy_agent::config::{AIConfig, OpenAIConfig};
23//! 
24//! // Load from environment
25//! let config = AIConfig::from_env().unwrap_or_default();
26//! 
27//! // Or create manually
28//! let config = AIConfig {
29//!     openai: Some(OpenAIConfig {
30//!         api_key: "your-key-here".to_string(),
31//!         organization: None,
32//!         base_url: None,
33//!     }),
34//!     ..Default::default()
35//! };
36//! ```
37
38use crate::error::{BevyAIError, Result};
39use serde::{Deserialize, Serialize};
40use std::env;
41use std::fs;
42use std::path::PathBuf;
43
44/// Main configuration for the Bevy AI system
45#[derive(Debug, Clone, Serialize, Deserialize)]
46pub struct AIConfig {
47    /// OpenAI API configuration
48    pub openai: Option<OpenAIConfig>,
49    /// Anthropic API configuration
50    pub anthropic: Option<AnthropicConfig>,
51    /// Google API configuration
52    pub google: Option<GoogleConfig>,
53    /// Default AI model to use
54    pub default_model: ModelType,
55    /// Default generation settings
56    pub generation: GenerationConfig,
57    /// Project settings
58    pub project: ProjectSettings,
59}
60
61/// OpenAI API configuration
62#[derive(Debug, Clone, Serialize, Deserialize)]
63pub struct OpenAIConfig {
64    /// OpenAI API key
65    pub api_key: String,
66    /// Optional organization ID
67    pub organization: Option<String>,
68    /// Optional custom base URL
69    pub base_url: Option<String>,
70}
71
72/// Anthropic API configuration
73#[derive(Debug, Clone, Serialize, Deserialize)]
74pub struct AnthropicConfig {
75    /// Anthropic API key
76    pub api_key: String,
77    /// Optional custom base URL
78    pub base_url: Option<String>,
79}
80
81/// Google API configuration
82#[derive(Debug, Clone, Serialize, Deserialize)]
83pub struct GoogleConfig {
84    /// Google API key
85    pub api_key: String,
86    /// Optional custom base URL
87    pub base_url: Option<String>,
88}
89
90/// Available AI models
91#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
92pub enum ModelType {
93    /// OpenAI GPT-4 model
94    #[serde(rename = "gpt-4")]
95    GPT4,
96    /// OpenAI GPT-4 Turbo model
97    #[serde(rename = "gpt-4-turbo")]
98    GPT4Turbo,
99    /// OpenAI GPT-3.5 Turbo model
100    #[serde(rename = "gpt-3.5-turbo")]
101    GPT35Turbo,
102    /// Anthropic Claude 3 Opus model
103    #[serde(rename = "claude-3-opus")]
104    Claude3Opus,
105    /// Anthropic Claude 3 Sonnet model
106    #[serde(rename = "claude-3-sonnet")]
107    Claude3Sonnet,
108    /// Anthropic Claude 3 Haiku model
109    #[serde(rename = "claude-3-haiku")]
110    Claude3Haiku,
111    /// Google Gemini Pro model
112    #[serde(rename = "gemini-pro")]
113    GeminiPro,
114    /// Google Gemini Pro Vision model
115    #[serde(rename = "gemini-pro-vision")]
116    GeminiProVision,
117}
118
119impl ModelType {
120    /// Get the string representation of the model
121    pub fn as_str(&self) -> &'static str {
122        match self {
123            ModelType::GPT4 => "gpt-4",
124            ModelType::GPT4Turbo => "gpt-4-turbo",
125            ModelType::GPT35Turbo => "gpt-3.5-turbo",
126            ModelType::Claude3Opus => "claude-3-opus",
127            ModelType::Claude3Sonnet => "claude-3-sonnet",
128            ModelType::Claude3Haiku => "claude-3-haiku",
129            ModelType::GeminiPro => "gemini-pro",
130            ModelType::GeminiProVision => "gemini-pro-vision",
131        }
132    }
133    
134    /// Get the provider for this model
135    pub fn provider(&self) -> &'static str {
136        match self {
137            ModelType::GPT4 | ModelType::GPT4Turbo | ModelType::GPT35Turbo => "openai",
138            ModelType::Claude3Opus | ModelType::Claude3Sonnet | ModelType::Claude3Haiku => "anthropic",
139            ModelType::GeminiPro | ModelType::GeminiProVision => "google",
140        }
141    }
142    
143    /// Check if this model supports vision/image inputs
144    pub fn supports_vision(&self) -> bool {
145        matches!(self, ModelType::GPT4 | ModelType::GeminiProVision)
146    }
147    
148    /// Get maximum context length for this model
149    pub fn max_context_length(&self) -> usize {
150        match self {
151            ModelType::GPT4 => 8192,
152            ModelType::GPT4Turbo => 128000,
153            ModelType::GPT35Turbo => 16385,
154            ModelType::Claude3Opus => 200000,
155            ModelType::Claude3Sonnet => 200000,
156            ModelType::Claude3Haiku => 200000,
157            ModelType::GeminiPro => 32768,
158            ModelType::GeminiProVision => 16384,
159        }
160    }
161}
162
163impl std::fmt::Display for ModelType {
164    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
165        write!(f, "{}", self.as_str())
166    }
167}
168
169impl std::str::FromStr for ModelType {
170    type Err = BevyAIError;
171    
172    fn from_str(s: &str) -> Result<Self> {
173        match s {
174            "gpt-4" => Ok(ModelType::GPT4),
175            "gpt-4-turbo" => Ok(ModelType::GPT4Turbo),
176            "gpt-3.5-turbo" => Ok(ModelType::GPT35Turbo),
177            "claude-3-opus" => Ok(ModelType::Claude3Opus),
178            "claude-3-sonnet" => Ok(ModelType::Claude3Sonnet),
179            "claude-3-haiku" => Ok(ModelType::Claude3Haiku),
180            "gemini-pro" => Ok(ModelType::GeminiPro),
181            "gemini-pro-vision" => Ok(ModelType::GeminiProVision),
182            _ => Err(BevyAIError::unsupported_model(s)),
183        }
184    }
185}
186
187/// Code generation configuration
188#[derive(Debug, Clone, Serialize, Deserialize)]
189pub struct GenerationConfig {
190    /// Temperature for AI generation (0.0 to 1.0)
191    pub temperature: f32,
192    /// Maximum tokens to generate
193    pub max_tokens: u32,
194    /// Whether to include explanatory comments
195    pub include_comments: bool,
196    /// Whether to generate tests
197    pub generate_tests: bool,
198    /// Bevy version to target
199    pub bevy_version: String,
200    /// Rust edition to use
201    pub rust_edition: String,
202}
203
204/// Project-level settings
205#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct ProjectSettings {
207    /// Whether to track AI conversations
208    pub track_conversations: bool,
209    /// Whether to auto-format generated code
210    pub auto_format: bool,
211    /// Whether to auto-detect dependencies
212    pub auto_dependencies: bool,
213    /// Default project template
214    pub default_template: String,
215}
216
217/// Project configuration stored in .bevy-agent.json
218#[derive(Debug, Clone, Serialize, Deserialize)]
219pub struct ProjectConfig {
220    /// Project metadata
221    pub metadata: ProjectMetadata,
222    /// AI conversation history
223    pub conversations: Vec<ConversationEntry>,
224    /// Generated files tracking
225    pub generated_files: Vec<GeneratedFile>,
226    /// Project dependencies
227    pub dependencies: Vec<Dependency>,
228    /// Custom templates
229    pub templates: Vec<CustomTemplate>,
230}
231
232/// Project metadata
233#[derive(Debug, Clone, Serialize, Deserialize)]
234pub struct ProjectMetadata {
235    /// Project name
236    pub name: String,
237    /// Project description
238    pub description: String,
239    /// Project version
240    pub version: String,
241    /// Creation timestamp
242    pub created_at: chrono::DateTime<chrono::Utc>,
243    /// Last update timestamp
244    pub updated_at: chrono::DateTime<chrono::Utc>,
245    /// Bevy engine version used
246    pub bevy_version: String,
247    /// Enabled features
248    pub features: Vec<String>,
249    /// Project tags
250    pub tags: Vec<String>,
251}
252
253/// Conversation history entry
254#[derive(Debug, Clone, Serialize, Deserialize)]
255pub struct ConversationEntry {
256    /// Unique identifier for this conversation
257    pub id: uuid::Uuid,
258    /// The user's request/prompt
259    pub request: String,
260    /// The AI's response
261    pub response: String,
262    /// The AI model that was used
263    pub model_used: ModelType,
264    /// When this conversation occurred
265    pub timestamp: chrono::DateTime<chrono::Utc>,
266    /// Number of tokens used (if available)
267    pub tokens_used: Option<u32>,
268    /// Cost of the API call (if available)
269    pub cost: Option<f64>,
270    /// List of files that were modified in this conversation
271    pub files_modified: Vec<String>,
272}
273
274/// Generated file tracking
275#[derive(Debug, Clone, Serialize, Deserialize)]
276pub struct GeneratedFile {
277    /// Path to the generated file
278    pub path: String,
279    /// What generated this file (e.g., "AI", "template")
280    pub generator: String,
281    /// The AI model that generated this file
282    pub model: ModelType,
283    /// When this file was created
284    pub created_at: chrono::DateTime<chrono::Utc>,
285    /// File checksum for integrity verification
286    pub checksum: String,
287}
288
289/// Dependency information
290#[derive(Debug, Clone, Serialize, Deserialize)]
291pub struct Dependency {
292    /// Name of the dependency
293    pub name: String,
294    /// Version requirement for the dependency
295    pub version: String,
296    /// Features to enable for this dependency
297    pub features: Vec<String>,
298    /// Reason why this dependency was added
299    pub reason: String,
300    /// Which AI model added this dependency
301    pub added_by: ModelType,
302    /// When this dependency was added
303    pub added_at: chrono::DateTime<chrono::Utc>,
304}
305
306/// Custom template definition
307#[derive(Debug, Clone, Serialize, Deserialize)]
308pub struct CustomTemplate {
309    /// Name of the template
310    pub name: String,
311    /// Description of what this template does
312    pub description: String,
313    /// Path to the template file
314    pub template_path: String,
315    /// Variables that can be customized in this template
316    pub variables: Vec<TemplateVariable>,
317    /// When this template was created
318    pub created_at: chrono::DateTime<chrono::Utc>,
319}
320
321/// Template variable definition
322#[derive(Debug, Clone, Serialize, Deserialize)]
323pub struct TemplateVariable {
324    /// Name of the variable
325    pub name: String,
326    /// Description of what this variable is for
327    pub description: String,
328    /// Default value for this variable (if any)
329    pub default_value: Option<String>,
330    /// Whether this variable is required
331    pub required: bool,
332}
333
334impl Default for AIConfig {
335    fn default() -> Self {
336        Self {
337            openai: None,
338            anthropic: None,
339            google: None,
340            default_model: ModelType::GPT4,
341            generation: GenerationConfig::default(),
342            project: ProjectSettings::default(),
343        }
344    }
345}
346
347impl Default for GenerationConfig {
348    fn default() -> Self {
349        Self {
350            temperature: 0.7,
351            max_tokens: 4000,
352            include_comments: true,
353            generate_tests: false,
354            bevy_version: "0.12".to_string(),
355            rust_edition: "2021".to_string(),
356        }
357    }
358}
359
360impl Default for ProjectSettings {
361    fn default() -> Self {
362        Self {
363            track_conversations: true,
364            auto_format: true,
365            auto_dependencies: true,
366            default_template: "basic".to_string(),
367        }
368    }
369}
370
371impl AIConfig {
372    /// Load configuration from environment variables
373    pub fn from_env() -> Result<Self> {
374        let mut config = Self::default();
375        
376        if let Ok(api_key) = env::var("OPENAI_API_KEY") {
377            config.openai = Some(OpenAIConfig {
378                api_key,
379                organization: env::var("OPENAI_ORGANIZATION").ok(),
380                base_url: env::var("OPENAI_BASE_URL").ok(),
381            });
382        }
383        
384        if let Ok(api_key) = env::var("ANTHROPIC_API_KEY") {
385            config.anthropic = Some(AnthropicConfig {
386                api_key,
387                base_url: env::var("ANTHROPIC_BASE_URL").ok(),
388            });
389        }
390        
391        if let Ok(api_key) = env::var("GOOGLE_API_KEY") {
392            config.google = Some(GoogleConfig {
393                api_key,
394                base_url: env::var("GOOGLE_BASE_URL").ok(),
395            });
396        }
397        
398        if let Ok(model) = env::var("bevy_agent_DEFAULT_MODEL") {
399            config.default_model = model.parse()?;
400        }
401        
402        Ok(config)
403    }
404    
405    /// Load configuration from file
406    pub fn from_file<P: AsRef<std::path::Path>>(path: P) -> Result<Self> {
407        let content = fs::read_to_string(path)?;
408        Ok(serde_json::from_str(&content)?)
409    }
410    
411    /// Save configuration to file
412    pub fn save_to_file<P: AsRef<std::path::Path>>(&self, path: P) -> Result<()> {
413        let content = serde_json::to_string_pretty(self)?;
414        fs::write(path, content)?;
415        Ok(())
416    }
417    
418    /// Get the default config file path
419    pub fn default_config_path() -> Result<PathBuf> {
420        Ok(dirs::home_dir()
421            .ok_or_else(|| BevyAIError::Config(config::ConfigError::Message("Could not find home directory".to_string())))?
422            .join(".bevy-agent-config.json"))
423    }
424    
425    /// Load configuration from default location or create if not exists
426    pub fn load_or_create() -> Result<Self> {
427        let config_path = Self::default_config_path()?;
428        
429        if config_path.exists() {
430            Self::from_file(&config_path)
431        } else {
432            let config = Self::from_env()?;
433            config.save_to_file(&config_path)?;
434            Ok(config)
435        }
436    }
437    
438    /// Get API key for a specific model
439    pub fn get_api_key(&self, model: &ModelType) -> Result<String> {
440        match model.provider() {
441            "openai" => self.openai.as_ref()
442                .map(|c| c.api_key.clone())
443                .ok_or_else(|| BevyAIError::missing_api_key("OpenAI")),
444            "anthropic" => self.anthropic.as_ref()
445                .map(|c| c.api_key.clone())
446                .ok_or_else(|| BevyAIError::missing_api_key("Anthropic")),
447            "google" => self.google.as_ref()
448                .map(|c| c.api_key.clone())
449                .ok_or_else(|| BevyAIError::missing_api_key("Google")),
450            provider => Err(BevyAIError::unsupported_model(provider)),
451        }
452    }
453    
454    /// Check if a model is available (has API key configured)
455    pub fn is_model_available(&self, model: &ModelType) -> bool {
456        self.get_api_key(model).is_ok()
457    }
458    
459    /// Get list of available models
460    pub fn available_models(&self) -> Vec<ModelType> {
461        let all_models = vec![
462            ModelType::GPT4,
463            ModelType::GPT4Turbo,
464            ModelType::GPT35Turbo,
465            ModelType::Claude3Opus,
466            ModelType::Claude3Sonnet,
467            ModelType::Claude3Haiku,
468            ModelType::GeminiPro,
469            ModelType::GeminiProVision,
470        ];
471        
472        all_models.into_iter()
473            .filter(|model| self.is_model_available(model))
474            .collect()
475    }
476}