bevy_agent/
cli.rs

1//! Command-line interface for Bevy AI
2//! 
3//! This module provides a comprehensive CLI for interacting with the Bevy AI system.
4//! It supports game creation, feature addition, code improvement, debugging, and project management.
5
6use crate::{
7    ai::BevyAIAgent,
8    config::{AIConfig, ModelType},
9    error::{BevyAIError, Result},
10    game_templates::{TemplateManager, TemplateContext},
11    project::{Project, ProjectManager},
12    utils::{build_utils, config_utils},
13};
14use clap::{Parser, Subcommand, ValueEnum};
15use std::path::PathBuf;
16use tracing::{error, info, warn};
17
18/// Bevy AI Agent - AI-powered Bevy game development assistant
19#[derive(Parser)]
20#[command(name = "bevy-agent")]
21#[command(about = "AI-powered Bevy game prototyping assistant with GPT/Claude integration")]
22#[command(version = crate::VERSION)]
23pub struct Cli {
24    /// The command to execute
25    #[command(subcommand)]
26    pub command: Commands,
27    
28    /// Enable verbose logging
29    #[arg(short, long, global = true)]
30    pub verbose: bool,
31    
32    /// Configuration file path
33    #[arg(short, long, global = true)]
34    pub config: Option<PathBuf>,
35}
36
37/// Available CLI commands
38#[derive(Subcommand)]
39pub enum Commands {
40    /// Generate a new game prototype from natural language description
41    Create {
42        /// Describe the game you want to create
43        description: String,
44        
45        /// AI model to use (gpt-4, claude-3-opus, etc.)
46        #[arg(long)]
47        model: Option<ModelType>,
48        
49        /// Project name (defaults to generated name)
50        #[arg(long)]
51        name: Option<String>,
52        
53        /// Output directory
54        #[arg(long, short)]
55        output: Option<PathBuf>,
56        
57        /// Use a specific template
58        #[arg(long)]
59        template: Option<String>,
60        
61        /// Don't create a new directory, use current directory
62        #[arg(long)]
63        in_place: bool,
64    },
65    
66    /// Add features to existing prototype
67    Add {
68        /// Describe what to add to the game
69        feature: String,
70        
71        /// AI model to use
72        #[arg(long)]
73        model: Option<ModelType>,
74        
75        /// Project directory (defaults to current directory)
76        #[arg(long, short)]
77        project: Option<PathBuf>,
78    },
79    
80    /// Refactor or improve existing code
81    Improve {
82        /// What to improve (performance, readability, features)
83        aspect: ImprovementAspect,
84        
85        /// AI model to use
86        #[arg(long)]
87        model: Option<ModelType>,
88        
89        /// Specific file to improve (defaults to main.rs)
90        #[arg(long)]
91        file: Option<PathBuf>,
92        
93        /// Project directory (defaults to current directory)
94        #[arg(long, short)]
95        project: Option<PathBuf>,
96    },
97    
98    /// Ask AI to explain the current codebase
99    Explain {
100        /// AI model to use
101        #[arg(long)]
102        model: Option<ModelType>,
103        
104        /// Specific file to explain (defaults to main.rs)
105        #[arg(long)]
106        file: Option<PathBuf>,
107        
108        /// Project directory (defaults to current directory)
109        #[arg(long, short)]
110        project: Option<PathBuf>,
111    },
112    
113    /// Debug code issues with AI assistance
114    Debug {
115        /// Error message or description of the issue
116        error: String,
117        
118        /// AI model to use
119        #[arg(long)]
120        model: Option<ModelType>,
121        
122        /// Specific file with the issue (defaults to main.rs)
123        #[arg(long)]
124        file: Option<PathBuf>,
125        
126        /// Project directory (defaults to current directory)
127        #[arg(long, short)]
128        project: Option<PathBuf>,
129    },
130    
131    /// Initialize a new Bevy project with AI agent support
132    Init {
133        /// Project name
134        name: String,
135        
136        /// Project description
137        #[arg(long, short)]
138        description: Option<String>,
139        
140        /// Output directory
141        #[arg(long, short)]
142        output: Option<PathBuf>,
143        
144        /// Use a specific template
145        #[arg(long)]
146        template: Option<String>,
147    },
148    
149    /// Configure AI API keys and settings
150    Config {
151        /// OpenAI API key
152        #[arg(long)]
153        openai_key: Option<String>,
154        
155        /// Anthropic API key
156        #[arg(long)]
157        anthropic_key: Option<String>,
158        
159        /// Google API key
160        #[arg(long)]
161        google_key: Option<String>,
162        
163        /// Default AI model
164        #[arg(long)]
165        default_model: Option<ModelType>,
166        
167        /// Show current configuration
168        #[arg(long)]
169        show: bool,
170        
171        /// Validate configuration
172        #[arg(long)]
173        validate: bool,
174    },
175    
176    /// Build operations
177    Build {
178        /// Build operation to perform
179        #[command(subcommand)]
180        operation: BuildOperation,
181    },
182    
183    /// Project management commands
184    Project {
185        /// Project operation to perform
186        #[command(subcommand)]
187        operation: ProjectOperation,
188    },
189    
190    /// Template management commands
191    Template {
192        /// Template operation to perform
193        #[command(subcommand)]
194        operation: TemplateOperation,
195    },
196}
197
198/// Build and development operations
199#[derive(Subcommand)]
200pub enum BuildOperation {
201    /// Build the current prototype
202    Build {
203        /// Project directory (defaults to current directory)
204        #[arg(long, short)]
205        project: Option<PathBuf>,
206        
207        /// Release build
208        #[arg(long)]
209        release: bool,
210    },
211    
212    /// Run the current prototype
213    Run {
214        /// Project directory (defaults to current directory)
215        #[arg(long, short)]
216        project: Option<PathBuf>,
217        
218        /// Release build
219        #[arg(long)]
220        release: bool,
221        
222        /// Additional arguments to pass to the program
223        #[arg(last = true)]
224        args: Vec<String>,
225    },
226    
227    /// Check code for errors
228    Check {
229        /// Project directory (defaults to current directory)
230        #[arg(long, short)]
231        project: Option<PathBuf>,
232    },
233    
234    /// Run clippy lints
235    Clippy {
236        /// Project directory (defaults to current directory)
237        #[arg(long, short)]
238        project: Option<PathBuf>,
239    },
240    
241    /// Format code
242    Format {
243        /// Project directory (defaults to current directory)
244        #[arg(long, short)]
245        project: Option<PathBuf>,
246    },
247    
248    /// Run tests
249    Test {
250        /// Project directory (defaults to current directory)
251        #[arg(long, short)]
252        project: Option<PathBuf>,
253    },
254}
255
256/// Project management operations
257#[derive(Subcommand)]
258pub enum ProjectOperation {
259    /// Show project information
260    Info {
261        /// Project directory (defaults to current directory)
262        #[arg(long, short)]
263        project: Option<PathBuf>,
264    },
265    
266    /// Show project statistics
267    Stats {
268        /// Project directory (defaults to current directory)
269        #[arg(long, short)]
270        project: Option<PathBuf>,
271    },
272    
273    /// Show conversation history
274    History {
275        /// Project directory (defaults to current directory)
276        #[arg(long, short)]
277        project: Option<PathBuf>,
278        
279        /// Number of recent conversations to show
280        #[arg(long, default_value = "10")]
281        limit: usize,
282    },
283    
284    /// Export project data
285    Export {
286        /// Output file path
287        output: PathBuf,
288        
289        /// Project directory (defaults to current directory)
290        #[arg(long, short)]
291        project: Option<PathBuf>,
292        
293        /// Export format
294        #[arg(long, default_value = "json")]
295        format: ExportFormat,
296    },
297    
298    /// Clean generated files
299    Clean {
300        /// Project directory (defaults to current directory)
301        #[arg(long, short)]
302        project: Option<PathBuf>,
303        
304        /// Also clean Cargo build artifacts
305        #[arg(long)]
306        cargo: bool,
307    },
308}
309
310/// Template management operations
311#[derive(Subcommand)]
312pub enum TemplateOperation {
313    /// List available templates
314    List,
315    
316    /// Show template details
317    Show {
318        /// Template name
319        name: String,
320    },
321    
322    /// Create a new custom template
323    Create {
324        /// Template name
325        name: String,
326        
327        /// Template description
328        description: String,
329        
330        /// Template file path
331        file: PathBuf,
332    },
333    
334    /// Apply a template to current project
335    Apply {
336        /// Template name
337        name: String,
338        
339        /// Project directory (defaults to current directory)
340        #[arg(long, short)]
341        project: Option<PathBuf>,
342    },
343}
344
345/// Aspects of code that can be improved
346#[derive(ValueEnum, Clone, Debug)]
347pub enum ImprovementAspect {
348    /// Improve code performance and efficiency
349    Performance,
350    /// Improve code readability and maintainability
351    Readability,
352    /// Add or improve features
353    Features,
354    /// Improve code structure and organization
355    Structure,
356    /// Add or improve tests
357    Testing,
358    /// Add or improve documentation
359    Documentation,
360    /// Improve security aspects
361    Security,
362    /// Improve overall efficiency
363    Efficiency,
364}
365
366/// Export formats for project data
367#[derive(ValueEnum, Clone, Debug)]
368pub enum ExportFormat {
369    /// JSON format
370    Json,
371    /// YAML format
372    Yaml,
373    /// TOML format
374    Toml,
375    /// Markdown format
376    Markdown,
377}
378
379impl std::fmt::Display for ImprovementAspect {
380    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
381        match self {
382            ImprovementAspect::Performance => write!(f, "performance"),
383            ImprovementAspect::Readability => write!(f, "readability"),
384            ImprovementAspect::Features => write!(f, "features"),
385            ImprovementAspect::Structure => write!(f, "structure"),
386            ImprovementAspect::Testing => write!(f, "testing"),
387            ImprovementAspect::Documentation => write!(f, "documentation"),
388            ImprovementAspect::Security => write!(f, "security"),
389            ImprovementAspect::Efficiency => write!(f, "efficiency"),
390        }
391    }
392}
393
394/// Main CLI handler
395pub struct CliHandler {
396    config: AIConfig,
397    agent: Option<BevyAIAgent>,
398}
399
400impl CliHandler {
401    /// Create a new CLI handler
402    pub async fn new(config_path: Option<PathBuf>) -> Result<Self> {
403        let config = if let Some(path) = config_path {
404            AIConfig::from_file(path)?
405        } else {
406            AIConfig::load_or_create()?
407        };
408        
409        let agent = BevyAIAgent::new(config.clone()).await.ok();
410        
411        Ok(Self { config, agent })
412    }
413    
414    /// Handle CLI commands
415    pub async fn handle(&mut self, cli: Cli) -> Result<()> {
416        // Initialize logging
417        if cli.verbose {
418            tracing_subscriber::fmt()
419                .with_env_filter("bevy_agent=debug")
420                .init();
421        } else {
422            tracing_subscriber::fmt()
423                .with_env_filter("bevy_agent=info")
424                .init();
425        }
426        
427        match cli.command {
428            Commands::Create { description, model, name, output, template, in_place } => {
429                self.handle_create(description, model, name, output, template, in_place).await
430            }
431            Commands::Add { feature, model, project } => {
432                self.handle_add(feature, model, project).await
433            }
434            Commands::Improve { aspect, model, file, project } => {
435                self.handle_improve(aspect, model, file, project).await
436            }
437            Commands::Explain { model, file, project } => {
438                self.handle_explain(model, file, project).await
439            }
440            Commands::Debug { error, model, file, project } => {
441                self.handle_debug(error, model, file, project).await
442            }
443            Commands::Init { name, description, output, template } => {
444                self.handle_init(name, description, output, template).await
445            }
446            Commands::Config { openai_key, anthropic_key, google_key, default_model, show, validate } => {
447                self.handle_config(openai_key, anthropic_key, google_key, default_model, show, validate).await
448            }
449            Commands::Build { operation } => {
450                self.handle_build_operation(operation).await
451            }
452            Commands::Project { operation } => {
453                self.handle_project_operation(operation).await
454            }
455            Commands::Template { operation } => {
456                self.handle_template_operation(operation).await
457            }
458        }
459    }
460    
461    async fn handle_create(
462        &mut self,
463        description: String,
464        _model: Option<ModelType>,
465        name: Option<String>,
466        output: Option<PathBuf>,
467        template: Option<String>,
468        in_place: bool,
469    ) -> Result<()> {
470        let agent = self.agent.as_ref()
471            .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
472        
473        let project_name = name.unwrap_or_else(|| self.generate_project_name(&description));
474        let project_path = if in_place {
475            std::env::current_dir()?
476        } else {
477            output.unwrap_or_else(|| std::env::current_dir().unwrap().join(&project_name))
478        };
479        
480        info!("Creating game '{}' from description: {}", project_name, description);
481        
482        if let Some(template_name) = template {
483            // Use template-based generation
484            let template_manager = TemplateManager::new()?;
485            let context = TemplateContext::new(project_name.clone(), description.clone());
486            let code = template_manager.generate(&template_name, &context)?;
487            
488            // Create project structure manually
489            let mut project_manager = ProjectManager::new(&project_path);
490            project_manager.init(&project_name, &description).await?;
491            
492            // Write generated code
493            std::fs::write(project_path.join("src/main.rs"), code)?;
494        } else {
495            // Use AI generation
496            let mut project = Project::init(project_path.clone(), &project_name, &description, agent.clone()).await?;
497            let response = project.generate_game(&description).await?;
498            
499            info!("Game '{}' created successfully!", project_name);
500            info!("Project location: {}", project_path.display());
501            info!("To run: cd {} && cargo run", project_path.display());
502            
503            if let Some(tokens) = response.tokens_used {
504                info!("Tokens used: {}", tokens);
505            }
506        }
507        
508        Ok(())
509    }
510    
511    async fn handle_add(&mut self, feature: String, _model: Option<ModelType>, project: Option<PathBuf>) -> Result<()> {
512        let agent = self.agent.as_ref()
513            .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
514        
515        let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
516        
517        info!("Adding feature: {}", feature);
518        
519        let mut project = Project::new(project_path, agent.clone()).await?;
520        let response = project.add_feature(&feature).await?;
521        
522        info!("Feature added successfully!");
523        
524        if let Some(tokens) = response.tokens_used {
525            info!("Tokens used: {}", tokens);
526        }
527        
528        Ok(())
529    }
530    
531    async fn handle_improve(
532        &mut self,
533        aspect: ImprovementAspect,
534        _model: Option<ModelType>,
535        file: Option<PathBuf>,
536        project: Option<PathBuf>,
537    ) -> Result<()> {
538        let agent = self.agent.as_ref()
539            .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
540        
541        let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
542        let file_path = file.unwrap_or_else(|| project_path.join("src/main.rs"));
543        
544        if !file_path.exists() {
545            return Err(BevyAIError::file_operation("read", &file_path.display().to_string()));
546        }
547        
548        let code = std::fs::read_to_string(&file_path)?;
549        
550        info!("Improving {} of {}", aspect, file_path.display());
551        
552        let response = agent
553            .improve_code(aspect.to_string(), code)
554            .with_model(_model.unwrap_or(self.config.default_model.clone()))
555            .execute()
556            .await?;
557        
558        let improved_code = agent.extract_code(&response.content);
559        
560        // Create backup
561        crate::utils::fs_utils::backup_file(&file_path)?;
562        
563        // Write improved code
564        std::fs::write(&file_path, improved_code)?;
565        
566        info!("Code improved successfully!");
567        info!("Backup created for original file");
568        
569        if let Some(tokens) = response.tokens_used {
570            info!("Tokens used: {}", tokens);
571        }
572        
573        Ok(())
574    }
575    
576    async fn handle_explain(
577        &mut self,
578        _model: Option<ModelType>,
579        file: Option<PathBuf>,
580        project: Option<PathBuf>,
581    ) -> Result<()> {
582        let agent = self.agent.as_ref()
583            .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
584        
585        let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
586        let file_path = file.unwrap_or_else(|| project_path.join("src/main.rs"));
587        
588        if !file_path.exists() {
589            return Err(BevyAIError::file_operation("read", &file_path.display().to_string()));
590        }
591        
592        let code = std::fs::read_to_string(&file_path)?;
593        
594        info!("Explaining code in {}", file_path.display());
595        
596        let response = agent
597            .explain_code(code)
598            .with_model(_model.unwrap_or(self.config.default_model.clone()))
599            .execute()
600            .await?;
601        
602        println!("\nAI Code Explanation:\n");
603        println!("{}", response.content);
604        
605        if let Some(tokens) = response.tokens_used {
606            info!("Tokens used: {}", tokens);
607        }
608        
609        Ok(())
610    }
611    
612    async fn handle_debug(
613        &mut self,
614        error: String,
615        _model: Option<ModelType>,
616        file: Option<PathBuf>,
617        project: Option<PathBuf>,
618    ) -> Result<()> {
619        let agent = self.agent.as_ref()
620            .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
621        
622        let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
623        let file_path = file.unwrap_or_else(|| project_path.join("src/main.rs"));
624        
625        if !file_path.exists() {
626            return Err(BevyAIError::file_operation("read", &file_path.display().to_string()));
627        }
628        
629        let code = std::fs::read_to_string(&file_path)?;
630        
631        info!("Debugging issue: {}", error);
632        
633        let response = agent
634            .debug_code(code, error)
635            .with_model(_model.unwrap_or(self.config.default_model.clone()))
636            .execute()
637            .await?;
638        
639        println!("\nAI Debug Analysis:\n");
640        println!("{}", response.content);
641        
642        // Extract and offer to apply fixed code
643        let fixed_code = agent.extract_code(&response.content);
644        if fixed_code != response.content {
645            println!("\nApply the suggested fix? [y/N]");
646            let mut input = String::new();
647            std::io::stdin().read_line(&mut input)?;
648            
649            if input.trim().to_lowercase() == "y" {
650                // Create backup
651                crate::utils::fs_utils::backup_file(&file_path)?;
652                
653                // Write fixed code
654                std::fs::write(&file_path, fixed_code)?;
655                
656                info!("Fix applied successfully!");
657                info!("Backup created for original file");
658            }
659        }
660        
661        if let Some(tokens) = response.tokens_used {
662            info!("Tokens used: {}", tokens);
663        }
664        
665        Ok(())
666    }
667    
668    async fn handle_init(
669        &mut self,
670        name: String,
671        description: Option<String>,
672        output: Option<PathBuf>,
673        template: Option<String>,
674    ) -> Result<()> {
675        let description = description.unwrap_or_else(|| format!("A new Bevy game: {}", name));
676        let project_path = output.unwrap_or_else(|| std::env::current_dir().unwrap().join(&name));
677        
678        info!("Initializing new Bevy AI project: {}", name);
679        
680        let mut project_manager = ProjectManager::new(&project_path);
681        project_manager.init(&name, &description).await?;
682        
683        if let Some(template_name) = template {
684            let template_manager = TemplateManager::new()?;
685            let context = TemplateContext::new(name.clone(), description.clone());
686            let code = template_manager.generate(&template_name, &context)?;
687            
688            std::fs::write(project_path.join("src/main.rs"), code)?;
689            info!("Applied template: {}", template_name);
690        }
691        
692        info!("Project '{}' initialized successfully!", name);
693        info!("Project location: {}", project_path.display());
694        info!("To get started: cd {} && bevy-agent add \"your first feature\"", project_path.display());
695        
696        Ok(())
697    }
698    
699    async fn handle_config(
700        &mut self,
701        openai_key: Option<String>,
702        anthropic_key: Option<String>,
703        google_key: Option<String>,
704        default_model: Option<ModelType>,
705        show: bool,
706        validate: bool,
707    ) -> Result<()> {
708        if show {
709            println!("Current Configuration:");
710            println!("OpenAI: {}", if self.config.openai.is_some() { "Configured" } else { "Not configured" });
711            println!("Anthropic: {}", if self.config.anthropic.is_some() { "Configured" } else { "Not configured" });
712            println!("Google: {}", if self.config.google.is_some() { "Configured" } else { "Not configured" });
713            println!("Default Model: {}", self.config.default_model);
714            println!("Available Models: {:?}", self.config.available_models());
715            return Ok(());
716        }
717        
718        if validate {
719            let warnings = config_utils::validate_config(&self.config)?;
720            if warnings.is_empty() {
721                info!("Configuration is valid");
722            } else {
723                warn!("Configuration warnings:");
724                for warning in warnings {
725                    warn!("  - {}", warning);
726                }
727            }
728            return Ok(());
729        }
730        
731        let mut updated = false;
732        
733        if let Some(key) = openai_key {
734            self.config.openai = Some(crate::config::OpenAIConfig {
735                api_key: key,
736                organization: None,
737                base_url: None,
738            });
739            info!("OpenAI API key configured");
740            updated = true;
741        }
742        
743        if let Some(key) = anthropic_key {
744            self.config.anthropic = Some(crate::config::AnthropicConfig {
745                api_key: key,
746                base_url: None,
747            });
748            info!("Anthropic API key configured");
749            updated = true;
750        }
751        
752        if let Some(key) = google_key {
753            self.config.google = Some(crate::config::GoogleConfig {
754                api_key: key,
755                base_url: None,
756            });
757            info!("Google API key configured");
758            updated = true;
759        }
760        
761        if let Some(model) = default_model {
762            self.config.default_model = model;
763            info!("Default model set to: {}", self.config.default_model);
764            updated = true;
765        }
766        
767        if updated {
768            let config_path = AIConfig::default_config_path()?;
769            self.config.save_to_file(&config_path)?;
770            
771            // Recreate agent with new config
772            if let Ok(agent) = BevyAIAgent::new(self.config.clone()).await {
773                self.agent = Some(agent);
774                info!("๐Ÿ”„ AI agent updated with new configuration");
775            }
776        }
777        
778        Ok(())
779    }
780    
781    async fn handle_build_operation(&self, operation: BuildOperation) -> Result<()> {
782        match operation {
783            BuildOperation::Build { project, release: _ } => {
784                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
785                info!("Building project...");
786                
787                let manager = ProjectManager::new(&project_path);
788                let result = manager.build().await?;
789                
790                info!("Build completed successfully!");
791                if !result.is_empty() {
792                    println!("{}", result);
793                }
794            }
795            BuildOperation::Run { project, release: _, args: _ } => {
796                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
797                info!("Running project...");
798                
799                let manager = ProjectManager::new(&project_path);
800                let result = manager.run().await?;
801                
802                if !result.is_empty() {
803                    println!("{}", result);
804                }
805            }
806            BuildOperation::Check { project } => {
807                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
808                info!("Checking code...");
809                
810                let result = build_utils::cargo_check(&project_path)?;
811                println!("{}", result);
812            }
813            BuildOperation::Clippy { project } => {
814                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
815                info!("Running clippy...");
816                
817                let result = build_utils::cargo_clippy(&project_path)?;
818                println!("{}", result);
819            }
820            BuildOperation::Format { project } => {
821                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
822                info!("Formatting code...");
823                
824                let result = build_utils::cargo_fmt(&project_path)?;
825                info!("{}", result);
826            }
827            BuildOperation::Test { project } => {
828                let _project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
829                info!("๐Ÿงช Running tests...");
830                
831                // TODO: Implement test running
832                info!("Test runner not yet implemented");
833            }
834        }
835        
836        Ok(())
837    }
838    
839    async fn handle_project_operation(&self, operation: ProjectOperation) -> Result<()> {
840        match operation {
841            ProjectOperation::Info { project } => {
842                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
843                let mut manager = ProjectManager::new(&project_path);
844                manager.load().await?;
845                
846                if let Some(config) = manager.config() {
847                    println!("Project Information:");
848                    println!("Name: {}", config.metadata.name);
849                    println!("Description: {}", config.metadata.description);
850                    println!("Version: {}", config.metadata.version);
851                    println!("Created: {}", config.metadata.created_at.format("%Y-%m-%d %H:%M:%S"));
852                    println!("Updated: {}", config.metadata.updated_at.format("%Y-%m-%d %H:%M:%S"));
853                    println!("Bevy Version: {}", config.metadata.bevy_version);
854                    println!("Features: {}", config.metadata.features.join(", "));
855                    println!("Conversations: {}", config.conversations.len());
856                    println!("Generated Files: {}", config.generated_files.len());
857                }
858            }
859            ProjectOperation::Stats { project } => {
860                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
861                let manager = ProjectManager::new(&project_path);
862                let stats = manager.stats().await?;
863                
864                println!("Project Statistics:");
865                println!("Lines of Code: {}", stats.lines_of_code);
866                println!("Rust Files: {}", stats.rust_files);
867                println!("AI Conversations: {}", stats.conversations);
868                println!("Generated Files: {}", stats.generated_files);
869                println!("Dependencies: {}", stats.dependencies);
870                println!("Features: {}", stats.features);
871            }
872            ProjectOperation::History { project, limit } => {
873                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
874                let mut manager = ProjectManager::new(&project_path);
875                manager.load().await?;
876                
877                if let Some(config) = manager.config() {
878                    println!("๐Ÿ“œ Conversation History (last {}):", limit);
879                    for conversation in config.conversations.iter().rev().take(limit) {
880                        println!("\n{} ({})", 
881                               conversation.timestamp.format("%Y-%m-%d %H:%M:%S"),
882                               conversation.model_used);
883                        println!("๐Ÿ‘ค Request: {}", conversation.request);
884                        println!("AI Response: {}...", 
885                               conversation.response.chars().take(100).collect::<String>());
886                        if let Some(tokens) = conversation.tokens_used {
887                            println!("Tokens: {}", tokens);
888                        }
889                    }
890                }
891            }
892            ProjectOperation::Export { output: _, project: _, format: _ } => {
893                // TODO: Implement project export
894                info!("Project export not yet implemented");
895            }
896            ProjectOperation::Clean { project, cargo } => {
897                let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
898                info!("Cleaning project...");
899                
900                if cargo {
901                    std::process::Command::new("cargo")
902                        .arg("clean")
903                        .current_dir(&project_path)
904                        .output()?;
905                    info!("Cargo artifacts cleaned");
906                }
907                
908                info!("Project cleaned");
909            }
910        }
911        
912        Ok(())
913    }
914    
915    async fn handle_template_operation(&self, operation: TemplateOperation) -> Result<()> {
916        match operation {
917            TemplateOperation::List => {
918                let _manager = TemplateManager::new()?;
919                let templates = TemplateManager::builtin_templates();
920                
921                println!("Available Templates:");
922                for template in templates {
923                    println!("  {} - {} ({})", 
924                           template.name, 
925                           template.description,
926                           template.category);
927                }
928            }
929            TemplateOperation::Show { name } => {
930                let templates = TemplateManager::builtin_templates();
931                if let Some(template) = templates.iter().find(|t| t.name == name) {
932                    println!("Template: {}", template.name);
933                    println!("Description: {}", template.description);
934                    println!("Category: {}", template.category);
935                    println!("Dependencies: {}", template.dependencies.join(", "));
936                    println!("Features: {}", template.features.join(", "));
937                } else {
938                    error!("Template '{}' not found", name);
939                }
940            }
941            TemplateOperation::Create { name: _, description: _, file: _ } => {
942                // TODO: Implement custom template creation
943                info!("Custom template creation not yet implemented");
944            }
945            TemplateOperation::Apply { name: _, project: _ } => {
946                // TODO: Implement template application
947                info!("Template application not yet implemented");
948            }
949        }
950        
951        Ok(())
952    }
953    
954    fn generate_project_name(&self, description: &str) -> String {
955        description
956            .split_whitespace()
957            .take(3)
958            .collect::<Vec<_>>()
959            .join("_")
960            .to_lowercase()
961            .chars()
962            .filter(|c| c.is_alphanumeric() || *c == '_')
963            .collect()
964    }
965}