1use crate::{
7 ai::BevyAIAgent,
8 config::{AIConfig, ModelType},
9 error::{BevyAIError, Result},
10 game_templates::{TemplateManager, TemplateContext},
11 project::{Project, ProjectManager},
12 utils::{build_utils, config_utils},
13};
14use clap::{Parser, Subcommand, ValueEnum};
15use std::path::PathBuf;
16use tracing::{error, info, warn};
17
18#[derive(Parser)]
20#[command(name = "bevy-agent")]
21#[command(about = "AI-powered Bevy game prototyping assistant with GPT/Claude integration")]
22#[command(version = crate::VERSION)]
23pub struct Cli {
24 #[command(subcommand)]
26 pub command: Commands,
27
28 #[arg(short, long, global = true)]
30 pub verbose: bool,
31
32 #[arg(short, long, global = true)]
34 pub config: Option<PathBuf>,
35}
36
37#[derive(Subcommand)]
39pub enum Commands {
40 Create {
42 description: String,
44
45 #[arg(long)]
47 model: Option<ModelType>,
48
49 #[arg(long)]
51 name: Option<String>,
52
53 #[arg(long, short)]
55 output: Option<PathBuf>,
56
57 #[arg(long)]
59 template: Option<String>,
60
61 #[arg(long)]
63 in_place: bool,
64 },
65
66 Add {
68 feature: String,
70
71 #[arg(long)]
73 model: Option<ModelType>,
74
75 #[arg(long, short)]
77 project: Option<PathBuf>,
78 },
79
80 Improve {
82 aspect: ImprovementAspect,
84
85 #[arg(long)]
87 model: Option<ModelType>,
88
89 #[arg(long)]
91 file: Option<PathBuf>,
92
93 #[arg(long, short)]
95 project: Option<PathBuf>,
96 },
97
98 Explain {
100 #[arg(long)]
102 model: Option<ModelType>,
103
104 #[arg(long)]
106 file: Option<PathBuf>,
107
108 #[arg(long, short)]
110 project: Option<PathBuf>,
111 },
112
113 Debug {
115 error: String,
117
118 #[arg(long)]
120 model: Option<ModelType>,
121
122 #[arg(long)]
124 file: Option<PathBuf>,
125
126 #[arg(long, short)]
128 project: Option<PathBuf>,
129 },
130
131 Init {
133 name: String,
135
136 #[arg(long, short)]
138 description: Option<String>,
139
140 #[arg(long, short)]
142 output: Option<PathBuf>,
143
144 #[arg(long)]
146 template: Option<String>,
147 },
148
149 Config {
151 #[arg(long)]
153 openai_key: Option<String>,
154
155 #[arg(long)]
157 anthropic_key: Option<String>,
158
159 #[arg(long)]
161 google_key: Option<String>,
162
163 #[arg(long)]
165 default_model: Option<ModelType>,
166
167 #[arg(long)]
169 show: bool,
170
171 #[arg(long)]
173 validate: bool,
174 },
175
176 Build {
178 #[command(subcommand)]
180 operation: BuildOperation,
181 },
182
183 Project {
185 #[command(subcommand)]
187 operation: ProjectOperation,
188 },
189
190 Template {
192 #[command(subcommand)]
194 operation: TemplateOperation,
195 },
196}
197
198#[derive(Subcommand)]
200pub enum BuildOperation {
201 Build {
203 #[arg(long, short)]
205 project: Option<PathBuf>,
206
207 #[arg(long)]
209 release: bool,
210 },
211
212 Run {
214 #[arg(long, short)]
216 project: Option<PathBuf>,
217
218 #[arg(long)]
220 release: bool,
221
222 #[arg(last = true)]
224 args: Vec<String>,
225 },
226
227 Check {
229 #[arg(long, short)]
231 project: Option<PathBuf>,
232 },
233
234 Clippy {
236 #[arg(long, short)]
238 project: Option<PathBuf>,
239 },
240
241 Format {
243 #[arg(long, short)]
245 project: Option<PathBuf>,
246 },
247
248 Test {
250 #[arg(long, short)]
252 project: Option<PathBuf>,
253 },
254}
255
256#[derive(Subcommand)]
258pub enum ProjectOperation {
259 Info {
261 #[arg(long, short)]
263 project: Option<PathBuf>,
264 },
265
266 Stats {
268 #[arg(long, short)]
270 project: Option<PathBuf>,
271 },
272
273 History {
275 #[arg(long, short)]
277 project: Option<PathBuf>,
278
279 #[arg(long, default_value = "10")]
281 limit: usize,
282 },
283
284 Export {
286 output: PathBuf,
288
289 #[arg(long, short)]
291 project: Option<PathBuf>,
292
293 #[arg(long, default_value = "json")]
295 format: ExportFormat,
296 },
297
298 Clean {
300 #[arg(long, short)]
302 project: Option<PathBuf>,
303
304 #[arg(long)]
306 cargo: bool,
307 },
308}
309
310#[derive(Subcommand)]
312pub enum TemplateOperation {
313 List,
315
316 Show {
318 name: String,
320 },
321
322 Create {
324 name: String,
326
327 description: String,
329
330 file: PathBuf,
332 },
333
334 Apply {
336 name: String,
338
339 #[arg(long, short)]
341 project: Option<PathBuf>,
342 },
343}
344
345#[derive(ValueEnum, Clone, Debug)]
347pub enum ImprovementAspect {
348 Performance,
350 Readability,
352 Features,
354 Structure,
356 Testing,
358 Documentation,
360 Security,
362 Efficiency,
364}
365
366#[derive(ValueEnum, Clone, Debug)]
368pub enum ExportFormat {
369 Json,
371 Yaml,
373 Toml,
375 Markdown,
377}
378
379impl std::fmt::Display for ImprovementAspect {
380 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
381 match self {
382 ImprovementAspect::Performance => write!(f, "performance"),
383 ImprovementAspect::Readability => write!(f, "readability"),
384 ImprovementAspect::Features => write!(f, "features"),
385 ImprovementAspect::Structure => write!(f, "structure"),
386 ImprovementAspect::Testing => write!(f, "testing"),
387 ImprovementAspect::Documentation => write!(f, "documentation"),
388 ImprovementAspect::Security => write!(f, "security"),
389 ImprovementAspect::Efficiency => write!(f, "efficiency"),
390 }
391 }
392}
393
394pub struct CliHandler {
396 config: AIConfig,
397 agent: Option<BevyAIAgent>,
398}
399
400impl CliHandler {
401 pub async fn new(config_path: Option<PathBuf>) -> Result<Self> {
403 let config = if let Some(path) = config_path {
404 AIConfig::from_file(path)?
405 } else {
406 AIConfig::load_or_create()?
407 };
408
409 let agent = BevyAIAgent::new(config.clone()).await.ok();
410
411 Ok(Self { config, agent })
412 }
413
414 pub async fn handle(&mut self, cli: Cli) -> Result<()> {
416 if cli.verbose {
418 tracing_subscriber::fmt()
419 .with_env_filter("bevy_agent=debug")
420 .init();
421 } else {
422 tracing_subscriber::fmt()
423 .with_env_filter("bevy_agent=info")
424 .init();
425 }
426
427 match cli.command {
428 Commands::Create { description, model, name, output, template, in_place } => {
429 self.handle_create(description, model, name, output, template, in_place).await
430 }
431 Commands::Add { feature, model, project } => {
432 self.handle_add(feature, model, project).await
433 }
434 Commands::Improve { aspect, model, file, project } => {
435 self.handle_improve(aspect, model, file, project).await
436 }
437 Commands::Explain { model, file, project } => {
438 self.handle_explain(model, file, project).await
439 }
440 Commands::Debug { error, model, file, project } => {
441 self.handle_debug(error, model, file, project).await
442 }
443 Commands::Init { name, description, output, template } => {
444 self.handle_init(name, description, output, template).await
445 }
446 Commands::Config { openai_key, anthropic_key, google_key, default_model, show, validate } => {
447 self.handle_config(openai_key, anthropic_key, google_key, default_model, show, validate).await
448 }
449 Commands::Build { operation } => {
450 self.handle_build_operation(operation).await
451 }
452 Commands::Project { operation } => {
453 self.handle_project_operation(operation).await
454 }
455 Commands::Template { operation } => {
456 self.handle_template_operation(operation).await
457 }
458 }
459 }
460
461 async fn handle_create(
462 &mut self,
463 description: String,
464 _model: Option<ModelType>,
465 name: Option<String>,
466 output: Option<PathBuf>,
467 template: Option<String>,
468 in_place: bool,
469 ) -> Result<()> {
470 let agent = self.agent.as_ref()
471 .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
472
473 let project_name = name.unwrap_or_else(|| self.generate_project_name(&description));
474 let project_path = if in_place {
475 std::env::current_dir()?
476 } else {
477 output.unwrap_or_else(|| std::env::current_dir().unwrap().join(&project_name))
478 };
479
480 info!("Creating game '{}' from description: {}", project_name, description);
481
482 if let Some(template_name) = template {
483 let template_manager = TemplateManager::new()?;
485 let context = TemplateContext::new(project_name.clone(), description.clone());
486 let code = template_manager.generate(&template_name, &context)?;
487
488 let mut project_manager = ProjectManager::new(&project_path);
490 project_manager.init(&project_name, &description).await?;
491
492 std::fs::write(project_path.join("src/main.rs"), code)?;
494 } else {
495 let mut project = Project::init(project_path.clone(), &project_name, &description, agent.clone()).await?;
497 let response = project.generate_game(&description).await?;
498
499 info!("Game '{}' created successfully!", project_name);
500 info!("Project location: {}", project_path.display());
501 info!("To run: cd {} && cargo run", project_path.display());
502
503 if let Some(tokens) = response.tokens_used {
504 info!("Tokens used: {}", tokens);
505 }
506 }
507
508 Ok(())
509 }
510
511 async fn handle_add(&mut self, feature: String, _model: Option<ModelType>, project: Option<PathBuf>) -> Result<()> {
512 let agent = self.agent.as_ref()
513 .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
514
515 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
516
517 info!("Adding feature: {}", feature);
518
519 let mut project = Project::new(project_path, agent.clone()).await?;
520 let response = project.add_feature(&feature).await?;
521
522 info!("Feature added successfully!");
523
524 if let Some(tokens) = response.tokens_used {
525 info!("Tokens used: {}", tokens);
526 }
527
528 Ok(())
529 }
530
531 async fn handle_improve(
532 &mut self,
533 aspect: ImprovementAspect,
534 _model: Option<ModelType>,
535 file: Option<PathBuf>,
536 project: Option<PathBuf>,
537 ) -> Result<()> {
538 let agent = self.agent.as_ref()
539 .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
540
541 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
542 let file_path = file.unwrap_or_else(|| project_path.join("src/main.rs"));
543
544 if !file_path.exists() {
545 return Err(BevyAIError::file_operation("read", &file_path.display().to_string()));
546 }
547
548 let code = std::fs::read_to_string(&file_path)?;
549
550 info!("Improving {} of {}", aspect, file_path.display());
551
552 let response = agent
553 .improve_code(aspect.to_string(), code)
554 .with_model(_model.unwrap_or(self.config.default_model.clone()))
555 .execute()
556 .await?;
557
558 let improved_code = agent.extract_code(&response.content);
559
560 crate::utils::fs_utils::backup_file(&file_path)?;
562
563 std::fs::write(&file_path, improved_code)?;
565
566 info!("Code improved successfully!");
567 info!("Backup created for original file");
568
569 if let Some(tokens) = response.tokens_used {
570 info!("Tokens used: {}", tokens);
571 }
572
573 Ok(())
574 }
575
576 async fn handle_explain(
577 &mut self,
578 _model: Option<ModelType>,
579 file: Option<PathBuf>,
580 project: Option<PathBuf>,
581 ) -> Result<()> {
582 let agent = self.agent.as_ref()
583 .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
584
585 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
586 let file_path = file.unwrap_or_else(|| project_path.join("src/main.rs"));
587
588 if !file_path.exists() {
589 return Err(BevyAIError::file_operation("read", &file_path.display().to_string()));
590 }
591
592 let code = std::fs::read_to_string(&file_path)?;
593
594 info!("Explaining code in {}", file_path.display());
595
596 let response = agent
597 .explain_code(code)
598 .with_model(_model.unwrap_or(self.config.default_model.clone()))
599 .execute()
600 .await?;
601
602 println!("\nAI Code Explanation:\n");
603 println!("{}", response.content);
604
605 if let Some(tokens) = response.tokens_used {
606 info!("Tokens used: {}", tokens);
607 }
608
609 Ok(())
610 }
611
612 async fn handle_debug(
613 &mut self,
614 error: String,
615 _model: Option<ModelType>,
616 file: Option<PathBuf>,
617 project: Option<PathBuf>,
618 ) -> Result<()> {
619 let agent = self.agent.as_ref()
620 .ok_or_else(|| BevyAIError::ai_api("No AI agent available. Please configure API keys.".to_string()))?;
621
622 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
623 let file_path = file.unwrap_or_else(|| project_path.join("src/main.rs"));
624
625 if !file_path.exists() {
626 return Err(BevyAIError::file_operation("read", &file_path.display().to_string()));
627 }
628
629 let code = std::fs::read_to_string(&file_path)?;
630
631 info!("Debugging issue: {}", error);
632
633 let response = agent
634 .debug_code(code, error)
635 .with_model(_model.unwrap_or(self.config.default_model.clone()))
636 .execute()
637 .await?;
638
639 println!("\nAI Debug Analysis:\n");
640 println!("{}", response.content);
641
642 let fixed_code = agent.extract_code(&response.content);
644 if fixed_code != response.content {
645 println!("\nApply the suggested fix? [y/N]");
646 let mut input = String::new();
647 std::io::stdin().read_line(&mut input)?;
648
649 if input.trim().to_lowercase() == "y" {
650 crate::utils::fs_utils::backup_file(&file_path)?;
652
653 std::fs::write(&file_path, fixed_code)?;
655
656 info!("Fix applied successfully!");
657 info!("Backup created for original file");
658 }
659 }
660
661 if let Some(tokens) = response.tokens_used {
662 info!("Tokens used: {}", tokens);
663 }
664
665 Ok(())
666 }
667
668 async fn handle_init(
669 &mut self,
670 name: String,
671 description: Option<String>,
672 output: Option<PathBuf>,
673 template: Option<String>,
674 ) -> Result<()> {
675 let description = description.unwrap_or_else(|| format!("A new Bevy game: {}", name));
676 let project_path = output.unwrap_or_else(|| std::env::current_dir().unwrap().join(&name));
677
678 info!("Initializing new Bevy AI project: {}", name);
679
680 let mut project_manager = ProjectManager::new(&project_path);
681 project_manager.init(&name, &description).await?;
682
683 if let Some(template_name) = template {
684 let template_manager = TemplateManager::new()?;
685 let context = TemplateContext::new(name.clone(), description.clone());
686 let code = template_manager.generate(&template_name, &context)?;
687
688 std::fs::write(project_path.join("src/main.rs"), code)?;
689 info!("Applied template: {}", template_name);
690 }
691
692 info!("Project '{}' initialized successfully!", name);
693 info!("Project location: {}", project_path.display());
694 info!("To get started: cd {} && bevy-agent add \"your first feature\"", project_path.display());
695
696 Ok(())
697 }
698
699 async fn handle_config(
700 &mut self,
701 openai_key: Option<String>,
702 anthropic_key: Option<String>,
703 google_key: Option<String>,
704 default_model: Option<ModelType>,
705 show: bool,
706 validate: bool,
707 ) -> Result<()> {
708 if show {
709 println!("Current Configuration:");
710 println!("OpenAI: {}", if self.config.openai.is_some() { "Configured" } else { "Not configured" });
711 println!("Anthropic: {}", if self.config.anthropic.is_some() { "Configured" } else { "Not configured" });
712 println!("Google: {}", if self.config.google.is_some() { "Configured" } else { "Not configured" });
713 println!("Default Model: {}", self.config.default_model);
714 println!("Available Models: {:?}", self.config.available_models());
715 return Ok(());
716 }
717
718 if validate {
719 let warnings = config_utils::validate_config(&self.config)?;
720 if warnings.is_empty() {
721 info!("Configuration is valid");
722 } else {
723 warn!("Configuration warnings:");
724 for warning in warnings {
725 warn!(" - {}", warning);
726 }
727 }
728 return Ok(());
729 }
730
731 let mut updated = false;
732
733 if let Some(key) = openai_key {
734 self.config.openai = Some(crate::config::OpenAIConfig {
735 api_key: key,
736 organization: None,
737 base_url: None,
738 });
739 info!("OpenAI API key configured");
740 updated = true;
741 }
742
743 if let Some(key) = anthropic_key {
744 self.config.anthropic = Some(crate::config::AnthropicConfig {
745 api_key: key,
746 base_url: None,
747 });
748 info!("Anthropic API key configured");
749 updated = true;
750 }
751
752 if let Some(key) = google_key {
753 self.config.google = Some(crate::config::GoogleConfig {
754 api_key: key,
755 base_url: None,
756 });
757 info!("Google API key configured");
758 updated = true;
759 }
760
761 if let Some(model) = default_model {
762 self.config.default_model = model;
763 info!("Default model set to: {}", self.config.default_model);
764 updated = true;
765 }
766
767 if updated {
768 let config_path = AIConfig::default_config_path()?;
769 self.config.save_to_file(&config_path)?;
770
771 if let Ok(agent) = BevyAIAgent::new(self.config.clone()).await {
773 self.agent = Some(agent);
774 info!("๐ AI agent updated with new configuration");
775 }
776 }
777
778 Ok(())
779 }
780
781 async fn handle_build_operation(&self, operation: BuildOperation) -> Result<()> {
782 match operation {
783 BuildOperation::Build { project, release: _ } => {
784 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
785 info!("Building project...");
786
787 let manager = ProjectManager::new(&project_path);
788 let result = manager.build().await?;
789
790 info!("Build completed successfully!");
791 if !result.is_empty() {
792 println!("{}", result);
793 }
794 }
795 BuildOperation::Run { project, release: _, args: _ } => {
796 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
797 info!("Running project...");
798
799 let manager = ProjectManager::new(&project_path);
800 let result = manager.run().await?;
801
802 if !result.is_empty() {
803 println!("{}", result);
804 }
805 }
806 BuildOperation::Check { project } => {
807 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
808 info!("Checking code...");
809
810 let result = build_utils::cargo_check(&project_path)?;
811 println!("{}", result);
812 }
813 BuildOperation::Clippy { project } => {
814 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
815 info!("Running clippy...");
816
817 let result = build_utils::cargo_clippy(&project_path)?;
818 println!("{}", result);
819 }
820 BuildOperation::Format { project } => {
821 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
822 info!("Formatting code...");
823
824 let result = build_utils::cargo_fmt(&project_path)?;
825 info!("{}", result);
826 }
827 BuildOperation::Test { project } => {
828 let _project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
829 info!("๐งช Running tests...");
830
831 info!("Test runner not yet implemented");
833 }
834 }
835
836 Ok(())
837 }
838
839 async fn handle_project_operation(&self, operation: ProjectOperation) -> Result<()> {
840 match operation {
841 ProjectOperation::Info { project } => {
842 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
843 let mut manager = ProjectManager::new(&project_path);
844 manager.load().await?;
845
846 if let Some(config) = manager.config() {
847 println!("Project Information:");
848 println!("Name: {}", config.metadata.name);
849 println!("Description: {}", config.metadata.description);
850 println!("Version: {}", config.metadata.version);
851 println!("Created: {}", config.metadata.created_at.format("%Y-%m-%d %H:%M:%S"));
852 println!("Updated: {}", config.metadata.updated_at.format("%Y-%m-%d %H:%M:%S"));
853 println!("Bevy Version: {}", config.metadata.bevy_version);
854 println!("Features: {}", config.metadata.features.join(", "));
855 println!("Conversations: {}", config.conversations.len());
856 println!("Generated Files: {}", config.generated_files.len());
857 }
858 }
859 ProjectOperation::Stats { project } => {
860 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
861 let manager = ProjectManager::new(&project_path);
862 let stats = manager.stats().await?;
863
864 println!("Project Statistics:");
865 println!("Lines of Code: {}", stats.lines_of_code);
866 println!("Rust Files: {}", stats.rust_files);
867 println!("AI Conversations: {}", stats.conversations);
868 println!("Generated Files: {}", stats.generated_files);
869 println!("Dependencies: {}", stats.dependencies);
870 println!("Features: {}", stats.features);
871 }
872 ProjectOperation::History { project, limit } => {
873 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
874 let mut manager = ProjectManager::new(&project_path);
875 manager.load().await?;
876
877 if let Some(config) = manager.config() {
878 println!("๐ Conversation History (last {}):", limit);
879 for conversation in config.conversations.iter().rev().take(limit) {
880 println!("\n{} ({})",
881 conversation.timestamp.format("%Y-%m-%d %H:%M:%S"),
882 conversation.model_used);
883 println!("๐ค Request: {}", conversation.request);
884 println!("AI Response: {}...",
885 conversation.response.chars().take(100).collect::<String>());
886 if let Some(tokens) = conversation.tokens_used {
887 println!("Tokens: {}", tokens);
888 }
889 }
890 }
891 }
892 ProjectOperation::Export { output: _, project: _, format: _ } => {
893 info!("Project export not yet implemented");
895 }
896 ProjectOperation::Clean { project, cargo } => {
897 let project_path = project.unwrap_or_else(|| std::env::current_dir().unwrap());
898 info!("Cleaning project...");
899
900 if cargo {
901 std::process::Command::new("cargo")
902 .arg("clean")
903 .current_dir(&project_path)
904 .output()?;
905 info!("Cargo artifacts cleaned");
906 }
907
908 info!("Project cleaned");
909 }
910 }
911
912 Ok(())
913 }
914
915 async fn handle_template_operation(&self, operation: TemplateOperation) -> Result<()> {
916 match operation {
917 TemplateOperation::List => {
918 let _manager = TemplateManager::new()?;
919 let templates = TemplateManager::builtin_templates();
920
921 println!("Available Templates:");
922 for template in templates {
923 println!(" {} - {} ({})",
924 template.name,
925 template.description,
926 template.category);
927 }
928 }
929 TemplateOperation::Show { name } => {
930 let templates = TemplateManager::builtin_templates();
931 if let Some(template) = templates.iter().find(|t| t.name == name) {
932 println!("Template: {}", template.name);
933 println!("Description: {}", template.description);
934 println!("Category: {}", template.category);
935 println!("Dependencies: {}", template.dependencies.join(", "));
936 println!("Features: {}", template.features.join(", "));
937 } else {
938 error!("Template '{}' not found", name);
939 }
940 }
941 TemplateOperation::Create { name: _, description: _, file: _ } => {
942 info!("Custom template creation not yet implemented");
944 }
945 TemplateOperation::Apply { name: _, project: _ } => {
946 info!("Template application not yet implemented");
948 }
949 }
950
951 Ok(())
952 }
953
954 fn generate_project_name(&self, description: &str) -> String {
955 description
956 .split_whitespace()
957 .take(3)
958 .collect::<Vec<_>>()
959 .join("_")
960 .to_lowercase()
961 .chars()
962 .filter(|c| c.is_alphanumeric() || *c == '_')
963 .collect()
964 }
965}