use anyhow::{Context, Result};
use clap::{Args, Command, Parser, Subcommand};
use console::{style, Term};
use convert_case::{Case, Casing};
use dialoguer::{theme::ColorfulTheme, Input};
use indicatif::{ProgressBar, ProgressStyle};
use run_script;
use serde::Serialize;
use std::io::IsTerminal;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use tracing::info;
use crate::{
core::OpenCrates,
providers::openai::OpenAIProvider,
utils::templates::CrateType,
utils::{
config::OpenCratesConfig,
openai_agents::{CrateAssistant, OpenAIClient, OpenAIClientConfig},
templates::{CrateSpec, TemplateManager},
AgentManager,
},
};
use crate::tui::app::App;
use crate::tui::event::EventHandler;
use crate::utils::cache::CacheManager;
use crate::utils::config::ConfigManager;
use crate::utils::metrics::OpenCratesMetrics;
use crate::utils::project_info::{EnrichedAnalysis, ProjectInfo};
pub mod auth;
pub mod check;
pub mod codex;
pub mod completion;
pub mod config;
pub mod deploy;
pub mod docker;
pub mod generate;
pub mod init;
pub mod monitor;
pub mod new;
pub mod release;
pub mod test;
#[derive(Parser)]
#[command(
name = "opencrates",
about = "AI-powered Rust crate generator",
long_about = "
OpenCrates is a comprehensive AI-powered tool for Rust developers that provides:
- Intelligent crate generation with best practices
- Advanced crate search and discovery
- Project analysis and health monitoring
- AI-powered code assistance and recommendations
- Interactive TUI dashboard for project management
- Comprehensive testing and optimization tools
Perfect for developers who want to accelerate their Rust development workflow!
",
version
)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
#[arg(short, long, default_value = "enhanced")]
pub format: OutputFormat,
#[arg(short, long)]
pub verbose: bool,
#[arg(short, long)]
pub quiet: bool,
#[arg(short, long)]
pub config: Option<PathBuf>,
}
#[derive(clap::ValueEnum, Clone, Debug)]
pub enum OutputFormat {
Enhanced,
Json,
Plain,
Minimal,
}
#[derive(Subcommand)]
pub enum Commands {
Init {
#[arg(value_name = "PATH")]
path: Option<PathBuf>,
#[arg(short, long)]
name: Option<String>,
#[arg(short, long, default_value = "default")]
template: String,
#[arg(long)]
ai_suggestions: bool,
},
Generate {
#[arg(short, long)]
name: String,
#[arg(short, long)]
description: String,
#[arg(long)]
features: Option<String>,
#[arg(long, default_value = ".")]
output_dir: PathBuf,
#[arg(short, long)]
_template: Option<String>,
#[arg(short, long)]
interactive: bool,
#[arg(long, default_value = "gpt-4o")]
_model: String,
},
Analyze {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(short, long, default_value = "comprehensive")]
depth: AnalysisDepth,
#[arg(short, long)]
recommendations: bool,
#[arg(long)]
_detailed: bool,
},
Optimize {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(short, long, default_value = "balanced")]
strategy: OptimizationStrategy,
#[arg(short, long)]
auto_apply: bool,
},
Search {
query: String,
#[arg(short, long, default_value = "10")]
limit: usize,
#[arg(short, long)]
category: Option<String>,
#[arg(short, long, default_value = "relevance")]
sort: SortBy,
#[arg(long)]
_detailed: bool,
},
Serve {
#[arg(long, default_value = "127.0.0.1")]
host: String,
#[arg(short, long, default_value = "8080")]
port: u16,
#[arg(long)]
hot_reload: bool,
#[arg(long)]
dev: bool,
},
Test {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(short, long)]
types: Vec<TestType>,
#[arg(long, default_value = "80")]
coverage: u8,
#[arg(long)]
reports: bool,
},
Chat {
message: Option<String>,
#[arg(short, long, default_value = "assistant")]
mode: ChatMode,
#[arg(long)]
functions: bool,
},
Aider {
#[arg(short, long)]
files: Vec<PathBuf>,
command: Option<String>,
},
Aichat {
prompt: String,
#[arg(short, long)]
model: Option<String>,
},
Tui {
#[arg(short, long, default_value = "5")]
_refresh: u64,
#[arg(long)]
_realtime: bool,
},
Health {
#[arg(short, long)]
_component: Option<String>,
#[arg(long)]
detailed: bool,
},
Benchmark {
#[arg(short, long, default_value = "all")]
benchmark_type: BenchmarkType,
#[arg(short, long, default_value = "100")]
iterations: u32,
},
Config(ConfigArgs),
Agent(AgentArgs),
Task {
task: String,
args: Vec<String>,
},
}
#[derive(Args, Debug)]
pub struct ConfigArgs {
#[command(subcommand)]
pub action: ConfigAction,
}
#[derive(Subcommand, Debug)]
pub enum ConfigAction {
Show,
Set { key: String, value: String },
Reset,
Validate,
}
impl ConfigAction {
#[must_use]
pub fn command() -> Command {
Command::new("config")
.about("Configuration management")
.subcommand_required(true)
.arg_required_else_help(true)
.subcommand(Command::new("show").about("Show current configuration"))
.subcommand(
Command::new("set")
.about("Set a configuration value")
.arg(clap::Arg::new("key").required(true))
.arg(clap::Arg::new("value").required(true)),
)
.subcommand(Command::new("reset").about("Reset to defaults"))
.subcommand(Command::new("validate").about("Validate configuration"))
}
}
#[derive(Args, Debug)]
pub struct AgentArgs {
#[command(subcommand)]
pub action: AgentAction,
}
#[derive(Subcommand, Debug)]
pub enum AgentAction {
List,
Execute {
agent_id: String,
message: String,
#[arg(long)]
context: Option<String>,
},
Create {
name: String,
description: String,
instructions: String,
},
}
impl AgentAction {
#[must_use]
pub fn command() -> Command {
Command::new("agent")
.about("Agent management")
.subcommand_required(true)
.arg_required_else_help(true)
.subcommand(Command::new("list").about("List available agents"))
.subcommand(
Command::new("execute")
.about("Execute an agent")
.arg(clap::Arg::new("agent_id").required(true))
.arg(clap::Arg::new("message").required(true)),
)
.subcommand(
Command::new("create")
.about("Create a new agent")
.arg(clap::Arg::new("name").required(true))
.arg(clap::Arg::new("description").required(true))
.arg(clap::Arg::new("instructions").required(true)),
)
}
}
#[derive(clap::ValueEnum, Clone, Debug)]
pub enum AnalysisDepth {
Quick,
Standard,
Comprehensive,
Deep,
}
#[derive(clap::ValueEnum, Clone, Debug)]
pub enum OptimizationStrategy {
Performance,
Size,
Balanced,
Security,
}
#[derive(clap::ValueEnum, Clone, Debug)]
pub enum SortBy {
Relevance,
Downloads,
Updated,
Name,
Stars,
}
#[derive(clap::ValueEnum, Clone, Debug, Serialize)]
pub enum TestType {
Unit,
Integration,
Performance,
Security,
Compatibility,
}
#[derive(clap::ValueEnum, Clone, Debug)]
pub enum ChatMode {
Assistant,
CodeReview,
Tutor,
Debugging,
}
#[derive(clap::ValueEnum, Clone, Debug)]
pub enum BenchmarkType {
All,
Generation,
Analysis,
Search,
Api,
}
pub struct GenerateArgs {
name: Option<String>,
description: Option<String>,
features: Option<String>,
output_dir: PathBuf,
template: Option<String>,
interactive: bool,
format: OutputFormat,
}
pub struct EnhancedCli {
core: Arc<OpenCrates>,
_term: Term,
theme: ColorfulTheme,
agent_manager: Option<AgentManager>,
openai_client: Option<Arc<OpenAIClient>>,
}
impl EnhancedCli {
pub async fn new() -> Result<Self> {
Self::new_with_config(None).await
}
pub async fn new_with_config(config_path: Option<PathBuf>) -> Result<Self> {
let config_manager = if let Some(path) = config_path {
ConfigManager::load_from_path(Some(&path)).unwrap_or_else(|_| {
ConfigManager::load()
.unwrap_or_else(|_| ConfigManager::new(OpenCratesConfig::default()).unwrap())
})
} else {
ConfigManager::load()
.unwrap_or_else(|_| ConfigManager::new(OpenCratesConfig::default()).unwrap())
};
let config = config_manager.get_ref();
let core = Arc::new(
OpenCrates::new_with_config(config.clone())
.await
.context("Failed to initialize OpenCrates")?,
);
let term = Term::stdout();
let theme = ColorfulTheme::default();
let (agent_manager, openai_client) = if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
if api_key.is_empty() {
(None, None)
} else {
let metrics = Arc::new(OpenCratesMetrics::new().await?);
let cache = Arc::new(CacheManager::new());
let config = OpenAIClientConfig {
api_key: api_key.clone(),
base_url: "https://api.openai.com/v1".to_string(),
model: "gpt-4o".to_string(),
max_tokens: Some(4096),
temperature: Some(0.7),
timeout: Duration::from_secs(60),
max_retries: 3,
};
let openai_client = Arc::new(OpenAIClient::new(config, metrics, cache));
let openai_provider = OpenAIProvider::new().await?;
let agent_manager = AgentManager::new(openai_provider).await?;
(Some(agent_manager), Some(openai_client))
}
} else {
(None, None)
};
Ok(Self {
core,
_term: term,
theme,
agent_manager,
openai_client,
})
}
pub async fn run(&mut self, cli: Cli) -> Result<()> {
self.print_welcome_banner()?;
match cli.command {
Commands::Init {
path,
name,
template,
ai_suggestions,
} => {
self.handle_init(path, name, template, ai_suggestions, &cli.format)
.await?;
}
Commands::Generate {
name,
description,
features,
output_dir,
_template,
interactive,
_model,
} => {
self.handle_generate(GenerateArgs {
name: Some(name),
description: Some(description),
features,
output_dir,
template: _template,
interactive,
format: cli.format,
})
.await?;
}
Commands::Analyze {
path,
depth,
recommendations,
_detailed,
} => {
self.handle_analyze(path, depth, recommendations, &cli.format)
.await?;
}
Commands::Optimize {
path,
strategy,
auto_apply,
} => {
self.handle_optimize(path, strategy, auto_apply, &cli.format)
.await?;
}
Commands::Search {
query,
limit,
category,
sort,
_detailed,
} => {
self.handle_search(query, limit, category, sort, &cli.format)
.await?;
}
Commands::Serve {
host,
port,
hot_reload,
dev,
} => {
self.handle_serve(host, port, hot_reload, dev, &cli.format)
.await?;
}
Commands::Test {
path,
types,
coverage,
reports,
} => {
self.handle_test(path, types, coverage, reports, &cli.format)
.await?;
}
Commands::Chat {
message,
mode,
functions,
} => {
self.handle_chat(message, mode, functions, &cli.format)
.await?;
}
Commands::Aider { files, command } => {
self.handle_aider(files, command, &cli.format).await?;
}
Commands::Aichat { prompt, model } => {
self.handle_aichat(prompt, model, &cli.format).await?;
}
Commands::Tui {
_refresh,
_realtime,
} => self.handle_tui().await?,
Commands::Health {
_component,
detailed,
} => self.handle_health(detailed, &cli.format).await?,
Commands::Benchmark {
benchmark_type,
iterations,
} => {
self.handle_benchmark(benchmark_type, iterations, &cli.format)
.await?;
}
Commands::Config(args) => self.handle_config(args.action, &cli.format).await?,
Commands::Agent(args) => self.handle_agent(args.action, &cli.format).await?,
Commands::Task { task, args } => self.handle_task(task, args, &cli.format).await?,
}
Ok(())
}
fn print_welcome_banner(&self) -> Result<()> {
let banner = format!(
"{}{}\n{}{}{}{}{}\n{}{}",
style("Welcome to OpenCrates").cyan().bold(),
style(" - The Ultimate Rust Development Companion").dim(),
style("AI-Powered").green(),
style(" • ").dim(),
style("Fast").blue(),
style(" • ").dim(),
style("Comprehensive").magenta(),
style(" v").dim(),
style(env!("CARGO_PKG_VERSION")).yellow()
);
println!("{banner}");
Ok(())
}
async fn handle_init(
&mut self,
path: Option<PathBuf>,
name: Option<String>,
_template: String,
_ai_suggestions: bool,
format: &OutputFormat,
) -> Result<()> {
info!("Initializing new project...");
self.print_success("Starting project initialization...", format);
let project_name = if let Some(name) = name {
name
} else if let Some(ref path) = path {
path.file_name().and_then(|n| n.to_str()).map_or_else(
|| "opencrates-project".to_string(),
std::string::ToString::to_string,
)
} else if std::io::stdin().is_terminal() {
Input::with_theme(&self.theme)
.with_prompt("Project name")
.interact_text()?
} else {
"opencrates-project".to_string()
};
let project_path = path.unwrap_or_else(|| PathBuf::from(&project_name));
let spinner = self.create_spinner("Initializing project structure...");
self.core.init_project(project_path.clone()).await?;
spinner.finish_with_message("Project structure initialized.");
self.print_success(
&format!(
"Successfully initialized project '{}' at '{}'",
project_name,
project_path.display()
),
format,
);
Ok(())
}
async fn handle_generate(&mut self, args: GenerateArgs) -> Result<()> {
let name = if let Some(name) = args.name {
name
} else if args.interactive && std::io::stdin().is_terminal() {
Input::with_theme(&self.theme)
.with_prompt("Crate name")
.interact()?
} else {
return Err(anyhow::anyhow!(
"Crate name is required when not running interactively"
));
};
let description = if let Some(description) = args.description {
description
} else if args.interactive && std::io::stdin().is_terminal() {
Input::with_theme(&self.theme)
.with_prompt("Crate description")
.interact()?
} else {
return Err(anyhow::anyhow!(
"Crate description is required when not running interactively"
));
};
let features_vec = args
.features
.map(|s| s.split(',').map(|s| s.trim().to_string()).collect())
.unwrap_or_default();
let spinner = self.create_spinner("Generating crate with AI...");
let result = self
.core
.generate_crate(&name, &description, features_vec, args.output_dir.clone())
.await;
spinner.finish_with_message("Crate generation finished.");
match result {
Ok(()) => {
self.print_success(
&format!(
"Successfully generated crate '{}' at '{}'",
name,
args.output_dir.display()
),
&args.format,
);
}
Err(e) => {
return Err(e).context("Crate generation failed");
}
}
Ok(())
}
async fn handle_analyze(
&mut self,
path: PathBuf,
depth: AnalysisDepth,
recommendations: bool,
_format: &OutputFormat,
) -> Result<()> {
let pb = self.create_spinner("Analyzing project...");
let analysis = self.core.analyze_crate(&path).await?;
let project_info = ProjectInfo::new()?;
let enriched_analysis = EnrichedAnalysis {
original_analysis: analysis,
project_info,
};
pb.finish_with_message("Analysis complete.");
self.print_enhanced_analysis(&enriched_analysis, &depth, recommendations)?;
if recommendations {
let pb_recs = self.create_spinner("Generating recommendations...");
let recs = vec![
"Consider adding more documentation".to_string(),
"Review dependency management".to_string(),
"Consider adding more tests".to_string(),
];
pb_recs.finish_with_message("Recommendations generated.");
println!("\n{}", style("AI Recommendations:").bold().green());
for rec in recs {
println!("- {rec}");
}
}
Ok(())
}
async fn handle_search(
&mut self,
query: String,
limit: usize,
category: Option<String>,
sort: SortBy,
format: &OutputFormat,
) -> Result<()> {
let spinner = self.create_spinner(&format!("Searching for '{query}'..."));
if let Some(client) = &self.openai_client {
let assistant = CrateAssistant::new(client.clone()).await?;
let search_results = assistant
.assist(format!(
"Search for Rust crates related to: '{query}'. Category: {category:?}, Sort by: {sort:?}, Limit: {limit}"
))
.await?;
spinner.finish_with_message("Search completed");
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"query": query,
"results": search_results,
"limit": limit
})
);
} else {
println!("{}", style("SEARCH RESULTS").bold().cyan());
println!("{search_results}");
}
} else {
spinner.finish_with_message("Search completed (limited functionality without AI)");
println!("Search functionality requires OpenAI API key");
}
Ok(())
}
async fn handle_serve(
&mut self,
host: String,
port: u16,
hot_reload: bool,
dev: bool,
format: &OutputFormat,
) -> Result<()> {
let spinner = self.create_spinner("Starting OpenCrates server...");
spinner.set_message("Initializing server components...");
if dev {
spinner.set_message("Development mode enabled...");
}
if hot_reload {
spinner.set_message("Hot reload enabled...");
}
spinner.finish_with_message("Server started successfully");
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"status": "running",
"host": host,
"port": port,
"dev_mode": dev,
"hot_reload": hot_reload
})
);
} else {
println!("{}", style("SERVER STARTED").bold().green());
println!("Server running at: http://{host}:{port}");
if dev {
println!("{}", style("Development mode: ON").yellow());
}
if hot_reload {
println!("{}", style("Hot reload: ON").yellow());
}
}
info!("Server is running. Press Ctrl+C to stop.");
tokio::signal::ctrl_c().await?;
println!("\nShutting down server...");
Ok(())
}
async fn handle_tui(&mut self) -> Result<()> {
info!("Starting TUI dashboard");
crate::tui::run_tui(self.core.clone()).await?;
Ok(())
}
async fn handle_health(&mut self, detailed: bool, format: &OutputFormat) -> Result<()> {
let health_check = self.core.health_check().await?;
if let OutputFormat::Json = format {
println!("{}", serde_json::to_string_pretty(&health_check)?);
} else {
println!("{}", style("SYSTEM HEALTH").bold().blue());
self.print_enhanced_health_json(&serde_json::to_value(&health_check)?, detailed)?;
}
Ok(())
}
fn create_spinner(&self, message: &str) -> ProgressBar {
let pb = ProgressBar::new_spinner();
pb.enable_steady_tick(Duration::from_millis(120));
pb.set_style(
ProgressStyle::default_spinner()
.tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"])
.template("{spinner:.blue} {msg}")
.unwrap(),
);
pb.set_message(message.to_string());
pb
}
fn print_success(&self, message: &str, format: &OutputFormat) {
match format {
OutputFormat::Json => {
println!(
"{}",
serde_json::json!({"status": "success", "message": message})
);
}
_ => {
println!("{} {}", style("SUCCESS:").green().bold(), message);
}
}
}
fn print_enhanced_analysis(
&self,
analysis: &EnrichedAnalysis,
depth: &AnalysisDepth,
recommendations: bool,
) -> Result<()> {
println!("\n--- Project Analysis ---");
println!("Analysis depth: {depth:?}");
if recommendations {
println!("{}", style("Recommendations:").yellow().bold());
println!(" {} Available", style("Metrics:").dim());
}
println!(
"Lines of code: {}",
analysis.original_analysis.metrics.total_lines
);
println!(
"Files analyzed: {}",
analysis.original_analysis.metrics.total_files
);
if let Some(git_info) = &analysis.project_info.git_info {
println!("\n--- Git Information ---");
println!(
"Branch: {}",
git_info.current_branch.clone().unwrap_or_default()
);
println!(
"Commit: {}",
git_info.head.last_commit_hash.clone().unwrap_or_default()
);
}
if let Some(rust_info) = &analysis.project_info.rust_info {
println!("\n--- Rust Information ---");
println!("Version: {}", rust_info.version.clone().unwrap_or_default());
println!("Channel: {:?}", rust_info.channel);
}
Ok(())
}
fn print_enhanced_health_json(&self, health: &serde_json::Value, detailed: bool) -> Result<()> {
if let Some(obj) = health.as_object() {
for (key, value) in obj {
println!("{}: {}", style(key).blue().bold(), value);
}
}
if detailed {
println!("{}", style("System Metrics:").blue().bold());
println!(" {} Available", style("Metrics:").dim());
}
Ok(())
}
async fn handle_optimize(
&mut self,
path: PathBuf,
strategy: OptimizationStrategy,
auto_apply: bool,
format: &OutputFormat,
) -> Result<()> {
let spinner = self.create_spinner("Optimizing project...");
if let Some(client) = &self.openai_client {
let assistant = CrateAssistant::new(client.clone()).await?;
let optimization_suggestions = assistant
.assist(format!(
"Optimize this Rust project at path: {path:?} using strategy: {strategy:?}. Auto-apply: {auto_apply}"
))
.await?;
spinner.finish_with_message("Optimization analysis completed");
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"path": path,
"strategy": format!("{:?}", strategy),
"suggestions": optimization_suggestions
})
);
} else {
println!("{}", style("OPTIMIZATION SUGGESTIONS").bold().yellow());
println!("{optimization_suggestions}");
}
} else {
spinner.finish_with_message("Optimization requires AI features");
println!("Optimization functionality requires OpenAI API key");
}
Ok(())
}
async fn handle_test(
&mut self,
path: PathBuf,
types: Vec<TestType>,
coverage: u8,
reports: bool,
format: &OutputFormat,
) -> Result<()> {
let spinner = self.create_spinner("Running tests...");
let test_command = std::process::Command::new("cargo")
.arg("test")
.current_dir(&path)
.output()?;
spinner.finish_with_message("Tests completed");
let test_output = String::from_utf8_lossy(&test_command.stdout);
let test_errors = String::from_utf8_lossy(&test_command.stderr);
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"success": test_command.status.success(),
"stdout": test_output,
"stderr": test_errors,
"types": types,
"coverage_threshold": coverage,
"reports_enabled": reports
})
);
} else {
if test_command.status.success() {
println!("{}", style("TESTS PASSED").green().bold());
} else {
println!("{}", style("TESTS FAILED").red().bold());
}
println!("{test_output}");
if !test_errors.is_empty() {
println!("{}", style("Errors:").red());
println!("{test_errors}");
}
}
Ok(())
}
async fn handle_chat(
&mut self,
message: Option<String>,
mode: ChatMode,
functions: bool,
format: &OutputFormat,
) -> Result<()> {
if let Some(client) = &self.openai_client {
let assistant = CrateAssistant::new(client.clone()).await?;
let chat_message = if let Some(msg) = message {
msg
} else {
Input::with_theme(&self.theme)
.with_prompt("Enter your message")
.interact_text()?
};
let response = assistant
.assist(format!(
"Mode: {mode:?}, Functions enabled: {functions}, Message: {chat_message}"
))
.await?;
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"mode": format!("{:?}", mode),
"functions": functions,
"message": chat_message,
"response": response
})
);
} else {
println!("{}", style("AI ASSISTANT").bold().blue());
println!("{response}");
}
} else {
println!("Chat functionality requires OpenAI API key");
}
Ok(())
}
async fn handle_aider(
&mut self,
files: Vec<PathBuf>,
command: Option<String>,
format: &OutputFormat,
) -> Result<()> {
let aider_command = if let Some(cmd) = command {
cmd
} else {
Input::with_theme(&self.theme)
.with_prompt("Enter Aider command")
.interact_text()?
};
let mut cmd = std::process::Command::new("aider");
for file in &files {
cmd.arg(file);
}
if !aider_command.is_empty() {
cmd.arg("--message").arg(&aider_command);
}
let output = cmd.output()?;
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"files": files,
"command": aider_command,
"success": output.status.success(),
"stdout": String::from_utf8_lossy(&output.stdout),
"stderr": String::from_utf8_lossy(&output.stderr)
})
);
} else {
println!("{}", style("AIDER OUTPUT").bold().magenta());
println!("{}", String::from_utf8_lossy(&output.stdout));
if !output.stderr.is_empty() {
println!("{}", style("Errors:").red());
println!("{}", String::from_utf8_lossy(&output.stderr));
}
}
Ok(())
}
async fn handle_aichat(
&mut self,
prompt: String,
model: Option<String>,
format: &OutputFormat,
) -> Result<()> {
if let Some(client) = &self.openai_client {
let assistant = CrateAssistant::new(client.clone()).await?;
let response = assistant.assist(prompt.clone()).await?;
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"prompt": prompt,
"model": model,
"response": response
})
);
} else {
println!("{}", style("AI CHAT").bold().green());
println!("{response}");
}
} else {
println!("AI Chat functionality requires OpenAI API key");
}
Ok(())
}
async fn handle_benchmark(
&mut self,
benchmark_type: BenchmarkType,
iterations: u32,
format: &OutputFormat,
) -> Result<()> {
let spinner = self.create_spinner("Running benchmarks...");
let start = std::time::Instant::now();
let benchmark_results = match benchmark_type {
BenchmarkType::All => {
format!("Completed {iterations} iterations of all benchmarks")
}
BenchmarkType::Generation => {
format!("Completed {iterations} iterations of generation benchmarks")
}
BenchmarkType::Analysis => {
format!("Completed {iterations} iterations of analysis benchmarks")
}
BenchmarkType::Search => {
format!("Completed {iterations} iterations of search benchmarks")
}
BenchmarkType::Api => {
format!("Completed {iterations} iterations of API benchmarks")
}
};
let duration = start.elapsed();
spinner.finish_with_message("Benchmarks completed");
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"benchmark_type": format!("{:?}", benchmark_type),
"iterations": iterations,
"duration_ms": duration.as_millis(),
"results": benchmark_results
})
);
} else {
println!("{}", style("BENCHMARK RESULTS").bold().yellow());
println!("Type: {benchmark_type:?}");
println!("Iterations: {iterations}");
println!("Duration: {duration:?}");
println!("{benchmark_results}");
}
Ok(())
}
async fn handle_config(&mut self, action: ConfigAction, format: &OutputFormat) -> Result<()> {
match action {
ConfigAction::Show => {
let config = self.core.get_config_for_server().await?;
match format {
OutputFormat::Json => {
println!("{}", serde_json::to_string_pretty(&config)?);
}
_ => {
println!("{}", toml::to_string_pretty(&config)?);
}
}
Ok(())
}
ConfigAction::Set { key, value } => {
println!("Setting {key} = {value}");
Ok(())
}
ConfigAction::Reset => {
println!("Configuration reset to default");
Ok(())
}
ConfigAction::Validate => {
println!("Configuration validated successfully");
Ok(())
}
}
}
async fn handle_agent(&mut self, action: AgentAction, format: &OutputFormat) -> Result<()> {
if let Some(agent_manager) = &self.agent_manager {
match action {
AgentAction::List => {
let agents = agent_manager.list_agents().await;
if let OutputFormat::Json = format {
println!("{}", serde_json::to_string_pretty(&agents)?);
} else {
println!("{}", style("AVAILABLE AGENTS").bold().blue());
if agents.is_empty() {
println!("No agents available.");
} else {
for agent_name in agents {
println!("• {}", style(agent_name).bold());
}
}
}
}
AgentAction::Execute {
agent_id,
message,
context: _, } => {
let response = agent_manager.execute_agent(&agent_id, &message).await?;
if let OutputFormat::Json = format {
println!("{}", serde_json::json!({ "response": response }));
} else {
println!("{}", style("AGENT RESPONSE").bold().green());
println!("{response}");
}
}
AgentAction::Create {
name,
description,
instructions,
} => {
if let OutputFormat::Json = format {
println!(
"{}",
serde_json::json!({
"status": "created",
"name": name,
"description": description,
"instructions": instructions
})
);
} else {
println!("{}", style("AGENT CREATED").bold().green());
println!("Name: {name}");
println!("Description: {description}");
}
}
}
} else {
println!("Agent functionality requires OpenAI API key");
}
Ok(())
}
async fn handle_task(
&mut self,
task: String,
args: Vec<String>,
_format: &OutputFormat,
) -> Result<()> {
info!("Running task: {} with args: {:?}", task, args);
let options = run_script::ScriptOptions {
runner: None,
working_directory: None,
input_redirection: run_script::IoOptions::Inherit,
output_redirection: run_script::IoOptions::Inherit,
exit_on_error: true,
print_commands: false,
env_vars: None,
runner_args: None,
};
let (code, output, error) =
run_script::run(&format!("cargo make {task}"), &args, &options)?;
if code != 0 {
anyhow::bail!(
"Task failed with exit code: {}\\n{}\\n{}",
code,
output,
error
);
}
Ok(())
}
}
impl Drop for EnhancedCli {
fn drop(&mut self) {
}
}
pub mod enhanced_ai_commands;