syncable_cli/
lib.rs

1pub mod agent;
2pub mod analyzer;
3pub mod cli;
4pub mod common;
5pub mod config;
6pub mod error;
7pub mod generator;
8pub mod handlers;
9pub mod telemetry;  // Add telemetry module
10
11// Re-export commonly used types and functions
12pub use analyzer::{ProjectAnalysis, analyze_project};
13use cli::Commands;
14pub use error::{IaCGeneratorError, Result};
15pub use generator::{generate_compose, generate_dockerfile, generate_terraform};
16pub use handlers::*;
17pub use telemetry::{TelemetryClient, TelemetryConfig, UserId};  // Re-export telemetry types
18
19/// The current version of the CLI tool
20pub const VERSION: &str = env!("CARGO_PKG_VERSION");
21
22pub async fn run_command(command: Commands) -> Result<()> {
23    match command {
24        Commands::Analyze {
25            path,
26            json,
27            detailed,
28            display,
29            only,
30            color_scheme,
31        } => {
32            match handlers::handle_analyze(path, json, detailed, display, only, color_scheme) {
33                Ok(_output) => Ok(()), // The output was already printed by display_analysis_with_return
34                Err(e) => Err(e),
35            }
36        }
37        Commands::Generate {
38            path,
39            output,
40            dockerfile,
41            compose,
42            terraform,
43            all,
44            dry_run,
45            force,
46        } => handlers::handle_generate(
47            path, output, dockerfile, compose, terraform, all, dry_run, force,
48        ),
49        Commands::Validate { path, types, fix } => handlers::handle_validate(path, types, fix),
50        Commands::Support {
51            languages,
52            frameworks,
53            detailed,
54        } => handlers::handle_support(languages, frameworks, detailed),
55        Commands::Dependencies {
56            path,
57            licenses,
58            vulnerabilities,
59            prod_only,
60            dev_only,
61            format,
62        } => handlers::handle_dependencies(
63            path,
64            licenses,
65            vulnerabilities,
66            prod_only,
67            dev_only,
68            format,
69        )
70        .await
71        .map(|_| ()),
72        Commands::Vulnerabilities {
73            path,
74            severity,
75            format,
76            output,
77        } => handlers::handle_vulnerabilities(path, severity, format, output).await,
78        Commands::Security {
79            path,
80            mode,
81            include_low,
82            no_secrets,
83            no_code_patterns,
84            no_infrastructure,
85            no_compliance,
86            frameworks,
87            format,
88            output,
89            fail_on_findings,
90        } => {
91            handlers::handle_security(
92                path,
93                mode,
94                include_low,
95                no_secrets,
96                no_code_patterns,
97                no_infrastructure,
98                no_compliance,
99                frameworks,
100                format,
101                output,
102                fail_on_findings,
103            )
104            .map(|_| ()) // Map Result<String> to Result<()>
105        }
106        Commands::Tools { command } => handlers::handle_tools(command).await,
107        Commands::Chat { path, provider, model, query } => {
108            use agent::ProviderType;
109            use cli::ChatProvider;
110            use config::load_agent_config;
111
112            let project_path = path.canonicalize().unwrap_or(path);
113
114            // Load saved config for Auto mode
115            let agent_config = load_agent_config();
116
117            // Determine provider - use saved default if Auto
118            let (provider_type, effective_model) = match provider {
119                ChatProvider::Openai => (ProviderType::OpenAI, model),
120                ChatProvider::Anthropic => (ProviderType::Anthropic, model),
121                ChatProvider::Bedrock => (ProviderType::Bedrock, model),
122                ChatProvider::Ollama => {
123                    eprintln!("Ollama support coming soon. Using OpenAI as fallback.");
124                    (ProviderType::OpenAI, model)
125                }
126                ChatProvider::Auto => {
127                    // Load from saved config
128                    let saved_provider = match agent_config.default_provider.as_str() {
129                        "openai" => ProviderType::OpenAI,
130                        "anthropic" => ProviderType::Anthropic,
131                        "bedrock" => ProviderType::Bedrock,
132                        _ => ProviderType::OpenAI, // Fallback
133                    };
134                    // Use saved model if no explicit model provided
135                    let saved_model = if model.is_some() {
136                        model
137                    } else {
138                        agent_config.default_model.clone()
139                    };
140                    (saved_provider, saved_model)
141                }
142            };
143
144            // Load API key/credentials from config to environment
145            // This is essential for Bedrock bearer token auth!
146            agent::session::ChatSession::load_api_key_to_env(provider_type);
147
148            if let Some(q) = query {
149                let response = agent::run_query(&project_path, &q, provider_type, effective_model).await?;
150                println!("{}", response);
151                Ok(())
152            } else {
153                agent::run_interactive(&project_path, provider_type, effective_model).await?;
154                Ok(())
155            }
156        }
157    }
158}