use anyhow::Result;
#[cfg(feature = "api")]
use axum::{
extract::{Json, State},
http::StatusCode,
response::IntoResponse,
routing::post,
Router,
};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
#[cfg(feature = "api")]
use tower_http::cors::CorsLayer;
use crate::unified_pipeline::analyze_repository;
#[derive(Debug, Clone, Deserialize)]
pub struct AnalyzeRequest {
pub repo_path: String,
pub s3_bucket: String,
pub s3_key: String,
pub workers: Option<usize>,
}
#[derive(Debug, Clone, Serialize)]
pub struct AnalyzeResponse {
pub s3_url: String,
pub summary: String,
}
#[derive(Clone)]
pub struct ApiState {
llm_api_key: String,
}
#[cfg(feature = "api")]
pub async fn create_api_router(llm_api_key: String) -> axum::Router {
let state = ApiState { llm_api_key };
Router::new()
.route("/analyze", post(analyze_handler))
.layer(CorsLayer::permissive())
.with_state(state)
}
#[cfg(feature = "api")]
async fn analyze_handler(
State(state): State<ApiState>,
Json(request): Json<AnalyzeRequest>,
) -> impl IntoResponse {
let repo_path = PathBuf::from(&request.repo_path);
if !repo_path.exists() {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Repository path does not exist"
})),
)
.into_response();
}
match analyze_repository(
&repo_path,
&request.s3_bucket,
&request.s3_key,
state.llm_api_key.clone(),
request.workers,
)
.await
{
Ok(s3_url) => {
let response = AnalyzeResponse {
s3_url: s3_url.clone(),
summary: format!(
"Successfully analyzed {} and published first-principles instructions to {}",
request.repo_path, s3_url
),
};
(StatusCode::OK, Json(response)).into_response()
}
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Analysis failed: {}", e)
})),
)
.into_response(),
}
}
pub async fn run_cli_analysis(
repo_path: &str,
s3_bucket: &str,
s3_key: &str,
llm_api_key: &str,
) -> Result<()> {
std::env::set_var("RUST_LOG", "concept_analyzer=info");
env_logger::init();
println!("\n🎯 CONCEPT ANALYZER - First Principles Extractor");
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━");
println!("\nThis tool will:");
println!(" 1. Scan your repository for source files");
println!(" 2. Extract high-level concepts using AI");
println!(" 3. Analyze relationships between concepts");
println!(" 4. Identify critical gaps in functionality");
println!(" 5. Generate rebuild instructions from first principles");
println!(" 6. Publish results to S3 for agent consumption");
println!("\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━");
let s3_url = analyze_repository(
&PathBuf::from(repo_path),
s3_bucket,
s3_key,
llm_api_key.to_string(),
None,
)
.await?;
println!("\n\n🎉 SUCCESS! Your first-principles blueprint is ready!");
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━");
println!("\n📋 Output Summary:");
println!(" ✓ Essential concepts extracted");
println!(" ✓ Core relationships mapped");
println!(" ✓ Build order optimized");
println!(" ✓ Critical gaps identified");
println!(" ✓ Rebuild instructions generated");
println!("\n📍 Location: {}", s3_url);
println!("\n💡 An AI agent can now use this blueprint to:");
println!(" • Understand the system's core purpose");
println!(" • Rebuild it from scratch");
println!(" • Fill in the identified gaps");
println!(" • Maintain architectural consistency");
println!("\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n");
Ok(())
}