use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::fs;
use std::path::Path;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum ProjectType {
Docker {
has_dockerfile: bool,
},
DockerCompose {
compose_files: Vec<String>,
detected_ports: Vec<u16>,
},
Railway {
has_railway_json: bool,
has_railway_toml: bool,
},
Python {
has_requirements_txt: bool,
has_pyproject_toml: bool,
},
OpenApi {
spec_files: Vec<String>,
},
Terraform {
tf_file_count: usize,
},
NextJs {
package_json: PackageJsonInfo,
has_next_config: bool,
},
NodeJs {
package_json: PackageJsonInfo,
},
Rust {
cargo_toml: CargoTomlInfo,
},
Unknown,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PackageJsonInfo {
pub name: String,
pub version: String,
pub scripts: std::collections::HashMap<String, String>,
pub dependencies: std::collections::HashMap<String, String>,
pub dev_dependencies: std::collections::HashMap<String, String>,
pub main: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CargoTomlInfo {
pub name: String,
pub version: String,
pub description: Option<String>,
pub authors: Vec<String>,
pub edition: Option<String>,
}
pub struct ProjectDetector;
impl ProjectDetector {
pub async fn detect_project_type(project_path: &Path) -> Result<ProjectType, String> {
let project_path = project_path
.canonicalize()
.map_err(|e| format!("Failed to resolve project path: {}", e))?;
if let Ok(project_type) = Self::detect_docker_compose(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_docker(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_railway(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_python(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_nextjs(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_nodejs(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_rust(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_openapi(&project_path).await {
return Ok(project_type);
}
if let Ok(project_type) = Self::detect_terraform(&project_path).await {
return Ok(project_type);
}
Ok(ProjectType::Unknown)
}
pub fn detect_provider_manifests(project_path: &Path) -> Vec<String> {
let mut detections = Vec::new();
if project_path.join("Dockerfile").exists() {
detections.push("Dockerfile".to_string());
}
for name in [
"docker-compose.yml",
"docker-compose.yaml",
"compose.yml",
"compose.yaml",
] {
if project_path.join(name).exists() {
detections.push(name.to_string());
}
}
if project_path.join("railway.json").exists() {
detections.push("railway.json".to_string());
}
if project_path.join("railway.toml").exists() {
detections.push("railway.toml".to_string());
}
if project_path.join("requirements.txt").exists() {
detections.push("requirements.txt".to_string());
}
if project_path.join("pyproject.toml").exists() {
detections.push("pyproject.toml".to_string());
}
for name in [
"openapi.yaml",
"openapi.yml",
"swagger.yaml",
"swagger.yml",
"swagger.json",
] {
if project_path.join(name).exists() {
detections.push(name.to_string());
}
}
let tf_count = match fs::read_dir(project_path) {
Ok(entries) => entries
.filter_map(|e| e.ok())
.filter(|e| {
e.path()
.extension()
.and_then(|s| s.to_str())
.map(|ext| ext.eq_ignore_ascii_case("tf"))
.unwrap_or(false)
})
.count(),
Err(_) => 0,
};
if tf_count > 0 {
detections.push(format!("Terraform (.tf) x{}", tf_count));
}
detections
}
async fn detect_docker_compose(project_path: &Path) -> Result<ProjectType, String> {
let compose_names = [
"docker-compose.yml",
"docker-compose.yaml",
"compose.yml",
"compose.yaml",
];
let mut compose_files = Vec::new();
for name in compose_names {
if project_path.join(name).exists() {
compose_files.push(name.to_string());
}
}
if compose_files.is_empty() {
return Err("No compose file found".to_string());
}
let mut detected_ports: Vec<u16> = Vec::new();
for file in &compose_files {
let path = project_path.join(file);
if let Ok(contents) = fs::read_to_string(&path) {
if let Ok(value) = serde_yaml::from_str::<serde_yaml::Value>(&contents) {
if let Some(services) = value.get("services").and_then(|v| v.as_mapping()) {
for (_svc_name, svc_cfg) in services.iter() {
let ports = svc_cfg
.get("ports")
.and_then(|v| v.as_sequence())
.cloned()
.unwrap_or_default();
for p in ports {
if let Some(s) = p.as_str() {
if let Some(host) = s.split(':').next() {
if let Ok(port) = host.parse::<u16>() {
detected_ports.push(port);
}
}
} else if let Some(n) = p.as_i64() {
if n >= 1 && n <= u16::MAX as i64 {
detected_ports.push(n as u16);
}
}
}
}
}
}
}
}
detected_ports.sort_unstable();
detected_ports.dedup();
Ok(ProjectType::DockerCompose {
compose_files,
detected_ports,
})
}
async fn detect_docker(project_path: &Path) -> Result<ProjectType, String> {
let dockerfile = project_path.join("Dockerfile");
if !dockerfile.exists() {
return Err("No Dockerfile found".to_string());
}
Ok(ProjectType::Docker {
has_dockerfile: true,
})
}
async fn detect_railway(project_path: &Path) -> Result<ProjectType, String> {
let has_railway_json = project_path.join("railway.json").exists();
let has_railway_toml = project_path.join("railway.toml").exists();
if !has_railway_json && !has_railway_toml {
return Err("No railway manifest found".to_string());
}
Ok(ProjectType::Railway {
has_railway_json,
has_railway_toml,
})
}
async fn detect_python(project_path: &Path) -> Result<ProjectType, String> {
let has_requirements_txt = project_path.join("requirements.txt").exists();
let has_pyproject_toml = project_path.join("pyproject.toml").exists();
if !has_requirements_txt && !has_pyproject_toml {
return Err("No python manifest found".to_string());
}
Ok(ProjectType::Python {
has_requirements_txt,
has_pyproject_toml,
})
}
async fn detect_openapi(project_path: &Path) -> Result<ProjectType, String> {
let names = [
"openapi.yaml",
"openapi.yml",
"swagger.yaml",
"swagger.yml",
"swagger.json",
];
let mut spec_files = Vec::new();
for name in names {
if project_path.join(name).exists() {
spec_files.push(name.to_string());
}
}
if spec_files.is_empty() {
return Err("No OpenAPI spec found".to_string());
}
Ok(ProjectType::OpenApi { spec_files })
}
async fn detect_terraform(project_path: &Path) -> Result<ProjectType, String> {
let tf_count = fs::read_dir(project_path)
.map_err(|_| "Failed to read directory".to_string())?
.filter_map(|e| e.ok())
.filter(|e| {
e.path()
.extension()
.and_then(|s| s.to_str())
.map(|ext| ext.eq_ignore_ascii_case("tf"))
.unwrap_or(false)
})
.count();
if tf_count == 0 {
return Err("No terraform files found".to_string());
}
Ok(ProjectType::Terraform {
tf_file_count: tf_count,
})
}
async fn detect_nextjs(project_path: &Path) -> Result<ProjectType, String> {
let package_json_path = project_path.join("package.json");
let next_config_path = project_path.join("next.config.js");
let next_config_mjs_path = project_path.join("next.config.mjs");
let next_dir = project_path.join(".next");
if !package_json_path.exists() {
return Err("No package.json found".to_string());
}
let package_json_info = Self::parse_package_json(&package_json_path)?;
let has_next = package_json_info.dependencies.contains_key("next")
|| package_json_info.dev_dependencies.contains_key("next");
if !has_next {
return Err("Next.js not found in dependencies".to_string());
}
let has_next_config =
next_config_path.exists() || next_config_mjs_path.exists() || next_dir.exists();
Ok(ProjectType::NextJs {
package_json: package_json_info,
has_next_config,
})
}
async fn detect_nodejs(project_path: &Path) -> Result<ProjectType, String> {
let package_json_path = project_path.join("package.json");
if !package_json_path.exists() {
return Err("No package.json found".to_string());
}
let package_json_info = Self::parse_package_json(&package_json_path)?;
Ok(ProjectType::NodeJs {
package_json: package_json_info,
})
}
async fn detect_rust(project_path: &Path) -> Result<ProjectType, String> {
let cargo_toml_path = project_path.join("Cargo.toml");
if !cargo_toml_path.exists() {
return Err("No Cargo.toml found".to_string());
}
let cargo_toml_info = Self::parse_cargo_toml(&cargo_toml_path)?;
Ok(ProjectType::Rust {
cargo_toml: cargo_toml_info,
})
}
fn parse_package_json(path: &Path) -> Result<PackageJsonInfo, String> {
let content =
fs::read_to_string(path).map_err(|e| format!("Failed to read package.json: {}", e))?;
let json: Value = serde_json::from_str(&content)
.map_err(|e| format!("Failed to parse package.json: {}", e))?;
let name = json["name"].as_str().unwrap_or("unknown").to_string();
let version = json["version"].as_str().unwrap_or("0.0.0").to_string();
let scripts = json["scripts"]
.as_object()
.map(|obj| {
obj.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect()
})
.unwrap_or_default();
let dependencies = json["dependencies"]
.as_object()
.map(|obj| {
obj.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect()
})
.unwrap_or_default();
let dev_dependencies = json["devDependencies"]
.as_object()
.map(|obj| {
obj.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect()
})
.unwrap_or_default();
let main = json["main"].as_str().map(|s| s.to_string());
Ok(PackageJsonInfo {
name,
version,
scripts,
dependencies,
dev_dependencies,
main,
})
}
fn parse_cargo_toml(path: &Path) -> Result<CargoTomlInfo, String> {
let content =
fs::read_to_string(path).map_err(|e| format!("Failed to read Cargo.toml: {}", e))?;
let toml_value: toml::Value =
toml::from_str(&content).map_err(|e| format!("Failed to parse Cargo.toml: {}", e))?;
let package = toml_value
.get("package")
.ok_or("No [package] section found in Cargo.toml")?;
let name = package
.get("name")
.and_then(|v| v.as_str())
.ok_or("No name found in [package] section")?
.to_string();
let version = package
.get("version")
.and_then(|v| v.as_str())
.unwrap_or("0.0.0")
.to_string();
let description = package
.get("description")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
let authors = package
.get("authors")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str())
.map(|s| s.to_string())
.collect()
})
.unwrap_or_default();
let edition = package
.get("edition")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
Ok(CargoTomlInfo {
name,
version,
description,
authors,
edition,
})
}
pub fn get_deployment_recommendations(project_type: &ProjectType) -> DeploymentRecommendations {
match project_type {
ProjectType::DockerCompose { detected_ports, .. } => DeploymentRecommendations {
build_command: Some("docker compose build".to_string()),
start_command: Some("docker compose up -d".to_string()),
install_command: None,
default_port: detected_ports.first().copied().unwrap_or(80),
process_name: None,
requires_build: true,
},
ProjectType::Docker { .. } => DeploymentRecommendations {
build_command: Some("docker build -t <image-name> .".to_string()),
start_command: Some(
"docker run -p <host-port>:<container-port> <image-name>".to_string(),
),
install_command: None,
default_port: 80,
process_name: None,
requires_build: true,
},
ProjectType::Railway { .. } => DeploymentRecommendations {
build_command: None,
start_command: None,
install_command: None,
default_port: 8080,
process_name: None,
requires_build: false,
},
ProjectType::Python {
has_requirements_txt,
has_pyproject_toml,
} => {
let install_command = if *has_requirements_txt {
Some("pip install -r requirements.txt".to_string())
} else if *has_pyproject_toml {
Some("pip install -e .".to_string())
} else {
None
};
DeploymentRecommendations {
build_command: None,
start_command: None,
install_command,
default_port: 8000,
process_name: None,
requires_build: false,
}
}
ProjectType::OpenApi { .. } => DeploymentRecommendations {
build_command: None,
start_command: None,
install_command: None,
default_port: 8080,
process_name: None,
requires_build: false,
},
ProjectType::Terraform { .. } => DeploymentRecommendations {
build_command: None,
start_command: None,
install_command: None,
default_port: 8080,
process_name: None,
requires_build: false,
},
ProjectType::NextJs { package_json, .. } => DeploymentRecommendations {
build_command: Some("pnpm run build".to_string()),
start_command: Some("pnpm run start".to_string()),
install_command: Some("pnpm install".to_string()),
default_port: 3000,
process_name: Some(package_json.name.clone()),
requires_build: true,
},
ProjectType::NodeJs { package_json } => {
let start_cmd = package_json
.scripts
.get("start")
.map(|s| {
format!(
"pnpm run {}",
s.split_whitespace().next().unwrap_or("start")
)
})
.or_else(|| package_json.main.as_ref().map(|m| format!("node {}", m)))
.unwrap_or_else(|| "pnpm run start".to_string());
DeploymentRecommendations {
build_command: package_json
.scripts
.get("build")
.map(|_| "pnpm run build".to_string()),
start_command: Some(start_cmd),
install_command: Some("pnpm install".to_string()),
default_port: 3000,
process_name: Some(package_json.name.clone()),
requires_build: package_json.scripts.contains_key("build"),
}
}
ProjectType::Rust { cargo_toml } => DeploymentRecommendations {
build_command: Some("cargo build --release".to_string()),
start_command: Some(format!("./target/release/{}", cargo_toml.name)),
install_command: None,
default_port: 8080,
process_name: Some(cargo_toml.name.clone()),
requires_build: true,
},
ProjectType::Unknown => DeploymentRecommendations {
build_command: None,
start_command: None,
install_command: None,
default_port: 8080,
process_name: None,
requires_build: false,
},
}
}
}
#[derive(Debug, Clone)]
pub struct DeploymentRecommendations {
pub build_command: Option<String>,
pub start_command: Option<String>,
pub install_command: Option<String>,
pub default_port: u16,
pub process_name: Option<String>,
pub requires_build: bool,
}