use anyhow::{bail, Result};
use console::style;
use std::path::Path;
use std::time::Instant;
use crate::config;
pub fn execute(target: Option<String>, registry: Option<&str>, dry_run: bool, local: bool) -> Result<()> {
let (config_path, cfg) = config::load_or_find_config()?;
let project = config::project_dir(&config_path);
if local {
if cfg.workspaces.is_some() {
return publish_all_local(&config_path, &project);
}
return publish_single_local(&project, &cfg, None);
}
if cfg.workspaces.is_some() {
if let Some(ref module_name) = target {
return publish_workspace_module(&config_path, &project, module_name, registry, dry_run);
}
return publish_all_workspace_modules(&config_path, &project, registry, dry_run);
}
if target.is_some() {
bail!("--target only works in workspace mode. Current project is not a workspace.");
}
if cfg.private.unwrap_or(false) {
bail!("Cannot publish a private package. Remove 'private = true' from package.toml.");
}
let version = cfg
.version
.as_deref()
.unwrap_or("0.0.0");
crate::scripts::run_script(&cfg, "prepublish", &project)?;
super::build::execute(vec![], true)?;
let pom_path = project.join("out").join("pom.xml");
generate_pom(&project, &cfg, &pom_path, None)?;
println!(
" {} Generated POM for {}@{}",
style("✓").green(),
style(&cfg.name).bold(),
version
);
let reg = resolve_registry(&cfg, registry)?;
let registry_url = reg.url;
let jar_path = find_output_jar(&project, &cfg, None)?;
let jar_size = std::fs::metadata(&jar_path).map(|m| m.len()).unwrap_or(0);
let sources_jar = generate_sources_jar(&project, &cfg, None)?;
let javadoc_jar = generate_javadoc_jar(&project, &cfg, None)?;
if dry_run {
println!();
println!(" {} dry run — would publish:", style("➜").green());
println!(" Package: {}@{}", style(&cfg.name).bold(), version);
println!(" Registry: {}", style(®istry_url).dim());
let sources_size = std::fs::metadata(&sources_jar).map(|m| m.len()).unwrap_or(0);
println!(" JAR: {} ({:.1} KB)", jar_path.display(), jar_size as f64 / 1024.0);
println!(" Sources: {} ({:.1} KB)", sources_jar.display(), sources_size as f64 / 1024.0);
if let Some(ref jd) = javadoc_jar {
let jd_size = std::fs::metadata(jd).map(|m| m.len()).unwrap_or(0);
println!(" Javadoc: {} ({:.1} KB)", jd.display(), jd_size as f64 / 1024.0);
}
println!(" POM: {}", pom_path.display());
println!(" Checksums: SHA-256 + MD5 for each artifact");
let gpg = std::process::Command::new("gpg").arg("--version")
.stdout(std::process::Stdio::null()).stderr(std::process::Stdio::null())
.status().map(|s| s.success()).unwrap_or(false);
if gpg {
println!(" GPG: signatures will be generated");
} else {
println!(" GPG: {} (gpg not found)", style("skipped").yellow());
}
println!();
println!(" {} No artifacts were uploaded", style("!").yellow());
return Ok(());
}
let creds_path = credentials_path();
let creds = load_credentials_for_registry(&creds_path, ®istry_url, reg.inline_creds)?;
println!(
" {} publishing {} to {}",
style("➜").green(),
style(&cfg.name).bold(),
style(®istry_url).dim()
);
upload_artifact(&jar_path, &pom_path, &sources_jar, javadoc_jar.as_deref(), &cfg, ®istry_url, &creds, None)?;
if is_sonatype_url(®istry_url) {
println!(
" {} Detected Sonatype OSSRH — attempting staging close + release",
style("➜").green()
);
match sonatype_close_and_release(®istry_url, &cfg, &creds) {
Ok(()) => {
println!(" {} Staging repository released to Maven Central", style("✓").green());
}
Err(e) => {
println!(
" {} Staging close/release failed: {}",
style("!").yellow(),
e
);
println!(" Complete the release manually at https://oss.sonatype.org");
}
}
}
println!(
" {} Published {}@{}",
style("✓").green(),
style(&cfg.name).bold(),
version
);
crate::scripts::run_script(&cfg, "postpublish", &project)?;
Ok(())
}
fn publish_single_local(project: &Path, cfg: &config::schema::YmConfig, root_version: Option<&str>) -> Result<()> {
let version = cfg.version.as_deref()
.or(root_version)
.unwrap_or("0.0.0");
let cache_dir = config::maven_cache_dir();
let dest = cache_dir
.join(&cfg.group_id)
.join(&cfg.name)
.join(version);
std::fs::create_dir_all(&dest)?;
let pom_path = project.join("out").join("pom.xml");
generate_pom(project, cfg, &pom_path, Some(version))?;
let jar_path = find_output_jar(project, cfg, Some(version))?;
let dest_jar = dest.join(format!("{}-{}.jar", cfg.name, version));
let dest_pom = dest.join(format!("{}-{}.pom", cfg.name, version));
std::fs::copy(&jar_path, &dest_jar)?;
std::fs::copy(&pom_path, &dest_pom)?;
let marker = dest_jar.with_extension("jar.pom-only");
let _ = std::fs::remove_file(&marker);
Ok(())
}
fn publish_all_local(config_path: &Path, workspace_root: &Path) -> Result<()> {
use rayon::prelude::*;
use std::sync::atomic::{AtomicUsize, Ordering};
let publish_start = Instant::now();
let ws = crate::workspace::graph::WorkspaceGraph::build(workspace_root)?;
super::build::execute(vec![], true)?;
let root_cfg = config::load_config(config_path)?;
let root_version = root_cfg.version.as_deref().unwrap_or("0.0.0");
let mut modules: Vec<_> = Vec::new();
for pkg_name in &ws.all_packages() {
let pkg = ws.get_package(pkg_name).unwrap();
if pkg.config.private.unwrap_or(false) { continue; }
modules.push((pkg.path.clone(), pkg.config.clone()));
}
let total = modules.len();
println!(
" {} installing {} modules to local cache",
style("➜").green(),
total
);
let installed = AtomicUsize::new(0);
let failed = AtomicUsize::new(0);
let num_threads = std::thread::available_parallelism().map(|n| n.get()).unwrap_or(8);
let pool = rayon::ThreadPoolBuilder::new().num_threads(num_threads).build()
.unwrap_or_else(|_| rayon::ThreadPoolBuilder::new().build().unwrap());
pool.install(|| modules.par_iter().for_each(|(module_path, module_cfg)| {
match publish_single_local(module_path, module_cfg, Some(root_version)) {
Ok(()) => {
let n = installed.fetch_add(1, Ordering::Relaxed) + 1;
eprintln!("\r {} Installed [{}/{}] {}", style("✓").green(), n, total, &module_cfg.name);
}
Err(e) => {
failed.fetch_add(1, Ordering::Relaxed);
eprintln!("\r {} Failed {} : {}", style("✗").red(), &module_cfg.name, e);
}
}
}));
let elapsed = publish_start.elapsed();
let elapsed_str = if elapsed.as_secs() >= 60 {
format!("{:.1} minutes", elapsed.as_secs_f64() / 60.0)
} else {
format!("{:.1}s", elapsed.as_secs_f64())
};
let cache_dir = config::maven_cache_dir();
println!();
println!(
" {} {} installed, {} failed in {} → {}",
style("✓").green(),
installed.load(Ordering::Relaxed),
failed.load(Ordering::Relaxed),
style(elapsed_str).bold(),
style(cache_dir.display()).dim()
);
Ok(())
}
fn publish_all_workspace_modules(
config_path: &Path,
workspace_root: &Path,
registry: Option<&str>,
dry_run: bool,
) -> Result<()> {
use rayon::prelude::*;
use std::sync::atomic::{AtomicUsize, Ordering};
let publish_start = Instant::now();
let ws = crate::workspace::graph::WorkspaceGraph::build(workspace_root)?;
super::build::execute(vec![], true)?;
let root_cfg = config::load_config(config_path)?;
let reg = resolve_registry(&root_cfg, registry)?;
let registry_url = reg.url.clone();
let creds_path = credentials_path();
let creds = load_credentials_for_registry(&creds_path, ®istry_url, reg.inline_creds)?;
let mut modules: Vec<_> = Vec::new();
let mut skipped = 0;
for pkg_name in &ws.all_packages() {
let pkg = ws.get_package(pkg_name).unwrap();
if pkg.config.private.unwrap_or(false) {
skipped += 1;
continue;
}
let version = pkg.config.version.as_deref()
.or(root_cfg.version.as_deref())
.unwrap_or("0.0.0")
.to_string();
modules.push((pkg.path.clone(), pkg.config.clone(), version));
}
let total = modules.len();
if dry_run {
for (_, cfg, ver) in &modules {
println!(" {} would publish {}@{}", style("·").dim(), style(&cfg.name).bold(), ver);
}
println!("\n {} {} modules would be published, {} skipped (private)", style("✓").green(), total, skipped);
return Ok(());
}
println!(
" {} publishing {} modules to {}",
style("➜").green(),
total,
style(®istry_url).dim()
);
let num_threads = std::thread::available_parallelism().map(|n| n.get() * 2).unwrap_or(16);
let pool = rayon::ThreadPoolBuilder::new().num_threads(num_threads).build()
.unwrap_or_else(|_| rayon::ThreadPoolBuilder::new().build().unwrap());
let published = AtomicUsize::new(0);
let failed = AtomicUsize::new(0);
let client = reqwest::blocking::Client::builder()
.user_agent(concat!("ym/", env!("CARGO_PKG_VERSION")))
.pool_max_idle_per_host(num_threads)
.build()?;
let gpg_available = check_gpg_available();
let results: Vec<_> = pool.install(|| modules.par_iter().map(|(module_path, module_cfg, version)| {
let result = (|| -> Result<()> {
let pom_path = module_path.join("out").join("pom.xml");
generate_pom(module_path, module_cfg, &pom_path, Some(version))?;
let jar_path = find_output_jar(module_path, module_cfg, Some(version))?;
let sources_jar = generate_sources_jar(module_path, module_cfg, Some(version))?;
upload_artifact_with(&jar_path, &pom_path, &sources_jar, None, module_cfg, ®istry_url, &creds, Some(version.as_str()), &client, gpg_available)?;
Ok(())
})();
match &result {
Ok(()) => {
let n = published.fetch_add(1, Ordering::Relaxed) + 1;
eprintln!("\r {} Published [{}/{}] {}", style("✓").green(), n, total, &module_cfg.name);
}
Err(e) => {
failed.fetch_add(1, Ordering::Relaxed);
eprintln!("\r {} Failed {} : {}", style("✗").red(), &module_cfg.name, e);
}
}
(module_cfg.name.clone(), result)
}).collect());
let pub_count = published.load(Ordering::Relaxed);
let fail_count = failed.load(Ordering::Relaxed);
let elapsed = publish_start.elapsed();
let elapsed_str = if elapsed.as_secs() >= 60 {
format!("{:.1} minutes", elapsed.as_secs_f64() / 60.0)
} else {
format!("{:.1}s", elapsed.as_secs_f64())
};
println!();
println!(
" {} {} published, {} failed, {} skipped (private) in {}",
style("✓").green(),
pub_count,
fail_count,
skipped,
style(elapsed_str).bold()
);
if fail_count > 0 {
bail!("{} module(s) failed to publish", fail_count);
}
Ok(())
}
fn publish_workspace_module(
config_path: &Path,
workspace_root: &Path,
module_name: &str,
registry: Option<&str>,
dry_run: bool,
) -> Result<()> {
let ws = crate::workspace::graph::WorkspaceGraph::build(workspace_root)?;
let pkg = ws.get_package(module_name)
.ok_or_else(|| anyhow::anyhow!("Module '{}' not found in workspace", module_name))?;
let module_cfg = &pkg.config;
if module_cfg.private.unwrap_or(false) {
bail!("Module '{}' is private. Remove 'private = true' to publish.", module_name);
}
let root_cfg = config::load_config(config_path)?;
let version = module_cfg.version.as_deref()
.or(root_cfg.version.as_deref())
.unwrap_or("0.0.0");
let module_path = &pkg.path;
super::build::execute(vec![module_name.to_string()], true)?;
let pom_path = module_path.join("out").join("pom.xml");
generate_pom(module_path, module_cfg, &pom_path, Some(version))?;
println!(
" {} Generated POM for {}@{}",
style("✓").green(),
style(&module_cfg.name).bold(),
version
);
let root_cfg = config::load_config(config_path)?;
let reg = resolve_registry(&root_cfg, registry)?;
let registry_url = reg.url;
let jar_path = find_output_jar(module_path, module_cfg, Some(version))?;
let sources_jar = generate_sources_jar(module_path, module_cfg, Some(version))?;
if dry_run {
let jar_size = std::fs::metadata(&jar_path).map(|m| m.len()).unwrap_or(0);
println!();
println!(" {} dry run — would publish module:", style("➜").green());
println!(" Package: {}@{}", style(&module_cfg.name).bold(), version);
println!(" GroupId: {}", module_cfg.group_id);
println!(" Registry: {}", style(®istry_url).dim());
println!(" JAR: {} ({:.1} KB)", jar_path.display(), jar_size as f64 / 1024.0);
println!();
println!(" {} No artifacts were uploaded", style("!").yellow());
return Ok(());
}
let creds_path = credentials_path();
let creds = load_credentials_for_registry(&creds_path, ®istry_url, reg.inline_creds)?;
println!(
" {} publishing module {} to {}",
style("➜").green(),
style(&module_cfg.name).bold(),
style(®istry_url).dim()
);
upload_artifact(&jar_path, &pom_path, &sources_jar, None, module_cfg, ®istry_url, &creds, Some(version))?;
if is_sonatype_url(®istry_url) {
match sonatype_close_and_release(®istry_url, module_cfg, &creds) {
Ok(()) => println!(" {} Staging repository released", style("✓").green()),
Err(e) => println!(" {} Staging close/release failed: {}", style("!").yellow(), e),
}
}
println!(
" {} Published {}@{}",
style("✓").green(),
style(&module_cfg.name).bold(),
version
);
Ok(())
}
fn generate_pom(
project: &Path,
cfg: &config::schema::YmConfig,
output: &Path,
version_override: Option<&str>,
) -> Result<()> {
let ws_root = config::find_workspace_root(project);
let root_cfg = ws_root.as_ref()
.and_then(|root| config::load_config(&root.join(config::CONFIG_FILE)).ok());
let ws = ws_root
.and_then(|root| crate::workspace::graph::WorkspaceGraph::build(&root).ok());
let root_version = root_cfg.as_ref()
.and_then(|r| r.version.as_deref())
.or(version_override)
.unwrap_or("0.0.0");
let mut dep_xml = String::new();
if let Some(ref deps) = cfg.dependencies {
for (coord, value) in deps {
if !crate::config::schema::is_maven_dep(coord) && value.is_workspace() {
if let Some(ref ws) = ws {
if let Some(pkg) = ws.get_package(coord) {
dep_xml.push_str(&format!(
r#" <dependency>
<groupId>{}</groupId>
<artifactId>{}</artifactId>
<version>{}</version>
</dependency>
"#,
pkg.config.group_id, pkg.config.name, root_version
));
}
}
continue;
}
if !crate::config::schema::is_maven_dep(coord) {
continue;
}
let version = if value.is_workspace() {
if let Some(ref root) = root_cfg {
match root.find_dep_version(coord) {
Some(v) => config::schema::YmConfig::resolve_var(v, root),
None => continue,
}
} else {
continue;
}
} else {
let raw_version = match value.version() {
Some(v) => v,
None => continue,
};
if let Some(ref root) = root_cfg {
config::schema::YmConfig::resolve_var(raw_version, root)
} else {
raw_version.to_string()
}
};
let scope = value.scope();
if scope == "test" {
continue;
}
let resolved = root_cfg.as_ref()
.map(|root| root.resolve_key(coord))
.unwrap_or_else(|| cfg.resolve_key(coord));
let parts: Vec<&str> = resolved.split(':').collect();
if parts.len() == 2 {
let scope_xml = match scope {
"compile" => String::new(),
s => format!("\n <scope>{}</scope>", s),
};
dep_xml.push_str(&format!(
r#" <dependency>
<groupId>{}</groupId>
<artifactId>{}</artifactId>
<version>{}</version>{scope_xml}
</dependency>
"#,
parts[0], parts[1], version
));
}
}
}
let group_id = &cfg.group_id;
let artifact_id = &cfg.name;
let pom = format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>{group_id}</groupId>
<artifactId>{artifact_id}</artifactId>
<version>{version}</version>
<packaging>jar</packaging>
<dependencies>
{dep_xml} </dependencies>
</project>
"#,
version = version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0")
);
if let Some(parent) = output.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(output, pom)?;
Ok(())
}
fn generate_sources_jar(project: &Path, cfg: &config::schema::YmConfig, version_override: Option<&str>) -> Result<std::path::PathBuf> {
let source_dir = config::source_dir(project);
let jar_name = format!(
"{}-{}-sources.jar",
cfg.name,
version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0")
);
let jar_path = project.join("out").join(&jar_name);
if let Some(parent) = jar_path.parent() {
std::fs::create_dir_all(parent)?;
}
let file = std::fs::File::create(&jar_path)?;
let mut zip = zip::ZipWriter::new(file);
let options = zip::write::SimpleFileOptions::default()
.compression_method(zip::CompressionMethod::Deflated);
if source_dir.exists() {
for entry in walkdir::WalkDir::new(&source_dir).into_iter().filter_map(|e| e.ok()) {
let path = entry.path();
if path.is_file() {
let rel = path.strip_prefix(&source_dir).unwrap_or(path);
zip.start_file(rel.to_string_lossy(), options)?;
let mut f = std::fs::File::open(path)?;
std::io::copy(&mut f, &mut zip)?;
}
}
}
zip.finish()?;
Ok(jar_path)
}
fn generate_javadoc_jar(project: &Path, cfg: &config::schema::YmConfig, version_override: Option<&str>) -> Result<Option<std::path::PathBuf>> {
let source_dir = config::source_dir(project);
if !source_dir.exists() {
return Ok(None);
}
let java_files: Vec<std::path::PathBuf> = walkdir::WalkDir::new(&source_dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().and_then(|x| x.to_str()) == Some("java"))
.map(|e| e.path().to_path_buf())
.collect();
if java_files.is_empty() {
return Ok(None);
}
let javadoc_dir = project.join("out").join("javadoc");
let _ = std::fs::remove_dir_all(&javadoc_dir);
std::fs::create_dir_all(&javadoc_dir)?;
let cache = config::maven_cache_dir();
let cp_str: String = walkdir::WalkDir::new(&cache)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().and_then(|x| x.to_str()) == Some("jar"))
.map(|e| e.path().to_string_lossy().to_string())
.collect::<Vec<_>>()
.join(":");
let argfile = project.join("out").join("javadoc-files.txt");
let file_list: String = java_files.iter()
.map(|p| p.to_string_lossy().to_string())
.collect::<Vec<_>>()
.join("\n");
std::fs::write(&argfile, &file_list)?;
let mut cmd = std::process::Command::new("javadoc");
cmd.arg("-d").arg(&javadoc_dir)
.arg("-quiet")
.arg("-Xdoclint:none")
.arg(format!("@{}", argfile.display()))
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null());
if !cp_str.is_empty() {
cmd.arg("-classpath").arg(&cp_str);
}
let target = cfg.target.as_deref().unwrap_or("21");
cmd.arg("-source").arg(target);
let status = cmd.status();
let _ = std::fs::remove_file(&argfile);
match status {
Ok(s) if s.success() => {}
_ => {
println!(
" {} Javadoc generation failed for {}@{} (non-fatal), skipping",
style("!").yellow(),
cfg.name,
version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0")
);
return Ok(None);
}
}
let jar_name = format!(
"{}-{}-javadoc.jar",
cfg.name,
version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0")
);
let jar_path = project.join("out").join(&jar_name);
let status = std::process::Command::new("jar")
.arg("cf")
.arg(&jar_path)
.arg("-C")
.arg(&javadoc_dir)
.arg(".")
.status()?;
if !status.success() {
println!(
" {} Failed to create javadoc JAR for {}@{} (non-fatal)",
style("!").yellow(),
cfg.name,
version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0")
);
return Ok(None);
}
println!(" {} Generated javadoc JAR for {}@{}", style("✓").green(),
cfg.name, version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0"));
Ok(Some(jar_path))
}
fn find_output_jar(project: &Path, cfg: &config::schema::YmConfig, version_override: Option<&str>) -> Result<std::path::PathBuf> {
let classes_dir = config::output_classes_dir(project);
let jar_name = format!(
"{}-{}.jar",
cfg.name,
version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0")
);
let jar_path = project.join("out").join(&jar_name);
if let Some(parent) = jar_path.parent() {
std::fs::create_dir_all(parent)?;
}
let version_str = version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0");
super::build::write_classes_jar(&jar_path, &classes_dir, &cfg.name, version_str)?;
Ok(jar_path)
}
struct ResolvedRegistry {
url: String,
inline_creds: Option<Credentials>,
}
fn resolve_env(s: &str) -> String {
if s.contains("${") {
config::schema::YmConfig::resolve_env_vars(s)
} else {
s.to_string()
}
}
fn find_registry_value<'a>(
cfg: &'a config::schema::YmConfig,
registry: Option<&str>,
) -> Result<&'a config::schema::RegistryValue> {
if let Some(name) = registry {
cfg.registries
.as_ref()
.and_then(|r| r.get(name))
.ok_or_else(|| anyhow::anyhow!("Registry '{}' not found in [registries]", name))
} else {
let regs = cfg.registries.as_ref();
match regs {
None => bail!("No [registries] configured. Add a registry to publish."),
Some(r) if r.is_empty() => bail!("No [registries] configured. Add a registry to publish."),
Some(r) => {
if let Some(v) = r.get("default") {
Ok(v)
} else if r.len() == 1 {
Ok(r.values().next().unwrap())
} else {
bail!(
"Multiple registries configured but no 'default' key. Use --registry <name> to specify.\nAvailable: {}",
r.keys().cloned().collect::<Vec<_>>().join(", ")
);
}
}
}
}
}
fn resolve_registry(cfg: &config::schema::YmConfig, registry: Option<&str>) -> Result<ResolvedRegistry> {
let value = find_registry_value(cfg, registry)?;
let url = resolve_env(value.url());
let inline_creds = match (value.username(), value.password()) {
(Some(u), Some(p)) => Some(Credentials {
username: resolve_env(u),
password: resolve_env(p),
}),
_ => None,
};
Ok(ResolvedRegistry { url, inline_creds })
}
pub fn resolve_registry_url_by_name(registry_name: &str) -> Result<String> {
let (_config_path, cfg) = config::load_or_find_config()?;
let value = cfg.registries
.as_ref()
.and_then(|r| r.get(registry_name))
.ok_or_else(|| anyhow::anyhow!("Registry '{}' not found in [registries]", registry_name))?;
Ok(resolve_env(value.url()))
}
struct Credentials {
username: String,
password: String,
}
fn credentials_path() -> std::path::PathBuf {
crate::home_dir().join(".ym").join("credentials.json")
}
fn load_credentials_for_registry(path: &Path, registry_url: &str, inline: Option<Credentials>) -> Result<Credentials> {
if let Some(creds) = inline {
if !creds.username.is_empty() {
return Ok(creds);
}
}
let content = match std::fs::read_to_string(path) {
Ok(c) => c,
Err(_) => bail!(
"No credentials found for '{}'. Run 'ym login' or add username/password to registry config.",
registry_url
),
};
let map: std::collections::BTreeMap<String, serde_json::Value> = serde_json::from_str(&content)?;
let normalized = registry_url.trim_end_matches('/');
let entry = map.get(normalized)
.or_else(|| map.get(&format!("{}/", normalized)));
match entry {
Some(v) => {
if let Some(token) = v.get("token").and_then(|t| t.as_str()) {
Ok(Credentials { username: token.to_string(), password: String::new() })
} else {
Ok(Credentials {
username: v["username"].as_str().unwrap_or("").to_string(),
password: v["password"].as_str().unwrap_or("").to_string(),
})
}
}
None => bail!(
"No credentials found for '{}'. Run 'ym login' or add username/password to registry config.",
registry_url
),
}
}
fn sha256_of_file(path: &Path) -> Result<String> {
let bytes = std::fs::read(path)?;
Ok(crate::compiler::incremental::hash_bytes(&bytes))
}
fn md5_of_file(path: &Path) -> Result<String> {
use std::io::Read;
let mut file = std::fs::File::open(path)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let digest = md5::compute(&buf);
Ok(format!("{:x}", digest))
}
fn gpg_sign_file(path: &Path) -> Option<std::path::PathBuf> {
let asc_path = path.with_extension(
format!("{}.asc", path.extension().unwrap_or_default().to_string_lossy())
);
let status = std::process::Command::new("gpg")
.arg("--batch")
.arg("--yes")
.arg("-ab")
.arg(path)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status();
match status {
Ok(s) if s.success() && asc_path.exists() => Some(asc_path),
_ => None,
}
}
fn upload_file(
client: &reqwest::blocking::Client,
url: &str,
path: &Path,
creds: &Credentials,
) -> Result<()> {
let bytes = std::fs::read(path)?;
let resp = client
.put(url)
.basic_auth(&creds.username, Some(&creds.password))
.body(bytes)
.send()?;
if !resp.status().is_success() {
bail!("Failed to upload {}: HTTP {}", path.display(), resp.status());
}
Ok(())
}
fn upload_checksum(
client: &reqwest::blocking::Client,
url: &str,
checksum: &str,
creds: &Credentials,
) -> Result<()> {
let resp = client
.put(url)
.basic_auth(&creds.username, Some(&creds.password))
.body(checksum.to_string())
.send()?;
if !resp.status().is_success() {
bail!("Failed to upload checksum to {}: HTTP {}", url, resp.status());
}
Ok(())
}
fn upload_artifact(
jar_path: &Path,
pom_path: &Path,
sources_jar_path: &Path,
javadoc_jar_path: Option<&Path>,
cfg: &config::schema::YmConfig,
registry: &str,
creds: &Credentials,
version_override: Option<&str>,
) -> Result<()> {
let client = reqwest::blocking::Client::builder()
.user_agent(concat!("ym/", env!("CARGO_PKG_VERSION")))
.build()?;
let gpg_available = check_gpg_available();
upload_artifact_with(jar_path, pom_path, sources_jar_path, javadoc_jar_path, cfg, registry, creds, version_override, &client, gpg_available)
}
fn check_gpg_available() -> bool {
std::process::Command::new("gpg")
.arg("--version")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map(|s| s.success())
.unwrap_or(false)
}
fn upload_artifact_with(
jar_path: &Path,
pom_path: &Path,
sources_jar_path: &Path,
javadoc_jar_path: Option<&Path>,
cfg: &config::schema::YmConfig,
registry: &str,
creds: &Credentials,
version_override: Option<&str>,
client: &reqwest::blocking::Client,
gpg_available: bool,
) -> Result<()> {
let group_id = &cfg.group_id;
let artifact_id = &cfg.name;
let version = version_override.or(cfg.version.as_deref()).unwrap_or("0.0.0");
let group_path = group_id.replace('.', "/");
let base_url = format!(
"{}/{}/{}/{}",
registry, group_path, artifact_id, version
);
let pom_url = format!("{}/{}-{}.pom", base_url, artifact_id, version);
upload_file(&client, &pom_url, pom_path, creds)?;
upload_checksums(&client, &pom_url, pom_path, creds)?;
if gpg_available {
upload_gpg_signature(&client, &pom_url, pom_path, creds)?;
}
let jar_url = format!("{}/{}-{}.jar", base_url, artifact_id, version);
upload_file(&client, &jar_url, jar_path, creds)?;
upload_checksums(&client, &jar_url, jar_path, creds)?;
if gpg_available {
upload_gpg_signature(&client, &jar_url, jar_path, creds)?;
}
let sources_url = format!("{}/{}-{}-sources.jar", base_url, artifact_id, version);
upload_file(&client, &sources_url, sources_jar_path, creds)?;
upload_checksums(&client, &sources_url, sources_jar_path, creds)?;
if gpg_available {
upload_gpg_signature(&client, &sources_url, sources_jar_path, creds)?;
}
if let Some(javadoc_path) = javadoc_jar_path {
let javadoc_url = format!("{}/{}-{}-javadoc.jar", base_url, artifact_id, version);
upload_file(&client, &javadoc_url, javadoc_path, creds)?;
upload_checksums(&client, &javadoc_url, javadoc_path, creds)?;
if gpg_available {
upload_gpg_signature(&client, &javadoc_url, javadoc_path, creds)?;
}
}
Ok(())
}
fn upload_checksums(
client: &reqwest::blocking::Client,
artifact_url: &str,
artifact_path: &Path,
creds: &Credentials,
) -> Result<()> {
let sha = sha256_of_file(artifact_path)?;
upload_checksum(client, &format!("{}.sha256", artifact_url), &sha, creds)?;
let md5 = md5_of_file(artifact_path)?;
upload_checksum(client, &format!("{}.md5", artifact_url), &md5, creds)?;
Ok(())
}
fn is_sonatype_url(url: &str) -> bool {
url.contains("oss.sonatype.org") || url.contains("s01.oss.sonatype.org")
}
fn sonatype_close_and_release(
registry_url: &str,
cfg: &config::schema::YmConfig,
creds: &Credentials,
) -> Result<()> {
let base_url = if registry_url.contains("s01.oss.sonatype.org") {
"https://s01.oss.sonatype.org"
} else {
"https://oss.sonatype.org"
};
let client = reqwest::blocking::Client::builder()
.user_agent(concat!("ym/", env!("CARGO_PKG_VERSION")))
.timeout(std::time::Duration::from_secs(60))
.build()?;
let profile_url = format!(
"{}/service/local/staging/profile_evaluate?type=deployed&g={}&a={}&v={}",
base_url, cfg.group_id, cfg.name,
cfg.version.as_deref().unwrap_or("0.0.0")
);
let resp = client.get(&profile_url)
.basic_auth(&creds.username, Some(&creds.password))
.header("Accept", "application/json")
.send()?;
if !resp.status().is_success() {
bail!("Failed to query staging profiles: HTTP {}", resp.status());
}
let body: serde_json::Value = resp.json()?;
let profile_id = body["data"][0]["id"].as_str()
.or_else(|| body["data"][0]["profileId"].as_str());
let profile_id = match profile_id {
Some(id) => id.to_string(),
None => bail!("No staging profile found for {}", cfg.group_id),
};
let repos_url = format!(
"{}/service/local/staging/profile_repositories/{}",
base_url, profile_id
);
let resp = client.get(&repos_url)
.basic_auth(&creds.username, Some(&creds.password))
.header("Accept", "application/json")
.send()?;
if !resp.status().is_success() {
bail!("Failed to list staging repositories: HTTP {}", resp.status());
}
let body: serde_json::Value = resp.json()?;
let repos = body["data"].as_array();
let repo_id = repos
.and_then(|arr| {
arr.iter()
.rfind(|r| r["type"].as_str() == Some("open"))
.and_then(|r| r["repositoryId"].as_str())
});
let repo_id = match repo_id {
Some(id) => id.to_string(),
None => bail!("No open staging repository found. Artifacts may not have been uploaded correctly."),
};
println!(" {} Found staging repo: {}", style("✓").green(), &repo_id);
let close_url = format!(
"{}/service/local/staging/profiles/{}/finish",
base_url, profile_id
);
let close_body = serde_json::json!({
"data": {
"stagedRepositoryId": repo_id,
"description": format!("Close {}@{}", cfg.name, cfg.version.as_deref().unwrap_or("0.0.0"))
}
});
let resp = client.post(&close_url)
.basic_auth(&creds.username, Some(&creds.password))
.header("Content-Type", "application/json")
.json(&close_body)
.send()?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().unwrap_or_default();
bail!("Failed to close staging repo: HTTP {} — {}", status, text);
}
println!(" {} Staging repo closed, waiting for validation...", style("➜").green());
let activity_url = format!(
"{}/service/local/staging/repository/{}/activity",
base_url, repo_id
);
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(120);
let mut closed = false;
while std::time::Instant::now() < deadline {
std::thread::sleep(std::time::Duration::from_secs(5));
if let Ok(resp) = client.get(&activity_url)
.basic_auth(&creds.username, Some(&creds.password))
.header("Accept", "application/json")
.send()
{
if let Ok(body) = resp.json::<serde_json::Value>() {
if let Some(activities) = body.as_array() {
let has_close = activities.iter().any(|a| {
a["name"].as_str() == Some("close")
&& a["events"].as_array().is_some_and(|events| {
events.iter().any(|e| e["name"].as_str() == Some("repositoryClosed"))
})
});
if has_close {
closed = true;
break;
}
let has_fail = activities.iter().any(|a| {
a["name"].as_str() == Some("close")
&& a["events"].as_array().is_some_and(|events| {
events.iter().any(|e| e["name"].as_str() == Some("repositoryCloseFailed"))
})
});
if has_fail {
bail!("Staging validation failed. Check Sonatype for details.");
}
}
}
}
}
if !closed {
bail!("Staging close timed out after 120s. Check Sonatype manually.");
}
println!(" {} Validation passed", style("✓").green());
let release_url = format!(
"{}/service/local/staging/profiles/{}/promote",
base_url, profile_id
);
let release_body = serde_json::json!({
"data": {
"stagedRepositoryId": repo_id,
"description": format!("Release {}@{}", cfg.name, cfg.version.as_deref().unwrap_or("0.0.0"))
}
});
let resp = client.post(&release_url)
.basic_auth(&creds.username, Some(&creds.password))
.header("Content-Type", "application/json")
.json(&release_body)
.send()?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().unwrap_or_default();
bail!("Failed to release staging repo: HTTP {} — {}", status, text);
}
Ok(())
}
fn upload_gpg_signature(
client: &reqwest::blocking::Client,
artifact_url: &str,
artifact_path: &Path,
creds: &Credentials,
) -> Result<()> {
if let Some(asc_path) = gpg_sign_file(artifact_path) {
upload_file(client, &format!("{}.asc", artifact_url), &asc_path, creds)?;
let _ = std::fs::remove_file(&asc_path);
}
Ok(())
}