use std::path::Path;
use std::sync::Arc;
use serde::Serialize;
use void_core::ops::merge::{merge, merge_abort, merge_continue, MergeOptions, MergeResult};
use crate::context::{load_signing_key, open_repo, signing_key_exists, void_err_to_cli};
use crate::observer::ProgressObserver;
use crate::output::{run_command, CliError, CliOptions};
#[derive(Debug)]
pub struct MergeArgs {
pub target: Option<String>,
pub force: bool,
pub continue_merge: bool,
pub abort_merge: bool,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CheckoutStats {
pub files_restored: usize,
pub bytes_written: u64,
pub files_skipped: usize,
pub shards_read: usize,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct MergeOutput {
pub status: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub new_head: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stats: Option<CheckoutStats>,
#[serde(skip_serializing_if = "Option::is_none")]
pub conflicts: Option<Vec<String>>,
}
pub fn run(cwd: &Path, args: MergeArgs, opts: &CliOptions) -> Result<(), CliError> {
run_command("merge", opts, |ctx| {
let repo = open_repo(cwd)?;
if args.abort_merge {
ctx.progress("Aborting merge...");
merge_abort(repo.context()).map_err(void_err_to_cli)?;
if !ctx.use_json() {
ctx.info("Merge aborted");
}
return Ok(MergeOutput {
status: "aborted".to_string(),
new_head: None,
stats: None,
conflicts: None,
});
}
if args.continue_merge {
ctx.progress("Continuing merge...");
let signing_key = if signing_key_exists() {
match load_signing_key() {
Ok(key) => Some(key),
Err(_) => None, }
} else {
None
};
let result =
merge_continue(repo.context(), signing_key.as_ref()).map_err(void_err_to_cli)?;
return handle_merge_result(ctx, result, None);
}
let target = args
.target
.ok_or_else(|| CliError::invalid_args("target branch or commit required"))?;
ctx.progress(format!("Merging '{}'...", target));
let observer: Arc<ProgressObserver> = if ctx.use_json() {
Arc::new(ProgressObserver::new_hidden())
} else {
Arc::new(ProgressObserver::new("Merging..."))
};
let signing_key = if signing_key_exists() {
match load_signing_key() {
Ok(key) => Some(key),
Err(_) => None, }
} else {
None
};
let merge_opts = MergeOptions {
message: None,
no_commit: false,
force: args.force,
observer: Some(observer.clone()),
signing_key,
};
let result = merge(repo.context(), &target, merge_opts).map_err(void_err_to_cli)?;
observer.finish();
handle_merge_result(ctx, result, Some(&target))
})
}
fn convert_stats(stats: &void_core::workspace::checkout::CheckoutStats) -> CheckoutStats {
CheckoutStats {
files_restored: stats.files_restored,
bytes_written: stats.bytes_written,
files_skipped: stats.files_skipped,
shards_read: stats.shards_read,
}
}
fn handle_merge_result(
ctx: &mut crate::output::CommandContext,
result: MergeResult,
target: Option<&str>,
) -> Result<MergeOutput, CliError> {
match result {
MergeResult::AlreadyUpToDate => {
if !ctx.use_json() {
ctx.info("Already up to date.");
}
Ok(MergeOutput {
status: "already_up_to_date".to_string(),
new_head: None,
stats: None,
conflicts: None,
})
}
MergeResult::FastForward { new_head, stats } => {
if !ctx.use_json() {
let short_cid = if new_head.len() > 12 {
&new_head[..12]
} else {
&new_head
};
ctx.info(format!("Fast-forward to {}...", short_cid));
ctx.info(format!("Updated {} files", stats.files_restored));
}
Ok(MergeOutput {
status: "fast_forward".to_string(),
new_head: Some(new_head),
stats: Some(convert_stats(&stats)),
conflicts: None,
})
}
MergeResult::Merged {
new_head,
stats,
auto_resolved,
} => {
if !ctx.use_json() {
if let Some(target_name) = target {
ctx.info(format!("Merged '{}' into HEAD", target_name));
} else {
ctx.info("Merge completed");
}
if auto_resolved > 0 {
ctx.info(format!("Auto-resolved {} files", auto_resolved));
}
if stats.files_restored > 0 {
ctx.info(format!("Updated {} files", stats.files_restored));
}
}
Ok(MergeOutput {
status: "merged".to_string(),
new_head: Some(new_head),
stats: Some(convert_stats(&stats)),
conflicts: None,
})
}
MergeResult::Conflicts {
conflicts,
merge_state_saved,
} => {
let conflict_paths: Vec<String> = conflicts.iter().map(|c| c.path.clone()).collect();
if !ctx.use_json() {
for conflict in &conflicts {
ctx.info(format!("Merge conflict in {}", conflict.path));
}
ctx.info("");
ctx.info("Automatic merge failed; fix conflicts and then commit");
ctx.info("Use 'void merge --continue' after resolving, or 'void merge --abort' to cancel");
if !merge_state_saved {
ctx.warn("Warning: merge state could not be saved");
}
}
Ok(MergeOutput {
status: "conflicts".to_string(),
new_head: None,
stats: None,
conflicts: Some(conflict_paths),
})
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_output_already_up_to_date() {
let output = MergeOutput {
status: "already_up_to_date".to_string(),
new_head: None,
stats: None,
conflicts: None,
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"status\":\"already_up_to_date\""));
assert!(!json.contains("\"newHead\""));
assert!(!json.contains("\"stats\""));
assert!(!json.contains("\"conflicts\""));
}
#[test]
fn test_merge_output_fast_forward() {
let output = MergeOutput {
status: "fast_forward".to_string(),
new_head: Some("bafyabc123456789".to_string()),
stats: Some(CheckoutStats {
files_restored: 10,
bytes_written: 1024,
files_skipped: 2,
shards_read: 3,
}),
conflicts: None,
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"status\":\"fast_forward\""));
assert!(json.contains("\"newHead\":\"bafyabc123456789\""));
assert!(json.contains("\"filesRestored\":10"));
assert!(json.contains("\"bytesWritten\":1024"));
assert!(json.contains("\"filesSkipped\":2"));
assert!(json.contains("\"shardsRead\":3"));
assert!(!json.contains("\"conflicts\""));
}
#[test]
fn test_merge_output_merged() {
let output = MergeOutput {
status: "merged".to_string(),
new_head: Some("bafydef987654321".to_string()),
stats: Some(CheckoutStats {
files_restored: 5,
bytes_written: 512,
files_skipped: 1,
shards_read: 2,
}),
conflicts: None,
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"status\":\"merged\""));
assert!(json.contains("\"newHead\":\"bafydef987654321\""));
assert!(json.contains("\"filesRestored\":5"));
assert!(!json.contains("\"conflicts\""));
}
#[test]
fn test_merge_output_conflicts() {
let output = MergeOutput {
status: "conflicts".to_string(),
new_head: None,
stats: None,
conflicts: Some(vec!["src/main.rs".to_string(), "README.md".to_string()]),
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"status\":\"conflicts\""));
assert!(json.contains("\"conflicts\""));
assert!(json.contains("src/main.rs"));
assert!(json.contains("README.md"));
assert!(!json.contains("\"newHead\""));
assert!(!json.contains("\"stats\""));
}
#[test]
fn test_merge_output_aborted() {
let output = MergeOutput {
status: "aborted".to_string(),
new_head: None,
stats: None,
conflicts: None,
};
let json = serde_json::to_string(&output).unwrap();
assert!(json.contains("\"status\":\"aborted\""));
}
}