use crate::ops::utils::short_oid;
use anyhow::{bail, Context, Result};
use serde::{Deserialize, Serialize};
use std::fmt;
use std::path::{Path, PathBuf};
use std::process::Command;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct BackupConfig {
pub destinations: Vec<BackupDestination>,
#[serde(default)]
pub retention: RetentionConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BackupDestination {
pub name: String,
pub backend: BackendType,
pub destination: String,
#[serde(default)]
pub auto_backup: bool,
pub last_backup: Option<String>,
pub last_sha: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum BackendType {
Local,
Rsync,
Rclone,
}
impl fmt::Display for BackendType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Local => write!(f, "local"),
Self::Rsync => write!(f, "rsync"),
Self::Rclone => write!(f, "rclone"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RetentionConfig {
pub max_backups: usize,
pub max_age_days: u32,
}
impl Default for RetentionConfig {
fn default() -> Self {
Self {
max_backups: 10,
max_age_days: 90,
}
}
}
pub struct BundleInfo {
pub objects: usize,
pub size_bytes: u64,
pub path: PathBuf,
}
fn config_path(repo_path: &Path) -> Result<PathBuf> {
let repo = git2::Repository::discover(repo_path).context("Not a git repository")?;
let git_dir = repo.path().to_path_buf();
Ok(git_dir.join("securegit").join("backup.json"))
}
pub fn load_config(repo_path: &Path) -> Result<BackupConfig> {
let path = config_path(repo_path)?;
if !path.exists() {
return Ok(BackupConfig::default());
}
let data = std::fs::read_to_string(&path).context("Failed to read backup config")?;
serde_json::from_str(&data).context("Failed to parse backup config")
}
pub fn save_config(repo_path: &Path, config: &BackupConfig) -> Result<()> {
let path = config_path(repo_path)?;
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
let data = serde_json::to_string_pretty(config)?;
std::fs::write(&path, data).context("Failed to write backup config")
}
pub fn detect_backend(dest: &str) -> BackendType {
if dest.contains('@') && dest.contains(':') {
return BackendType::Rsync;
}
if let Some(colon_pos) = dest.find(':') {
let before = &dest[..colon_pos];
if before.len() > 1 && !before.contains('/') && !before.contains('\\') {
return BackendType::Rclone;
}
}
BackendType::Local
}
pub fn add_destination(
repo_path: &Path,
name: &str,
dest: &str,
type_hint: Option<&str>,
auto: bool,
) -> Result<()> {
let mut config = load_config(repo_path)?;
if config.destinations.iter().any(|d| d.name == name) {
bail!(
"Backup destination '{}' already exists. Remove it first.",
name
);
}
let backend = match type_hint {
Some("local") => BackendType::Local,
Some("rsync") => BackendType::Rsync,
Some("rclone") => BackendType::Rclone,
Some(other) => bail!(
"Unknown backend type '{}'. Use: local, rsync, rclone",
other
),
None => detect_backend(dest),
};
check_tool_available(backend)?;
config.destinations.push(BackupDestination {
name: name.to_string(),
backend,
destination: dest.to_string(),
auto_backup: auto,
last_backup: None,
last_sha: None,
});
save_config(repo_path, &config)
}
pub fn remove_destination(repo_path: &Path, name: &str) -> Result<()> {
let mut config = load_config(repo_path)?;
let before = config.destinations.len();
config.destinations.retain(|d| d.name != name);
if config.destinations.len() == before {
bail!("No backup destination named '{}'", name);
}
save_config(repo_path, &config)
}
pub fn create_bundle(repo_path: &Path, all_branches: bool) -> Result<BundleInfo> {
let repo = git2::Repository::discover(repo_path)?;
let workdir = repo.workdir().unwrap_or(repo_path);
let repo_name = workdir
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "repo".to_string());
let head = repo.head().context("No HEAD — is the repo empty?")?;
let commit = head.peel_to_commit()?;
let short_sha = short_oid(&commit.id());
let branch = head.shorthand().unwrap_or("HEAD");
let timestamp = chrono::Utc::now().format("%Y%m%dT%H%M%SZ");
let filename = format!(
"{}-{}-{}-{}.bundle",
repo_name, branch, short_sha, timestamp
);
let tmp_dir = tempfile::tempdir().context("Failed to create temp directory")?;
let bundle_path = tmp_dir.keep().join(&filename);
let mut cmd = Command::new("git");
cmd.arg("bundle").arg("create").arg(&bundle_path);
if all_branches {
cmd.arg("--all");
} else {
cmd.arg("HEAD");
cmd.arg(format!("refs/heads/{}", branch));
cmd.arg("--tags");
}
cmd.current_dir(workdir);
let output = cmd.output().context("Failed to run git bundle create")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
bail!("git bundle create failed: {}", stderr.trim());
}
let meta = std::fs::metadata(&bundle_path)?;
Ok(BundleInfo {
objects: 0, size_bytes: meta.len(),
path: bundle_path,
})
}
pub fn push_to_destination(dest: &BackupDestination, bundle: &Path) -> Result<()> {
check_tool_available(dest.backend)?;
match dest.backend {
BackendType::Local => {
let dest_dir = Path::new(&dest.destination);
std::fs::create_dir_all(dest_dir).context("Failed to create local backup directory")?;
let target = dest_dir.join(
bundle
.file_name()
.ok_or_else(|| anyhow::anyhow!("Invalid bundle path"))?,
);
std::fs::copy(bundle, &target).context("Failed to copy bundle to local destination")?;
}
BackendType::Rsync => {
let status = Command::new("rsync")
.args(["-az", "--progress"])
.arg(bundle)
.arg(&dest.destination)
.status()
.context("Failed to run rsync")?;
if !status.success() {
bail!(
"rsync failed with exit code {}",
status.code().unwrap_or(-1)
);
}
}
BackendType::Rclone => {
let bundle_dir = bundle
.parent()
.ok_or_else(|| anyhow::anyhow!("Invalid bundle path"))?;
let bundle_name = bundle
.file_name()
.ok_or_else(|| anyhow::anyhow!("Invalid bundle path"))?
.to_string_lossy();
let status = Command::new("rclone")
.args(["copy", "--progress"])
.arg(format!("{}/{}", bundle_dir.display(), bundle_name))
.arg(&dest.destination)
.status()
.context("Failed to run rclone")?;
if !status.success() {
bail!(
"rclone failed with exit code {}",
status.code().unwrap_or(-1)
);
}
}
}
Ok(())
}
pub fn verify_bundle(bundle_path: &Path) -> Result<String> {
if !bundle_path.exists() {
bail!("Bundle file not found: {}", bundle_path.display());
}
let output = Command::new("git")
.args(["bundle", "verify"])
.arg(bundle_path)
.output()
.context("Failed to run git bundle verify")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
if !output.status.success() {
bail!("Bundle verification failed:\n{}{}", stdout, stderr);
}
Ok(format!("{}{}", stdout, stderr))
}
pub fn restore_from_bundle(source: &str, output: Option<&Path>) -> Result<PathBuf> {
let local_bundle = if source.contains('@') && source.contains(':') {
check_tool_available(BackendType::Rsync)?;
let tmp = tempfile::tempdir()?;
let local = tmp.keep().join("restore.bundle");
let status = Command::new("rsync")
.args(["-az"])
.arg(source)
.arg(&local)
.status()
.context("Failed to download bundle via rsync")?;
if !status.success() {
bail!("rsync download failed");
}
local
} else if detect_backend(source) == BackendType::Rclone {
check_tool_available(BackendType::Rclone)?;
let tmp = tempfile::tempdir()?;
let tmp_path = tmp.keep();
let status = Command::new("rclone")
.args(["copy"])
.arg(source)
.arg(&tmp_path)
.status()
.context("Failed to download bundle via rclone")?;
if !status.success() {
bail!("rclone download failed");
}
let entry = std::fs::read_dir(&tmp_path)?
.filter_map(|e| e.ok())
.next()
.ok_or_else(|| anyhow::anyhow!("rclone downloaded nothing"))?;
entry.path()
} else {
PathBuf::from(source)
};
if !local_bundle.exists() {
bail!("Bundle file not found: {}", local_bundle.display());
}
let out_dir = if let Some(p) = output {
p.to_path_buf()
} else {
let stem = local_bundle
.file_stem()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| "restored".to_string());
PathBuf::from(&stem)
};
let status = Command::new("git")
.args(["clone"])
.arg(&local_bundle)
.arg(&out_dir)
.status()
.context("Failed to clone from bundle")?;
if !status.success() {
bail!("git clone from bundle failed");
}
Ok(out_dir)
}
pub fn trigger_auto_backup(repo_path: &Path) {
let config = match load_config(repo_path) {
Ok(c) => c,
Err(_) => return,
};
let auto_dests: Vec<_> = config
.destinations
.iter()
.filter(|d| d.auto_backup)
.collect();
if auto_dests.is_empty() {
return;
}
let bundle = match create_bundle(repo_path, false) {
Ok(b) => b,
Err(e) => {
tracing::warn!("Auto-backup: failed to create bundle: {}", e);
return;
}
};
for dest in auto_dests {
match push_to_destination(dest, &bundle.path) {
Ok(()) => {
tracing::info!("Auto-backup: pushed to '{}' ({})", dest.name, dest.backend);
}
Err(e) => {
tracing::warn!("Auto-backup: failed to push to '{}': {}", dest.name, e);
}
}
}
if let Ok(mut config) = load_config(repo_path) {
let now = chrono::Utc::now().to_rfc3339();
let sha = git2::Repository::discover(repo_path).ok().and_then(|r| {
let head = r.head().ok()?;
let commit = head.peel_to_commit().ok()?;
Some(commit.id().to_string())
});
for dest in &mut config.destinations {
if dest.auto_backup {
dest.last_backup = Some(now.clone());
dest.last_sha = sha.clone();
}
}
let _ = save_config(repo_path, &config);
}
let _ = std::fs::remove_file(&bundle.path);
if let Some(parent) = bundle.path.parent() {
let _ = std::fs::remove_dir(parent);
}
}
fn check_tool_available(backend: BackendType) -> Result<()> {
let tool = match backend {
BackendType::Local => return Ok(()),
BackendType::Rsync => "rsync",
BackendType::Rclone => "rclone",
};
if which::which(tool).is_err() {
let install_hint = match (tool, std::env::consts::OS) {
("rsync", "linux") => "sudo dnf install rsync (or: sudo apt install rsync)",
("rsync", "macos") => "brew install rsync",
("rclone", "linux") => {
"sudo dnf install rclone (or: curl https://rclone.org/install.sh | sudo bash)"
}
("rclone", "macos") => "brew install rclone",
_ => "See installation docs for your platform",
};
bail!(
"{} is not installed. Install it with:\n {}",
tool,
install_hint
);
}
Ok(())
}
pub fn format_size(bytes: u64) -> String {
if bytes >= 1_048_576 {
format!("{:.1} MB", bytes as f64 / 1_048_576.0)
} else if bytes >= 1024 {
format!("{:.1} KB", bytes as f64 / 1024.0)
} else {
format!("{} B", bytes)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_detect_backend_local() {
assert_eq!(detect_backend("/tmp/backups"), BackendType::Local);
assert_eq!(detect_backend("/home/user/backup-dir"), BackendType::Local);
assert_eq!(detect_backend("./relative/path"), BackendType::Local);
assert_eq!(detect_backend("backups"), BackendType::Local);
}
#[test]
fn test_detect_backend_rsync() {
assert_eq!(
detect_backend("user@host:/path/to/backup"),
BackendType::Rsync
);
assert_eq!(
detect_backend("deploy@192.168.1.1:/backups"),
BackendType::Rsync
);
assert_eq!(
detect_backend("root@server.example.com:/var/backups"),
BackendType::Rsync
);
}
#[test]
fn test_detect_backend_rclone() {
assert_eq!(detect_backend("remote:bucket/"), BackendType::Rclone);
assert_eq!(detect_backend("s3:my-bucket/backups"), BackendType::Rclone);
assert_eq!(
detect_backend("gdrive:securegit-backups"),
BackendType::Rclone
);
}
#[test]
fn test_detect_backend_windows_drive() {
assert_eq!(detect_backend("C:\\Users\\backup"), BackendType::Local);
assert_eq!(detect_backend("D:\\backups"), BackendType::Local);
}
#[test]
fn test_backup_config_default() {
let config = BackupConfig::default();
assert!(config.destinations.is_empty());
assert_eq!(config.retention.max_backups, 10);
assert_eq!(config.retention.max_age_days, 90);
}
#[test]
fn test_retention_config_default() {
let retention = RetentionConfig::default();
assert_eq!(retention.max_backups, 10);
assert_eq!(retention.max_age_days, 90);
}
fn create_test_repo() -> (tempfile::TempDir, PathBuf) {
let tmpdir = tempfile::tempdir().expect("create tempdir");
let repo_path = tmpdir.path().to_path_buf();
git2::Repository::init(&repo_path).expect("git init");
(tmpdir, repo_path)
}
#[test]
fn test_add_remove_destination() {
let (_tmpdir, repo_path) = create_test_repo();
add_destination(&repo_path, "local-backup", "/tmp/test-backup", None, false)
.expect("add destination should succeed");
let config = load_config(&repo_path).expect("load config");
assert_eq!(config.destinations.len(), 1);
assert_eq!(config.destinations[0].name, "local-backup");
assert_eq!(config.destinations[0].backend, BackendType::Local);
assert_eq!(config.destinations[0].destination, "/tmp/test-backup");
assert!(!config.destinations[0].auto_backup);
remove_destination(&repo_path, "local-backup").expect("remove destination should succeed");
let config = load_config(&repo_path).expect("load config");
assert!(config.destinations.is_empty());
}
#[test]
fn test_add_duplicate_fails() {
let (_tmpdir, repo_path) = create_test_repo();
add_destination(&repo_path, "dup-name", "/tmp/backup1", None, false)
.expect("first add should succeed");
let result = add_destination(&repo_path, "dup-name", "/tmp/backup2", None, false);
assert!(result.is_err(), "Adding duplicate name should fail");
let err_msg = result.unwrap_err().to_string();
assert!(
err_msg.contains("already exists"),
"Error should mention 'already exists', got: {}",
err_msg
);
}
#[test]
fn test_remove_nonexistent_fails() {
let (_tmpdir, repo_path) = create_test_repo();
let result = remove_destination(&repo_path, "does-not-exist");
assert!(
result.is_err(),
"Removing nonexistent destination should fail"
);
}
#[test]
fn test_add_destination_with_auto() {
let (_tmpdir, repo_path) = create_test_repo();
add_destination(&repo_path, "auto-backup", "/tmp/auto", None, true)
.expect("add with auto should succeed");
let config = load_config(&repo_path).expect("load config");
assert!(config.destinations[0].auto_backup);
}
#[test]
fn test_add_destination_with_type_hint() {
let (_tmpdir, repo_path) = create_test_repo();
add_destination(
&repo_path,
"forced-local",
"remote:bucket",
Some("local"),
false,
)
.expect("add with type hint should succeed");
let config = load_config(&repo_path).expect("load config");
assert_eq!(config.destinations[0].backend, BackendType::Local);
}
#[test]
fn test_add_destination_invalid_type_hint() {
let (_tmpdir, repo_path) = create_test_repo();
let result = add_destination(&repo_path, "bad-type", "/tmp/x", Some("ftp"), false);
assert!(result.is_err(), "Invalid type hint should fail");
let err_msg = result.unwrap_err().to_string();
assert!(err_msg.contains("Unknown backend type"));
}
#[test]
fn test_format_size() {
assert_eq!(format_size(0), "0 B");
assert_eq!(format_size(1), "1 B");
assert_eq!(format_size(512), "512 B");
assert_eq!(format_size(1023), "1023 B");
assert_eq!(format_size(1024), "1.0 KB");
assert_eq!(format_size(1536), "1.5 KB");
assert_eq!(format_size(10240), "10.0 KB");
assert_eq!(format_size(1_048_576), "1.0 MB");
assert_eq!(format_size(1_572_864), "1.5 MB");
assert_eq!(format_size(10_485_760), "10.0 MB");
assert_eq!(format_size(1_073_741_824), "1024.0 MB");
}
#[test]
fn test_verify_bundle_nonexistent() {
let result = verify_bundle(Path::new("/tmp/nonexistent-bundle-12345.bundle"));
assert!(result.is_err(), "Verifying nonexistent bundle should fail");
let err_msg = result.unwrap_err().to_string();
assert!(
err_msg.contains("not found"),
"Error should mention file not found, got: {}",
err_msg
);
}
#[test]
fn test_config_save_load_roundtrip() {
let (_tmpdir, repo_path) = create_test_repo();
let config = BackupConfig {
destinations: vec![
BackupDestination {
name: "nas".to_string(),
backend: BackendType::Local,
destination: "/mnt/nas/backups".to_string(),
auto_backup: true,
last_backup: Some("2026-01-01T00:00:00Z".to_string()),
last_sha: Some("abc1234".to_string()),
},
BackupDestination {
name: "s3".to_string(),
backend: BackendType::Rclone,
destination: "s3:my-bucket/backups".to_string(),
auto_backup: false,
last_backup: None,
last_sha: None,
},
],
retention: RetentionConfig {
max_backups: 5,
max_age_days: 30,
},
};
save_config(&repo_path, &config).expect("save config");
let loaded = load_config(&repo_path).expect("load config");
assert_eq!(loaded.destinations.len(), 2);
assert_eq!(loaded.destinations[0].name, "nas");
assert_eq!(loaded.destinations[0].backend, BackendType::Local);
assert!(loaded.destinations[0].auto_backup);
assert_eq!(loaded.destinations[1].name, "s3");
assert_eq!(loaded.destinations[1].backend, BackendType::Rclone);
assert_eq!(loaded.retention.max_backups, 5);
assert_eq!(loaded.retention.max_age_days, 30);
}
#[test]
fn test_load_config_no_file_returns_default() {
let (_tmpdir, repo_path) = create_test_repo();
let config = load_config(&repo_path).expect("load config from empty repo");
assert!(config.destinations.is_empty());
}
#[test]
fn test_backend_type_display() {
assert_eq!(format!("{}", BackendType::Local), "local");
assert_eq!(format!("{}", BackendType::Rsync), "rsync");
assert_eq!(format!("{}", BackendType::Rclone), "rclone");
}
}