Skip to main content

cfgd_core/
upgrade.rs

1// Self-update — query GitHub releases, download, verify, atomic install
2
3use std::collections::HashMap;
4use std::fs;
5use std::io::Read;
6use std::path::{Path, PathBuf};
7use std::time::{Duration, SystemTime};
8
9use semver::Version;
10
11use crate::errors::{Result, UpgradeError};
12use crate::output::Printer;
13
14const GITHUB_API_BASE: &str = "https://api.github.com";
15const DEFAULT_REPO: &str = "tj-smith47/cfgd";
16const CACHE_TTL_SECS: u64 = 86400; // 24 hours
17const CACHE_FILENAME: &str = "version-check.json";
18
19/// Strip leading 'v' from a git tag to get the bare version string.
20fn strip_tag_prefix(tag: &str) -> &str {
21    tag.strip_prefix('v').unwrap_or(tag)
22}
23
24/// Information about a GitHub release.
25#[derive(Debug, Clone)]
26pub struct ReleaseInfo {
27    pub tag: String,
28    pub version: Version,
29    pub assets: Vec<ReleaseAsset>,
30}
31
32/// A downloadable asset attached to a release.
33#[derive(Debug, Clone)]
34pub struct ReleaseAsset {
35    pub name: String,
36    pub download_url: String,
37    pub size: u64,
38}
39
40/// Cached version check result, persisted to disk.
41#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
42#[serde(rename_all = "camelCase")]
43struct VersionCache {
44    checked_at_secs: u64,
45    latest_tag: String,
46    latest_version: String,
47    current_version: String,
48}
49
50/// Result of a version check.
51#[derive(Debug, Clone)]
52pub struct UpdateCheck {
53    pub current: Version,
54    pub latest: Version,
55    pub update_available: bool,
56    pub release: Option<ReleaseInfo>,
57}
58
59/// Return the compiled-in version of cfgd.
60pub fn current_version() -> std::result::Result<Version, UpgradeError> {
61    Version::parse(env!("CARGO_PKG_VERSION")).map_err(|e| UpgradeError::VersionParse {
62        message: format!("cannot parse compiled version: {}", e),
63    })
64}
65
66/// Query the GitHub Releases API for the latest release.
67pub fn fetch_latest_release(repo: &str, printer: Option<&Printer>) -> Result<ReleaseInfo> {
68    fetch_latest_release_from(GITHUB_API_BASE, repo, printer)
69}
70
71/// Query a releases API for the latest release (testable with custom base URL).
72fn fetch_latest_release_from(
73    api_base: &str,
74    repo: &str,
75    printer: Option<&Printer>,
76) -> Result<ReleaseInfo> {
77    let url = format!("{}/repos/{}/releases/latest", api_base, repo);
78
79    let spinner = printer.map(|p| p.spinner("Checking for latest release..."));
80
81    let agent = ureq::AgentBuilder::new()
82        .timeout(std::time::Duration::from_secs(300))
83        .build();
84    let response = agent
85        .get(&url)
86        .set("Accept", "application/vnd.github+json")
87        .set("User-Agent", "cfgd-self-update")
88        .call()
89        .map_err(|e| UpgradeError::ApiError {
90            message: format!("{}", e),
91        })?;
92
93    let body: String = response.into_string().map_err(|e| UpgradeError::ApiError {
94        message: format!("failed to read response body: {}", e),
95    })?;
96
97    if let Some(s) = spinner {
98        s.finish_and_clear();
99    }
100
101    parse_release_json(&body)
102}
103
104fn parse_release_json(body: &str) -> Result<ReleaseInfo> {
105    let json: serde_json::Value =
106        serde_json::from_str(body).map_err(|e| UpgradeError::ApiError {
107            message: format!("invalid JSON: {}", e),
108        })?;
109
110    let tag = json["tag_name"]
111        .as_str()
112        .ok_or_else(|| UpgradeError::ApiError {
113            message: "missing tag_name in release".into(),
114        })?
115        .to_string();
116
117    let version_str = strip_tag_prefix(&tag);
118    let version = Version::parse(version_str).map_err(|e| UpgradeError::VersionParse {
119        message: format!("cannot parse release version '{}': {}", tag, e),
120    })?;
121
122    let assets = json["assets"]
123        .as_array()
124        .map(|arr| {
125            arr.iter()
126                .filter_map(|a| {
127                    Some(ReleaseAsset {
128                        name: a["name"].as_str()?.to_string(),
129                        download_url: a["browser_download_url"].as_str()?.to_string(),
130                        size: a["size"].as_u64().unwrap_or(0),
131                    })
132                })
133                .collect()
134        })
135        .unwrap_or_default();
136
137    Ok(ReleaseInfo {
138        tag,
139        version,
140        assets,
141    })
142}
143
144/// Find the correct binary asset for the current OS and architecture.
145pub fn find_asset_for_platform(
146    release: &ReleaseInfo,
147) -> std::result::Result<&ReleaseAsset, UpgradeError> {
148    let os = std::env::consts::OS;
149    let archive_arch = std::env::consts::ARCH;
150
151    let archive_os = match os {
152        "macos" => "darwin",
153        other => other,
154    };
155
156    // Look for: cfgd-<version>-<os>-<arch>.tar.gz (Unix) or .zip (Windows)
157    let version_str = strip_tag_prefix(&release.tag);
158    #[cfg(unix)]
159    let archive_suffix = ".tar.gz";
160    #[cfg(windows)]
161    let archive_suffix = ".zip";
162    let expected_name = format!(
163        "cfgd-{}-{}-{}{}",
164        version_str, archive_os, archive_arch, archive_suffix
165    );
166
167    release
168        .assets
169        .iter()
170        .find(|a| a.name == expected_name)
171        .ok_or_else(|| UpgradeError::NoAsset {
172            os: archive_os.to_string(),
173            arch: archive_arch.to_string(),
174        })
175}
176
177/// Find the checksums asset for a release.
178fn find_checksums_asset(release: &ReleaseInfo) -> Option<&ReleaseAsset> {
179    release
180        .assets
181        .iter()
182        .find(|a| a.name.ends_with("-checksums.txt"))
183}
184
185/// Download a file from a URL to a local path.
186fn download_to_file(
187    url: &str,
188    dest: &Path,
189    printer: Option<&Printer>,
190) -> std::result::Result<(), UpgradeError> {
191    let agent = ureq::AgentBuilder::new()
192        .timeout(std::time::Duration::from_secs(300))
193        .build();
194    let response = agent
195        .get(url)
196        .set("User-Agent", "cfgd-self-update")
197        .call()
198        .map_err(|e| UpgradeError::DownloadFailed {
199            message: format!("{}", e),
200        })?;
201
202    // Determine content length for progress tracking
203    let content_length: Option<u64> = response
204        .header("content-length")
205        .and_then(|v| v.parse().ok());
206
207    // Stream directly to a temp file (avoids buffering entire binary in memory)
208    let parent = dest.parent().unwrap_or(std::path::Path::new("."));
209    let mut tmp =
210        tempfile::NamedTempFile::new_in(parent).map_err(|e| UpgradeError::DownloadFailed {
211            message: format!("create temp file: {}", e),
212        })?;
213
214    const MAX_DOWNLOAD_SIZE: u64 = 256 * 1024 * 1024;
215    let mut reader = response.into_reader().take(MAX_DOWNLOAD_SIZE);
216
217    // Use progress bar if we know the size, spinner otherwise
218    match (printer, content_length) {
219        (Some(p), Some(total)) => {
220            let pb = p.progress_bar(total, url);
221            let mut buf = [0u8; 8192];
222            let mut downloaded: u64 = 0;
223            loop {
224                let n = reader
225                    .read(&mut buf)
226                    .map_err(|e| UpgradeError::DownloadFailed {
227                        message: format!("stream to disk: {}", e),
228                    })?;
229                if n == 0 {
230                    break;
231                }
232                std::io::Write::write_all(&mut tmp, &buf[..n]).map_err(|e| {
233                    UpgradeError::DownloadFailed {
234                        message: format!("stream to disk: {}", e),
235                    }
236                })?;
237                downloaded += n as u64;
238                pb.set_position(downloaded);
239            }
240            pb.finish_and_clear();
241        }
242        (Some(p), None) => {
243            let spinner = p.spinner(&format!("Downloading {url}..."));
244            std::io::copy(&mut reader, &mut tmp).map_err(|e| UpgradeError::DownloadFailed {
245                message: format!("stream to disk: {}", e),
246            })?;
247            spinner.finish_and_clear();
248        }
249        _ => {
250            std::io::copy(&mut reader, &mut tmp).map_err(|e| UpgradeError::DownloadFailed {
251                message: format!("stream to disk: {}", e),
252            })?;
253        }
254    }
255
256    tmp.persist(dest)
257        .map_err(|e| UpgradeError::DownloadFailed {
258            message: format!("rename to {}: {}", dest.display(), e.error),
259        })?;
260
261    Ok(())
262}
263
264/// Parse a checksums.txt file into a map of filename -> hex SHA256.
265fn parse_checksums(content: &str) -> HashMap<String, String> {
266    content
267        .lines()
268        .filter_map(|line| {
269            let mut parts = line.split_whitespace();
270            let hash = parts.next()?;
271            let filename = parts.next()?;
272            Some((filename.to_string(), hash.to_lowercase()))
273        })
274        .collect()
275}
276
277/// Compute the SHA256 hex digest of a file.
278fn sha256_file(path: &Path) -> std::result::Result<String, UpgradeError> {
279    let bytes = fs::read(path).map_err(|e| UpgradeError::DownloadFailed {
280        message: format!("read {}: {}", path.display(), e),
281    })?;
282    Ok(crate::sha256_hex(&bytes))
283}
284
285/// Download, verify checksum, extract, and atomically install the new binary.
286///
287/// Returns the path to the newly installed binary.
288pub fn download_and_install(
289    release: &ReleaseInfo,
290    asset: &ReleaseAsset,
291    printer: Option<&Printer>,
292) -> Result<PathBuf> {
293    let current_exe = std::env::current_exe().map_err(|e| UpgradeError::InstallFailed {
294        message: format!("cannot determine current binary path: {}", e),
295    })?;
296
297    // Create temp directory for download
298    let tmp_dir = tempfile::tempdir().map_err(|e| UpgradeError::DownloadFailed {
299        message: format!("create temp dir: {}", e),
300    })?;
301
302    let archive_path = tmp_dir.path().join(&asset.name);
303
304    // Download archive
305    download_to_file(&asset.download_url, &archive_path, printer)?;
306
307    // Download and verify checksum if available
308    if let Some(checksums_asset) = find_checksums_asset(release) {
309        let checksums_path = tmp_dir.path().join(&checksums_asset.name);
310        download_to_file(&checksums_asset.download_url, &checksums_path, printer)?;
311
312        let checksums_content =
313            fs::read_to_string(&checksums_path).map_err(|e| UpgradeError::DownloadFailed {
314                message: format!("read checksums: {}", e),
315            })?;
316
317        let checksums = parse_checksums(&checksums_content);
318        if let Some(expected) = checksums.get(&asset.name) {
319            let verify_spinner = printer.map(|p| p.spinner("Verifying checksum..."));
320            let actual = sha256_file(&archive_path)?;
321            if actual != *expected {
322                if let Some(s) = verify_spinner {
323                    s.finish_and_clear();
324                }
325                return Err(UpgradeError::ChecksumMismatch {
326                    file: asset.name.clone(),
327                }
328                .into());
329            }
330            if let Some(s) = verify_spinner {
331                s.finish_and_clear();
332            }
333            tracing::debug!("checksum verified for {}", asset.name);
334        } else {
335            return Err(UpgradeError::ChecksumMismatch {
336                file: asset.name.clone(),
337            }
338            .into());
339        }
340    } else {
341        return Err(UpgradeError::ChecksumMismatch {
342            file: asset.name.clone(),
343        }
344        .into());
345    }
346
347    // Extract the archive
348    let extract_dir = tmp_dir.path().join("extracted");
349    fs::create_dir_all(&extract_dir).map_err(|e| UpgradeError::InstallFailed {
350        message: format!("create extract dir: {}", e),
351    })?;
352
353    let extract_spinner = printer.map(|p| p.spinner("Extracting archive..."));
354    #[cfg(unix)]
355    extract_tarball(&archive_path, &extract_dir)?;
356    #[cfg(windows)]
357    extract_zip(&archive_path, &extract_dir)?;
358    if let Some(s) = extract_spinner {
359        s.finish_and_clear();
360    }
361
362    // Find the cfgd binary in the extracted contents
363    #[cfg(unix)]
364    let binary_name = "cfgd";
365    #[cfg(windows)]
366    let binary_name = "cfgd.exe";
367    let new_binary = extract_dir.join(binary_name);
368    if !new_binary.exists() {
369        return Err(UpgradeError::InstallFailed {
370            message: format!(
371                "extracted archive does not contain '{}' binary",
372                binary_name
373            ),
374        }
375        .into());
376    }
377
378    // Make it executable (no-op on Windows)
379    crate::set_file_permissions(&new_binary, 0o755).map_err(|e| UpgradeError::InstallFailed {
380        message: format!("set permissions: {}", e),
381    })?;
382
383    // Install new binary over old.
384    // Unix: atomic rename via tempfile. Windows: rename-dance (can't overwrite running exe).
385    let target = &current_exe;
386    atomic_replace(&new_binary, target)?;
387
388    Ok(target.clone())
389}
390
391/// Atomically replace `target` with `source`.
392/// Copies source to a NamedTempFile in the target directory, then persists it
393/// over the target (atomic rename on the same filesystem).
394#[cfg(unix)]
395fn atomic_replace(source: &Path, target: &Path) -> std::result::Result<(), UpgradeError> {
396    let target_dir = target.parent().ok_or_else(|| UpgradeError::InstallFailed {
397        message: "target has no parent directory".into(),
398    })?;
399
400    // Create a temp file in the target directory so rename is same-FS
401    let tmp =
402        tempfile::NamedTempFile::new_in(target_dir).map_err(|e| UpgradeError::InstallFailed {
403            message: format!("create temp file in {}: {}", target_dir.display(), e),
404        })?;
405
406    // Copy source to the temp file
407    fs::copy(source, tmp.path()).map_err(|e| UpgradeError::InstallFailed {
408        message: format!("copy to staging: {}", e),
409    })?;
410
411    // Persist (atomic rename) temp file to target
412    tmp.persist(target)
413        .map_err(|e| UpgradeError::InstallFailed {
414            message: format!("atomic rename: {}", e),
415        })?;
416
417    Ok(())
418}
419
420/// Replace `target` with `source` using the Windows rename-dance.
421/// Windows cannot overwrite a running executable, so we rename the current
422/// binary to `.exe.old`, copy the new one into place, and clean up `.old`
423/// on next startup via `cleanup_old_binary`.
424#[cfg(windows)]
425fn atomic_replace(source: &Path, target: &Path) -> std::result::Result<(), UpgradeError> {
426    // with_extension replaces .exe → .exe.old (not appends)
427    let old = target.with_extension("exe.old");
428    // Clean up from previous upgrades
429    let _ = fs::remove_file(&old);
430    // Rename running binary out of the way (can't overwrite running exe on Windows)
431    if target.exists() {
432        fs::rename(target, &old).map_err(|e| UpgradeError::InstallFailed {
433            message: format!("rename {} -> {}: {}", target.display(), old.display(), e),
434        })?;
435    }
436    // Copy new binary into place
437    fs::copy(source, target).map_err(|e| UpgradeError::InstallFailed {
438        message: format!("copy {} -> {}: {}", source.display(), target.display(), e),
439    })?;
440    Ok(())
441}
442
443/// Extract a .tar.gz archive to a directory.
444#[cfg(unix)]
445fn extract_tarball(archive: &Path, dest: &Path) -> std::result::Result<(), UpgradeError> {
446    let file = fs::File::open(archive).map_err(|e| UpgradeError::InstallFailed {
447        message: format!("open archive {}: {}", archive.display(), e),
448    })?;
449
450    let gz = flate2::read::GzDecoder::new(file);
451    let mut tar = tar::Archive::new(gz);
452
453    tar.unpack(dest).map_err(|e| UpgradeError::InstallFailed {
454        message: format!("extract archive: {}", e),
455    })?;
456
457    Ok(())
458}
459
460/// Extract a .zip archive to a directory.
461#[cfg(windows)]
462fn extract_zip(archive: &Path, dest: &Path) -> std::result::Result<(), UpgradeError> {
463    let file = fs::File::open(archive).map_err(|e| UpgradeError::InstallFailed {
464        message: format!("open archive {}: {}", archive.display(), e),
465    })?;
466    let mut zip = zip::ZipArchive::new(file).map_err(|e| UpgradeError::InstallFailed {
467        message: format!("read zip {}: {}", archive.display(), e),
468    })?;
469    zip.extract(dest).map_err(|e| UpgradeError::InstallFailed {
470        message: format!("extract zip: {}", e),
471    })?;
472    Ok(())
473}
474
475/// Check if the daemon is running and restart it.
476/// Returns true if the daemon was restarted, false if it wasn't running.
477pub fn restart_daemon_if_running() -> bool {
478    let status = match crate::daemon::query_daemon_status() {
479        Ok(Some(s)) => s,
480        _ => return false,
481    };
482
483    // Daemon is running — terminate so the service manager restarts it
484    // with the new binary.
485    crate::terminate_process(status.pid);
486    tracing::info!("terminated daemon (pid {})", status.pid);
487    true
488}
489
490/// Clean up the old binary left behind by the Windows rename-dance upgrade.
491/// Call this on startup. No-op on Unix.
492#[cfg(windows)]
493pub fn cleanup_old_binary() {
494    if let Ok(exe) = std::env::current_exe() {
495        let old = exe.with_extension("exe.old");
496        let _ = fs::remove_file(old);
497    }
498}
499
500/// Clean up the old binary left behind by the Windows rename-dance upgrade.
501/// Call this on startup. No-op on Unix.
502#[cfg(unix)]
503pub fn cleanup_old_binary() {
504    // Unix atomic_replace doesn't leave old files
505}
506
507/// Check for an update, using a 24h disk cache to avoid excessive API calls.
508pub fn check_with_cache(repo: Option<&str>, printer: Option<&Printer>) -> Result<UpdateCheck> {
509    let repo = repo.unwrap_or(DEFAULT_REPO);
510    let current = current_version()?;
511
512    // Try reading from cache
513    if let Some(cache) = read_version_cache() {
514        let now = SystemTime::now()
515            .duration_since(SystemTime::UNIX_EPOCH)
516            .unwrap_or_default()
517            .as_secs();
518
519        if now.saturating_sub(cache.checked_at_secs) < CACHE_TTL_SECS {
520            let cached_version =
521                Version::parse(&cache.latest_version).map_err(|e| UpgradeError::VersionParse {
522                    message: format!("cached version: {}", e),
523                })?;
524
525            return Ok(UpdateCheck {
526                update_available: cached_version > current,
527                current,
528                latest: cached_version,
529                release: None,
530            });
531        }
532    }
533
534    // Cache miss or expired — fall through to fresh check + update cache
535    let check = check_latest(Some(repo), printer)?;
536
537    let _ = write_version_cache(&VersionCache {
538        checked_at_secs: SystemTime::now()
539            .duration_since(SystemTime::UNIX_EPOCH)
540            .unwrap_or_default()
541            .as_secs(),
542        latest_tag: check
543            .release
544            .as_ref()
545            .map(|r| r.tag.clone())
546            .unwrap_or_default(),
547        latest_version: check.latest.to_string(),
548        current_version: check.current.to_string(),
549    });
550
551    Ok(check)
552}
553
554/// Check for an update without using cache. Always queries the API.
555pub fn check_latest(repo: Option<&str>, printer: Option<&Printer>) -> Result<UpdateCheck> {
556    let repo = repo.unwrap_or(DEFAULT_REPO);
557    let current = current_version()?;
558    let release = fetch_latest_release(repo, printer)?;
559    let update_available = release.version > current;
560
561    Ok(UpdateCheck {
562        current,
563        latest: release.version.clone(),
564        update_available,
565        release: Some(release),
566    })
567}
568
569fn cache_dir() -> Option<PathBuf> {
570    directories::ProjectDirs::from("dev", "cfgd", "cfgd").map(|dirs| dirs.cache_dir().to_path_buf())
571}
572
573fn read_version_cache() -> Option<VersionCache> {
574    let dir = cache_dir()?;
575    let path = dir.join(CACHE_FILENAME);
576    let content = fs::read_to_string(&path).ok()?;
577    serde_json::from_str(&content).ok()
578}
579
580fn write_version_cache(cache: &VersionCache) -> std::result::Result<(), UpgradeError> {
581    let dir = cache_dir().ok_or_else(|| UpgradeError::InstallFailed {
582        message: "cannot determine cache directory".into(),
583    })?;
584
585    fs::create_dir_all(&dir).map_err(|e| UpgradeError::InstallFailed {
586        message: format!("create cache dir: {}", e),
587    })?;
588
589    let path = dir.join(CACHE_FILENAME);
590    let json = serde_json::to_string(cache).map_err(|e| UpgradeError::InstallFailed {
591        message: format!("serialize cache: {}", e),
592    })?;
593
594    crate::atomic_write_str(&path, &json).map_err(|e| UpgradeError::InstallFailed {
595        message: format!("write cache: {}", e),
596    })?;
597
598    Ok(())
599}
600
601/// Invalidate the version check cache so the next check queries the API.
602pub fn invalidate_cache() {
603    if let Some(dir) = cache_dir() {
604        let _ = fs::remove_file(dir.join(CACHE_FILENAME));
605    }
606}
607
608/// Duration for the daemon's version check timer.
609pub fn version_check_interval() -> Duration {
610    Duration::from_secs(CACHE_TTL_SECS)
611}
612
613#[cfg(test)]
614mod tests {
615    use super::*;
616
617    #[test]
618    fn current_version_is_valid_semver() {
619        let v = current_version().expect("CARGO_PKG_VERSION should be valid semver");
620        assert_eq!(
621            v.to_string(),
622            env!("CARGO_PKG_VERSION"),
623            "parsed version should round-trip to the compiled version string"
624        );
625        assert!(
626            v.major > 0 || v.minor > 0 || v.patch > 0,
627            "version should be non-zero: {v}"
628        );
629    }
630
631    #[test]
632    fn parse_checksums_basic() {
633        let content =
634            "abc123  cfgd-0.2.0-linux-x86_64.tar.gz\ndef456  cfgd-0.2.0-darwin-aarch64.tar.gz\n";
635        let map = parse_checksums(content);
636        assert_eq!(map.len(), 2);
637        assert_eq!(
638            map.get("cfgd-0.2.0-linux-x86_64.tar.gz"),
639            Some(&"abc123".to_string())
640        );
641        assert_eq!(
642            map.get("cfgd-0.2.0-darwin-aarch64.tar.gz"),
643            Some(&"def456".to_string())
644        );
645    }
646
647    #[test]
648    fn parse_checksums_empty_lines() {
649        let content = "\nabc123  foo.tar.gz\n\n";
650        let map = parse_checksums(content);
651        assert_eq!(map.len(), 1);
652    }
653
654    #[test]
655    fn parse_release_json_valid() {
656        let json = r#"{
657            "tag_name": "v0.2.0",
658            "assets": [
659                {
660                    "name": "cfgd-0.2.0-linux-x86_64.tar.gz",
661                    "browser_download_url": "https://example.com/cfgd-0.2.0-linux-x86_64.tar.gz",
662                    "size": 1024
663                },
664                {
665                    "name": "cfgd-0.2.0-checksums.txt",
666                    "browser_download_url": "https://example.com/cfgd-0.2.0-checksums.txt",
667                    "size": 256
668                }
669            ]
670        }"#;
671
672        let release = parse_release_json(json).expect("should parse");
673        assert_eq!(release.tag, "v0.2.0");
674        assert_eq!(release.version, Version::new(0, 2, 0));
675        assert_eq!(release.assets.len(), 2);
676        assert_eq!(release.assets[0].name, "cfgd-0.2.0-linux-x86_64.tar.gz");
677    }
678
679    #[test]
680    fn parse_release_json_no_v_prefix() {
681        let json = r#"{
682            "tag_name": "0.3.0",
683            "assets": []
684        }"#;
685
686        let release = parse_release_json(json).expect("should parse");
687        assert_eq!(release.version, Version::new(0, 3, 0));
688    }
689
690    #[test]
691    fn parse_release_json_missing_tag() {
692        let json = r#"{"assets": []}"#;
693        let err = parse_release_json(json).unwrap_err().to_string();
694        assert!(
695            err.contains("missing tag_name"),
696            "error should mention missing tag_name: {err}"
697        );
698    }
699
700    #[test]
701    fn find_asset_matches_current_platform() {
702        let os = std::env::consts::OS;
703        let arch = std::env::consts::ARCH;
704        let archive_os = if os == "macos" { "darwin" } else { os };
705
706        #[cfg(unix)]
707        let suffix = ".tar.gz";
708        #[cfg(windows)]
709        let suffix = ".zip";
710        let expected_name = format!("cfgd-0.2.0-{}-{}{}", archive_os, arch, suffix);
711
712        let release = ReleaseInfo {
713            tag: "v0.2.0".into(),
714            version: Version::new(0, 2, 0),
715            assets: vec![
716                ReleaseAsset {
717                    name: expected_name.clone(),
718                    download_url: "https://example.com/match".into(),
719                    size: 1024,
720                },
721                ReleaseAsset {
722                    name: "cfgd-0.2.0-freebsd-riscv64.tar.gz".into(),
723                    download_url: "https://example.com/other".into(),
724                    size: 1024,
725                },
726            ],
727        };
728
729        let asset = find_asset_for_platform(&release).expect("should find platform asset");
730        assert_eq!(asset.name, expected_name);
731        assert_eq!(asset.download_url, "https://example.com/match");
732    }
733
734    #[test]
735    fn find_asset_returns_error_when_missing() {
736        let release = ReleaseInfo {
737            tag: "v0.2.0".into(),
738            version: Version::new(0, 2, 0),
739            assets: vec![ReleaseAsset {
740                name: "cfgd-0.2.0-freebsd-riscv64.tar.gz".into(),
741                download_url: "https://example.com/other".into(),
742                size: 1024,
743            }],
744        };
745
746        let err = find_asset_for_platform(&release).unwrap_err();
747        let msg = err.to_string();
748        assert!(
749            msg.contains(std::env::consts::OS.replace("macos", "darwin").as_str())
750                || msg.contains(std::env::consts::ARCH),
751            "error should mention the current platform: {msg}"
752        );
753    }
754
755    #[test]
756    fn sha256_file_computes_hash() {
757        let tmp = tempfile::NamedTempFile::new().expect("temp file");
758        fs::write(tmp.path(), b"hello world").expect("write");
759        let hash = sha256_file(tmp.path()).expect("hash");
760        // SHA256 of "hello world"
761        assert_eq!(
762            hash,
763            "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
764        );
765    }
766
767    #[test]
768    fn atomic_replace_overwrites_target() {
769        let dir = tempfile::tempdir().unwrap();
770        let src = dir.path().join("source");
771        let tgt = dir.path().join("target");
772        std::fs::write(&src, "new content").unwrap();
773        std::fs::write(&tgt, "old content").unwrap();
774
775        atomic_replace(&src, &tgt).unwrap();
776        assert_eq!(std::fs::read_to_string(&tgt).unwrap(), "new content");
777    }
778
779    #[test]
780    fn atomic_replace_creates_target() {
781        let dir = tempfile::tempdir().unwrap();
782        let src = dir.path().join("source");
783        let tgt = dir.path().join("target");
784        std::fs::write(&src, "data").unwrap();
785
786        atomic_replace(&src, &tgt).unwrap();
787        assert_eq!(std::fs::read_to_string(&tgt).unwrap(), "data");
788    }
789
790    #[test]
791    fn version_cache_disk_persistence_camel_case() {
792        // Write VersionCache to a temp file, read it back, verify camelCase keys on disk
793        let cache = VersionCache {
794            checked_at_secs: 1711800000,
795            latest_tag: "v0.5.0".into(),
796            latest_version: "0.5.0".into(),
797            current_version: "0.4.0".into(),
798        };
799
800        let dir = tempfile::tempdir().unwrap();
801        let path = dir.path().join("version-check.json");
802
803        // Serialize and write to disk
804        let json = serde_json::to_string(&cache).expect("serialize");
805        fs::write(&path, &json).expect("write");
806
807        // Verify the on-disk JSON uses camelCase keys
808        let raw = fs::read_to_string(&path).expect("read");
809        assert!(
810            raw.contains("checkedAtSecs"),
811            "expected camelCase key 'checkedAtSecs', got: {}",
812            raw
813        );
814        assert!(
815            raw.contains("latestTag"),
816            "expected camelCase key 'latestTag', got: {}",
817            raw
818        );
819        assert!(
820            raw.contains("latestVersion"),
821            "expected camelCase key 'latestVersion', got: {}",
822            raw
823        );
824        assert!(
825            raw.contains("currentVersion"),
826            "expected camelCase key 'currentVersion', got: {}",
827            raw
828        );
829        // Ensure snake_case keys are NOT present
830        assert!(
831            !raw.contains("checked_at_secs"),
832            "should not contain snake_case key 'checked_at_secs'"
833        );
834
835        // Read back and deserialize
836        let restored: VersionCache = serde_json::from_str(&raw).expect("deserialize from disk");
837        assert_eq!(restored.checked_at_secs, 1711800000);
838        assert_eq!(restored.latest_tag, "v0.5.0");
839        assert_eq!(restored.latest_version, "0.5.0");
840        assert_eq!(restored.current_version, "0.4.0");
841    }
842
843    #[test]
844    fn find_asset_wrong_platform_returns_error() {
845        // Assets only for a fake platform should not match the real runtime platform
846        let release = ReleaseInfo {
847            tag: "v1.0.0".into(),
848            version: Version::new(1, 0, 0),
849            assets: vec![
850                ReleaseAsset {
851                    name: "cfgd-1.0.0-fakeos-fakearch.tar.gz".into(),
852                    download_url: "https://example.com/fake".into(),
853                    size: 2048,
854                },
855                ReleaseAsset {
856                    name: "cfgd-1.0.0-anotheros-anotherarch.zip".into(),
857                    download_url: "https://example.com/another".into(),
858                    size: 4096,
859                },
860            ],
861        };
862
863        let result = find_asset_for_platform(&release);
864        assert!(result.is_err(), "should fail for fake platform assets");
865
866        // Verify the error message references the missing platform
867        let err = result.unwrap_err();
868        let err_msg = format!("{}", err);
869        assert!(
870            err_msg.contains("no release found for"),
871            "error should mention missing platform: {}",
872            err_msg
873        );
874    }
875
876    #[test]
877    fn cache_ttl_fresh_cache_is_valid() {
878        // Simulate a cache entry that was just written — should be within TTL
879        let now_secs = SystemTime::now()
880            .duration_since(SystemTime::UNIX_EPOCH)
881            .unwrap()
882            .as_secs();
883
884        let cache = VersionCache {
885            checked_at_secs: now_secs, // just now
886            latest_tag: "v0.3.0".into(),
887            latest_version: "0.3.0".into(),
888            current_version: "0.2.0".into(),
889        };
890
891        let elapsed = now_secs.saturating_sub(cache.checked_at_secs);
892        assert!(
893            elapsed < CACHE_TTL_SECS,
894            "fresh cache should be within TTL: elapsed={}, ttl={}",
895            elapsed,
896            CACHE_TTL_SECS
897        );
898
899        // The cached version should parse and be usable for comparison
900        let cached_version = Version::parse(&cache.latest_version).expect("parse cached version");
901        let current = Version::parse(&cache.current_version).expect("parse current version");
902        assert!(cached_version > current, "0.3.0 > 0.2.0");
903    }
904
905    #[test]
906    fn cache_ttl_expired_cache_is_stale() {
907        // Simulate a cache entry from 25 hours ago — should exceed the 24h TTL
908        let now_secs = SystemTime::now()
909            .duration_since(SystemTime::UNIX_EPOCH)
910            .unwrap()
911            .as_secs();
912
913        let twenty_five_hours_ago = now_secs - (25 * 3600);
914
915        let cache = VersionCache {
916            checked_at_secs: twenty_five_hours_ago,
917            latest_tag: "v0.3.0".into(),
918            latest_version: "0.3.0".into(),
919            current_version: "0.2.0".into(),
920        };
921
922        let elapsed = now_secs.saturating_sub(cache.checked_at_secs);
923        assert!(
924            elapsed >= CACHE_TTL_SECS,
925            "25h-old cache should exceed TTL: elapsed={}, ttl={}",
926            elapsed,
927            CACHE_TTL_SECS
928        );
929    }
930
931    #[test]
932    fn cache_ttl_boundary_just_expired() {
933        // Cache is exactly at TTL boundary + 1 second — should be expired
934        let now_secs = SystemTime::now()
935            .duration_since(SystemTime::UNIX_EPOCH)
936            .unwrap()
937            .as_secs();
938
939        let just_past_ttl = now_secs - CACHE_TTL_SECS - 1;
940
941        let cache = VersionCache {
942            checked_at_secs: just_past_ttl,
943            latest_tag: "v0.3.0".into(),
944            latest_version: "0.3.0".into(),
945            current_version: "0.2.0".into(),
946        };
947
948        let elapsed = now_secs.saturating_sub(cache.checked_at_secs);
949        assert!(
950            elapsed >= CACHE_TTL_SECS,
951            "cache at TTL+1s should be expired"
952        );
953
954        // One second before expiry should still be valid
955        let at_boundary = now_secs - CACHE_TTL_SECS + 1;
956        let boundary_elapsed = now_secs.saturating_sub(at_boundary);
957        assert!(
958            boundary_elapsed < CACHE_TTL_SECS,
959            "cache at TTL-1s should still be valid"
960        );
961    }
962
963    #[test]
964    fn version_cache_deserialization_from_known_json() {
965        // Ensure we can deserialize a known JSON payload (simulates reading from disk)
966        let json = r#"{"checkedAtSecs":1700000000,"latestTag":"v1.2.3","latestVersion":"1.2.3","currentVersion":"1.0.0"}"#;
967        let cache: VersionCache = serde_json::from_str(json).expect("deserialize known JSON");
968        assert_eq!(cache.checked_at_secs, 1700000000);
969        assert_eq!(cache.latest_tag, "v1.2.3");
970        assert_eq!(cache.latest_version, "1.2.3");
971        assert_eq!(cache.current_version, "1.0.0");
972    }
973
974    #[test]
975    #[cfg(unix)]
976    fn extract_tarball_valid() {
977        use flate2::Compression;
978        use flate2::write::GzEncoder;
979
980        let dir = tempfile::tempdir().unwrap();
981        let archive_path = dir.path().join("test.tar.gz");
982        let dest = dir.path().join("out");
983        std::fs::create_dir_all(&dest).unwrap();
984
985        // Create a .tar.gz with one file
986        {
987            let file = std::fs::File::create(&archive_path).unwrap();
988            let enc = GzEncoder::new(file, Compression::default());
989            let mut tar_builder = tar::Builder::new(enc);
990            let content = b"hello from tarball";
991            let mut header = tar::Header::new_gnu();
992            header.set_size(content.len() as u64);
993            header.set_mode(0o644);
994            header.set_cksum();
995            tar_builder
996                .append_data(&mut header, "test.txt", &content[..])
997                .unwrap();
998            tar_builder.finish().unwrap();
999        }
1000
1001        extract_tarball(&archive_path, &dest).unwrap();
1002        let extracted = std::fs::read_to_string(dest.join("test.txt")).unwrap();
1003        assert_eq!(extracted, "hello from tarball");
1004    }
1005
1006    #[test]
1007    fn download_and_install_checksum_mismatch_detection() {
1008        // Create a fake tarball
1009        let dir = tempfile::tempdir().unwrap();
1010        let tar_dir = dir.path().join("tar_src");
1011        std::fs::create_dir_all(&tar_dir).unwrap();
1012        std::fs::write(tar_dir.join("cfgd"), b"#!/bin/sh\necho fake binary").unwrap();
1013
1014        let tarball_path = dir.path().join("cfgd-test.tar.gz");
1015        {
1016            let tar_file = std::fs::File::create(&tarball_path).unwrap();
1017            let enc = flate2::write::GzEncoder::new(tar_file, flate2::Compression::default());
1018            let mut tar_builder = tar::Builder::new(enc);
1019            tar_builder.append_dir_all(".", &tar_dir).unwrap();
1020            tar_builder.finish().unwrap();
1021        }
1022
1023        // Create a checksums file with WRONG hash
1024        let checksums =
1025            "deadbeef00000000000000000000000000000000000000000000000000000000  cfgd-test.tar.gz\n";
1026        let parsed = parse_checksums(checksums);
1027        assert_eq!(
1028            parsed.get("cfgd-test.tar.gz").unwrap(),
1029            "deadbeef00000000000000000000000000000000000000000000000000000000"
1030        );
1031
1032        // The actual hash of the tarball should NOT match the fake hash
1033        let actual_hash = sha256_file(&tarball_path).unwrap();
1034        assert_ne!(
1035            actual_hash, "deadbeef00000000000000000000000000000000000000000000000000000000",
1036            "real hash should differ from fake"
1037        );
1038    }
1039
1040    #[test]
1041    fn version_cache_disk_persistence() {
1042        let dir = tempfile::tempdir().unwrap();
1043        let cache = VersionCache {
1044            checked_at_secs: 1711234567,
1045            latest_tag: "v1.2.3".into(),
1046            latest_version: "1.2.3".into(),
1047            current_version: "1.0.0".into(),
1048        };
1049        let json = serde_json::to_string(&cache).unwrap();
1050        let path = dir.path().join("version-cache.json");
1051        std::fs::write(&path, &json).unwrap();
1052
1053        let content = std::fs::read_to_string(&path).unwrap();
1054        let restored: VersionCache = serde_json::from_str(&content).unwrap();
1055        assert_eq!(restored.checked_at_secs, 1711234567);
1056        assert_eq!(restored.latest_tag, "v1.2.3");
1057        assert_eq!(restored.latest_version, "1.2.3");
1058        assert_eq!(restored.current_version, "1.0.0");
1059
1060        // Verify camelCase serialization
1061        assert!(json.contains("checkedAtSecs"));
1062        assert!(json.contains("latestTag"));
1063    }
1064
1065    #[test]
1066    fn find_asset_multiple_platforms_picks_current() {
1067        let os = std::env::consts::OS;
1068        let arch = std::env::consts::ARCH;
1069        let archive_os = if os == "macos" { "darwin" } else { os };
1070        #[cfg(unix)]
1071        let suffix = ".tar.gz";
1072        #[cfg(windows)]
1073        let suffix = ".zip";
1074
1075        let release = ReleaseInfo {
1076            tag: "v0.5.0".into(),
1077            version: Version::new(0, 5, 0),
1078            assets: vec![
1079                ReleaseAsset {
1080                    name: format!("cfgd-0.5.0-{}-{}{}", archive_os, arch, suffix),
1081                    download_url: "https://example.com/current".into(),
1082                    size: 5000,
1083                },
1084                ReleaseAsset {
1085                    name: "cfgd-0.5.0-freebsd-riscv64.tar.gz".into(),
1086                    download_url: "https://example.com/other".into(),
1087                    size: 4000,
1088                },
1089            ],
1090        };
1091        let result = find_asset_for_platform(&release);
1092        assert!(result.is_ok());
1093        let asset = result.unwrap();
1094        assert_eq!(asset.download_url, "https://example.com/current");
1095    }
1096
1097    #[test]
1098    fn find_asset_no_matching_platform() {
1099        let release = ReleaseInfo {
1100            tag: "v0.5.0".into(),
1101            version: Version::new(0, 5, 0),
1102            assets: vec![ReleaseAsset {
1103                name: "cfgd-0.5.0-mips-unknown-linux.tar.gz".into(),
1104                download_url: "https://example.com/mips".into(),
1105                size: 3000,
1106            }],
1107        };
1108        let result = find_asset_for_platform(&release);
1109        // Unless we're running on mips, this should fail
1110        if std::env::consts::ARCH != "mips" {
1111            let err = result.unwrap_err();
1112            let msg = err.to_string();
1113            assert!(
1114                msg.contains(std::env::consts::ARCH),
1115                "error should mention the current arch: {msg}"
1116            );
1117        }
1118    }
1119
1120    #[test]
1121    fn parse_checksums_with_multiple_entries() {
1122        let content = "abc123  file1.tar.gz\ndef456  file2.tar.gz\n";
1123        let parsed = parse_checksums(content);
1124        assert_eq!(parsed.get("file1.tar.gz").unwrap(), "abc123");
1125        assert_eq!(parsed.get("file2.tar.gz").unwrap(), "def456");
1126    }
1127
1128    #[test]
1129    fn parse_checksums_ignores_malformed_lines() {
1130        let content = "abc123  good.tar.gz\nbadline\n  \nabc456  another.tar.gz\n";
1131        let parsed = parse_checksums(content);
1132        assert_eq!(parsed.len(), 2);
1133        assert_eq!(parsed.get("good.tar.gz").unwrap(), "abc123");
1134        assert_eq!(parsed.get("another.tar.gz").unwrap(), "abc456");
1135    }
1136
1137    #[test]
1138    fn parse_checksums_normalizes_to_lowercase() {
1139        let content = "ABCDEF123456  mixed-case.tar.gz\n";
1140        let parsed = parse_checksums(content);
1141        assert_eq!(parsed.get("mixed-case.tar.gz").unwrap(), "abcdef123456");
1142    }
1143
1144    #[test]
1145    fn find_checksums_asset_finds_by_suffix() {
1146        let release = ReleaseInfo {
1147            tag: "v0.5.0".into(),
1148            version: Version::new(0, 5, 0),
1149            assets: vec![
1150                ReleaseAsset {
1151                    name: "cfgd-0.5.0-linux-x86_64.tar.gz".into(),
1152                    download_url: "https://example.com/binary".into(),
1153                    size: 5000,
1154                },
1155                ReleaseAsset {
1156                    name: "cfgd-0.5.0-checksums.txt".into(),
1157                    download_url: "https://example.com/checksums".into(),
1158                    size: 256,
1159                },
1160            ],
1161        };
1162        let asset = find_checksums_asset(&release);
1163        assert!(asset.is_some());
1164        assert_eq!(asset.unwrap().name, "cfgd-0.5.0-checksums.txt");
1165    }
1166
1167    #[test]
1168    fn find_checksums_asset_none_when_missing() {
1169        let release = ReleaseInfo {
1170            tag: "v0.5.0".into(),
1171            version: Version::new(0, 5, 0),
1172            assets: vec![ReleaseAsset {
1173                name: "cfgd-0.5.0-linux-x86_64.tar.gz".into(),
1174                download_url: "https://example.com/binary".into(),
1175                size: 5000,
1176            }],
1177        };
1178        let asset = find_checksums_asset(&release);
1179        assert!(asset.is_none());
1180    }
1181
1182    #[test]
1183    fn version_check_interval_matches_cache_ttl() {
1184        let interval = version_check_interval();
1185        assert_eq!(interval, Duration::from_secs(CACHE_TTL_SECS));
1186    }
1187
1188    #[test]
1189    #[cfg(unix)]
1190    fn extract_tarball_multiple_files_and_dirs() {
1191        use flate2::Compression;
1192        use flate2::write::GzEncoder;
1193
1194        let dir = tempfile::tempdir().unwrap();
1195        let archive_path = dir.path().join("multi.tar.gz");
1196        let dest = dir.path().join("extracted");
1197        std::fs::create_dir_all(&dest).unwrap();
1198
1199        {
1200            let file = std::fs::File::create(&archive_path).unwrap();
1201            let enc = GzEncoder::new(file, Compression::default());
1202            let mut tar_builder = tar::Builder::new(enc);
1203
1204            // Add a top-level file
1205            let content_a = b"file A content";
1206            let mut header_a = tar::Header::new_gnu();
1207            header_a.set_size(content_a.len() as u64);
1208            header_a.set_mode(0o644);
1209            header_a.set_cksum();
1210            tar_builder
1211                .append_data(&mut header_a, "file_a.txt", &content_a[..])
1212                .unwrap();
1213
1214            // Add a file in a subdirectory
1215            let content_b = b"nested file B";
1216            let mut header_b = tar::Header::new_gnu();
1217            header_b.set_size(content_b.len() as u64);
1218            header_b.set_mode(0o755);
1219            header_b.set_cksum();
1220            tar_builder
1221                .append_data(&mut header_b, "subdir/file_b.txt", &content_b[..])
1222                .unwrap();
1223
1224            // Add an empty file
1225            let mut header_c = tar::Header::new_gnu();
1226            header_c.set_size(0);
1227            header_c.set_mode(0o644);
1228            header_c.set_cksum();
1229            tar_builder
1230                .append_data(&mut header_c, "empty.txt", &[][..])
1231                .unwrap();
1232
1233            tar_builder.finish().unwrap();
1234        }
1235
1236        extract_tarball(&archive_path, &dest).unwrap();
1237
1238        // Verify all files extracted correctly
1239        let a_content = std::fs::read_to_string(dest.join("file_a.txt")).unwrap();
1240        assert_eq!(a_content, "file A content");
1241
1242        let b_content = std::fs::read_to_string(dest.join("subdir/file_b.txt")).unwrap();
1243        assert_eq!(b_content, "nested file B");
1244
1245        let c_content = std::fs::read_to_string(dest.join("empty.txt")).unwrap();
1246        assert!(c_content.is_empty(), "empty file should have no content");
1247    }
1248
1249    #[test]
1250    #[cfg(unix)]
1251    fn extract_tarball_nonexistent_archive_fails() {
1252        let dir = tempfile::tempdir().unwrap();
1253        let dest = dir.path().join("out");
1254        std::fs::create_dir_all(&dest).unwrap();
1255
1256        let result = extract_tarball(&dir.path().join("does-not-exist.tar.gz"), &dest);
1257        assert!(result.is_err(), "should fail for nonexistent archive");
1258    }
1259
1260    #[test]
1261    #[cfg(unix)]
1262    fn extract_tarball_invalid_gz_fails() {
1263        let dir = tempfile::tempdir().unwrap();
1264        let archive_path = dir.path().join("bad.tar.gz");
1265        let dest = dir.path().join("out");
1266        std::fs::create_dir_all(&dest).unwrap();
1267
1268        // Write garbage data that isn't valid gzip
1269        std::fs::write(&archive_path, b"this is not a gzip file").unwrap();
1270
1271        let result = extract_tarball(&archive_path, &dest);
1272        assert!(result.is_err(), "should fail for invalid gzip data");
1273    }
1274
1275    #[test]
1276    fn find_checksums_asset_picks_checksums_txt_over_other_assets() {
1277        let release = ReleaseInfo {
1278            tag: "v1.0.0".into(),
1279            version: Version::new(1, 0, 0),
1280            assets: vec![
1281                ReleaseAsset {
1282                    name: "cfgd-1.0.0-linux-x86_64.tar.gz".into(),
1283                    download_url: "https://example.com/binary".into(),
1284                    size: 10000,
1285                },
1286                ReleaseAsset {
1287                    name: "SHA256SUMS".into(),
1288                    download_url: "https://example.com/sha256sums".into(),
1289                    size: 512,
1290                },
1291                ReleaseAsset {
1292                    name: "cfgd-1.0.0-checksums.txt".into(),
1293                    download_url: "https://example.com/checksums".into(),
1294                    size: 256,
1295                },
1296            ],
1297        };
1298
1299        let asset = find_checksums_asset(&release);
1300        assert!(asset.is_some());
1301        // find_checksums_asset looks for names ending in "-checksums.txt"
1302        assert_eq!(asset.unwrap().name, "cfgd-1.0.0-checksums.txt");
1303        assert_eq!(asset.unwrap().download_url, "https://example.com/checksums");
1304    }
1305
1306    #[test]
1307    fn find_checksums_asset_returns_none_for_non_matching_names() {
1308        // SHA256SUMS does not match the -checksums.txt suffix pattern
1309        let release = ReleaseInfo {
1310            tag: "v2.0.0".into(),
1311            version: Version::new(2, 0, 0),
1312            assets: vec![
1313                ReleaseAsset {
1314                    name: "cfgd-2.0.0-linux-x86_64.tar.gz".into(),
1315                    download_url: "https://example.com/binary".into(),
1316                    size: 10000,
1317                },
1318                ReleaseAsset {
1319                    name: "SHA256SUMS".into(),
1320                    download_url: "https://example.com/sha256sums".into(),
1321                    size: 512,
1322                },
1323            ],
1324        };
1325
1326        let asset = find_checksums_asset(&release);
1327        assert!(
1328            asset.is_none(),
1329            "SHA256SUMS does not end with -checksums.txt, so should not match"
1330        );
1331    }
1332
1333    #[test]
1334    fn find_checksums_asset_empty_assets() {
1335        let release = ReleaseInfo {
1336            tag: "v1.0.0".into(),
1337            version: Version::new(1, 0, 0),
1338            assets: vec![],
1339        };
1340        assert!(find_checksums_asset(&release).is_none());
1341    }
1342
1343    #[test]
1344    fn invalidate_cache_removes_file_if_present() {
1345        // Write a fake cache file into the real cache dir, then invalidate.
1346        // Skip if the cache dir is unavailable or not writable (CI environments).
1347        let dir = match directories::ProjectDirs::from("dev", "cfgd", "cfgd") {
1348            Some(d) => d,
1349            None => return,
1350        };
1351        if fs::create_dir_all(dir.cache_dir()).is_err() {
1352            return; // skip if dir can't be created
1353        }
1354        let cache_path = dir.cache_dir().join(CACHE_FILENAME);
1355        let data = r#"{"checkedAtSecs":0,"latestTag":"v0","latestVersion":"0.0.0","currentVersion":"0.0.0"}"#;
1356        if fs::write(&cache_path, data).is_err() {
1357            return; // skip if not writable
1358        }
1359        // Another parallel test may race and invalidate the cache between write
1360        // and this check; skip if the file disappeared (test is still valid).
1361        if !cache_path.exists() {
1362            return;
1363        }
1364
1365        invalidate_cache();
1366
1367        assert!(
1368            !cache_path.exists(),
1369            "cache file should be removed after invalidation"
1370        );
1371    }
1372
1373    #[test]
1374    fn invalidate_cache_no_panic_when_no_file() {
1375        // Ensure calling invalidate when no cache file exists does not panic
1376        invalidate_cache();
1377        invalidate_cache(); // double-call should be safe
1378    }
1379
1380    #[test]
1381    fn restart_daemon_if_running_returns_false_when_no_daemon() {
1382        // In test environments, no daemon is running, so this should return false
1383        let result = restart_daemon_if_running();
1384        assert!(
1385            !result,
1386            "restart_daemon_if_running should return false when no daemon is running"
1387        );
1388    }
1389
1390    #[test]
1391    fn update_check_fields_are_coherent() {
1392        // Construct an UpdateCheck manually and verify field semantics
1393        let check = UpdateCheck {
1394            current: Version::new(0, 1, 0),
1395            latest: Version::new(0, 2, 0),
1396            update_available: true,
1397            release: None,
1398        };
1399        assert!(check.update_available);
1400        assert!(check.latest > check.current);
1401        assert!(check.release.is_none());
1402
1403        let no_update = UpdateCheck {
1404            current: Version::new(0, 2, 0),
1405            latest: Version::new(0, 2, 0),
1406            update_available: false,
1407            release: None,
1408        };
1409        assert!(!no_update.update_available);
1410        assert_eq!(no_update.current, no_update.latest);
1411    }
1412
1413    #[test]
1414    fn version_cache_write_and_read_roundtrip() {
1415        // Test write_version_cache + read_version_cache via the real cache dir
1416        let cache = VersionCache {
1417            checked_at_secs: SystemTime::now()
1418                .duration_since(SystemTime::UNIX_EPOCH)
1419                .unwrap()
1420                .as_secs(),
1421            latest_tag: "v99.99.99".into(),
1422            latest_version: "99.99.99".into(),
1423            current_version: env!("CARGO_PKG_VERSION").into(),
1424        };
1425
1426        // Write the cache
1427        let write_result = write_version_cache(&cache);
1428        if write_result.is_ok() {
1429            // Read it back
1430            let read = read_version_cache();
1431            assert!(read.is_some(), "should be able to read back written cache");
1432            let read = read.unwrap();
1433            assert_eq!(read.latest_tag, "v99.99.99");
1434            assert_eq!(read.latest_version, "99.99.99");
1435            assert_eq!(read.current_version, env!("CARGO_PKG_VERSION"));
1436
1437            // Clean up by invalidating
1438            invalidate_cache();
1439        }
1440    }
1441
1442    #[test]
1443    fn read_version_cache_returns_none_after_invalidation() {
1444        invalidate_cache();
1445        // After invalidation, the cache should be gone (or nonexistent)
1446        // We can't guarantee it was there before, but we can verify the function
1447        // doesn't panic and returns None when no file
1448        let result = read_version_cache();
1449        assert!(
1450            result.is_none(),
1451            "read_version_cache should return None after invalidation"
1452        );
1453    }
1454
1455    #[test]
1456    fn cleanup_old_binary_does_not_panic() {
1457        // Just verify it doesn't panic on any platform
1458        cleanup_old_binary();
1459    }
1460
1461    // --- fetch_latest_release_from with mockito ---
1462
1463    #[test]
1464    fn fetch_latest_release_from_parses_github_response() {
1465        let mut server = mockito::Server::new();
1466        let mock = server
1467            .mock("GET", "/repos/test/repo/releases/latest")
1468            .with_status(200)
1469            .with_header("content-type", "application/json")
1470            .with_body(
1471                r#"{
1472                    "tag_name": "v1.2.3",
1473                    "assets": [
1474                        {
1475                            "name": "cfgd-1.2.3-linux-x86_64.tar.gz",
1476                            "browser_download_url": "https://example.com/download/cfgd-1.2.3-linux-x86_64.tar.gz",
1477                            "size": 5000000
1478                        },
1479                        {
1480                            "name": "checksums.txt",
1481                            "browser_download_url": "https://example.com/download/checksums.txt",
1482                            "size": 512
1483                        }
1484                    ]
1485                }"#,
1486            )
1487            .create();
1488
1489        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1490        mock.assert();
1491
1492        let release = result.unwrap();
1493        assert_eq!(release.tag, "v1.2.3");
1494        assert_eq!(release.version, Version::new(1, 2, 3));
1495        assert_eq!(release.assets.len(), 2);
1496        assert_eq!(release.assets[0].name, "cfgd-1.2.3-linux-x86_64.tar.gz");
1497        assert_eq!(release.assets[0].size, 5000000);
1498        assert_eq!(release.assets[1].name, "checksums.txt");
1499    }
1500
1501    #[test]
1502    fn fetch_latest_release_from_handles_api_error() {
1503        let mut server = mockito::Server::new();
1504        let mock = server
1505            .mock("GET", "/repos/test/repo/releases/latest")
1506            .with_status(404)
1507            .with_body(r#"{"message": "Not Found"}"#)
1508            .create();
1509
1510        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1511        mock.assert();
1512
1513        assert!(result.is_err());
1514        let err = result.unwrap_err();
1515        let err_str = err.to_string();
1516        assert!(
1517            err_str.contains("404")
1518                || err_str.contains("Not Found")
1519                || err_str.contains("status code"),
1520            "error should indicate API failure: {}",
1521            err_str
1522        );
1523    }
1524
1525    #[test]
1526    fn fetch_latest_release_from_handles_invalid_json() {
1527        let mut server = mockito::Server::new();
1528        let mock = server
1529            .mock("GET", "/repos/test/repo/releases/latest")
1530            .with_status(200)
1531            .with_body("this is not json")
1532            .create();
1533
1534        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1535        mock.assert();
1536
1537        assert!(result.is_err());
1538    }
1539
1540    #[test]
1541    fn fetch_latest_release_from_handles_missing_tag_name() {
1542        let mut server = mockito::Server::new();
1543        let mock = server
1544            .mock("GET", "/repos/test/repo/releases/latest")
1545            .with_status(200)
1546            .with_body(r#"{"name": "Release", "assets": []}"#)
1547            .create();
1548
1549        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1550        mock.assert();
1551
1552        assert!(result.is_err());
1553    }
1554
1555    #[test]
1556    fn fetch_latest_release_from_handles_no_assets() {
1557        let mut server = mockito::Server::new();
1558        let mock = server
1559            .mock("GET", "/repos/test/repo/releases/latest")
1560            .with_status(200)
1561            .with_body(r#"{"tag_name": "v2.0.0"}"#)
1562            .create();
1563
1564        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1565        mock.assert();
1566
1567        let release = result.unwrap();
1568        assert_eq!(release.version, Version::new(2, 0, 0));
1569        assert!(release.assets.is_empty());
1570    }
1571
1572    #[test]
1573    fn fetch_latest_release_from_handles_tag_without_v_prefix() {
1574        let mut server = mockito::Server::new();
1575        let mock = server
1576            .mock("GET", "/repos/test/repo/releases/latest")
1577            .with_status(200)
1578            .with_body(r#"{"tag_name": "3.0.1", "assets": []}"#)
1579            .create();
1580
1581        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1582        mock.assert();
1583
1584        let release = result.unwrap();
1585        assert_eq!(release.tag, "3.0.1");
1586        assert_eq!(release.version, Version::new(3, 0, 1));
1587    }
1588
1589    #[test]
1590    fn fetch_latest_release_from_handles_prerelease_version() {
1591        let mut server = mockito::Server::new();
1592        let mock = server
1593            .mock("GET", "/repos/test/repo/releases/latest")
1594            .with_status(200)
1595            .with_body(r#"{"tag_name": "v4.0.0-beta.1", "assets": []}"#)
1596            .create();
1597
1598        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
1599        mock.assert();
1600
1601        let release = result.unwrap();
1602        assert_eq!(release.version, Version::parse("4.0.0-beta.1").unwrap());
1603    }
1604
1605    // --- download_to_file with mockito ---
1606
1607    #[test]
1608    fn download_to_file_writes_content_to_path() {
1609        let mut server = mockito::Server::new();
1610        let mock = server
1611            .mock("GET", "/download/test-file")
1612            .with_status(200)
1613            .with_body(b"file content here")
1614            .create();
1615
1616        let dir = tempfile::tempdir().unwrap();
1617        let dest = dir.path().join("downloaded.bin");
1618        let url = format!("{}/download/test-file", server.url());
1619
1620        let result = download_to_file(&url, &dest, None);
1621        mock.assert();
1622
1623        assert!(result.is_ok());
1624        assert_eq!(std::fs::read_to_string(&dest).unwrap(), "file content here");
1625    }
1626
1627    #[test]
1628    fn download_to_file_returns_error_on_http_failure() {
1629        let mut server = mockito::Server::new();
1630        let mock = server
1631            .mock("GET", "/download/missing")
1632            .with_status(404)
1633            .create();
1634
1635        let dir = tempfile::tempdir().unwrap();
1636        let dest = dir.path().join("should-not-exist.bin");
1637        let url = format!("{}/download/missing", server.url());
1638
1639        let result = download_to_file(&url, &dest, None);
1640        mock.assert();
1641
1642        assert!(result.is_err());
1643        assert!(!dest.exists(), "file should not be created on failure");
1644    }
1645
1646    // --- parse_release_json: comprehensive edge cases ---
1647
1648    #[test]
1649    fn parse_release_json_assets_missing_fields_skipped() {
1650        // Assets with missing name or download_url are filtered out by filter_map
1651        let json = r#"{
1652            "tag_name": "v1.0.0",
1653            "assets": [
1654                {
1655                    "name": "valid.tar.gz",
1656                    "browser_download_url": "https://example.com/valid.tar.gz",
1657                    "size": 1024
1658                },
1659                {
1660                    "browser_download_url": "https://example.com/noname.tar.gz",
1661                    "size": 512
1662                },
1663                {
1664                    "name": "nourl.tar.gz",
1665                    "size": 256
1666                }
1667            ]
1668        }"#;
1669        let release = parse_release_json(json).unwrap();
1670        assert_eq!(
1671            release.assets.len(),
1672            1,
1673            "only the valid asset should be included"
1674        );
1675        assert_eq!(release.assets[0].name, "valid.tar.gz");
1676    }
1677
1678    #[test]
1679    fn parse_release_json_asset_size_defaults_to_zero() {
1680        let json = r#"{
1681            "tag_name": "v1.0.0",
1682            "assets": [
1683                {
1684                    "name": "nosize.tar.gz",
1685                    "browser_download_url": "https://example.com/nosize.tar.gz"
1686                }
1687            ]
1688        }"#;
1689        let release = parse_release_json(json).unwrap();
1690        assert_eq!(release.assets.len(), 1);
1691        assert_eq!(
1692            release.assets[0].size, 0,
1693            "missing size should default to 0"
1694        );
1695    }
1696
1697    #[test]
1698    fn parse_release_json_prerelease_tag() {
1699        let json = r#"{
1700            "tag_name": "v2.0.0-rc.1",
1701            "assets": []
1702        }"#;
1703        let release = parse_release_json(json).unwrap();
1704        assert_eq!(release.tag, "v2.0.0-rc.1");
1705        assert_eq!(release.version, Version::parse("2.0.0-rc.1").unwrap());
1706    }
1707
1708    #[test]
1709    fn parse_release_json_build_metadata() {
1710        let json = r#"{
1711            "tag_name": "v1.0.0+build.123",
1712            "assets": []
1713        }"#;
1714        let release = parse_release_json(json).unwrap();
1715        assert_eq!(release.version.major, 1);
1716        assert_eq!(release.version.minor, 0);
1717        assert_eq!(release.version.patch, 0);
1718    }
1719
1720    #[test]
1721    fn parse_release_json_invalid_version_tag() {
1722        let json = r#"{
1723            "tag_name": "not-semver",
1724            "assets": []
1725        }"#;
1726        let result = parse_release_json(json);
1727        assert!(result.is_err());
1728        let msg = result.unwrap_err().to_string();
1729        assert!(
1730            msg.contains("cannot parse release version"),
1731            "should mention version parse error: {msg}"
1732        );
1733    }
1734
1735    #[test]
1736    fn parse_release_json_null_assets_treated_as_empty() {
1737        let json = r#"{
1738            "tag_name": "v1.0.0",
1739            "assets": null
1740        }"#;
1741        let release = parse_release_json(json).unwrap();
1742        assert!(release.assets.is_empty());
1743    }
1744
1745    #[test]
1746    fn parse_release_json_no_assets_field() {
1747        let json = r#"{"tag_name": "v1.0.0"}"#;
1748        let release = parse_release_json(json).unwrap();
1749        assert!(release.assets.is_empty());
1750    }
1751
1752    // --- find_asset_for_platform: empty assets ---
1753
1754    #[test]
1755    fn find_asset_empty_assets_returns_error() {
1756        let release = ReleaseInfo {
1757            tag: "v1.0.0".into(),
1758            version: Version::new(1, 0, 0),
1759            assets: vec![],
1760        };
1761        assert!(find_asset_for_platform(&release).is_err());
1762    }
1763
1764    // --- find_checksums_asset: various patterns ---
1765
1766    #[test]
1767    fn find_checksums_asset_matches_version_prefixed() {
1768        let release = ReleaseInfo {
1769            tag: "v3.0.0".into(),
1770            version: Version::new(3, 0, 0),
1771            assets: vec![
1772                ReleaseAsset {
1773                    name: "cfgd-3.0.0-linux-x86_64.tar.gz".into(),
1774                    download_url: "https://example.com/bin".into(),
1775                    size: 5000,
1776                },
1777                ReleaseAsset {
1778                    name: "cfgd-3.0.0-checksums.txt".into(),
1779                    download_url: "https://example.com/sums".into(),
1780                    size: 128,
1781                },
1782            ],
1783        };
1784        let asset = find_checksums_asset(&release).unwrap();
1785        assert_eq!(asset.name, "cfgd-3.0.0-checksums.txt");
1786        assert_eq!(asset.download_url, "https://example.com/sums");
1787    }
1788
1789    // --- extract_tarball: additional scenarios ---
1790
1791    #[test]
1792    #[cfg(unix)]
1793    fn extract_tarball_empty_archive() {
1794        use flate2::Compression;
1795        use flate2::write::GzEncoder;
1796
1797        let dir = tempfile::tempdir().unwrap();
1798        let archive_path = dir.path().join("empty.tar.gz");
1799        let dest = dir.path().join("out");
1800        std::fs::create_dir_all(&dest).unwrap();
1801
1802        // Create an empty tarball
1803        {
1804            let file = std::fs::File::create(&archive_path).unwrap();
1805            let enc = GzEncoder::new(file, Compression::default());
1806            let mut tar_builder = tar::Builder::new(enc);
1807            tar_builder.finish().unwrap();
1808        }
1809
1810        extract_tarball(&archive_path, &dest).unwrap();
1811        // dest should still exist but be empty (besides . and ..)
1812        let entries: Vec<_> = std::fs::read_dir(&dest).unwrap().collect();
1813        assert!(
1814            entries.is_empty(),
1815            "empty tarball should extract to empty dir"
1816        );
1817    }
1818
1819    #[test]
1820    #[cfg(unix)]
1821    fn extract_tarball_preserves_binary_content() {
1822        use flate2::Compression;
1823        use flate2::write::GzEncoder;
1824
1825        let dir = tempfile::tempdir().unwrap();
1826        let archive_path = dir.path().join("binary.tar.gz");
1827        let dest = dir.path().join("out");
1828        std::fs::create_dir_all(&dest).unwrap();
1829
1830        // Binary data (not valid UTF-8)
1831        let binary_data: Vec<u8> = (0..=255).collect();
1832
1833        {
1834            let file = std::fs::File::create(&archive_path).unwrap();
1835            let enc = GzEncoder::new(file, Compression::default());
1836            let mut tar_builder = tar::Builder::new(enc);
1837            let mut header = tar::Header::new_gnu();
1838            header.set_size(binary_data.len() as u64);
1839            header.set_mode(0o755);
1840            header.set_cksum();
1841            tar_builder
1842                .append_data(&mut header, "binary.bin", &binary_data[..])
1843                .unwrap();
1844            tar_builder.finish().unwrap();
1845        }
1846
1847        extract_tarball(&archive_path, &dest).unwrap();
1848        let extracted = std::fs::read(dest.join("binary.bin")).unwrap();
1849        assert_eq!(
1850            extracted, binary_data,
1851            "binary data should be preserved exactly"
1852        );
1853    }
1854
1855    // --- atomic_replace: edge cases ---
1856
1857    #[test]
1858    fn atomic_replace_with_large_content() {
1859        let dir = tempfile::tempdir().unwrap();
1860        let src = dir.path().join("source");
1861        let tgt = dir.path().join("target");
1862
1863        // Create a ~1MB file
1864        let large_content: Vec<u8> = vec![0xAB; 1024 * 1024];
1865        std::fs::write(&src, &large_content).unwrap();
1866        std::fs::write(&tgt, b"old small content").unwrap();
1867
1868        atomic_replace(&src, &tgt).unwrap();
1869        let result = std::fs::read(&tgt).unwrap();
1870        assert_eq!(result.len(), large_content.len());
1871        assert_eq!(result, large_content);
1872    }
1873
1874    #[test]
1875    fn atomic_replace_target_parent_must_exist() {
1876        let dir = tempfile::tempdir().unwrap();
1877        let src = dir.path().join("source");
1878        std::fs::write(&src, "content").unwrap();
1879
1880        // Target in a non-existent directory
1881        let tgt = dir.path().join("nonexistent").join("subdir").join("target");
1882        let result = atomic_replace(&src, &tgt);
1883        assert!(
1884            result.is_err(),
1885            "should fail when target parent doesn't exist"
1886        );
1887    }
1888
1889    // --- version_cache serialization/deserialization ---
1890
1891    #[test]
1892    fn version_cache_with_prerelease() {
1893        let cache = VersionCache {
1894            checked_at_secs: 1700000000,
1895            latest_tag: "v2.0.0-beta.3".into(),
1896            latest_version: "2.0.0-beta.3".into(),
1897            current_version: "1.9.0".into(),
1898        };
1899
1900        let json = serde_json::to_string(&cache).unwrap();
1901        let restored: VersionCache = serde_json::from_str(&json).unwrap();
1902        assert_eq!(restored.latest_tag, "v2.0.0-beta.3");
1903        assert_eq!(restored.latest_version, "2.0.0-beta.3");
1904
1905        // Verify the prerelease version parses and compares correctly
1906        let latest = Version::parse(&restored.latest_version).unwrap();
1907        let current = Version::parse(&restored.current_version).unwrap();
1908        assert!(latest > current, "2.0.0-beta.3 > 1.9.0");
1909    }
1910
1911    #[test]
1912    fn version_cache_tolerates_extra_json_fields() {
1913        // Forward compatibility: ignore unknown fields
1914        let json = r#"{"checkedAtSecs":100,"latestTag":"v1","latestVersion":"1.0.0","currentVersion":"0.9.0","extraField":"ignored"}"#;
1915        let cache: VersionCache = serde_json::from_str(json).unwrap();
1916        assert_eq!(cache.checked_at_secs, 100);
1917        assert_eq!(cache.latest_version, "1.0.0");
1918    }
1919
1920    // --- cache TTL: zero elapsed ---
1921
1922    #[test]
1923    fn cache_ttl_zero_seconds_ago_is_fresh() {
1924        let now_secs = SystemTime::now()
1925            .duration_since(SystemTime::UNIX_EPOCH)
1926            .unwrap()
1927            .as_secs();
1928
1929        let elapsed = now_secs.saturating_sub(now_secs);
1930        assert!(
1931            elapsed < CACHE_TTL_SECS,
1932            "zero-elapsed cache should be fresh"
1933        );
1934    }
1935
1936    #[test]
1937    fn cache_ttl_exactly_at_boundary_is_fresh() {
1938        let now_secs = SystemTime::now()
1939            .duration_since(SystemTime::UNIX_EPOCH)
1940            .unwrap()
1941            .as_secs();
1942
1943        // Exactly at TTL boundary (== CACHE_TTL_SECS) should NOT be fresh (uses <, not <=)
1944        let at_boundary = now_secs - CACHE_TTL_SECS;
1945        let elapsed = now_secs.saturating_sub(at_boundary);
1946        assert!(
1947            elapsed >= CACHE_TTL_SECS,
1948            "cache exactly at TTL boundary should be expired (uses strict <)"
1949        );
1950    }
1951
1952    // --- strip_tag_prefix ---
1953
1954    #[test]
1955    fn strip_tag_prefix_with_v() {
1956        assert_eq!(strip_tag_prefix("v1.2.3"), "1.2.3");
1957    }
1958
1959    #[test]
1960    fn strip_tag_prefix_without_v() {
1961        assert_eq!(strip_tag_prefix("1.2.3"), "1.2.3");
1962    }
1963
1964    #[test]
1965    fn strip_tag_prefix_empty() {
1966        assert_eq!(strip_tag_prefix(""), "");
1967    }
1968
1969    #[test]
1970    fn strip_tag_prefix_only_v() {
1971        assert_eq!(strip_tag_prefix("v"), "");
1972    }
1973
1974    #[test]
1975    fn strip_tag_prefix_double_v() {
1976        // Only strips one leading 'v'
1977        assert_eq!(strip_tag_prefix("vv1.0.0"), "v1.0.0");
1978    }
1979
1980    // --- parse_checksums edge cases ---
1981
1982    #[test]
1983    fn parse_checksums_extra_whitespace_between_fields() {
1984        let content = "abc123    file.tar.gz\n";
1985        let map = parse_checksums(content);
1986        assert_eq!(map.len(), 1);
1987        // split_whitespace handles multiple spaces
1988        assert_eq!(map.get("file.tar.gz").unwrap(), "abc123");
1989    }
1990
1991    #[test]
1992    fn parse_checksums_tab_separated() {
1993        let content = "abc123\tfile.tar.gz\n";
1994        let map = parse_checksums(content);
1995        assert_eq!(map.len(), 1);
1996        assert_eq!(map.get("file.tar.gz").unwrap(), "abc123");
1997    }
1998
1999    #[test]
2000    fn parse_checksums_duplicate_filename_last_wins() {
2001        let content = "first_hash  file.tar.gz\nsecond_hash  file.tar.gz\n";
2002        let map = parse_checksums(content);
2003        assert_eq!(map.len(), 1);
2004        assert_eq!(
2005            map.get("file.tar.gz").unwrap(),
2006            "second_hash",
2007            "last occurrence should win in HashMap"
2008        );
2009    }
2010
2011    // --- download_to_file with content-length header ---
2012
2013    #[test]
2014    fn download_to_file_with_content_length() {
2015        let mut server = mockito::Server::new();
2016        let body = "known length content";
2017        let mock = server
2018            .mock("GET", "/sized-file")
2019            .with_status(200)
2020            .with_header("content-length", &body.len().to_string())
2021            .with_body(body)
2022            .create();
2023
2024        let dir = tempfile::tempdir().unwrap();
2025        let dest = dir.path().join("sized.bin");
2026        let url = format!("{}/sized-file", server.url());
2027
2028        download_to_file(&url, &dest, None).unwrap();
2029        mock.assert();
2030
2031        let content = std::fs::read_to_string(&dest).unwrap();
2032        assert_eq!(content, "known length content");
2033    }
2034
2035    #[test]
2036    fn download_to_file_binary_content() {
2037        let mut server = mockito::Server::new();
2038        let binary_data: Vec<u8> = (0..=127).collect();
2039        let mock = server
2040            .mock("GET", "/binary")
2041            .with_status(200)
2042            .with_body(&binary_data)
2043            .create();
2044
2045        let dir = tempfile::tempdir().unwrap();
2046        let dest = dir.path().join("binary.bin");
2047        let url = format!("{}/binary", server.url());
2048
2049        download_to_file(&url, &dest, None).unwrap();
2050        mock.assert();
2051
2052        let content = std::fs::read(&dest).unwrap();
2053        assert_eq!(content, binary_data);
2054    }
2055
2056    // --- sha256_file edge cases ---
2057
2058    #[test]
2059    fn sha256_file_empty_file() {
2060        let tmp = tempfile::NamedTempFile::new().unwrap();
2061        // Write nothing (empty file)
2062        let hash = sha256_file(tmp.path()).unwrap();
2063        // SHA256 of empty string
2064        assert_eq!(
2065            hash,
2066            "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
2067        );
2068    }
2069
2070    #[test]
2071    fn sha256_file_nonexistent_returns_error() {
2072        let dir = tempfile::tempdir().unwrap();
2073        let result = sha256_file(&dir.path().join("does-not-exist"));
2074        assert!(result.is_err(), "nonexistent file should error");
2075    }
2076
2077    // --- fetch_latest_release_from: additional error scenarios ---
2078
2079    #[test]
2080    fn fetch_latest_release_from_handles_server_error() {
2081        let mut server = mockito::Server::new();
2082        let mock = server
2083            .mock("GET", "/repos/test/repo/releases/latest")
2084            .with_status(500)
2085            .with_body("Internal Server Error")
2086            .create();
2087
2088        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
2089        mock.assert();
2090
2091        assert!(result.is_err());
2092    }
2093
2094    #[test]
2095    fn fetch_latest_release_from_with_many_assets() {
2096        let mut server = mockito::Server::new();
2097        let mock = server
2098            .mock("GET", "/repos/test/repo/releases/latest")
2099            .with_status(200)
2100            .with_body(
2101                r#"{
2102                    "tag_name": "v5.0.0",
2103                    "assets": [
2104                        {"name": "cfgd-5.0.0-linux-x86_64.tar.gz", "browser_download_url": "https://dl/linux-x64", "size": 10000},
2105                        {"name": "cfgd-5.0.0-linux-aarch64.tar.gz", "browser_download_url": "https://dl/linux-arm64", "size": 9000},
2106                        {"name": "cfgd-5.0.0-darwin-x86_64.tar.gz", "browser_download_url": "https://dl/darwin-x64", "size": 11000},
2107                        {"name": "cfgd-5.0.0-darwin-aarch64.tar.gz", "browser_download_url": "https://dl/darwin-arm64", "size": 10500},
2108                        {"name": "cfgd-5.0.0-windows-x86_64.zip", "browser_download_url": "https://dl/windows-x64", "size": 12000},
2109                        {"name": "cfgd-5.0.0-checksums.txt", "browser_download_url": "https://dl/checksums", "size": 512}
2110                    ]
2111                }"#,
2112            )
2113            .create();
2114
2115        let result = fetch_latest_release_from(&server.url(), "test/repo", None);
2116        mock.assert();
2117
2118        let release = result.unwrap();
2119        assert_eq!(release.version, Version::new(5, 0, 0));
2120        assert_eq!(release.assets.len(), 6, "should parse all 6 assets");
2121
2122        // Verify specific assets
2123        let checksums = release.assets.iter().find(|a| a.name.contains("checksums"));
2124        assert!(checksums.is_some());
2125        assert_eq!(checksums.unwrap().size, 512);
2126    }
2127}