fluidattacks-core 0.1.5

Fluid Attacks Core Library
Documentation
use anyhow::{bail, Context, Result};
use std::path::Path;
use tracing::warn;

const MAX_RETRIES: u32 = 3;
const RETRY_SLEEP_SECS: u64 = 60;

pub async fn download(url: &str) -> Result<Vec<u8>> {
    let mut last_err = None;

    for attempt in 0..MAX_RETRIES {
        match do_download(url).await {
            Ok(data) => return Ok(data),
            Err(e) => {
                warn!(
                    attempt = attempt + 1,
                    max = MAX_RETRIES,
                    error = %e,
                    "download failed, retrying"
                );
                last_err = Some(e);
                if attempt < MAX_RETRIES - 1 {
                    tokio::time::sleep(std::time::Duration::from_secs(RETRY_SLEEP_SECS)).await;
                }
            }
        }
    }

    Err(last_err.unwrap_or_else(|| anyhow::anyhow!("download failed")))
}

async fn do_download(url: &str) -> Result<Vec<u8>> {
    let resp = reqwest::Client::new()
        .get(url)
        .send()
        .await
        .context("HTTP request failed")?;

    let status = resp.status();
    if !status.is_success() {
        bail!("HTTP {status}");
    }

    let data = resp.bytes().await.context("reading response body")?;

    if data.is_empty() || !is_gzip(&data) {
        bail!(
            "response is not a valid gzip file (got {} bytes)",
            data.len()
        );
    }

    Ok(data.to_vec())
}

fn is_gzip(data: &[u8]) -> bool {
    data.len() >= 2 && data[0] == 0x1f && data[1] == 0x8b
}

/// Downloads a gzip archive from `url`, extracts it, moves the result to
/// `dest`, resets the git repo, and deletes gitignored files.
pub async fn download_and_extract_repo(url: &str, dest: &Path, git_ignore: &[String]) -> bool {
    if let Some(parent) = dest.parent() {
        if let Err(e) = std::fs::create_dir_all(parent) {
            warn!(error = %e, "failed to create parent dir");
            return false;
        }
    }

    let data = match download(url).await {
        Ok(d) => d,
        Err(e) => {
            warn!(error = %e, "failed to download");
            return false;
        }
    };

    let tmp_dir = match tempfile::tempdir() {
        Ok(d) => d,
        Err(e) => {
            warn!(error = %e, "failed to create temp dir");
            return false;
        }
    };

    if let Err(e) = super::tar::extract(&data, tmp_dir.path()) {
        warn!(error = %e, "failed to extract archive");
        return false;
    }

    let extracted_dir = match std::fs::read_dir(tmp_dir.path()) {
        Ok(entries) => {
            let dirs: Vec<_> = entries
                .filter_map(|e| e.ok())
                .filter(|e| e.file_type().map(|t| t.is_dir()).unwrap_or(false))
                .collect();
            if dirs.is_empty() {
                warn!("no directory found in extracted archive");
                return false;
            }
            dirs[0].path()
        }
        Err(e) => {
            warn!(error = %e, "failed to read temp dir");
            return false;
        }
    };

    if dest.exists() {
        let _ = std::fs::remove_dir_all(dest);
    }
    if let Err(e) = std::fs::rename(&extracted_dir, dest) {
        if let Err(e2) = super::fs::copy_dir_all(&extracted_dir, dest) {
            warn!(rename_error = %e, copy_error = %e2, "failed to move extracted dir");
            return false;
        }
    }

    let dest_str = dest.to_string_lossy().to_string();
    let _ = super::cli::set_safe_directory().await;
    if super::cli::reset_hard(&dest_str).await.is_err() {
        let _ = std::fs::remove_dir_all(dest);
        return false;
    }
    let _ = super::fs::remove_symlinks(dest);
    let _ = super::fs::delete_gitignored_files(dest, git_ignore);

    true
}