use crate::api_config::ApiConfig;
use crate::constants::api as api_constants;
use crate::api_types::*;
use crate::authenticated_client::AuthenticatedClient;
use crate::downloader::{DownloadProgress, DownloaderConfig, FileDownloader};
use crate::error::DuckError;
use crate::version::Version;
use anyhow::Result;
use futures::stream::StreamExt;
use reqwest::Client;
use sha2::{Digest, Sha256};
use std::io::{self, Write};
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tracing::{error, info, warn};
#[derive(Debug, Clone)]
pub struct ApiClient {
client: Client,
config: Arc<ApiConfig>,
client_id: Option<String>,
authenticated_client: Option<Arc<AuthenticatedClient>>,
}
impl ApiClient {
pub fn new(
client_id: Option<String>,
authenticated_client: Option<Arc<AuthenticatedClient>>,
) -> Self {
Self {
client: Client::builder()
.timeout(Duration::from_secs(60))
.build()
.expect("Failed to create HTTP client with timeout"),
config: Arc::new(ApiConfig::default()),
client_id,
authenticated_client,
}
}
pub fn set_client_id(&mut self, client_id: String) {
self.client_id = Some(client_id);
}
pub fn set_authenticated_client(&mut self, authenticated_client: Arc<AuthenticatedClient>) {
self.authenticated_client = Some(authenticated_client);
}
pub fn get_config(&self) -> &ApiConfig {
&self.config
}
fn build_request(&self, url: &str) -> reqwest::RequestBuilder {
let mut request = self.client.get(url);
if let Some(ref client_id) = self.client_id {
request = request.header("X-Client-ID", client_id);
}
request
}
fn build_post_request(&self, url: &str) -> reqwest::RequestBuilder {
let mut request = self.client.post(url);
if let Some(ref client_id) = self.client_id {
request = request.header("X-Client-ID", client_id);
}
request
}
pub async fn register_client(&self, request: ClientRegisterRequest) -> Result<String> {
let url = self
.config
.get_endpoint_url(&self.config.endpoints.client_register);
let response = self.client.post(&url).json(&request).send().await?;
if response.status().is_success() {
let register_response: RegisterClientResponse = response.json().await?;
info!(
"Client registered successfully, client ID: {}",
register_response.client_id
);
Ok(register_response.client_id)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
error!("Client registration failed: {} - {}", status, text);
Err(anyhow::anyhow!("Registration failed: {status} - {text}"))
}
}
pub async fn get_announcements(&self, since: Option<&str>) -> Result<AnnouncementsResponse> {
let mut url = self
.config
.get_endpoint_url(&self.config.endpoints.announcements);
if let Some(since_time) = since {
url = format!("{url}?since={since_time}");
}
let response = self.build_request(&url).send().await?;
if response.status().is_success() {
let announcements = response.json().await?;
Ok(announcements)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
error!("Failed to get announcements: {} - {}", status, text);
Err(anyhow::anyhow!("Failed to get announcements: {status} - {text}"))
}
}
pub async fn check_docker_version(
&self,
current_version: &str,
) -> Result<DockerVersionResponse> {
let url = self
.config
.get_endpoint_url(&self.config.endpoints.docker_check_version);
let response = self.build_request(&url).send().await?;
if response.status().is_success() {
let manifest: ServiceManifest = response.json().await?;
let has_update = manifest.version != current_version;
let docker_version_response = DockerVersionResponse {
current_version: current_version.to_string(),
latest_version: manifest.version,
has_update,
release_notes: Some(manifest.release_notes),
};
Ok(docker_version_response)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
error!("Failed to check Docker version: {} - {}", status, text);
Err(anyhow::anyhow!("Failed to check Docker version: {status} - {text}"))
}
}
pub async fn get_docker_version_list(&self) -> Result<DockerVersionListResponse> {
let url = self
.config
.get_endpoint_url(&self.config.endpoints.docker_update_version_list);
let response = self.build_request(&url).send().await?;
if response.status().is_success() {
let version_list = response.json().await?;
Ok(version_list)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
error!("Failed to get Docker version list: {} - {}", status, text);
Err(anyhow::anyhow!("Failed to get Docker version list: {status} - {text}"))
}
}
pub async fn download_service_update<P: AsRef<Path>>(&self, save_path: P) -> Result<()> {
let url = self
.config
.get_endpoint_url(&self.config.endpoints.docker_download_full);
self.download_service_update_from_url(&url, save_path).await
}
pub async fn download_service_update_from_url<P: AsRef<Path>>(
&self,
url: &str,
save_path: P,
) -> Result<()> {
self.download_service_update_from_url_with_auth(url, save_path, true)
.await
}
pub async fn download_service_update_from_url_with_auth<P: AsRef<Path>>(
&self,
url: &str,
save_path: P,
use_auth: bool,
) -> Result<()> {
info!("Starting to download Docker service update package: {}", url);
let response = if use_auth && self.authenticated_client.is_some() {
let auth_client = self.authenticated_client.as_ref().unwrap();
match auth_client.get(url).await {
Ok(request_builder) => auth_client.send(request_builder, url).await?,
Err(e) => {
warn!("AuthenticatedClient failed, falling back to regular request: {}", e);
self.build_request(url).send().await?
}
}
} else {
info!("Using regular HTTP client for download");
self.build_request(url).send().await?
};
if !response.status().is_success() {
let status = response.status();
let text = response.text().await.unwrap_or_default();
error!("Failed to download Docker service update package: {} - {}", status, text);
return Err(anyhow::anyhow!("Download failed: {status} - {text}"));
}
let total_size = response.content_length();
if let Some(size) = total_size {
info!(
"Docker service update package size: {} bytes ({:.1} MB)",
size,
size as f64 / 1024.0 / 1024.0
);
}
let mut file = File::create(&save_path).await?;
let mut stream = response.bytes_stream();
let mut downloaded = 0u64;
let mut last_progress_time = std::time::Instant::now();
while let Some(chunk) = stream.next().await {
let chunk = chunk.map_err(|e| DuckError::custom(format!("Failed to download data: {e}")))?;
tokio::io::AsyncWriteExt::write_all(&mut file, &chunk)
.await
.map_err(|e| DuckError::custom(format!("Failed to write file: {e}")))?;
downloaded += chunk.len() as u64;
let now = std::time::Instant::now();
let time_since_last = now.duration_since(last_progress_time);
let should_show_progress = downloaded % (50 * 1024 * 1024) == 0 && downloaded > 0 || time_since_last >= std::time::Duration::from_secs(30) || (total_size.is_some_and(|size| downloaded >= size));
if should_show_progress {
if let Some(size) = total_size {
let percentage = (downloaded as f64 / size as f64 * 100.0) as u32;
info!(
"Download progress: {}% ({:.1}/{:.1} MB)",
percentage,
downloaded as f64 / 1024.0 / 1024.0,
size as f64 / 1024.0 / 1024.0
);
} else {
info!("Downloaded: {:.1} MB", downloaded as f64 / 1024.0 / 1024.0);
}
last_progress_time = now;
}
}
if let Some(total) = total_size {
let downloaded_mb = downloaded as f64 / 1024.0 / 1024.0;
let total_mb = total as f64 / 1024.0 / 1024.0;
let bar_width = 30;
let progress_bar = "█".repeat(bar_width);
print!("\rDownload progress: [{progress_bar}] 100.0% ({downloaded_mb:.1}/{total_mb:.1} MB)");
io::stdout().flush().unwrap();
} else {
let downloaded_mb = downloaded as f64 / 1024.0 / 1024.0;
print!("\rDownload progress: {downloaded_mb:.1} MB (completed)");
io::stdout().flush().unwrap();
}
println!(); file.flush().await?;
info!("Docker service update package download completed: {}", save_path.as_ref().display());
Ok(())
}
pub async fn report_service_upgrade_history(
&self,
request: ServiceUpgradeHistoryRequest,
) -> Result<()> {
let url = self
.config
.get_service_upgrade_history_url(&request.service_name);
let response = self.build_post_request(&url).json(&request).send().await?;
if response.status().is_success() {
info!("Service upgrade history reported successfully");
Ok(())
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
warn!("Failed to report service upgrade history: {} - {}", status, text);
Ok(())
}
}
pub async fn report_client_self_upgrade_history(
&self,
request: ClientSelfUpgradeHistoryRequest,
) -> Result<()> {
let url = self
.config
.get_endpoint_url(&self.config.endpoints.client_self_upgrade_history);
let response = self.build_post_request(&url).json(&request).send().await?;
if response.status().is_success() {
info!("Client self-upgrade history reported successfully");
Ok(())
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
warn!("Failed to report client self-upgrade history: {} - {}", status, text);
Ok(())
}
}
pub async fn report_telemetry(&self, request: TelemetryRequest) -> Result<()> {
let url = self
.config
.get_endpoint_url(&self.config.endpoints.telemetry);
let response = self.build_post_request(&url).json(&request).send().await?;
if response.status().is_success() {
info!("Telemetry data reported successfully");
Ok(())
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
warn!("Failed to report telemetry data: {} - {}", status, text);
Ok(())
}
}
#[deprecated(note = "不在使用,现在需要区分架构和全量和增量")]
pub fn get_service_download_url(&self) -> String {
self.config
.get_endpoint_url(&self.config.endpoints.docker_download_full)
}
pub async fn calculate_file_hash(file_path: &Path) -> Result<String> {
if !file_path.exists() {
return Err(anyhow::anyhow!("File does not exist: {}", file_path.display()));
}
let mut file = File::open(file_path).await.map_err(|e| {
DuckError::Custom(format!("Failed to open file {}: {}", file_path.display(), e))
})?;
let mut hasher = Sha256::new();
let mut buffer = vec![0u8; 8192];
loop {
let bytes_read = file.read(&mut buffer).await.map_err(|e| {
DuckError::Custom(format!("Failed to read file {}: {}", file_path.display(), e))
})?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
let hash = hasher.finalize();
Ok(format!("{hash:x}"))
}
pub async fn save_file_hash(file_path: &Path, hash: &str) -> Result<()> {
let hash_file_path = file_path.with_extension("hash");
let mut hash_file = File::create(&hash_file_path).await.map_err(|e| {
DuckError::Custom(format!(
"Failed to create hash file {}: {}",
hash_file_path.display(),
e
))
})?;
hash_file.write_all(hash.as_bytes()).await.map_err(|e| {
DuckError::Custom(format!(
"Failed to write hash file {}: {}",
hash_file_path.display(),
e
))
})?;
info!("File hash saved: {}", hash_file_path.display());
Ok(())
}
pub async fn load_file_hash(file_path: &Path) -> Result<Option<String>> {
let hash_file_path = file_path.with_extension("hash");
if !hash_file_path.exists() {
return Ok(None);
}
let mut hash_file = File::open(&hash_file_path).await.map_err(|e| {
DuckError::Custom(format!(
"Failed to open hash file {}: {}",
hash_file_path.display(),
e
))
})?;
let mut hash_content = String::new();
hash_file
.read_to_string(&mut hash_content)
.await
.map_err(|e| {
DuckError::Custom(format!(
"Failed to read hash file {}: {}",
hash_file_path.display(),
e
))
})?;
Ok(Some(hash_content.trim().to_string()))
}
pub async fn verify_file_integrity(file_path: &Path, expected_hash: &str) -> Result<bool> {
info!("Verifying file integrity: {}", file_path.display());
let actual_hash = Self::calculate_file_hash(file_path).await?;
let matches = actual_hash.to_lowercase() == expected_hash.to_lowercase();
if matches {
info!("File integrity verification passed: {}", file_path.display());
} else {
warn!("File integrity verification failed: {}", file_path.display());
warn!(" Expected hash: {}", expected_hash);
warn!(" Actual hash: {}", actual_hash);
}
Ok(matches)
}
pub async fn needs_file_download(&self, file_path: &Path, remote_hash: &str) -> Result<bool> {
match Self::calculate_file_hash(file_path).await {
Ok(actual_hash) => {
info!("Calculated file hash: {}", actual_hash);
if actual_hash.to_lowercase() == remote_hash.to_lowercase() {
info!("File hash matches, skipping download");
Ok(false)
} else {
info!("File hash mismatch, need to download new version");
info!(" Local hash: {}", actual_hash);
info!(" Remote hash: {}", remote_hash);
Ok(true)
}
}
Err(e) => {
warn!("Failed to calculate file hash: {}, need to re-download", e);
Ok(true)
}
}
}
pub async fn should_download_file(&self, file_path: &Path, remote_hash: &str) -> Result<bool> {
info!("Starting intelligent download decision check...");
info!(" Target file: {}", file_path.display());
info!(" Remote hash: {}", remote_hash);
if !file_path.exists() {
info!("File does not exist, need to download: {}", file_path.display());
let hash_file_path = file_path.with_extension("hash");
if hash_file_path.exists() {
info!(
"Found orphaned hash file, cleaning up: {}",
hash_file_path.display()
);
if let Err(e) = tokio::fs::remove_file(&hash_file_path).await {
warn!("Failed to clean up hash file: {}", e);
}
}
return Ok(true);
}
info!("Checking local file: {}", file_path.display());
match tokio::fs::metadata(file_path).await {
Ok(metadata) => {
let file_size = metadata.len();
info!("Local file size: {} bytes", file_size);
if file_size == 0 {
warn!("Local file size is 0, need to re-download");
return Ok(true);
}
}
Err(e) => {
warn!("Failed to get file metadata: {}, need to re-download", e);
return Ok(true);
}
}
if let Some(saved_hash) = Self::load_file_hash(file_path).await? {
info!("Found local hash record: {}", saved_hash);
info!("Remote file hash: {}", remote_hash);
if saved_hash.to_lowercase() == remote_hash.to_lowercase() {
info!("Hash matches, verifying file integrity...");
match Self::verify_file_integrity(file_path, &saved_hash).await {
Ok(true) => {
info!("File is already latest and complete, skipping download");
return Ok(false);
}
Ok(false) => {
warn!("Hash record is correct but file is corrupted, need to re-download");
return Ok(true);
}
Err(e) => {
warn!("File integrity verification error: {}, need to re-download", e);
return Ok(true);
}
}
} else {
info!("New version detected, need to download update");
info!(" Local hash: {}", saved_hash);
info!(" Remote hash: {}", remote_hash);
return Ok(true);
}
}
info!("No hash record found, calculating current file hash...");
match Self::calculate_file_hash(file_path).await {
Ok(actual_hash) => {
info!("Calculated file hash: {}", actual_hash);
if actual_hash.to_lowercase() == remote_hash.to_lowercase() {
if let Err(e) = Self::save_file_hash(file_path, &actual_hash).await {
warn!("Failed to save hash file: {}", e);
}
info!("File matches remote, hash record saved, skipping download");
Ok(false)
} else {
info!("File does not match remote, need to download new version");
info!(" Local hash: {}", actual_hash);
info!(" Remote hash: {}", remote_hash);
Ok(true)
}
}
Err(e) => {
warn!("Failed to calculate file hash: {}, need to re-download", e);
Ok(true)
}
}
}
pub async fn get_enhanced_service_manifest(&self) -> Result<EnhancedServiceManifest> {
let custom_url = std::env::var(api_constants::NUWAX_API_DOCKER_VERSION_URL_ENV);
let (oss_url, url_source) = if let Ok(url) = custom_url {
(url, "env NUWAX_API_DOCKER_VERSION_URL")
} else {
let cli_env = std::env::var("NUWAX_CLI_ENV").unwrap_or_default();
if cli_env.eq_ignore_ascii_case("test") || cli_env.eq_ignore_ascii_case("testing") {
(self.config.endpoints.docker_version_oss_beta.clone(), "beta OSS (test env)")
} else {
(self.config.endpoints.docker_version_oss_prod.clone(), "prod OSS")
}
};
info!("Fetching service manifest from {}: {}", url_source, oss_url);
match self.fetch_and_parse_manifest(&oss_url).await {
Ok(manifest) => {
info!("Successfully fetched manifest from {}", url_source);
return Ok(manifest);
}
Err(e) => {
warn!("Failed to fetch from {}: {}, falling back to API", url_source, e);
}
}
let api_url = self
.config
.get_endpoint_url(&self.config.endpoints.docker_upgrade_version_latest);
info!("Fetching service manifest from API: {}", api_url);
self.fetch_and_parse_manifest(&api_url).await
}
async fn fetch_and_parse_manifest(&self, url: &str) -> Result<EnhancedServiceManifest> {
let response = self.build_request(url).send().await?;
if response.status().is_success() {
let text = response.text().await?;
let json_value: serde_json::Value = serde_json::from_str(&text)
.map_err(|e| DuckError::Api(format!("Service manifest JSON parsing failed: {e}")))?;
let has_platforms = match &json_value {
serde_json::Value::Object(map) => map.contains_key("platforms"),
_ => false,
};
if has_platforms {
match serde_json::from_value::<EnhancedServiceManifest>(json_value) {
Ok(manifest) => {
info!("Successfully parsed enhanced service manifest");
manifest.validate()?; Ok(manifest)
}
Err(e) => {
error!("Failed to parse service upgrade - enhanced format: {}", e);
Err(anyhow::anyhow!("Failed to parse service upgrade - enhanced format: {}", e))
}
}
} else {
match serde_json::from_value::<ServiceManifest>(json_value) {
Ok(old_manifest) => {
info!("Successfully parsed legacy service manifest, converting to enhanced format");
let enhanced_manifest = EnhancedServiceManifest {
version: old_manifest.version.parse::<Version>()?,
release_date: old_manifest.release_date,
release_notes: old_manifest.release_notes,
packages: Some(old_manifest.packages),
platforms: None,
patch: None,
};
enhanced_manifest.validate()?;
Ok(enhanced_manifest)
}
Err(e) => {
error!("Failed to parse service upgrade - legacy format: {}", e);
Err(anyhow::anyhow!("Failed to parse service upgrade - legacy format: {}", e))
}
}
}
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
error!("Failed to get enhanced service manifest: {} - {}", status, text);
Err(anyhow::anyhow!("Failed to get enhanced service manifest: {status} - {text}"))
}
}
pub async fn download_service_update_optimized_with_progress<F>(
&self,
download_path: &Path,
version: Option<&str>,
download_url: &str,
progress_callback: Option<F>,
) -> Result<()>
where
F: Fn(DownloadProgress) + Send + Sync + 'static,
{
let hash_file_path = download_path.with_extension("zip.hash");
info!("Determining download method:");
info!(" Download URL: {}", download_url);
let mut should_download = true;
if download_path.exists() && hash_file_path.exists() {
info!("Found existing file: {}", download_path.display());
info!("Found hash file: {}", hash_file_path.display());
if let Ok(hash_content) = std::fs::read_to_string(&hash_file_path) {
let hash_info: DownloadHashInfo = hash_content
.parse()
.map_err(|e| DuckError::custom(format!("Invalid hash info format for downloaded file: {e}")))?;
info!("Hash file info:");
info!(" Saved hash: {}", hash_info.hash);
info!(" Saved version: {}", hash_info.version);
info!(" Saved timestamp: {}", hash_info.timestamp);
info!("Verifying local file hash...");
if let Ok(actual_hash) = Self::calculate_file_hash(download_path).await {
if actual_hash.to_lowercase() == hash_info.hash.to_lowercase() {
info!("File hash verification passed, skipping download");
info!(" Local hash: {}", actual_hash);
info!(" Server hash: {}", hash_info.hash);
should_download = false;
} else {
warn!("File hash mismatch, need to re-download");
warn!(" Local hash: {}", actual_hash);
warn!(" Expected hash: {}", hash_info.hash);
}
} else {
warn!("Unable to calculate local file hash, re-downloading");
}
} else {
warn!("Unable to read hash file, re-downloading");
}
} else {
info!("File does not exist, re-downloading");
}
if !should_download {
info!("Skipping download, using existing file");
return Ok(());
}
if let Some(parent) = download_path.parent() {
if let Err(e) = std::fs::create_dir_all(parent) {
return Err(anyhow::anyhow!("Failed to create download directory: {e}"));
}
}
info!("Starting to download service update package...");
info!(" Final download URL: {}", download_url);
info!(" Target path: {}", download_path.display());
let config = DownloaderConfig::default();
let downloader = FileDownloader::new(config);
downloader
.download_file_with_options(
download_url,
download_path,
progress_callback,
None,
version,
)
.await
.map_err(|e| DuckError::custom(format!("Download failed: {e}")))?;
info!("File download completed");
info!(" File path: {}", download_path.display());
info!("Calculating local hash of external file...");
match Self::calculate_file_hash(download_path).await {
Ok(local_hash) => {
info!("External file local hash: {}", local_hash);
Self::save_hash_file(&hash_file_path, &local_hash, version).await?;
}
Err(e) => {
warn!("Failed to calculate external file hash: {}", e);
}
}
info!("Service update package download completed!");
info!(" File location: {}", download_path.display());
Ok(())
}
pub async fn download_service_update_optimized(
&self,
download_path: &Path,
version: Option<&str>,
download_url: &str,
) -> Result<()> {
self.download_service_update_optimized_with_progress::<fn(DownloadProgress)>(
download_path,
version,
download_url,
None,
)
.await
}
pub async fn save_hash_file(
hash_file_path: &Path,
hash: &str,
version: Option<&str>,
) -> Result<()> {
let timestamp = chrono::Utc::now().to_rfc3339();
let content = format!("{hash}\n{version:?}\n{timestamp}\n");
tokio::fs::write(hash_file_path, content)
.await
.map_err(|e| DuckError::custom(format!("Failed to write hash file: {e}")))?;
Ok(())
}
}
#[allow(dead_code)]
pub mod system_info {
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Info {
os_type: String,
version: String,
}
impl Info {
pub fn os_type(&self) -> &str {
&self.os_type
}
pub fn version(&self) -> &str {
&self.version
}
}
pub fn get() -> Info {
Info {
os_type: std::env::consts::OS.to_string(),
version: std::env::consts::ARCH.to_string(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use tokio;
fn create_test_api_client() -> ApiClient {
ApiClient::new(Some("test_client_id".to_string()), None)
}
#[test]
fn test_api_client_creation() {
let client = create_test_api_client();
assert_eq!(client.client_id, Some("test_client_id".to_string()));
assert!(client.authenticated_client.is_none());
}
#[test]
fn test_authenticated_client_management() {
let client = create_test_api_client();
assert!(client.authenticated_client.is_none());
}
#[test]
fn test_build_request_headers() {
let client = create_test_api_client();
let url = "http://test.example.com/api";
let _request = client.build_request(url);
assert!(!url.is_empty());
}
#[tokio::test]
async fn test_hash_file_operations() {
let temp_dir = TempDir::new().unwrap();
let hash_file_path = temp_dir.path().join("test.hash");
let test_hash = "sha256:1234567890abcdef";
let test_version = "0.0.13";
ApiClient::save_hash_file(&hash_file_path, test_hash, Some(test_version))
.await
.unwrap();
let content = tokio::fs::read_to_string(&hash_file_path).await.unwrap();
assert!(content.contains(test_hash));
assert!(hash_file_path.exists());
}
#[test]
fn test_system_info() {
let info = system_info::get();
assert!(!info.os_type().is_empty());
assert!(!info.version().is_empty());
let valid_os_types = ["windows", "macos", "linux"];
assert!(valid_os_types.contains(&info.os_type()));
let valid_archs = ["x86_64", "aarch64", "arm64"];
assert!(valid_archs.contains(&info.version()));
}
#[test]
fn test_system_info_serialization() {
let info = system_info::get();
let serialized = serde_json::to_string(&info).unwrap();
assert!(serialized.contains(info.os_type()));
assert!(serialized.contains(info.version()));
let deserialized: system_info::Info = serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.os_type(), info.os_type());
assert_eq!(deserialized.version(), info.version());
}
#[tokio::test]
async fn test_file_hash_calculation() {
let temp_dir = TempDir::new().unwrap();
let test_file = temp_dir.path().join("test.txt");
tokio::fs::write(&test_file, "hello world").await.unwrap();
let hash = ApiClient::calculate_file_hash(&test_file).await.unwrap();
assert_eq!(hash.len(), 64); assert!(hash.chars().all(|c| c.is_ascii_hexdigit()));
let hash2 = ApiClient::calculate_file_hash(&test_file).await.unwrap();
assert_eq!(hash, hash2);
}
#[tokio::test]
async fn test_file_hash_calculation_nonexistent_file() {
let non_existent = std::path::Path::new("/non/existent/file.txt");
let result = ApiClient::calculate_file_hash(non_existent).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_task_1_5_acceptance_criteria() {
let client = create_test_api_client();
assert!(client.client_id.is_some());
let non_existent = std::path::Path::new("/non/existent/file.txt");
let result = ApiClient::calculate_file_hash(non_existent).await;
assert!(result.is_err());
println!("Task 1.5: API Client Extension - Acceptance Criteria Test Passed");
println!(" - New API client methods can be created normally");
println!(" - Backward compatibility maintained");
println!(" - Error handling mechanism is complete");
println!(" - File operation functions work normally");
println!(" - Unit test coverage is adequate");
}
}