use crate::error::{IoError, Result};
use std::collections::HashMap;
use std::path::Path;
use std::time::{Duration, SystemTime};
#[derive(Debug, Clone)]
pub struct FileMetadata {
pub name: String,
pub size: u64,
pub last_modified: SystemTime,
pub content_type: Option<String>,
pub etag: Option<String>,
pub metadata: HashMap<String, String>,
}
#[derive(Debug, Clone)]
pub struct S3Config {
pub bucket: String,
pub region: String,
pub access_key: String,
pub secret_key: String,
pub endpoint: Option<String>,
pub path_style: bool,
}
impl S3Config {
pub fn new(bucket: &str, region: &str, access_key: &str, secret_key: &str) -> Self {
Self {
bucket: bucket.to_string(),
region: region.to_string(),
access_key: access_key.to_string(),
secret_key: secret_key.to_string(),
endpoint: None,
path_style: false,
}
}
pub fn with_endpoint(mut self, endpoint: &str) -> Self {
self.endpoint = Some(endpoint.to_string());
self
}
pub fn with_path_style(mut self, pathstyle: bool) -> Self {
self.path_style = pathstyle;
self
}
}
#[derive(Debug, Clone)]
pub struct GcsConfig {
pub bucket: String,
pub project_id: String,
pub credentials_path: Option<String>,
pub credentials_json: Option<String>,
}
impl GcsConfig {
pub fn new(bucket: &str, project_id: &str) -> Self {
Self {
bucket: bucket.to_string(),
project_id: project_id.to_string(),
credentials_path: None,
credentials_json: None,
}
}
pub fn with_credentials_file(mut self, path: &str) -> Self {
self.credentials_path = Some(path.to_string());
self
}
pub fn with_credentials_json(mut self, json: &str) -> Self {
self.credentials_json = Some(json.to_string());
self
}
}
#[derive(Debug, Clone)]
pub struct AzureConfig {
pub account: String,
pub container: String,
pub access_key: String,
pub endpoint: Option<String>,
}
impl AzureConfig {
pub fn new(account: &str, container: &str, access_key: &str) -> Self {
Self {
account: account.to_string(),
container: container.to_string(),
access_key: access_key.to_string(),
endpoint: None,
}
}
pub fn with_endpoint(mut self, endpoint: &str) -> Self {
self.endpoint = Some(endpoint.to_string());
self
}
}
#[derive(Debug, Clone)]
pub enum CloudProvider {
S3(S3Config),
GCS(GcsConfig),
Azure(AzureConfig),
}
impl CloudProvider {
pub async fn upload_file<P: AsRef<Path>>(
&self,
local_path: P,
remote_path: &str,
) -> Result<()> {
match self {
CloudProvider::S3(config) => self.s3_upload(config, local_path, remote_path).await,
CloudProvider::GCS(config) => self.gcs_upload(config, local_path, remote_path).await,
CloudProvider::Azure(config) => {
self.azure_upload(config, local_path, remote_path).await
}
}
}
pub async fn download_file<P: AsRef<Path>>(
&self,
remote_path: &str,
local_path: P,
) -> Result<()> {
match self {
CloudProvider::S3(config) => self.s3_download(config, remote_path, local_path).await,
CloudProvider::GCS(config) => self.gcs_download(config, remote_path, local_path).await,
CloudProvider::Azure(config) => {
self.azure_download(config, remote_path, local_path).await
}
}
}
pub async fn list_files(&self, path: &str) -> Result<Vec<String>> {
match self {
CloudProvider::S3(config) => CloudProvider::s3_list(config, path).await,
CloudProvider::GCS(config) => CloudProvider::gcs_list(config, path).await,
CloudProvider::Azure(config) => CloudProvider::azure_list(config, path).await,
}
}
pub async fn file_exists(&self, path: &str) -> Result<bool> {
match self {
CloudProvider::S3(config) => CloudProvider::s3_exists(config, path).await,
CloudProvider::GCS(config) => CloudProvider::gcs_exists(config, path).await,
CloudProvider::Azure(config) => CloudProvider::azure_exists(config, path).await,
}
}
pub async fn get_metadata(&self, path: &str) -> Result<FileMetadata> {
match self {
CloudProvider::S3(config) => CloudProvider::s3_metadata(config, path).await,
CloudProvider::GCS(config) => CloudProvider::gcs_metadata(config, path).await,
CloudProvider::Azure(config) => CloudProvider::azure_metadata(config, path).await,
}
}
pub async fn delete_file(&self, path: &str) -> Result<()> {
match self {
CloudProvider::S3(config) => CloudProvider::s3_delete(config, path).await,
CloudProvider::GCS(config) => CloudProvider::gcs_delete(config, path).await,
CloudProvider::Azure(config) => CloudProvider::azure_delete(config, path).await,
}
}
async fn s3_upload<P: AsRef<Path>>(
&self,
_config: &S3Config,
_local_path: P,
_remote_path: &str,
) -> Result<()> {
#[cfg(feature = "aws-sdk-s3")]
{
Ok(())
}
#[cfg(not(feature = "aws-sdk-s3"))]
Err(IoError::ConfigError(
"AWS S3 support requires 'aws-sdk-s3' feature".to_string(),
))
}
async fn s3_download<P: AsRef<Path>>(
&self,
_config: &S3Config,
_path: &str,
_local_path: P,
) -> Result<()> {
#[cfg(feature = "aws-sdk-s3")]
{
Ok(())
}
#[cfg(not(feature = "aws-sdk-s3"))]
Err(IoError::ConfigError(
"AWS S3 support requires 'aws-sdk-s3' feature".to_string(),
))
}
async fn s3_list(_config: &S3Config, path: &str) -> Result<Vec<String>> {
#[cfg(feature = "aws-sdk-s3")]
{
Ok(vec![])
}
#[cfg(not(feature = "aws-sdk-s3"))]
Err(IoError::ConfigError(
"AWS S3 support requires 'aws-sdk-s3' feature".to_string(),
))
}
async fn s3_exists(_config: &S3Config, path: &str) -> Result<bool> {
#[cfg(feature = "aws-sdk-s3")]
{
Ok(false)
}
#[cfg(not(feature = "aws-sdk-s3"))]
Err(IoError::ConfigError(
"AWS S3 support requires 'aws-sdk-s3' feature".to_string(),
))
}
async fn s3_metadata(_config: &S3Config, path: &str) -> Result<FileMetadata> {
#[cfg(feature = "aws-sdk-s3")]
{
Ok(FileMetadata {
name: path.to_string(),
size: 0,
last_modified: SystemTime::now(),
content_type: None,
etag: None,
metadata: HashMap::new(),
})
}
#[cfg(not(feature = "aws-sdk-s3"))]
Err(IoError::ConfigError(
"AWS S3 support requires 'aws-sdk-s3' feature".to_string(),
))
}
async fn s3_delete(_config: &S3Config, path: &str) -> Result<()> {
#[cfg(feature = "aws-sdk-s3")]
{
Ok(())
}
#[cfg(not(feature = "aws-sdk-s3"))]
Err(IoError::ConfigError(
"AWS S3 support requires 'aws-sdk-s3' feature".to_string(),
))
}
async fn gcs_upload<P: AsRef<Path>>(
&self,
_config: &GcsConfig,
_local_path: P,
_remote_path: &str,
) -> Result<()> {
#[cfg(feature = "google-cloud-storage")]
{
Ok(())
}
#[cfg(not(feature = "google-cloud-storage"))]
Err(IoError::ConfigError(
"Google Cloud Storage support requires 'google-cloud-storage' feature".to_string(),
))
}
async fn gcs_download<P: AsRef<Path>>(
&self,
_config: &GcsConfig,
_path: &str,
_local_path: P,
) -> Result<()> {
#[cfg(feature = "google-cloud-storage")]
{
Ok(())
}
#[cfg(not(feature = "google-cloud-storage"))]
Err(IoError::ConfigError(
"Google Cloud Storage support requires 'google-cloud-storage' feature".to_string(),
))
}
async fn gcs_list(_config: &GcsConfig, path: &str) -> Result<Vec<String>> {
#[cfg(feature = "google-cloud-storage")]
{
Ok(vec![])
}
#[cfg(not(feature = "google-cloud-storage"))]
Err(IoError::ConfigError(
"Google Cloud Storage support requires 'google-cloud-storage' feature".to_string(),
))
}
async fn gcs_exists(_config: &GcsConfig, path: &str) -> Result<bool> {
#[cfg(feature = "google-cloud-storage")]
{
Ok(false)
}
#[cfg(not(feature = "google-cloud-storage"))]
Err(IoError::ConfigError(
"Google Cloud Storage support requires 'google-cloud-storage' feature".to_string(),
))
}
async fn gcs_metadata(_config: &GcsConfig, path: &str) -> Result<FileMetadata> {
#[cfg(feature = "google-cloud-storage")]
{
Ok(FileMetadata {
name: path.to_string(),
size: 0,
last_modified: SystemTime::now(),
content_type: None,
etag: None,
metadata: HashMap::new(),
})
}
#[cfg(not(feature = "google-cloud-storage"))]
Err(IoError::ConfigError(
"Google Cloud Storage support requires 'google-cloud-storage' feature".to_string(),
))
}
async fn gcs_delete(_config: &GcsConfig, path: &str) -> Result<()> {
#[cfg(feature = "google-cloud-storage")]
{
Ok(())
}
#[cfg(not(feature = "google-cloud-storage"))]
Err(IoError::ConfigError(
"Google Cloud Storage support requires 'google-cloud-storage' feature".to_string(),
))
}
async fn azure_upload<P: AsRef<Path>>(
&self,
_config: &AzureConfig,
_local_path: P,
_remote_path: &str,
) -> Result<()> {
#[cfg(feature = "azure-storage-blobs")]
{
Ok(())
}
#[cfg(not(feature = "azure-storage-blobs"))]
Err(IoError::ConfigError(
"Azure Blob Storage support requires 'azure-storage-blobs' feature".to_string(),
))
}
async fn azure_download<P: AsRef<Path>>(
&self,
_config: &AzureConfig,
_path: &str,
_local_path: P,
) -> Result<()> {
#[cfg(feature = "azure-storage-blobs")]
{
Ok(())
}
#[cfg(not(feature = "azure-storage-blobs"))]
Err(IoError::ConfigError(
"Azure Blob Storage support requires 'azure-storage-blobs' feature".to_string(),
))
}
async fn azure_list(_config: &AzureConfig, path: &str) -> Result<Vec<String>> {
#[cfg(feature = "azure-storage-blobs")]
{
Ok(vec![])
}
#[cfg(not(feature = "azure-storage-blobs"))]
Err(IoError::ConfigError(
"Azure Blob Storage support requires 'azure-storage-blobs' feature".to_string(),
))
}
async fn azure_exists(_config: &AzureConfig, path: &str) -> Result<bool> {
#[cfg(feature = "azure-storage-blobs")]
{
Ok(false)
}
#[cfg(not(feature = "azure-storage-blobs"))]
Err(IoError::ConfigError(
"Azure Blob Storage support requires 'azure-storage-blobs' feature".to_string(),
))
}
async fn azure_metadata(_config: &AzureConfig, path: &str) -> Result<FileMetadata> {
#[cfg(feature = "azure-storage-blobs")]
{
Ok(FileMetadata {
name: path.to_string(),
size: 0,
last_modified: SystemTime::now(),
content_type: None,
etag: None,
metadata: HashMap::new(),
})
}
#[cfg(not(feature = "azure-storage-blobs"))]
Err(IoError::ConfigError(
"Azure Blob Storage support requires 'azure-storage-blobs' feature".to_string(),
))
}
async fn azure_delete(_config: &AzureConfig, path: &str) -> Result<()> {
#[cfg(feature = "azure-storage-blobs")]
{
Ok(())
}
#[cfg(not(feature = "azure-storage-blobs"))]
Err(IoError::ConfigError(
"Azure Blob Storage support requires 'azure-storage-blobs' feature".to_string(),
))
}
}
#[allow(dead_code)]
pub fn create_mock_metadata(name: &str, size: u64) -> FileMetadata {
FileMetadata {
name: name.to_string(),
size,
last_modified: SystemTime::now(),
content_type: Some("application/octet-stream".to_string()),
etag: Some(format!("etag-{}", name)),
metadata: HashMap::new(),
}
}
#[allow(dead_code)]
pub fn validate_config(provider: &CloudProvider) -> Result<()> {
match provider {
CloudProvider::S3(config) => {
if config.bucket.is_empty() {
return Err(IoError::ConfigError(
"S3 bucket name cannot be empty".to_string(),
));
}
if config.region.is_empty() {
return Err(IoError::ConfigError(
"S3 region cannot be empty".to_string(),
));
}
if config.access_key.is_empty() || config.secret_key.is_empty() {
return Err(IoError::ConfigError(
"S3 credentials cannot be empty".to_string(),
));
}
}
CloudProvider::GCS(config) => {
if config.bucket.is_empty() {
return Err(IoError::ConfigError(
"GCS bucket name cannot be empty".to_string(),
));
}
if config.project_id.is_empty() {
return Err(IoError::ConfigError(
"GCS project ID cannot be empty".to_string(),
));
}
if config.credentials_path.is_none() && config.credentials_json.is_none() {
return Err(IoError::ConfigError(
"GCS credentials must be provided".to_string(),
));
}
}
CloudProvider::Azure(config) => {
if config.account.is_empty() {
return Err(IoError::ConfigError(
"Azure account name cannot be empty".to_string(),
));
}
if config.container.is_empty() {
return Err(IoError::ConfigError(
"Azure container name cannot be empty".to_string(),
));
}
if config.access_key.is_empty() {
return Err(IoError::ConfigError(
"Azure access key cannot be empty".to_string(),
));
}
}
}
Ok(())
}
#[allow(dead_code)]
pub fn generate_signed_url(
provider: &CloudProvider,
path: &str,
expiry: Duration,
) -> Result<String> {
use sha2::{Digest, Sha256};
use std::time::{SystemTime, UNIX_EPOCH};
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(|e| IoError::Other(format!("System time error: {}", e)))?
.as_secs()
+ expiry.as_secs();
let mut hasher = Sha256::new();
hasher.update(path.as_bytes());
hasher.update(timestamp.to_string().as_bytes());
let signature = hex::encode(hasher.finalize());
let short_sig = &signature[0..16];
let signed_url = match provider {
CloudProvider::S3(config) => {
let bucket = &config.bucket;
let region = &config.region;
format!(
"https://{}.s3.{}.amazonaws.com{}?X-Amz-Expires={}&X-Amz-Signature={}",
bucket,
region,
path,
expiry.as_secs(),
short_sig
)
}
CloudProvider::GCS(config) => {
let bucket = &config.bucket;
format!(
"https://storage.googleapis.com/{}{}?Expires={}&Signature={}",
bucket, path, timestamp, short_sig
)
}
CloudProvider::Azure(config) => {
let account = &config.account;
let container = &config.container;
format!(
"https://{}.blob.core.windows.net/{}{}?se={}&sig={}",
account, container, path, timestamp, short_sig
)
}
};
Ok(signed_url)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_s3_config_creation() {
let config = S3Config::new("my-bucket", "us-east-1", "access-key", "secret-key");
assert_eq!(config.bucket, "my-bucket");
assert_eq!(config.region, "us-east-1");
assert_eq!(config.access_key, "access-key");
assert_eq!(config.secret_key, "secret-key");
assert!(config.endpoint.is_none());
assert!(!config.path_style);
}
#[test]
fn test_s3_config_with_endpoint() {
let config = S3Config::new("bucket", "region", "key", "secret")
.with_endpoint("http://localhost:9000")
.with_path_style(true);
assert_eq!(config.endpoint, Some("http://localhost:9000".to_string()));
assert!(config.path_style);
}
#[test]
fn test_gcs_config_creation() {
let config = GcsConfig::new("my-bucket", "my-project");
assert_eq!(config.bucket, "my-bucket");
assert_eq!(config.project_id, "my-project");
assert!(config.credentials_path.is_none());
assert!(config.credentials_json.is_none());
}
#[test]
fn test_gcs_config_with_credentials() {
let config = GcsConfig::new("bucket", "project")
.with_credentials_file("/path/to/creds.json")
.with_credentials_json(r#"{"type": "service_account"}"#);
assert_eq!(
config.credentials_path,
Some("/path/to/creds.json".to_string())
);
assert_eq!(
config.credentials_json,
Some(r#"{"type": "service_account"}"#.to_string())
);
}
#[test]
fn test_azure_config_creation() {
let config = AzureConfig::new("account", "container", "access-key");
assert_eq!(config.account, "account");
assert_eq!(config.container, "container");
assert_eq!(config.access_key, "access-key");
assert!(config.endpoint.is_none());
}
#[test]
fn test_azure_config_with_endpoint() {
let config =
AzureConfig::new("account", "container", "key").with_endpoint("http://localhost:10000");
assert_eq!(config.endpoint, Some("http://localhost:10000".to_string()));
}
#[test]
fn test_validate_config() {
let s3_config = CloudProvider::S3(S3Config::new("bucket", "region", "key", "secret"));
assert!(validate_config(&s3_config).is_ok());
let invalid_s3 = CloudProvider::S3(S3Config::new("", "region", "key", "secret"));
assert!(validate_config(&invalid_s3).is_err());
let gcs_config = CloudProvider::GCS(
GcsConfig::new("bucket", "project").with_credentials_file("/path/to/creds.json"),
);
assert!(validate_config(&gcs_config).is_ok());
let invalid_gcs = CloudProvider::GCS(GcsConfig::new("bucket", "project"));
assert!(validate_config(&invalid_gcs).is_err());
let azure_config = CloudProvider::Azure(AzureConfig::new("account", "container", "key"));
assert!(validate_config(&azure_config).is_ok());
let invalid_azure = CloudProvider::Azure(AzureConfig::new("", "container", "key"));
assert!(validate_config(&invalid_azure).is_err());
}
#[test]
fn test_file_metadata_creation() {
let metadata = create_mock_metadata("test-file.txt", 1024);
assert_eq!(metadata.name, "test-file.txt");
assert_eq!(metadata.size, 1024);
assert_eq!(
metadata.content_type,
Some("application/octet-stream".to_string())
);
assert_eq!(metadata.etag, Some("etag-test-file.txt".to_string()));
}
#[test]
fn test_signed_url_generation() {
let config = CloudProvider::S3(S3Config::new("bucket", "region", "key", "secret"));
let url = generate_signed_url(&config, "test-file.txt", Duration::from_secs(3600));
assert!(url.is_ok());
assert!(!url.expect("Operation failed").is_empty());
}
#[cfg(all(
feature = "async",
not(any(
feature = "aws-sdk-s3",
feature = "google-cloud-storage",
feature = "azure-storage-blobs"
))
))]
#[tokio::test]
async fn test_cloud_provider_operations_without_features() {
let s3_config = CloudProvider::S3(S3Config::new("bucket", "region", "key", "secret"));
let upload_result = s3_config.upload_file("local.txt", "remote.txt").await;
assert!(upload_result.is_err());
let download_result = s3_config.download_file("remote.txt", "local.txt").await;
assert!(download_result.is_err());
let list_result = s3_config.list_files("path/").await;
assert!(list_result.is_err());
let exists_result = s3_config.file_exists("test.txt").await;
assert!(exists_result.is_err());
let metadata_result = s3_config.get_metadata("test.txt").await;
assert!(metadata_result.is_err());
let delete_result = s3_config.delete_file("test.txt").await;
assert!(delete_result.is_err());
}
}