pub mod error;
pub mod file_operations;
pub mod patch_processor;
pub use error::PatchExecutorError;
pub use file_operations::FileOperationExecutor;
pub use patch_processor::PatchProcessor;
use crate::api_types::{PatchOperations, PatchPackageInfo};
use std::path::{Path, PathBuf};
use tracing::{debug, error, info, warn};
pub struct PatchExecutor {
work_dir: PathBuf,
file_executor: FileOperationExecutor,
patch_processor: PatchProcessor,
backup_enabled: bool,
}
impl PatchExecutor {
pub fn new(work_dir: PathBuf) -> Result<Self, PatchExecutorError> {
let file_executor = FileOperationExecutor::new(work_dir.clone())?;
let patch_processor = PatchProcessor::new()?;
Ok(Self {
work_dir,
file_executor,
patch_processor,
backup_enabled: false,
})
}
pub fn enable_backup(&mut self) -> Result<(), PatchExecutorError> {
self.file_executor.enable_backup()?;
self.backup_enabled = true;
info!("Patch execution backup mode enabled");
Ok(())
}
pub async fn apply_patch<F>(
&mut self,
patch_info: &PatchPackageInfo,
operations: &PatchOperations,
progress_callback: F,
) -> Result<(), PatchExecutorError>
where
F: Fn(f64) + Send + Sync,
{
info!("Starting to apply incremental patch...");
progress_callback(0.0);
self.validate_preconditions(operations)?;
progress_callback(0.05);
match self
.execute_patch_pipeline(patch_info, operations, &progress_callback)
.await
{
Ok(_) => {
progress_callback(1.0);
info!("Incremental patch applied successfully");
Ok(())
}
Err(e) => {
error!("Patch application failed: {}", e);
if e.requires_rollback() && self.backup_enabled {
warn!("Starting automatic rollback...");
if let Err(rollback_err) = self.rollback().await {
error!("Rollback failed: {}", rollback_err);
return Err(PatchExecutorError::rollback_failed(format!(
"Original error: {e}, rollback error: {rollback_err}"
)));
}
info!("Automatic rollback completed");
}
Err(e)
}
}
}
fn validate_preconditions(
&self,
operations: &PatchOperations,
) -> Result<(), PatchExecutorError> {
debug!("Validating patch application preconditions");
if !self.work_dir.exists() {
return Err(PatchExecutorError::path_error(format!(
"Working directory does not exist: {:?}",
self.work_dir
)));
}
let total_operations = operations.total_operations();
if total_operations == 0 {
return Err(PatchExecutorError::custom("Patch operations are empty"));
}
debug!("Preconditions validated, total {} operations", total_operations);
Ok(())
}
async fn execute_patch_pipeline<F>(
&mut self,
patch_info: &PatchPackageInfo,
operations: &PatchOperations,
progress_callback: &F,
) -> Result<(), PatchExecutorError>
where
F: Fn(f64) + Send + Sync,
{
info!("Downloading patch package...");
let patch_path = self.patch_processor.download_patch(patch_info).await?;
progress_callback(0.25);
info!("Verifying patch integrity...");
self.patch_processor
.verify_patch_integrity(&patch_path, patch_info)
.await?;
progress_callback(0.35);
info!("Extracting patch package...");
let extracted_path = self.patch_processor.extract_patch(&patch_path).await?;
progress_callback(0.45);
info!("Verifying patch file structure...");
self.validate_patch_structure(&extracted_path, operations)
.await?;
progress_callback(0.5);
info!("Applying patch operations...");
self.apply_patch_operations(&extracted_path, operations, progress_callback)
.await?;
Ok(())
}
async fn validate_patch_structure(
&self,
extracted_path: &Path,
operations: &PatchOperations,
) -> Result<(), PatchExecutorError> {
let mut required_files = Vec::new();
if let Some(replace) = &operations.replace {
for file in &replace.files {
required_files.push(file.clone());
}
for dir in &replace.directories {
let dir_path = extracted_path.join(dir);
if !dir_path.exists() || !dir_path.is_dir() {
return Err(PatchExecutorError::verification_failed(format!(
"Required directory missing in patch: {dir}"
)));
}
}
}
self.patch_processor
.validate_extracted_structure(&required_files)
.await?;
debug!("Patch file structure verified");
Ok(())
}
async fn apply_patch_operations<F>(
&mut self,
extracted_path: &Path,
operations: &PatchOperations,
progress_callback: &F,
) -> Result<(), PatchExecutorError>
where
F: Fn(f64) + Send + Sync,
{
self.file_executor.set_patch_source(extracted_path)?;
let total_operations = operations.total_operations();
let mut completed_operations = 0;
let base_progress = 0.5; let operations_progress_range = 0.5;
if let Some(replace) = &operations.replace {
if !replace.files.is_empty() {
info!("Replacing {} files", &replace.files.len());
self.file_executor.replace_files(&replace.files).await?;
completed_operations += replace.files.len();
let progress = base_progress
+ (completed_operations as f64 / total_operations as f64)
* operations_progress_range;
progress_callback(progress);
}
if !replace.directories.is_empty() {
info!("Replacing {} directories", &replace.directories.len());
self.file_executor
.replace_directories(&replace.directories)
.await?;
completed_operations += replace.directories.len();
let progress = base_progress
+ (completed_operations as f64 / total_operations as f64)
* operations_progress_range;
progress_callback(progress);
}
}
if let Some(delete) = &operations.delete {
if !delete.files.is_empty() {
info!("Deleting {} items", &delete.files.len());
self.file_executor.delete_items(&delete.files).await?;
completed_operations += &delete.files.len();
let progress = base_progress
+ (completed_operations as f64 / total_operations as f64)
* operations_progress_range;
progress_callback(progress);
}
if !delete.directories.is_empty() {
info!("Deleting {} directories", &delete.directories.len());
self.file_executor.delete_items(&delete.directories).await?;
completed_operations += &delete.directories.len();
let progress = base_progress
+ (completed_operations as f64 / total_operations as f64)
* operations_progress_range;
progress_callback(progress);
}
}
info!("Patch operations applied");
Ok(())
}
pub async fn rollback(&mut self) -> Result<(), PatchExecutorError> {
if !self.backup_enabled {
return Err(PatchExecutorError::BackupNotEnabled);
}
warn!("Starting rollback of patch operations...");
self.file_executor.rollback().await?;
info!("Patch rollback completed");
Ok(())
}
pub fn work_dir(&self) -> &Path {
&self.work_dir
}
pub fn is_backup_enabled(&self) -> bool {
self.backup_enabled
}
pub fn get_operation_summary(&self, operations: &PatchOperations) -> String {
let mut replace_file_count = 0;
let mut replace_dir_count = 0;
let mut delete_file_count = 0;
let mut delete_dir_count = 0;
if let Some(replace) = &operations.replace {
replace_file_count = replace.files.len();
replace_dir_count = replace.directories.len();
}
if let Some(delete) = &operations.delete {
delete_file_count = delete.files.len();
delete_dir_count = delete.directories.len();
}
let total = operations.total_operations();
format!(
"Patch operation summary: {} total operations (file replacements: {}, directory replacements: {}, file deletions: {}, directory deletions: {})",
total, replace_file_count, replace_dir_count, delete_file_count, delete_dir_count
)
}
pub fn temp_dir(&self) -> &Path {
self.patch_processor.temp_dir()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::api_types::ReplaceOperations;
use tempfile::TempDir;
#[tokio::test]
async fn test_patch_executor_creation() {
let temp_dir = TempDir::new().unwrap();
let executor = PatchExecutor::new(temp_dir.path().to_owned());
assert!(executor.is_ok());
}
#[tokio::test]
async fn test_enable_backup() {
let temp_dir = TempDir::new().unwrap();
let mut executor = PatchExecutor::new(temp_dir.path().to_owned()).unwrap();
assert!(!executor.is_backup_enabled());
let result = executor.enable_backup();
assert!(result.is_ok());
assert!(executor.is_backup_enabled());
}
#[tokio::test]
async fn test_validate_preconditions() {
let temp_dir = TempDir::new().unwrap();
let executor = PatchExecutor::new(temp_dir.path().to_owned()).unwrap();
let valid_operations = PatchOperations {
replace: Some(ReplaceOperations {
files: vec!["test.txt".to_string()],
directories: vec!["test_dir".to_string()],
}),
delete: Some(ReplaceOperations {
files: vec!["test.txt".to_string()],
directories: vec!["test_dir".to_string()],
}),
};
let result = executor.validate_preconditions(&valid_operations);
assert!(result.is_ok());
let empty_operations = PatchOperations {
replace: Some(ReplaceOperations {
files: vec![],
directories: vec![],
}),
delete: Some(ReplaceOperations {
files: vec![],
directories: vec![],
}),
};
let result = executor.validate_preconditions(&empty_operations);
assert!(result.is_err());
}
#[tokio::test]
async fn test_operation_summary() {
let temp_dir = TempDir::new().unwrap();
let executor = PatchExecutor::new(temp_dir.path().to_owned()).unwrap();
let operations = PatchOperations {
replace: Some(ReplaceOperations {
files: vec!["file1.txt".to_string(), "file2.txt".to_string()],
directories: vec!["dir1".to_string()],
}),
delete: Some(ReplaceOperations {
files: vec!["old_file.txt".to_string()],
directories: vec![],
}),
};
let summary = executor.get_operation_summary(&operations);
assert!(summary.contains("4 total operations"));
assert!(summary.contains("file replacements: 2"));
assert!(summary.contains("directory replacements: 1"));
assert!(summary.contains("deletions: 1"));
}
#[tokio::test]
async fn test_rollback_without_backup() {
let temp_dir = TempDir::new().unwrap();
let mut executor = PatchExecutor::new(temp_dir.path().to_owned()).unwrap();
let result = executor.rollback().await;
assert!(result.is_err());
assert!(matches!(
result.unwrap_err(),
PatchExecutorError::BackupNotEnabled
));
}
}