use more_asserts::*;
use ssstar::{CreateProgressCallback, ExtractProgressCallback};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use std::time::Duration;
#[derive(Clone, Debug, strum::EnumDiscriminants)]
#[allow(dead_code)] pub(crate) enum CreateProgressEvent {
InputObjectsDownloadStarting {
total_objects: usize,
total_bytes: u64,
},
InputObjectDownloadStarted {
bucket: String,
key: String,
version_id: Option<String>,
size: u64,
},
InputPartUnorderedDownloaded {
bucket: String,
key: String,
version_id: Option<String>,
part_number: usize,
part_size: usize,
},
InputPartDownloaded {
bucket: String,
key: String,
version_id: Option<String>,
part_number: usize,
part_size: usize,
},
InputObjectDownloadCompleted {
bucket: String,
key: String,
version_id: Option<String>,
size: u64,
},
InputObjectsDownloadCompleted {
total_bytes: u64,
},
ArchiveInitialized {
total_objects: usize,
total_bytes: u64,
estimated_archive_size: u64,
},
ArchivePartWritten {
bucket: String,
key: String,
version_id: Option<String>,
part_number: usize,
part_size: usize,
},
ArchiveObjectWritten {
bucket: String,
key: String,
version_id: Option<String>,
timestamp: chrono::DateTime<chrono::Utc>,
byte_offset: u64,
size: u64,
},
ArchiveBytesWritten {
bytes_written: usize,
},
ArchiveWritesCompleted {
total_bytes_written: u64,
},
ArchiveBytesUploaded {
bytes_uploaded: usize,
},
ArchiveUploadCompleted {
size: u64,
},
}
#[derive(Clone)]
pub(crate) struct TestCreateProgressCallback {
events: Arc<Mutex<Vec<CreateProgressEvent>>>,
}
macro_rules! with_match {
($var:ident, $matches:pat, $block:block) => {
if let $matches = $var {
$block
} else {
unreachable!(
"{}",
concat!(
stringify!($var),
" does not match expression ",
stringify!($matches)
)
)
}
};
}
impl TestCreateProgressCallback {
pub fn new() -> Self {
Self {
events: Arc::new(Mutex::new(Vec::new())),
}
}
pub fn sanity_check_updates(&self) {
assert_eq!(
self.input_object_download_started(),
self.input_object_download_completed()
);
assert_eq!(
self.input_part_unordered_downloaded(),
self.input_part_downloaded()
);
let (input_objects_downloading, input_object_bytes_downloading) =
self.input_objects_download_starting();
assert_eq!(
(input_objects_downloading, input_object_bytes_downloading),
self.input_object_download_started()
);
let (input_objects_downloaded, input_object_bytes_downloaded) =
self.input_object_download_completed();
let (input_parts_downloaded, input_part_bytes_downloaded) = self.input_part_downloaded();
let total_input_objects_bytes_downloaded = self.input_objects_download_completed();
assert_eq!(input_objects_downloaded, input_objects_downloading);
assert_eq!(
input_object_bytes_downloaded,
input_object_bytes_downloading
);
assert_eq!(
total_input_objects_bytes_downloaded,
input_object_bytes_downloaded
);
assert_gt!(input_objects_downloaded, 0);
let (archive_total_objects, archive_total_bytes, estimated_archive_size) =
self.tar_archive_initialized();
assert_eq!(archive_total_objects, input_objects_downloaded);
assert_eq!(archive_total_bytes, total_input_objects_bytes_downloaded);
assert_gt!(estimated_archive_size, archive_total_bytes);
let (archive_objects_written, archive_object_bytes_written) =
self.tar_archive_object_written();
assert_eq!(archive_objects_written, input_objects_downloaded);
assert_eq!(archive_object_bytes_written, input_object_bytes_downloaded);
let (archive_parts_written, archive_part_bytes_written) = self.tar_archive_part_written();
assert_eq!(archive_parts_written, input_parts_downloaded);
assert_eq!(archive_part_bytes_written, input_part_bytes_downloaded);
let (archive_writes, archive_bytes_written) = self.tar_archive_bytes_written();
assert_gt!(archive_writes, 0);
assert_gt!(archive_bytes_written, input_object_bytes_downloaded);
assert_eq!(archive_bytes_written, self.tar_archive_writes_completed());
let (archive_uploads, sum_archive_bytes_uploaded) = self.tar_archive_bytes_uploaded();
if archive_uploads > 0 {
let archive_upload_completed_bytes = self.tar_archive_upload_completed();
assert_eq!(sum_archive_bytes_uploaded, archive_upload_completed_bytes);
}
}
pub fn input_objects_download_starting(&self) -> (usize, u64) {
let event = self
.filter_single_event(CreateProgressEventDiscriminants::InputObjectsDownloadStarting)
.unwrap();
with_match!(
event,
CreateProgressEvent::InputObjectsDownloadStarting {
total_objects,
total_bytes
},
{ (total_objects, total_bytes) }
)
}
pub fn input_object_download_started(&self) -> (usize, u64) {
let events =
self.filter_events(CreateProgressEventDiscriminants::InputObjectDownloadStarted);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::InputObjectDownloadStarted { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn input_part_unordered_downloaded(&self) -> (usize, u64) {
let events =
self.filter_events(CreateProgressEventDiscriminants::InputPartUnorderedDownloaded);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::InputPartUnorderedDownloaded { part_size, .. },
{ part_size as u64 }
)
})
.sum();
(count, sum)
}
pub fn input_part_downloaded(&self) -> (usize, u64) {
let events = self.filter_events(CreateProgressEventDiscriminants::InputPartDownloaded);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::InputPartDownloaded { part_size, .. },
{ part_size as u64 }
)
})
.sum();
(count, sum)
}
pub fn input_object_download_completed(&self) -> (usize, u64) {
let events =
self.filter_events(CreateProgressEventDiscriminants::InputObjectDownloadCompleted);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::InputObjectDownloadCompleted { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn input_objects_download_completed(&self) -> u64 {
let event = self
.filter_single_event(CreateProgressEventDiscriminants::InputObjectsDownloadCompleted)
.unwrap();
with_match!(
event,
CreateProgressEvent::InputObjectsDownloadCompleted { total_bytes, .. },
{ total_bytes }
)
}
pub fn tar_archive_initialized(&self) -> (usize, u64, u64) {
let event = self
.filter_single_event(CreateProgressEventDiscriminants::ArchiveInitialized)
.unwrap();
with_match!(
event,
CreateProgressEvent::ArchiveInitialized {
total_objects,
total_bytes,
estimated_archive_size
},
{ (total_objects, total_bytes, estimated_archive_size) }
)
}
pub fn tar_archive_part_written(&self) -> (usize, u64) {
let events = self.filter_events(CreateProgressEventDiscriminants::ArchivePartWritten);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::ArchivePartWritten { part_size, .. },
{ part_size as u64 }
)
})
.sum();
(count, sum)
}
pub fn tar_archive_object_written(&self) -> (usize, u64) {
let events = self.filter_events(CreateProgressEventDiscriminants::ArchiveObjectWritten);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::ArchiveObjectWritten { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn tar_archive_bytes_written(&self) -> (usize, u64) {
let events = self.filter_events(CreateProgressEventDiscriminants::ArchiveBytesWritten);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::ArchiveBytesWritten { bytes_written, .. },
{ bytes_written as u64 }
)
})
.sum();
(count, sum)
}
pub fn tar_archive_writes_completed(&self) -> u64 {
let event = self
.filter_single_event(CreateProgressEventDiscriminants::ArchiveWritesCompleted)
.unwrap();
with_match!(
event,
CreateProgressEvent::ArchiveWritesCompleted {
total_bytes_written
},
{ total_bytes_written }
)
}
pub fn tar_archive_bytes_uploaded(&self) -> (usize, u64) {
let events = self.filter_events(CreateProgressEventDiscriminants::ArchiveBytesUploaded);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
CreateProgressEvent::ArchiveBytesUploaded { bytes_uploaded, .. },
{ bytes_uploaded as u64 }
)
})
.sum();
(count, sum)
}
pub fn tar_archive_upload_completed(&self) -> u64 {
let event = self
.filter_single_event(CreateProgressEventDiscriminants::ArchiveUploadCompleted)
.unwrap();
with_match!(
event,
CreateProgressEvent::ArchiveUploadCompleted { size },
{ size }
)
}
pub fn filter_events(&self, typ: CreateProgressEventDiscriminants) -> Vec<CreateProgressEvent> {
let events = self.events.lock().unwrap();
events
.iter()
.filter(|event| {
let event_typ: CreateProgressEventDiscriminants = (*event).into();
event_typ == typ
})
.cloned()
.collect::<Vec<_>>()
}
pub fn filter_single_event(
&self,
typ: CreateProgressEventDiscriminants,
) -> Option<CreateProgressEvent> {
let mut events = self.filter_events(typ);
assert!(
events.len() <= 1,
"Expected 0 or 1 instances of {:?}, but found {}",
typ,
events.len()
);
events.pop()
}
fn report_event(&self, event: CreateProgressEvent) {
let mut events = self.events.lock().unwrap();
events.push(event)
}
}
impl std::fmt::Debug for TestCreateProgressCallback {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let events = self.events.lock().unwrap();
events.fmt(f)
}
}
impl CreateProgressCallback for TestCreateProgressCallback {
fn input_objects_download_starting(&self, total_objects: usize, total_bytes: u64) {
self.report_event(CreateProgressEvent::InputObjectsDownloadStarting {
total_objects,
total_bytes,
});
}
fn input_object_download_started(
&self,
bucket: &str,
key: &str,
version_id: Option<&str>,
size: u64,
) {
self.report_event(CreateProgressEvent::InputObjectDownloadStarted {
bucket: bucket.to_string(),
key: key.to_string(),
version_id: version_id.map(|id| id.to_string()),
size,
});
}
fn input_part_unordered_downloaded(
&self,
bucket: &str,
key: &str,
version_id: Option<&str>,
part_number: usize,
part_size: usize,
) {
self.report_event(CreateProgressEvent::InputPartUnorderedDownloaded {
bucket: bucket.to_string(),
key: key.to_string(),
version_id: version_id.map(|id| id.to_string()),
part_number,
part_size,
});
}
fn input_part_downloaded(
&self,
bucket: &str,
key: &str,
version_id: Option<&str>,
part_number: usize,
part_size: usize,
) {
self.report_event(CreateProgressEvent::InputPartDownloaded {
bucket: bucket.to_string(),
key: key.to_string(),
version_id: version_id.map(|id| id.to_string()),
part_number,
part_size,
});
}
fn input_object_download_completed(
&self,
bucket: &str,
key: &str,
version_id: Option<&str>,
size: u64,
) {
self.report_event(CreateProgressEvent::InputObjectDownloadCompleted {
bucket: bucket.to_string(),
key: key.to_string(),
version_id: version_id.map(|id| id.to_string()),
size,
});
}
fn input_objects_download_completed(&self, total_bytes: u64, _duration: Duration) {
self.report_event(CreateProgressEvent::InputObjectsDownloadCompleted { total_bytes });
}
fn archive_initialized(
&self,
total_objects: usize,
total_bytes: u64,
estimated_archive_size: u64,
) {
self.report_event(CreateProgressEvent::ArchiveInitialized {
total_objects,
total_bytes,
estimated_archive_size,
});
}
fn archive_part_written(
&self,
bucket: &str,
key: &str,
version_id: Option<&str>,
part_number: usize,
part_size: usize,
) {
self.report_event(CreateProgressEvent::ArchivePartWritten {
bucket: bucket.to_string(),
key: key.to_string(),
version_id: version_id.map(|id| id.to_string()),
part_number,
part_size,
});
}
fn archive_object_written(
&self,
bucket: &str,
key: &str,
version_id: Option<&str>,
timestamp: chrono::DateTime<chrono::Utc>,
byte_offset: u64,
size: u64,
) {
self.report_event(CreateProgressEvent::ArchiveObjectWritten {
bucket: bucket.to_string(),
key: key.to_string(),
version_id: version_id.map(|id| id.to_string()),
timestamp,
byte_offset,
size,
});
}
fn archive_bytes_written(&self, bytes_written: usize) {
self.report_event(CreateProgressEvent::ArchiveBytesWritten { bytes_written });
}
fn archive_writes_completed(&self, total_bytes_written: u64) {
self.report_event(CreateProgressEvent::ArchiveWritesCompleted {
total_bytes_written,
});
}
fn archive_bytes_uploaded(&self, bytes_uploaded: usize) {
self.report_event(CreateProgressEvent::ArchiveBytesUploaded { bytes_uploaded });
}
fn archive_upload_completed(&self, size: u64, _duration: Duration) {
self.report_event(CreateProgressEvent::ArchiveUploadCompleted { size });
}
}
#[derive(Clone, Debug, strum::EnumDiscriminants)]
#[allow(dead_code)] pub(crate) enum ExtractProgressEvent {
ExtractStarting {
archive_size: Option<u64>,
},
ExtractArchivePartRead {
bytes: usize,
},
ExtractObjectSkipped {
key: String,
size: u64,
},
ExtractObjectStarting {
key: String,
size: u64,
},
ExtractObjectPartRead {
key: String,
bytes: usize,
},
ExtractObjectFinished {
key: String,
size: u64,
},
ExtractFinished {
extracted_objects: usize,
extracted_object_bytes: u64,
skipped_objects: usize,
skipped_object_bytes: u64,
total_bytes: u64,
},
ObjectUploadStarting {
key: String,
size: u64,
},
ObjectPartUploaded {
key: String,
bytes: usize,
},
ObjectUploaded {
key: String,
size: u64,
},
ObjectsUploaded {
total_objects: usize,
total_object_bytes: u64,
},
}
#[derive(Clone)]
pub(crate) struct TestExtractProgressCallback {
events: Arc<Mutex<Vec<ExtractProgressEvent>>>,
}
impl TestExtractProgressCallback {
pub fn new() -> Self {
Self {
events: Arc::new(Mutex::new(Vec::new())),
}
}
pub fn sanity_check_updates(&self) {
let archive_size = self.extract_starting();
if let Some(archive_size) = archive_size {
let (_, archive_bytes_read) = self.extract_archive_part_read();
assert_eq!(archive_size, archive_bytes_read + 512);
}
let (_, archive_part_bytes_read) = self.extract_archive_part_read();
let (_, _, _, _, total_archive_bytes_read) = self.extract_finished();
assert_eq!(archive_part_bytes_read, total_archive_bytes_read);
let (objects_skipped, object_bytes_skipped) = self.extract_object_skipped();
let (objects_extracted, object_bytes_extracted) = self.extract_object_finished();
let (
total_objects_extracted,
total_object_bytes_extracted,
total_objects_skipped,
total_object_bytes_skipped,
_,
) = self.extract_finished();
assert_eq!(objects_skipped, total_objects_skipped);
assert_eq!(object_bytes_skipped, total_object_bytes_skipped);
assert_eq!(objects_extracted, total_objects_extracted);
assert_eq!(object_bytes_extracted, total_object_bytes_extracted);
let (_, object_part_bytes_read) = self.extract_object_part_read();
assert_eq!(total_object_bytes_extracted, object_part_bytes_read);
assert_eq!(
self.extract_object_starting(),
self.extract_object_finished()
);
assert_eq!(
self.extract_object_starting(),
self.object_upload_starting()
);
assert_eq!(self.extract_object_finished(), self.object_uploaded());
let object_upload_starting_events = self
.filter_events(ExtractProgressEventDiscriminants::ObjectUploadStarting)
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ObjectUploadStarting { key, size, .. },
{ (key, size) }
)
});
let mut object_part_uploaded_events: HashMap<String, Vec<usize>> = HashMap::new();
for (key, bytes) in self
.filter_events(ExtractProgressEventDiscriminants::ObjectPartUploaded)
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ObjectPartUploaded { key, bytes, .. },
{ (key, bytes) }
)
})
{
object_part_uploaded_events
.entry(key)
.or_default()
.push(bytes)
}
for (object_key, object_size) in object_upload_starting_events {
let mut object_part_total_bytes = 0u64;
for part_bytes in object_part_uploaded_events
.get(&object_key)
.unwrap_or_else(|| panic!("Object {object_key} has no object_part_uploaded events"))
{
assert_le!(
object_part_total_bytes + (*part_bytes as u64),
object_size,
"Object '{object_key}'"
);
object_part_total_bytes += *part_bytes as u64;
}
assert_eq!(
object_part_total_bytes, object_size,
"Object '{object_key}'"
);
}
assert_eq!(self.objects_uploaded(), self.object_uploaded());
let (_, object_part_bytes_uploaded) = self.object_part_uploaded();
let (_, total_object_bytes_uploaded) = self.objects_uploaded();
assert_eq!(object_part_bytes_uploaded, total_object_bytes_uploaded);
}
pub fn extract_starting(&self) -> Option<u64> {
let event = self
.filter_single_event(ExtractProgressEventDiscriminants::ExtractStarting)
.unwrap();
with_match!(
event,
ExtractProgressEvent::ExtractStarting { archive_size },
{ archive_size }
)
}
pub fn extract_archive_part_read(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ExtractArchivePartRead);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ExtractArchivePartRead { bytes, .. },
{ bytes as u64 }
)
})
.sum();
(count, sum)
}
pub fn extract_object_skipped(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ExtractObjectSkipped);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ExtractObjectSkipped { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn extract_object_starting(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ExtractObjectStarting);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ExtractObjectStarting { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn extract_object_part_read(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ExtractObjectPartRead);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ExtractObjectPartRead { bytes, .. },
{ bytes as u64 }
)
})
.sum();
(count, sum)
}
pub fn extract_object_finished(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ExtractObjectFinished);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ExtractObjectFinished { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn extract_finished(&self) -> (usize, u64, usize, u64, u64) {
let event = self
.filter_single_event(ExtractProgressEventDiscriminants::ExtractFinished)
.unwrap();
with_match!(
event,
ExtractProgressEvent::ExtractFinished {
extracted_objects,
extracted_object_bytes,
skipped_objects,
skipped_object_bytes,
total_bytes
},
{
(
extracted_objects,
extracted_object_bytes,
skipped_objects,
skipped_object_bytes,
total_bytes,
)
}
)
}
pub fn object_upload_starting(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ObjectUploadStarting);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ObjectUploadStarting { size, .. },
{ size }
)
})
.sum();
(count, sum)
}
pub fn object_part_uploaded(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ObjectPartUploaded);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(
event,
ExtractProgressEvent::ObjectPartUploaded { bytes, .. },
{ bytes as u64 }
)
})
.sum();
(count, sum)
}
pub fn object_uploaded(&self) -> (usize, u64) {
let events = self.filter_events(ExtractProgressEventDiscriminants::ObjectUploaded);
let count = events.len();
let sum = events
.into_iter()
.map(|event| {
with_match!(event, ExtractProgressEvent::ObjectUploaded { size, .. }, {
size
})
})
.sum();
(count, sum)
}
pub fn objects_uploaded(&self) -> (usize, u64) {
let event = self
.filter_single_event(ExtractProgressEventDiscriminants::ObjectsUploaded)
.unwrap();
with_match!(
event,
ExtractProgressEvent::ObjectsUploaded {
total_objects,
total_object_bytes
},
{ (total_objects, total_object_bytes) }
)
}
pub fn filter_events(
&self,
typ: ExtractProgressEventDiscriminants,
) -> Vec<ExtractProgressEvent> {
let events = self.events.lock().unwrap();
events
.iter()
.filter(|event| {
let event_typ: ExtractProgressEventDiscriminants = (*event).into();
event_typ == typ
})
.cloned()
.collect::<Vec<_>>()
}
pub fn filter_single_event(
&self,
typ: ExtractProgressEventDiscriminants,
) -> Option<ExtractProgressEvent> {
let mut events = self.filter_events(typ);
assert!(
events.len() <= 1,
"Expected 0 or 1 instances of {:?}, but found {}",
typ,
events.len()
);
events.pop()
}
fn report_event(&self, event: ExtractProgressEvent) {
let mut events = self.events.lock().unwrap();
events.push(event)
}
}
impl std::fmt::Debug for TestExtractProgressCallback {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let events = self.events.lock().unwrap();
events.fmt(f)
}
}
impl ExtractProgressCallback for TestExtractProgressCallback {
fn extract_starting(&self, archive_size: Option<u64>) {
self.report_event(ExtractProgressEvent::ExtractStarting { archive_size });
}
fn extract_archive_part_read(&self, bytes: usize) {
self.report_event(ExtractProgressEvent::ExtractArchivePartRead { bytes });
}
fn extract_object_skipped(&self, key: &str, size: u64) {
self.report_event(ExtractProgressEvent::ExtractObjectSkipped {
key: key.to_string(),
size,
});
}
fn extract_object_starting(&self, key: &str, size: u64) {
self.report_event(ExtractProgressEvent::ExtractObjectStarting {
key: key.to_string(),
size,
});
}
fn extract_object_part_read(&self, key: &str, bytes: usize) {
self.report_event(ExtractProgressEvent::ExtractObjectPartRead {
key: key.to_string(),
bytes,
});
}
fn extract_object_finished(&self, key: &str, size: u64) {
self.report_event(ExtractProgressEvent::ExtractObjectFinished {
key: key.to_string(),
size,
});
}
fn extract_finished(
&self,
extracted_objects: usize,
extracted_object_bytes: u64,
skipped_objects: usize,
skipped_object_bytes: u64,
total_bytes: u64,
_duration: Duration,
) {
self.report_event(ExtractProgressEvent::ExtractFinished {
extracted_objects,
extracted_object_bytes,
skipped_objects,
skipped_object_bytes,
total_bytes,
});
}
fn object_upload_starting(&self, key: &str, size: u64) {
self.report_event(ExtractProgressEvent::ObjectUploadStarting {
key: key.to_string(),
size,
});
}
fn object_part_uploaded(&self, key: &str, bytes: usize) {
self.report_event(ExtractProgressEvent::ObjectPartUploaded {
key: key.to_string(),
bytes,
});
}
fn object_uploaded(&self, key: &str, size: u64) {
self.report_event(ExtractProgressEvent::ObjectUploaded {
key: key.to_string(),
size,
});
}
fn objects_uploaded(&self, total_objects: usize, total_object_bytes: u64, _duration: Duration) {
self.report_event(ExtractProgressEvent::ObjectsUploaded {
total_objects,
total_object_bytes,
});
}
}