1use crate::error::{Error, Result};
16use crate::fileinfo::{ErasureAlgo, ErasureInfo, FileInfo, FileInfoVersions, ObjectPartInfo, RawFileInfo};
17use crate::filemeta_inline::InlineData;
18use crate::headers::{
19 self, AMZ_META_UNENCRYPTED_CONTENT_LENGTH, AMZ_META_UNENCRYPTED_CONTENT_MD5, AMZ_STORAGE_CLASS, RESERVED_METADATA_PREFIX,
20 RESERVED_METADATA_PREFIX_LOWER, VERSION_PURGE_STATUS_KEY,
21};
22use byteorder::ByteOrder;
23use bytes::Bytes;
24use s3s::header::X_AMZ_RESTORE;
25use serde::{Deserialize, Serialize};
26use std::cmp::Ordering;
27use std::convert::TryFrom;
28use std::hash::Hasher;
29use std::io::{Read, Write};
30use std::{collections::HashMap, io::Cursor};
31use time::OffsetDateTime;
32use tokio::io::AsyncRead;
33use uuid::Uuid;
34use xxhash_rust::xxh64;
35
36pub static XL_FILE_HEADER: [u8; 4] = [b'X', b'L', b'2', b' '];
38static XL_FILE_VERSION_MAJOR: u16 = 1;
43static XL_FILE_VERSION_MINOR: u16 = 3;
44static XL_HEADER_VERSION: u8 = 3;
45pub static XL_META_VERSION: u8 = 2;
46static XXHASH_SEED: u64 = 0;
47
48const XL_FLAG_FREE_VERSION: u8 = 1 << 0;
49const _XL_FLAG_INLINE_DATA: u8 = 1 << 2;
51
52const META_DATA_READ_DEFAULT: usize = 4 << 10;
53const MSGP_UINT32_SIZE: usize = 5;
54
55pub const TRANSITION_COMPLETE: &str = "complete";
56pub const TRANSITION_PENDING: &str = "pending";
57
58pub const FREE_VERSION: &str = "free-version";
59
60pub const TRANSITION_STATUS: &str = "transition-status";
61pub const TRANSITIONED_OBJECTNAME: &str = "transitioned-object";
62pub const TRANSITIONED_VERSION_ID: &str = "transitioned-versionID";
63pub const TRANSITION_TIER: &str = "transition-tier";
64
65const X_AMZ_RESTORE_EXPIRY_DAYS: &str = "X-Amz-Restore-Expiry-Days";
66const X_AMZ_RESTORE_REQUEST_DATE: &str = "X-Amz-Restore-Request-Date";
67
68#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
71pub struct FileMeta {
72 pub versions: Vec<FileMetaShallowVersion>,
73 pub data: InlineData, pub meta_ver: u8,
75}
76
77impl FileMeta {
78 pub fn new() -> Self {
79 Self {
80 meta_ver: XL_META_VERSION,
81 data: InlineData::new(),
82 ..Default::default()
83 }
84 }
85
86 pub fn is_xl2_v1_format(buf: &[u8]) -> bool {
87 !matches!(Self::check_xl2_v1(buf), Err(_e))
88 }
89
90 pub fn load(buf: &[u8]) -> Result<FileMeta> {
91 let mut xl = FileMeta::default();
92 xl.unmarshal_msg(buf)?;
93
94 Ok(xl)
95 }
96
97 pub fn check_xl2_v1(buf: &[u8]) -> Result<(&[u8], u16, u16)> {
98 if buf.len() < 8 {
99 return Err(Error::other("xl file header not exists"));
100 }
101
102 if buf[0..4] != XL_FILE_HEADER {
103 return Err(Error::other("xl file header err"));
104 }
105
106 let major = byteorder::LittleEndian::read_u16(&buf[4..6]);
107 let minor = byteorder::LittleEndian::read_u16(&buf[6..8]);
108 if major > XL_FILE_VERSION_MAJOR {
109 return Err(Error::other("xl file version err"));
110 }
111
112 Ok((&buf[8..], major, minor))
113 }
114
115 pub fn read_bytes_header(buf: &[u8]) -> Result<(u32, &[u8])> {
117 let (mut size_buf, _) = buf.split_at(5);
118
119 let bin_len = rmp::decode::read_bin_len(&mut size_buf)?;
121
122 Ok((bin_len, &buf[5..]))
123 }
124
125 pub fn unmarshal_msg(&mut self, buf: &[u8]) -> Result<u64> {
126 let i = buf.len() as u64;
127
128 let (buf, _, _) = Self::check_xl2_v1(buf)?;
130
131 let (mut size_buf, buf) = buf.split_at(5);
132
133 let bin_len = rmp::decode::read_bin_len(&mut size_buf)?;
135
136 if buf.len() < bin_len as usize {
137 return Err(Error::other("insufficient data for metadata"));
138 }
139 let (meta, buf) = buf.split_at(bin_len as usize);
140
141 if buf.len() < 5 {
142 return Err(Error::other("insufficient data for CRC"));
143 }
144 let (mut crc_buf, buf) = buf.split_at(5);
145
146 let crc = rmp::decode::read_u32(&mut crc_buf)?;
148 let meta_crc = xxh64::xxh64(meta, XXHASH_SEED) as u32;
149
150 if crc != meta_crc {
151 return Err(Error::other("xl file crc check failed"));
152 }
153
154 if !buf.is_empty() {
155 self.data.update(buf);
156 self.data.validate()?;
157 }
158
159 if !meta.is_empty() {
161 let (versions_len, _, meta_ver, meta) = Self::decode_xl_headers(meta)?;
162
163 self.meta_ver = meta_ver;
166
167 self.versions = Vec::with_capacity(versions_len);
168
169 let mut cur: Cursor<&[u8]> = Cursor::new(meta);
170 for _ in 0..versions_len {
171 let bin_len = rmp::decode::read_bin_len(&mut cur)? as usize;
172 let start = cur.position() as usize;
173 let end = start + bin_len;
174 let header_buf = &meta[start..end];
175
176 let mut ver = FileMetaShallowVersion::default();
177 ver.header.unmarshal_msg(header_buf)?;
178
179 cur.set_position(end as u64);
180
181 let bin_len = rmp::decode::read_bin_len(&mut cur)? as usize;
182 let start = cur.position() as usize;
183 let end = start + bin_len;
184 let ver_meta_buf = &meta[start..end];
185
186 ver.meta.extend_from_slice(ver_meta_buf);
187
188 cur.set_position(end as u64);
189
190 self.versions.push(ver);
191 }
192 }
193
194 Ok(i)
195 }
196
197 fn decode_xl_headers(buf: &[u8]) -> Result<(usize, u8, u8, &[u8])> {
199 let mut cur = Cursor::new(buf);
200
201 let header_ver: u8 = rmp::decode::read_int(&mut cur)?;
202
203 if header_ver > XL_HEADER_VERSION {
204 return Err(Error::other("xl header version invalid"));
205 }
206
207 let meta_ver: u8 = rmp::decode::read_int(&mut cur)?;
208 if meta_ver > XL_META_VERSION {
209 return Err(Error::other("xl meta version invalid"));
210 }
211
212 let versions_len: usize = rmp::decode::read_int(&mut cur)?;
213
214 Ok((versions_len, header_ver, meta_ver, &buf[cur.position() as usize..]))
215 }
216
217 fn decode_versions<F: FnMut(usize, &[u8], &[u8]) -> Result<()>>(buf: &[u8], versions: usize, mut fnc: F) -> Result<()> {
218 let mut cur: Cursor<&[u8]> = Cursor::new(buf);
219
220 for i in 0..versions {
221 let bin_len = rmp::decode::read_bin_len(&mut cur)? as usize;
222 let start = cur.position() as usize;
223 let end = start + bin_len;
224 let header_buf = &buf[start..end];
225
226 cur.set_position(end as u64);
227
228 let bin_len = rmp::decode::read_bin_len(&mut cur)? as usize;
229 let start = cur.position() as usize;
230 let end = start + bin_len;
231 let ver_meta_buf = &buf[start..end];
232
233 cur.set_position(end as u64);
234
235 if let Err(err) = fnc(i, header_buf, ver_meta_buf) {
236 if err == Error::DoneForNow {
237 return Ok(());
238 }
239
240 return Err(err);
241 }
242 }
243
244 Ok(())
245 }
246
247 pub fn is_latest_delete_marker(buf: &[u8]) -> bool {
248 let header = Self::decode_xl_headers(buf).ok();
249 if let Some((versions, _hdr_v, _meta_v, meta)) = header {
250 if versions == 0 {
251 return false;
252 }
253
254 let mut is_delete_marker = false;
255
256 let _ = Self::decode_versions(meta, versions, |_: usize, hdr: &[u8], _: &[u8]| {
257 let mut header = FileMetaVersionHeader::default();
258 if header.unmarshal_msg(hdr).is_err() {
259 return Err(Error::DoneForNow);
260 }
261
262 is_delete_marker = header.version_type == VersionType::Delete;
263
264 Err(Error::DoneForNow)
265 });
266
267 is_delete_marker
268 } else {
269 false
270 }
271 }
272
273 pub fn marshal_msg(&self) -> Result<Vec<u8>> {
274 let mut wr = Vec::new();
275
276 wr.write_all(XL_FILE_HEADER.as_slice())?;
278
279 let mut major = [0u8; 2];
280 byteorder::LittleEndian::write_u16(&mut major, XL_FILE_VERSION_MAJOR);
281 wr.write_all(major.as_slice())?;
282
283 let mut minor = [0u8; 2];
284 byteorder::LittleEndian::write_u16(&mut minor, XL_FILE_VERSION_MINOR);
285 wr.write_all(minor.as_slice())?;
286
287 wr.write_all(&[0xc6, 0, 0, 0, 0])?;
289
290 let offset = wr.len();
291
292 rmp::encode::write_uint8(&mut wr, XL_HEADER_VERSION)?;
293 rmp::encode::write_uint8(&mut wr, XL_META_VERSION)?;
294
295 rmp::encode::write_sint(&mut wr, self.versions.len() as i64)?;
297
298 for ver in self.versions.iter() {
299 let hmsg = ver.header.marshal_msg()?;
300 rmp::encode::write_bin(&mut wr, &hmsg)?;
301
302 rmp::encode::write_bin(&mut wr, &ver.meta)?;
303 }
304
305 let data_len = wr.len() - offset;
307 byteorder::BigEndian::write_u32(&mut wr[offset - 4..offset], data_len as u32);
308
309 let crc = xxh64::xxh64(&wr[offset..], XXHASH_SEED) as u32;
310 let mut crc_buf = [0u8; 5];
311 crc_buf[0] = 0xce; byteorder::BigEndian::write_u32(&mut crc_buf[1..], crc);
313
314 wr.write_all(&crc_buf)?;
315
316 wr.write_all(self.data.as_slice())?;
317
318 Ok(wr)
319 }
320
321 fn get_idx(&self, idx: usize) -> Result<FileMetaVersion> {
338 if idx > self.versions.len() {
339 return Err(Error::FileNotFound);
340 }
341
342 FileMetaVersion::try_from(self.versions[idx].meta.as_slice())
343 }
344
345 fn set_idx(&mut self, idx: usize, ver: FileMetaVersion) -> Result<()> {
346 if idx >= self.versions.len() {
347 return Err(Error::FileNotFound);
348 }
349
350 let meta_buf = ver.marshal_msg()?;
352
353 let pre_mod_time = self.versions[idx].header.mod_time;
354
355 self.versions[idx].header = ver.header();
356 self.versions[idx].meta = meta_buf;
357
358 if pre_mod_time != self.versions[idx].header.mod_time {
359 self.sort_by_mod_time();
360 }
361
362 Ok(())
363 }
364
365 fn sort_by_mod_time(&mut self) {
366 if self.versions.len() <= 1 {
367 return;
368 }
369
370 self.versions.reverse();
371
372 }
376
377 pub fn find_version(&self, vid: Option<Uuid>) -> Result<(usize, FileMetaVersion)> {
379 for (i, fver) in self.versions.iter().enumerate() {
380 if fver.header.version_id == vid {
381 let version = self.get_idx(i)?;
382 return Ok((i, version));
383 }
384 }
385
386 Err(Error::FileVersionNotFound)
387 }
388
389 pub fn shard_data_dir_count(&self, vid: &Option<Uuid>, data_dir: &Option<Uuid>) -> usize {
391 self.versions
392 .iter()
393 .filter(|v| v.header.version_type == VersionType::Object && v.header.version_id != *vid && v.header.user_data_dir())
394 .map(|v| FileMetaVersion::decode_data_dir_from_meta(&v.meta).unwrap_or_default())
395 .filter(|v| v == data_dir)
396 .count()
397 }
398
399 pub fn update_object_version(&mut self, fi: FileInfo) -> Result<()> {
400 for version in self.versions.iter_mut() {
401 match version.header.version_type {
402 VersionType::Invalid | VersionType::Legacy => (),
403 VersionType::Object => {
404 if version.header.version_id == fi.version_id {
405 let mut ver = FileMetaVersion::try_from(version.meta.as_slice())?;
406
407 if let Some(ref mut obj) = ver.object {
408 for (k, v) in fi.metadata.iter() {
409 obj.meta_user.insert(k.clone(), v.clone());
410 }
411
412 if let Some(mod_time) = fi.mod_time {
413 obj.mod_time = Some(mod_time);
414 }
415 }
416
417 version.header = ver.header();
419 version.meta = ver.marshal_msg()?;
420 }
421 }
422 VersionType::Delete => {
423 if version.header.version_id == fi.version_id {
424 return Err(Error::MethodNotAllowed);
425 }
426 }
427 }
428 }
429
430 self.versions.sort_by(|a, b| {
431 if a.header.mod_time != b.header.mod_time {
432 a.header.mod_time.cmp(&b.header.mod_time)
433 } else if a.header.version_type != b.header.version_type {
434 a.header.version_type.cmp(&b.header.version_type)
435 } else if a.header.version_id != b.header.version_id {
436 a.header.version_id.cmp(&b.header.version_id)
437 } else if a.header.flags != b.header.flags {
438 a.header.flags.cmp(&b.header.flags)
439 } else {
440 a.cmp(b)
441 }
442 });
443 Ok(())
444 }
445
446 pub fn add_version(&mut self, fi: FileInfo) -> Result<()> {
447 let vid = fi.version_id;
448
449 if let Some(ref data) = fi.data {
450 let key = vid.unwrap_or_default().to_string();
451 self.data.replace(&key, data.to_vec())?;
452 }
453
454 let version = FileMetaVersion::from(fi);
455
456 if !version.valid() {
457 return Err(Error::other("file meta version invalid"));
458 }
459
460 for (idx, ver) in self.versions.iter().enumerate() {
462 if ver.header.version_id != vid {
463 continue;
464 }
465
466 return self.set_idx(idx, version);
467 }
468
469 let mod_time = version.get_mod_time();
472
473 self.versions.push(FileMetaShallowVersion {
475 header: FileMetaVersionHeader {
476 mod_time: Some(OffsetDateTime::from_unix_timestamp(-1)?),
477 ..Default::default()
478 },
479 ..Default::default()
480 });
481
482 for (idx, exist) in self.versions.iter().enumerate() {
483 if let Some(ref ex_mt) = exist.header.mod_time {
484 if let Some(ref in_md) = mod_time {
485 if ex_mt <= in_md {
486 self.versions.insert(idx, FileMetaShallowVersion::try_from(version)?);
488 self.versions.pop();
489 return Ok(());
490 }
491 }
492 }
493 }
494
495 Err(Error::other("add_version failed"))
496 }
497
498 pub fn add_version_filemata(&mut self, ver: FileMetaVersion) -> Result<()> {
499 let mod_time = ver.get_mod_time().unwrap().nanosecond();
500 if !ver.valid() {
501 return Err(Error::other("attempted to add invalid version"));
502 }
503 let encoded = ver.marshal_msg()?;
504
505 if self.versions.len() + 1 > 100 {
506 return Err(Error::other(
507 "You've exceeded the limit on the number of versions you can create on this object",
508 ));
509 }
510
511 self.versions.push(FileMetaShallowVersion {
512 header: FileMetaVersionHeader {
513 mod_time: Some(OffsetDateTime::from_unix_timestamp(-1)?),
514 ..Default::default()
515 },
516 ..Default::default()
517 });
518
519 let len = self.versions.len();
520 for (i, existing) in self.versions.iter().enumerate() {
521 if existing.header.mod_time.unwrap().nanosecond() <= mod_time {
522 let vers = self.versions[i..len - 1].to_vec();
523 self.versions[i + 1..].clone_from_slice(vers.as_slice());
524 self.versions[i] = FileMetaShallowVersion {
525 header: ver.header(),
526 meta: encoded,
527 };
528 return Ok(());
529 }
530 }
531 Err(Error::other("addVersion: Internal error, unable to add version"))
532 }
533
534 pub fn delete_version(&mut self, fi: &FileInfo) -> Result<Option<Uuid>> {
536 let mut ventry = FileMetaVersion::default();
537 if fi.deleted {
538 ventry.version_type = VersionType::Delete;
539 ventry.delete_marker = Some(MetaDeleteMarker {
540 version_id: fi.version_id,
541 mod_time: fi.mod_time,
542 ..Default::default()
543 });
544
545 if !fi.is_valid() {
546 return Err(Error::other("invalid file meta version"));
547 }
548 }
549
550 for (i, ver) in self.versions.iter().enumerate() {
551 if ver.header.version_id != fi.version_id {
552 continue;
553 }
554
555 match ver.header.version_type {
556 VersionType::Invalid | VersionType::Legacy => return Err(Error::other("invalid file meta version")),
557 VersionType::Delete => return Ok(None),
558 VersionType::Object => {
559 let v = self.get_idx(i)?;
560
561 self.versions.remove(i);
562
563 let a = v.object.map(|v| v.data_dir).unwrap_or_default();
564 return Ok(a);
565 }
566 }
567 }
568
569 for (i, version) in self.versions.iter().enumerate() {
570 if version.header.version_type != VersionType::Object || version.header.version_id != fi.version_id {
571 continue;
572 }
573
574 let mut ver = self.get_idx(i)?;
575
576 if fi.expire_restored {
577 ver.object.as_mut().unwrap().remove_restore_hdrs();
578 let _ = self.set_idx(i, ver.clone());
579 } else if fi.transition_status == TRANSITION_COMPLETE {
580 ver.object.as_mut().unwrap().set_transition(fi);
581 ver.object.as_mut().unwrap().reset_inline_data();
582 self.set_idx(i, ver.clone())?;
583 } else {
584 let vers = self.versions[i + 1..].to_vec();
585 self.versions.extend(vers.iter().cloned());
586 let (free_version, to_free) = ver.object.as_ref().unwrap().init_free_version(fi);
587 if to_free {
588 self.add_version_filemata(free_version)?;
589 }
590 }
591
592 if fi.deleted {
593 self.add_version_filemata(ventry)?;
594 }
595 if self.shared_data_dir_count(ver.object.as_ref().unwrap().version_id, ver.object.as_ref().unwrap().data_dir) > 0 {
596 return Ok(None);
597 }
598 return Ok(ver.object.as_ref().unwrap().data_dir);
599 }
600
601 if fi.deleted {
602 self.add_version_filemata(ventry)?;
603 }
604
605 Err(Error::FileVersionNotFound)
606 }
607
608 pub fn into_fileinfo(
609 &self,
610 volume: &str,
611 path: &str,
612 version_id: &str,
613 read_data: bool,
614 all_parts: bool,
615 ) -> Result<FileInfo> {
616 let has_vid = {
617 if !version_id.is_empty() {
618 let id = Uuid::parse_str(version_id)?;
619 if !id.is_nil() { Some(id) } else { None }
620 } else {
621 None
622 }
623 };
624
625 let mut is_latest = true;
626 let mut succ_mod_time = None;
627
628 for ver in self.versions.iter() {
629 let header = &ver.header;
630
631 if let Some(vid) = has_vid {
632 if header.version_id != Some(vid) {
633 is_latest = false;
634 succ_mod_time = header.mod_time;
635 continue;
636 }
637 }
638
639 let mut fi = ver.into_fileinfo(volume, path, all_parts)?;
640 fi.is_latest = is_latest;
641
642 if let Some(_d) = succ_mod_time {
643 fi.successor_mod_time = succ_mod_time;
644 }
645
646 if read_data {
647 fi.data = self
648 .data
649 .find(fi.version_id.unwrap_or_default().to_string().as_str())?
650 .map(bytes::Bytes::from);
651 }
652
653 fi.num_versions = self.versions.len();
654
655 return Ok(fi);
656 }
657
658 if has_vid.is_none() {
659 Err(Error::FileNotFound)
660 } else {
661 Err(Error::FileVersionNotFound)
662 }
663 }
664
665 pub fn into_file_info_versions(&self, volume: &str, path: &str, all_parts: bool) -> Result<FileInfoVersions> {
666 let mut versions = Vec::new();
667 for version in self.versions.iter() {
668 let mut file_version = FileMetaVersion::default();
669 file_version.unmarshal_msg(&version.meta)?;
670 let fi = file_version.into_fileinfo(volume, path, all_parts);
671 versions.push(fi);
672 }
673
674 let num = versions.len();
675 let mut prev_mod_time = None;
676 for (i, fi) in versions.iter_mut().enumerate() {
677 if i == 0 {
678 fi.is_latest = true;
679 } else {
680 fi.successor_mod_time = prev_mod_time;
681 }
682 fi.num_versions = num;
683 prev_mod_time = fi.mod_time;
684 }
685
686 if versions.is_empty() {
687 versions.push(FileInfo {
688 name: path.to_string(),
689 volume: volume.to_string(),
690 deleted: true,
691 is_latest: true,
692 ..Default::default()
693 });
694 }
695
696 Ok(FileInfoVersions {
697 volume: volume.to_string(),
698 name: path.to_string(),
699 latest_mod_time: versions[0].mod_time,
700 versions,
701 ..Default::default()
702 })
703 }
704
705 pub fn lastest_mod_time(&self) -> Option<OffsetDateTime> {
706 if self.versions.is_empty() {
707 return None;
708 }
709
710 self.versions.first().unwrap().header.mod_time
711 }
712
713 pub fn is_compatible_with_meta(&self) -> bool {
715 if self.meta_ver != XL_META_VERSION {
717 return false;
718 }
719
720 true
723 }
724
725 pub fn validate_integrity(&self) -> Result<()> {
727 if !self.is_sorted_by_mod_time() {
729 return Err(Error::other("versions not sorted by modification time"));
730 }
731
732 self.data.validate()?;
734
735 Ok(())
736 }
737
738 fn is_sorted_by_mod_time(&self) -> bool {
740 if self.versions.len() <= 1 {
741 return true;
742 }
743
744 for i in 1..self.versions.len() {
745 let prev_time = self.versions[i - 1].header.mod_time;
746 let curr_time = self.versions[i].header.mod_time;
747
748 match (prev_time, curr_time) {
749 (Some(prev), Some(curr)) => {
750 if prev < curr {
751 return false;
752 }
753 }
754 (None, Some(_)) => return false,
755 _ => continue,
756 }
757 }
758
759 true
760 }
761
762 pub fn get_version_stats(&self) -> VersionStats {
764 let mut stats = VersionStats {
765 total_versions: self.versions.len(),
766 ..Default::default()
767 };
768
769 for version in &self.versions {
770 match version.header.version_type {
771 VersionType::Object => stats.object_versions += 1,
772 VersionType::Delete => stats.delete_markers += 1,
773 VersionType::Invalid | VersionType::Legacy => stats.invalid_versions += 1,
774 }
775
776 if version.header.free_version() {
777 stats.free_versions += 1;
778 }
779 }
780
781 stats
782 }
783
784 pub fn load_or_convert(buf: &[u8]) -> Result<Self> {
786 match Self::load(buf) {
788 Ok(meta) => Ok(meta),
789 Err(_) => {
790 Self::load_legacy(buf)
792 }
793 }
794 }
795
796 pub fn load_legacy(_buf: &[u8]) -> Result<Self> {
798 Err(Error::other("Legacy format not yet implemented"))
801 }
802
803 pub fn get_data_dirs(&self) -> Result<Vec<Option<Uuid>>> {
805 let mut data_dirs = Vec::new();
806 for version in &self.versions {
807 if version.header.version_type == VersionType::Object {
808 let ver = FileMetaVersion::try_from(version.meta.as_slice())?;
809 data_dirs.push(ver.get_data_dir());
810 }
811 }
812 Ok(data_dirs)
813 }
814
815 pub fn shared_data_dir_count(&self, version_id: Option<Uuid>, data_dir: Option<Uuid>) -> usize {
817 self.versions
818 .iter()
819 .filter(|v| {
820 v.header.version_type == VersionType::Object && v.header.version_id != version_id && v.header.user_data_dir()
821 })
822 .filter_map(|v| FileMetaVersion::decode_data_dir_from_meta(&v.meta).ok().flatten())
823 .filter(|&dir| Some(dir) == data_dir)
824 .count()
825 }
826
827 pub fn add_legacy(&mut self, _legacy_obj: &str) -> Result<()> {
829 Err(Error::other("Legacy version addition not yet implemented"))
831 }
832
833 pub fn list_versions(&self, volume: &str, path: &str, all_parts: bool) -> Result<Vec<FileInfo>> {
835 let mut file_infos = Vec::new();
836 for (i, version) in self.versions.iter().enumerate() {
837 let mut fi = version.into_fileinfo(volume, path, all_parts)?;
838 fi.is_latest = i == 0;
839 file_infos.push(fi);
840 }
841 Ok(file_infos)
842 }
843
844 pub fn all_hidden(&self, top_delete_marker: bool) -> bool {
846 if self.versions.is_empty() {
847 return true;
848 }
849
850 if top_delete_marker && self.versions[0].header.version_type != VersionType::Delete {
851 return false;
852 }
853
854 self.versions
856 .iter()
857 .all(|v| v.header.version_type == VersionType::Delete || v.header.free_version())
858 }
859
860 pub fn append_to(&self, dst: &mut Vec<u8>) -> Result<()> {
862 let data = self.marshal_msg()?;
863 dst.extend_from_slice(&data);
864 Ok(())
865 }
866
867 pub fn find_version_str(&self, version_id: &str) -> Result<(usize, FileMetaVersion)> {
869 if version_id.is_empty() {
870 return Err(Error::other("empty version ID"));
871 }
872
873 let uuid = Uuid::parse_str(version_id)?;
874 self.find_version(Some(uuid))
875 }
876}
877
878#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Clone, Eq, PartialOrd, Ord)]
892pub struct FileMetaShallowVersion {
893 pub header: FileMetaVersionHeader,
894 pub meta: Vec<u8>, }
896
897impl FileMetaShallowVersion {
898 pub fn into_fileinfo(&self, volume: &str, path: &str, all_parts: bool) -> Result<FileInfo> {
899 let file_version = FileMetaVersion::try_from(self.meta.as_slice())?;
900
901 Ok(file_version.into_fileinfo(volume, path, all_parts))
902 }
903}
904
905impl TryFrom<FileMetaVersion> for FileMetaShallowVersion {
906 type Error = Error;
907
908 fn try_from(value: FileMetaVersion) -> std::result::Result<Self, Self::Error> {
909 let header = value.header();
910 let meta = value.marshal_msg()?;
911 Ok(Self { meta, header })
912 }
913}
914
915#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq)]
916pub struct FileMetaVersion {
917 #[serde(rename = "Type")]
918 pub version_type: VersionType,
919 #[serde(rename = "V2Obj")]
920 pub object: Option<MetaObject>,
921 #[serde(rename = "DelObj")]
922 pub delete_marker: Option<MetaDeleteMarker>,
923 #[serde(rename = "v")]
924 pub write_version: u64, }
926
927impl FileMetaVersion {
928 pub fn valid(&self) -> bool {
929 if !self.version_type.valid() {
930 return false;
931 }
932
933 match self.version_type {
934 VersionType::Object => self
935 .object
936 .as_ref()
937 .map(|v| v.erasure_algorithm.valid() && v.bitrot_checksum_algo.valid() && v.mod_time.is_some())
938 .unwrap_or_default(),
939 VersionType::Delete => self
940 .delete_marker
941 .as_ref()
942 .map(|v| v.mod_time.unwrap_or(OffsetDateTime::UNIX_EPOCH) > OffsetDateTime::UNIX_EPOCH)
943 .unwrap_or_default(),
944 _ => false,
945 }
946 }
947
948 pub fn get_data_dir(&self) -> Option<Uuid> {
949 if self.valid() {
950 {
951 if self.version_type == VersionType::Object {
952 self.object.as_ref().map(|v| v.data_dir).unwrap_or_default()
953 } else {
954 None
955 }
956 }
957 } else {
958 Default::default()
959 }
960 }
961
962 pub fn get_version_id(&self) -> Option<Uuid> {
963 match self.version_type {
964 VersionType::Object | VersionType::Delete => self.object.as_ref().map(|v| v.version_id).unwrap_or_default(),
965 _ => None,
966 }
967 }
968
969 pub fn get_mod_time(&self) -> Option<OffsetDateTime> {
970 match self.version_type {
971 VersionType::Object => self.object.as_ref().map(|v| v.mod_time).unwrap_or_default(),
972 VersionType::Delete => self.delete_marker.as_ref().map(|v| v.mod_time).unwrap_or_default(),
973 _ => None,
974 }
975 }
976
977 pub fn decode_data_dir_from_meta(buf: &[u8]) -> Result<Option<Uuid>> {
979 let mut ver = Self::default();
980 ver.unmarshal_msg(buf)?;
981
982 let data_dir = ver.object.map(|v| v.data_dir).unwrap_or_default();
983 Ok(data_dir)
984 }
985
986 pub fn unmarshal_msg(&mut self, buf: &[u8]) -> Result<u64> {
987 let ret: Self = rmp_serde::from_slice(buf)?;
988
989 *self = ret;
990
991 Ok(buf.len() as u64)
992 }
993
994 pub fn marshal_msg(&self) -> Result<Vec<u8>> {
995 let buf = rmp_serde::to_vec(self)?;
996 Ok(buf)
997 }
998
999 pub fn free_version(&self) -> bool {
1000 self.version_type == VersionType::Delete && self.delete_marker.as_ref().map(|m| m.free_version()).unwrap_or_default()
1001 }
1002
1003 pub fn header(&self) -> FileMetaVersionHeader {
1004 FileMetaVersionHeader::from(self.clone())
1005 }
1006
1007 pub fn into_fileinfo(&self, volume: &str, path: &str, all_parts: bool) -> FileInfo {
1008 match self.version_type {
1009 VersionType::Invalid | VersionType::Legacy => FileInfo {
1010 name: path.to_string(),
1011 volume: volume.to_string(),
1012 ..Default::default()
1013 },
1014 VersionType::Object => self
1015 .object
1016 .as_ref()
1017 .unwrap_or(&MetaObject::default())
1018 .into_fileinfo(volume, path, all_parts),
1019 VersionType::Delete => self
1020 .delete_marker
1021 .as_ref()
1022 .unwrap_or(&MetaDeleteMarker::default())
1023 .into_fileinfo(volume, path, all_parts),
1024 }
1025 }
1026
1027 pub fn is_legacy(&self) -> bool {
1029 self.version_type == VersionType::Legacy
1030 }
1031
1032 pub fn get_signature(&self) -> [u8; 4] {
1034 match self.version_type {
1035 VersionType::Object => {
1036 if let Some(ref obj) = self.object {
1037 let mut hasher = xxhash_rust::xxh64::Xxh64::new(XXHASH_SEED);
1039 hasher.update(obj.version_id.unwrap_or_default().as_bytes());
1040 if let Some(mod_time) = obj.mod_time {
1041 hasher.update(&mod_time.unix_timestamp_nanos().to_le_bytes());
1042 }
1043 let hash = hasher.finish();
1044 let bytes = hash.to_le_bytes();
1045 [bytes[0], bytes[1], bytes[2], bytes[3]]
1046 } else {
1047 [0; 4]
1048 }
1049 }
1050 VersionType::Delete => {
1051 if let Some(ref dm) = self.delete_marker {
1052 let mut hasher = xxhash_rust::xxh64::Xxh64::new(XXHASH_SEED);
1054 hasher.update(dm.version_id.unwrap_or_default().as_bytes());
1055 if let Some(mod_time) = dm.mod_time {
1056 hasher.update(&mod_time.unix_timestamp_nanos().to_le_bytes());
1057 }
1058 let hash = hasher.finish();
1059 let bytes = hash.to_le_bytes();
1060 [bytes[0], bytes[1], bytes[2], bytes[3]]
1061 } else {
1062 [0; 4]
1063 }
1064 }
1065 _ => [0; 4],
1066 }
1067 }
1068
1069 pub fn uses_data_dir(&self) -> bool {
1071 match self.version_type {
1072 VersionType::Object => self.object.as_ref().map(|obj| obj.uses_data_dir()).unwrap_or(false),
1073 _ => false,
1074 }
1075 }
1076
1077 pub fn uses_inline_data(&self) -> bool {
1079 match self.version_type {
1080 VersionType::Object => self.object.as_ref().map(|obj| obj.inlinedata()).unwrap_or(false),
1081 _ => false,
1082 }
1083 }
1084}
1085
1086impl TryFrom<&[u8]> for FileMetaVersion {
1087 type Error = Error;
1088
1089 fn try_from(value: &[u8]) -> std::result::Result<Self, Self::Error> {
1090 let mut ver = FileMetaVersion::default();
1091 ver.unmarshal_msg(value)?;
1092 Ok(ver)
1093 }
1094}
1095
1096impl From<FileInfo> for FileMetaVersion {
1097 fn from(value: FileInfo) -> Self {
1098 {
1099 if value.deleted {
1100 FileMetaVersion {
1101 version_type: VersionType::Delete,
1102 delete_marker: Some(MetaDeleteMarker::from(value)),
1103 object: None,
1104 write_version: 0,
1105 }
1106 } else {
1107 FileMetaVersion {
1108 version_type: VersionType::Object,
1109 delete_marker: None,
1110 object: Some(value.into()),
1111 write_version: 0,
1112 }
1113 }
1114 }
1115 }
1116}
1117
1118impl TryFrom<FileMetaShallowVersion> for FileMetaVersion {
1119 type Error = Error;
1120
1121 fn try_from(value: FileMetaShallowVersion) -> std::result::Result<Self, Self::Error> {
1122 FileMetaVersion::try_from(value.meta.as_slice())
1123 }
1124}
1125
1126#[derive(Serialize, Deserialize, Debug, PartialEq, Default, Clone, Eq, Hash)]
1127pub struct FileMetaVersionHeader {
1128 pub version_id: Option<Uuid>,
1129 pub mod_time: Option<OffsetDateTime>,
1130 pub signature: [u8; 4],
1131 pub version_type: VersionType,
1132 pub flags: u8,
1133 pub ec_n: u8,
1134 pub ec_m: u8,
1135}
1136
1137impl FileMetaVersionHeader {
1138 pub fn has_ec(&self) -> bool {
1139 self.ec_m > 0 && self.ec_n > 0
1140 }
1141
1142 pub fn matches_not_strict(&self, o: &FileMetaVersionHeader) -> bool {
1143 let mut ok = self.version_id == o.version_id && self.version_type == o.version_type && self.matches_ec(o);
1144 if self.version_id.is_none() {
1145 ok = ok && self.mod_time == o.mod_time;
1146 }
1147
1148 ok
1149 }
1150
1151 pub fn matches_ec(&self, o: &FileMetaVersionHeader) -> bool {
1152 if self.has_ec() && o.has_ec() {
1153 return self.ec_n == o.ec_n && self.ec_m == o.ec_m;
1154 }
1155
1156 true
1157 }
1158
1159 pub fn free_version(&self) -> bool {
1160 self.flags & XL_FLAG_FREE_VERSION != 0
1161 }
1162
1163 pub fn sorts_before(&self, o: &FileMetaVersionHeader) -> bool {
1164 if self == o {
1165 return false;
1166 }
1167
1168 if self.mod_time != o.mod_time {
1170 return self.mod_time > o.mod_time;
1171 }
1172
1173 match self.mod_time.cmp(&o.mod_time) {
1174 Ordering::Greater => {
1175 return true;
1176 }
1177 Ordering::Less => {
1178 return false;
1179 }
1180 _ => {}
1181 }
1182
1183 if self.version_type != o.version_type {
1186 return self.version_type < o.version_type;
1187 }
1188 match self.version_id.cmp(&o.version_id) {
1190 Ordering::Greater => {
1191 return true;
1192 }
1193 Ordering::Less => {
1194 return false;
1195 }
1196 _ => {}
1197 }
1198
1199 if self.flags != o.flags {
1200 return self.flags > o.flags;
1201 }
1202
1203 false
1204 }
1205
1206 pub fn user_data_dir(&self) -> bool {
1207 self.flags & Flags::UsesDataDir as u8 != 0
1208 }
1209
1210 pub fn marshal_msg(&self) -> Result<Vec<u8>> {
1211 let mut wr = Vec::new();
1212
1213 rmp::encode::write_array_len(&mut wr, 7)?;
1215
1216 rmp::encode::write_bin(&mut wr, self.version_id.unwrap_or_default().as_bytes())?;
1218 rmp::encode::write_i64(&mut wr, self.mod_time.unwrap_or(OffsetDateTime::UNIX_EPOCH).unix_timestamp_nanos() as i64)?;
1220 rmp::encode::write_bin(&mut wr, self.signature.as_slice())?;
1222 rmp::encode::write_uint8(&mut wr, self.version_type.to_u8())?;
1224 rmp::encode::write_uint8(&mut wr, self.flags)?;
1226 rmp::encode::write_uint8(&mut wr, self.ec_n)?;
1228 rmp::encode::write_uint8(&mut wr, self.ec_m)?;
1230
1231 Ok(wr)
1232 }
1233
1234 pub fn unmarshal_msg(&mut self, buf: &[u8]) -> Result<u64> {
1235 let mut cur = Cursor::new(buf);
1236 let alen = rmp::decode::read_array_len(&mut cur)?;
1237 if alen != 7 {
1238 return Err(Error::other(format!("version header array len err need 7 got {alen}")));
1239 }
1240
1241 rmp::decode::read_bin_len(&mut cur)?;
1243 let mut buf = [0u8; 16];
1244 cur.read_exact(&mut buf)?;
1245 self.version_id = {
1246 let id = Uuid::from_bytes(buf);
1247 if id.is_nil() { None } else { Some(id) }
1248 };
1249
1250 let unix: i128 = rmp::decode::read_int(&mut cur)?;
1252
1253 let time = OffsetDateTime::from_unix_timestamp_nanos(unix)?;
1254 if time == OffsetDateTime::UNIX_EPOCH {
1255 self.mod_time = None;
1256 } else {
1257 self.mod_time = Some(time);
1258 }
1259
1260 rmp::decode::read_bin_len(&mut cur)?;
1262 cur.read_exact(&mut self.signature)?;
1263
1264 let typ: u8 = rmp::decode::read_int(&mut cur)?;
1266 self.version_type = VersionType::from_u8(typ);
1267
1268 self.flags = rmp::decode::read_int(&mut cur)?;
1270 self.ec_n = rmp::decode::read_int(&mut cur)?;
1272 self.ec_m = rmp::decode::read_int(&mut cur)?;
1274
1275 Ok(cur.position())
1276 }
1277
1278 pub fn get_signature(&self) -> [u8; 4] {
1280 self.signature
1281 }
1282
1283 pub fn inline_data(&self) -> bool {
1285 self.flags & Flags::InlineData as u8 != 0
1286 }
1287
1288 pub fn update_signature(&mut self, version: &FileMetaVersion) {
1290 self.signature = version.get_signature();
1291 }
1292}
1293
1294impl PartialOrd for FileMetaVersionHeader {
1295 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1296 Some(self.cmp(other))
1297 }
1298}
1299
1300impl Ord for FileMetaVersionHeader {
1301 fn cmp(&self, other: &Self) -> Ordering {
1302 match self.mod_time.cmp(&other.mod_time) {
1303 core::cmp::Ordering::Equal => {}
1304 ord => return ord,
1305 }
1306
1307 match self.version_type.cmp(&other.version_type) {
1308 core::cmp::Ordering::Equal => {}
1309 ord => return ord,
1310 }
1311 match self.signature.cmp(&other.signature) {
1312 core::cmp::Ordering::Equal => {}
1313 ord => return ord,
1314 }
1315 match self.version_id.cmp(&other.version_id) {
1316 core::cmp::Ordering::Equal => {}
1317 ord => return ord,
1318 }
1319 self.flags.cmp(&other.flags)
1320 }
1321}
1322
1323impl From<FileMetaVersion> for FileMetaVersionHeader {
1324 fn from(value: FileMetaVersion) -> Self {
1325 let flags = {
1326 let mut f: u8 = 0;
1327 if value.free_version() {
1328 f |= Flags::FreeVersion as u8;
1329 }
1330
1331 if value.version_type == VersionType::Object && value.object.as_ref().map(|v| v.uses_data_dir()).unwrap_or_default() {
1332 f |= Flags::UsesDataDir as u8;
1333 }
1334
1335 if value.version_type == VersionType::Object && value.object.as_ref().map(|v| v.inlinedata()).unwrap_or_default() {
1336 f |= Flags::InlineData as u8;
1337 }
1338
1339 f
1340 };
1341
1342 let (ec_n, ec_m) = {
1343 if value.version_type == VersionType::Object && value.object.is_some() {
1344 (
1345 value.object.as_ref().unwrap().erasure_n as u8,
1346 value.object.as_ref().unwrap().erasure_m as u8,
1347 )
1348 } else {
1349 (0, 0)
1350 }
1351 };
1352
1353 Self {
1354 version_id: value.get_version_id(),
1355 mod_time: value.get_mod_time(),
1356 signature: [0, 0, 0, 0],
1357 version_type: value.version_type,
1358 flags,
1359 ec_n,
1360 ec_m,
1361 }
1362 }
1363}
1364
1365#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq)]
1366pub struct MetaObject {
1368 #[serde(rename = "ID")]
1369 pub version_id: Option<Uuid>, #[serde(rename = "DDir")]
1371 pub data_dir: Option<Uuid>, #[serde(rename = "EcAlgo")]
1373 pub erasure_algorithm: ErasureAlgo, #[serde(rename = "EcM")]
1375 pub erasure_m: usize, #[serde(rename = "EcN")]
1377 pub erasure_n: usize, #[serde(rename = "EcBSize")]
1379 pub erasure_block_size: usize, #[serde(rename = "EcIndex")]
1381 pub erasure_index: usize, #[serde(rename = "EcDist")]
1383 pub erasure_dist: Vec<u8>, #[serde(rename = "CSumAlgo")]
1385 pub bitrot_checksum_algo: ChecksumAlgo, #[serde(rename = "PartNums")]
1387 pub part_numbers: Vec<usize>, #[serde(rename = "PartETags")]
1389 pub part_etags: Vec<String>, #[serde(rename = "PartSizes")]
1391 pub part_sizes: Vec<usize>, #[serde(rename = "PartASizes")]
1393 pub part_actual_sizes: Vec<i64>, #[serde(rename = "PartIdx")]
1395 pub part_indices: Vec<Bytes>, #[serde(rename = "Size")]
1397 pub size: i64, #[serde(rename = "MTime")]
1399 pub mod_time: Option<OffsetDateTime>, #[serde(rename = "MetaSys")]
1401 pub meta_sys: HashMap<String, Vec<u8>>, #[serde(rename = "MetaUsr")]
1403 pub meta_user: HashMap<String, String>, }
1405
1406impl MetaObject {
1407 pub fn unmarshal_msg(&mut self, buf: &[u8]) -> Result<u64> {
1408 let ret: Self = rmp_serde::from_slice(buf)?;
1409
1410 *self = ret;
1411
1412 Ok(buf.len() as u64)
1413 }
1414 pub fn marshal_msg(&self) -> Result<Vec<u8>> {
1416 let buf = rmp_serde::to_vec(self)?;
1417 Ok(buf)
1418 }
1419
1420 pub fn into_fileinfo(&self, volume: &str, path: &str, all_parts: bool) -> FileInfo {
1421 let version_id = self.version_id.filter(|&vid| !vid.is_nil());
1422
1423 let parts = if all_parts {
1424 let mut parts = vec![ObjectPartInfo::default(); self.part_numbers.len()];
1425
1426 for (i, part) in parts.iter_mut().enumerate() {
1427 part.number = self.part_numbers[i];
1428 part.size = self.part_sizes[i];
1429 part.actual_size = self.part_actual_sizes[i];
1430
1431 if self.part_etags.len() == self.part_numbers.len() {
1432 part.etag = self.part_etags[i].clone();
1433 }
1434
1435 if self.part_indices.len() == self.part_numbers.len() {
1436 part.index = if self.part_indices[i].is_empty() {
1437 None
1438 } else {
1439 Some(self.part_indices[i].clone())
1440 };
1441 }
1442 }
1443 parts
1444 } else {
1445 Vec::new()
1446 };
1447
1448 let mut metadata = HashMap::with_capacity(self.meta_user.len() + self.meta_sys.len());
1449 for (k, v) in &self.meta_user {
1450 if k == AMZ_META_UNENCRYPTED_CONTENT_LENGTH || k == AMZ_META_UNENCRYPTED_CONTENT_MD5 {
1451 continue;
1452 }
1453
1454 if k == AMZ_STORAGE_CLASS && v == "STANDARD" {
1455 continue;
1456 }
1457
1458 metadata.insert(k.to_owned(), v.to_owned());
1459 }
1460
1461 for (k, v) in &self.meta_sys {
1462 if k == AMZ_STORAGE_CLASS && v == b"STANDARD" {
1463 continue;
1464 }
1465
1466 if k.starts_with(RESERVED_METADATA_PREFIX)
1467 || k.starts_with(RESERVED_METADATA_PREFIX_LOWER)
1468 || k == VERSION_PURGE_STATUS_KEY
1469 {
1470 metadata.insert(k.to_owned(), String::from_utf8(v.to_owned()).unwrap_or_default());
1471 }
1472 }
1473
1474 let erasure = ErasureInfo {
1477 algorithm: self.erasure_algorithm.to_string(),
1478 data_blocks: self.erasure_m,
1479 parity_blocks: self.erasure_n,
1480 block_size: self.erasure_block_size,
1481 index: self.erasure_index,
1482 distribution: self.erasure_dist.iter().map(|&v| v as usize).collect(),
1483 ..Default::default()
1484 };
1485
1486 FileInfo {
1487 version_id,
1488 erasure,
1489 data_dir: self.data_dir,
1490 mod_time: self.mod_time,
1491 size: self.size,
1492 name: path.to_string(),
1493 volume: volume.to_string(),
1494 parts,
1495 metadata,
1496 ..Default::default()
1497 }
1498 }
1499
1500 pub fn set_transition(&mut self, fi: &FileInfo) {
1501 self.meta_sys.insert(
1502 format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITION_STATUS}"),
1503 fi.transition_status.as_bytes().to_vec(),
1504 );
1505 self.meta_sys.insert(
1506 format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITIONED_OBJECTNAME}"),
1507 fi.transitioned_objname.as_bytes().to_vec(),
1508 );
1509 self.meta_sys.insert(
1510 format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITIONED_VERSION_ID}"),
1511 fi.transition_version_id.unwrap().as_bytes().to_vec(),
1512 );
1513 self.meta_sys.insert(
1514 format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITION_TIER}"),
1515 fi.transition_tier.as_bytes().to_vec(),
1516 );
1517 }
1518
1519 pub fn remove_restore_hdrs(&mut self) {
1520 self.meta_user.remove(X_AMZ_RESTORE.as_str());
1521 self.meta_user.remove(X_AMZ_RESTORE_EXPIRY_DAYS);
1522 self.meta_user.remove(X_AMZ_RESTORE_REQUEST_DATE);
1523 }
1524
1525 pub fn uses_data_dir(&self) -> bool {
1526 true
1528 }
1529
1530 pub fn inlinedata(&self) -> bool {
1531 self.meta_sys
1532 .contains_key(format!("{RESERVED_METADATA_PREFIX_LOWER}inline-data").as_str())
1533 }
1534
1535 pub fn reset_inline_data(&mut self) {
1536 self.meta_sys
1537 .remove(format!("{RESERVED_METADATA_PREFIX_LOWER}inline-data").as_str());
1538 }
1539
1540 pub fn remove_restore_headers(&mut self) {
1542 self.meta_sys.retain(|k, _| !k.starts_with("X-Amz-Restore"));
1544 }
1545
1546 pub fn get_signature(&self) -> [u8; 4] {
1548 let mut hasher = xxhash_rust::xxh64::Xxh64::new(XXHASH_SEED);
1549 hasher.update(self.version_id.unwrap_or_default().as_bytes());
1550 if let Some(mod_time) = self.mod_time {
1551 hasher.update(&mod_time.unix_timestamp_nanos().to_le_bytes());
1552 }
1553 hasher.update(&self.size.to_le_bytes());
1554 let hash = hasher.finish();
1555 let bytes = hash.to_le_bytes();
1556 [bytes[0], bytes[1], bytes[2], bytes[3]]
1557 }
1558
1559 pub fn init_free_version(&self, fi: &FileInfo) -> (FileMetaVersion, bool) {
1560 if fi.skip_tier_free_version() {
1561 return (FileMetaVersion::default(), false);
1562 }
1563 if let Some(status) = self
1564 .meta_sys
1565 .get(&format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITION_STATUS}"))
1566 {
1567 if *status == TRANSITION_COMPLETE.as_bytes().to_vec() {
1568 let vid = Uuid::parse_str(&fi.tier_free_version_id());
1569 if let Err(err) = vid {
1570 panic!("Invalid Tier Object delete marker versionId {} {}", fi.tier_free_version_id(), err);
1571 }
1572 let vid = vid.unwrap();
1573 let mut free_entry = FileMetaVersion {
1574 version_type: VersionType::Delete,
1575 write_version: 0,
1576 ..Default::default()
1577 };
1578 free_entry.delete_marker = Some(MetaDeleteMarker {
1579 version_id: Some(vid),
1580 mod_time: self.mod_time,
1581 meta_sys: Some(HashMap::<String, Vec<u8>>::new()),
1582 });
1583
1584 free_entry
1585 .delete_marker
1586 .as_mut()
1587 .unwrap()
1588 .meta_sys
1589 .as_mut()
1590 .unwrap()
1591 .insert(format!("{RESERVED_METADATA_PREFIX_LOWER}{FREE_VERSION}"), vec![]);
1592 let tier_key = format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITION_TIER}");
1593 let tier_obj_key = format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITIONED_OBJECTNAME}");
1594 let tier_obj_vid_key = format!("{RESERVED_METADATA_PREFIX_LOWER}{TRANSITIONED_VERSION_ID}");
1595
1596 let aa = [tier_key, tier_obj_key, tier_obj_vid_key];
1597 for (k, v) in &self.meta_sys {
1598 if aa.contains(k) {
1599 free_entry
1600 .delete_marker
1601 .as_mut()
1602 .unwrap()
1603 .meta_sys
1604 .as_mut()
1605 .unwrap()
1606 .insert(k.clone(), v.clone());
1607 }
1608 }
1609 return (free_entry, true);
1610 }
1611 }
1612 (FileMetaVersion::default(), false)
1613 }
1614}
1615
1616impl From<FileInfo> for MetaObject {
1617 fn from(value: FileInfo) -> Self {
1618 let part_etags = if !value.parts.is_empty() {
1619 value.parts.iter().map(|v| v.etag.clone()).collect()
1620 } else {
1621 vec![]
1622 };
1623
1624 let part_indices = if !value.parts.is_empty() {
1625 value.parts.iter().map(|v| v.index.clone().unwrap_or_default()).collect()
1626 } else {
1627 vec![]
1628 };
1629
1630 let mut meta_sys = HashMap::new();
1631 let mut meta_user = HashMap::new();
1632 for (k, v) in value.metadata.iter() {
1633 if k.len() > RESERVED_METADATA_PREFIX.len()
1634 && (k.starts_with(RESERVED_METADATA_PREFIX) || k.starts_with(RESERVED_METADATA_PREFIX_LOWER))
1635 {
1636 if k == headers::X_RUSTFS_HEALING || k == headers::X_RUSTFS_DATA_MOV {
1637 continue;
1638 }
1639
1640 meta_sys.insert(k.to_owned(), v.as_bytes().to_vec());
1641 } else {
1642 meta_user.insert(k.to_owned(), v.to_owned());
1643 }
1644 }
1645
1646 Self {
1647 version_id: value.version_id,
1648 data_dir: value.data_dir,
1649 size: value.size,
1650 mod_time: value.mod_time,
1651 erasure_algorithm: ErasureAlgo::ReedSolomon,
1652 erasure_m: value.erasure.data_blocks,
1653 erasure_n: value.erasure.parity_blocks,
1654 erasure_block_size: value.erasure.block_size,
1655 erasure_index: value.erasure.index,
1656 erasure_dist: value.erasure.distribution.iter().map(|x| *x as u8).collect(),
1657 bitrot_checksum_algo: ChecksumAlgo::HighwayHash,
1658 part_numbers: value.parts.iter().map(|v| v.number).collect(),
1659 part_etags,
1660 part_sizes: value.parts.iter().map(|v| v.size).collect(),
1661 part_actual_sizes: value.parts.iter().map(|v| v.actual_size).collect(),
1662 part_indices,
1663 meta_sys,
1664 meta_user,
1665 }
1666 }
1667}
1668
1669#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq)]
1670pub struct MetaDeleteMarker {
1671 #[serde(rename = "ID")]
1672 pub version_id: Option<Uuid>, #[serde(rename = "MTime")]
1674 pub mod_time: Option<OffsetDateTime>, #[serde(rename = "MetaSys")]
1676 pub meta_sys: Option<HashMap<String, Vec<u8>>>, }
1678
1679impl MetaDeleteMarker {
1680 pub fn free_version(&self) -> bool {
1681 self.meta_sys
1682 .as_ref()
1683 .map(|v| v.get(FREE_VERSION_META_HEADER).is_some())
1684 .unwrap_or_default()
1685 }
1686
1687 pub fn into_fileinfo(&self, volume: &str, path: &str, _all_parts: bool) -> FileInfo {
1688 let metadata = self.meta_sys.clone().unwrap_or_default();
1689
1690 FileInfo {
1691 version_id: self.version_id.filter(|&vid| !vid.is_nil()),
1692 name: path.to_string(),
1693 volume: volume.to_string(),
1694 deleted: true,
1695 mod_time: self.mod_time,
1696 metadata: metadata
1697 .into_iter()
1698 .map(|(k, v)| (k, String::from_utf8_lossy(&v).to_string()))
1699 .collect(),
1700 ..Default::default()
1701 }
1702 }
1703
1704 pub fn unmarshal_msg(&mut self, buf: &[u8]) -> Result<u64> {
1705 let ret: Self = rmp_serde::from_slice(buf)?;
1706
1707 *self = ret;
1708
1709 Ok(buf.len() as u64)
1710
1711 }
1771
1772 pub fn marshal_msg(&self) -> Result<Vec<u8>> {
1773 let buf = rmp_serde::to_vec(self)?;
1774 Ok(buf)
1775
1776 }
1815
1816 pub fn get_signature(&self) -> [u8; 4] {
1818 let mut hasher = xxhash_rust::xxh64::Xxh64::new(XXHASH_SEED);
1819 hasher.update(self.version_id.unwrap_or_default().as_bytes());
1820 if let Some(mod_time) = self.mod_time {
1821 hasher.update(&mod_time.unix_timestamp_nanos().to_le_bytes());
1822 }
1823 let hash = hasher.finish();
1824 let bytes = hash.to_le_bytes();
1825 [bytes[0], bytes[1], bytes[2], bytes[3]]
1826 }
1827}
1828
1829impl From<FileInfo> for MetaDeleteMarker {
1830 fn from(value: FileInfo) -> Self {
1831 Self {
1832 version_id: value.version_id,
1833 mod_time: value.mod_time,
1834 meta_sys: None,
1835 }
1836 }
1837}
1838
1839#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Default, Clone, PartialOrd, Ord, Hash)]
1840pub enum VersionType {
1841 #[default]
1842 Invalid = 0,
1843 Object = 1,
1844 Delete = 2,
1845 Legacy = 3,
1846}
1847
1848impl VersionType {
1849 pub fn valid(&self) -> bool {
1850 matches!(*self, VersionType::Object | VersionType::Delete | VersionType::Legacy)
1851 }
1852
1853 pub fn to_u8(&self) -> u8 {
1854 match self {
1855 VersionType::Invalid => 0,
1856 VersionType::Object => 1,
1857 VersionType::Delete => 2,
1858 VersionType::Legacy => 3,
1859 }
1860 }
1861
1862 pub fn from_u8(n: u8) -> Self {
1863 match n {
1864 1 => VersionType::Object,
1865 2 => VersionType::Delete,
1866 3 => VersionType::Legacy,
1867 _ => VersionType::Invalid,
1868 }
1869 }
1870}
1871
1872#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Default, Clone)]
1873pub enum ChecksumAlgo {
1874 #[default]
1875 Invalid = 0,
1876 HighwayHash = 1,
1877}
1878
1879impl ChecksumAlgo {
1880 pub fn valid(&self) -> bool {
1881 *self > ChecksumAlgo::Invalid
1882 }
1883 pub fn to_u8(&self) -> u8 {
1884 match self {
1885 ChecksumAlgo::Invalid => 0,
1886 ChecksumAlgo::HighwayHash => 1,
1887 }
1888 }
1889 pub fn from_u8(u: u8) -> Self {
1890 match u {
1891 1 => ChecksumAlgo::HighwayHash,
1892 _ => ChecksumAlgo::Invalid,
1893 }
1894 }
1895}
1896
1897#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Default, Clone)]
1898pub enum Flags {
1899 #[default]
1900 FreeVersion = 1 << 0,
1901 UsesDataDir = 1 << 1,
1902 InlineData = 1 << 2,
1903}
1904
1905const FREE_VERSION_META_HEADER: &str = "free-version";
1906
1907pub fn merge_file_meta_versions(
1909 mut quorum: usize,
1910 mut strict: bool,
1911 requested_versions: usize,
1912 versions: &[Vec<FileMetaShallowVersion>],
1913) -> Vec<FileMetaShallowVersion> {
1914 if quorum == 0 {
1915 quorum = 1;
1916 }
1917
1918 if versions.len() < quorum || versions.is_empty() {
1919 return Vec::new();
1920 }
1921
1922 if versions.len() == 1 {
1923 return versions[0].clone();
1924 }
1925
1926 if quorum == 1 {
1927 strict = true;
1928 }
1929
1930 let mut versions = versions.to_owned();
1931
1932 let mut n_versions = 0;
1933
1934 let mut merged = Vec::new();
1935 loop {
1936 let mut tops = Vec::new();
1937 let mut top_sig = FileMetaVersionHeader::default();
1938 let mut consistent = true;
1939 for vers in versions.iter() {
1940 if vers.is_empty() {
1941 consistent = false;
1942 continue;
1943 }
1944 if tops.is_empty() {
1945 consistent = true;
1946 top_sig = vers[0].header.clone();
1947 } else {
1948 consistent = consistent && vers[0].header == top_sig;
1949 }
1950 tops.push(vers[0].clone());
1951 }
1952
1953 if tops.len() < quorum {
1955 break;
1956 }
1957
1958 let mut latest = FileMetaShallowVersion::default();
1959 if consistent {
1960 merged.push(tops[0].clone());
1961 if tops[0].header.free_version() {
1962 n_versions += 1;
1963 }
1964 } else {
1965 let mut lastest_count = 0;
1966 for (i, ver) in tops.iter().enumerate() {
1967 if ver.header == latest.header {
1968 lastest_count += 1;
1969 continue;
1970 }
1971
1972 if i == 0 || ver.header.sorts_before(&latest.header) {
1973 if i == 0 || lastest_count == 0 {
1974 lastest_count = 1;
1975 } else if !strict && ver.header.matches_not_strict(&latest.header) {
1976 lastest_count += 1;
1977 } else {
1978 lastest_count = 1;
1979 }
1980 latest = ver.clone();
1981 continue;
1982 }
1983
1984 if lastest_count > 0 && !strict && ver.header.matches_not_strict(&latest.header) {
1986 lastest_count += 1;
1987 continue;
1988 }
1989
1990 if lastest_count > 0 && ver.header.version_id == latest.header.version_id {
1991 let mut x: HashMap<FileMetaVersionHeader, usize> = HashMap::new();
1992 for a in tops.iter() {
1993 if a.header.version_id != ver.header.version_id {
1994 continue;
1995 }
1996 let mut a_clone = a.clone();
1997 if !strict {
1998 a_clone.header.signature = [0; 4];
1999 }
2000 *x.entry(a_clone.header).or_insert(1) += 1;
2001 }
2002 lastest_count = 0;
2003 for (k, v) in x.iter() {
2004 if *v < lastest_count {
2005 continue;
2006 }
2007 if *v == lastest_count && latest.header.sorts_before(k) {
2008 continue;
2009 }
2010 tops.iter().for_each(|a| {
2011 let mut hdr = a.header.clone();
2012 if !strict {
2013 hdr.signature = [0; 4];
2014 }
2015 if hdr == *k {
2016 latest = a.clone();
2017 }
2018 });
2019
2020 lastest_count = *v;
2021 }
2022 break;
2023 }
2024 }
2025 if lastest_count >= quorum {
2026 if !latest.header.free_version() {
2027 n_versions += 1;
2028 }
2029 merged.push(latest.clone());
2030 }
2031 }
2032
2033 versions.iter_mut().for_each(|vers| {
2035 let mut bre = false;
2037 vers.retain(|ver| {
2038 if bre {
2039 return true;
2040 }
2041 if let Ordering::Greater = ver.header.mod_time.cmp(&latest.header.mod_time) {
2042 bre = true;
2043 return false;
2044 }
2045 if ver.header == latest.header {
2046 bre = true;
2047 return false;
2048 }
2049 if let Ordering::Equal = latest.header.version_id.cmp(&ver.header.version_id) {
2050 bre = true;
2051 return false;
2052 }
2053 for merged_v in merged.iter() {
2054 if let Ordering::Equal = ver.header.version_id.cmp(&merged_v.header.version_id) {
2055 bre = true;
2056 return false;
2057 }
2058 }
2059 true
2060 });
2061 });
2062 if requested_versions > 0 && requested_versions == n_versions {
2063 merged.append(&mut versions[0]);
2064 break;
2065 }
2066 }
2067
2068 merged
2071}
2072
2073pub async fn file_info_from_raw(ri: RawFileInfo, bucket: &str, object: &str, read_data: bool) -> Result<FileInfo> {
2074 get_file_info(&ri.buf, bucket, object, "", FileInfoOpts { data: read_data }).await
2075}
2076
2077pub struct FileInfoOpts {
2078 pub data: bool,
2079}
2080
2081pub async fn get_file_info(buf: &[u8], volume: &str, path: &str, version_id: &str, opts: FileInfoOpts) -> Result<FileInfo> {
2082 let vid = {
2083 if version_id.is_empty() {
2084 None
2085 } else {
2086 Some(Uuid::parse_str(version_id)?)
2087 }
2088 };
2089
2090 let meta = FileMeta::load(buf)?;
2091 if meta.versions.is_empty() {
2092 return Ok(FileInfo {
2093 volume: volume.to_owned(),
2094 name: path.to_owned(),
2095 version_id: vid,
2096 is_latest: true,
2097 deleted: true,
2098 mod_time: Some(OffsetDateTime::from_unix_timestamp(1)?),
2099 ..Default::default()
2100 });
2101 }
2102
2103 let fi = meta.into_fileinfo(volume, path, version_id, opts.data, true)?;
2104 Ok(fi)
2105}
2106
2107async fn read_more<R: AsyncRead + Unpin>(
2108 reader: &mut R,
2109 buf: &mut Vec<u8>,
2110 total_size: usize,
2111 read_size: usize,
2112 has_full: bool,
2113) -> Result<()> {
2114 use tokio::io::AsyncReadExt;
2115 let has = buf.len();
2116
2117 if has >= read_size {
2118 return Ok(());
2119 }
2120
2121 if has_full || read_size > total_size {
2122 return Err(Error::other(std::io::Error::new(std::io::ErrorKind::UnexpectedEof, "Unexpected EOF")));
2123 }
2124
2125 let extra = read_size - has;
2126 if buf.capacity() >= read_size {
2127 buf.resize(read_size, 0);
2129 } else {
2130 buf.extend(vec![0u8; extra]);
2131 }
2132
2133 reader.read_exact(&mut buf[has..]).await?;
2134 Ok(())
2135}
2136
2137pub async fn read_xl_meta_no_data<R: AsyncRead + Unpin>(reader: &mut R, size: usize) -> Result<Vec<u8>> {
2138 use tokio::io::AsyncReadExt;
2139
2140 let mut initial = size;
2141 let mut has_full = true;
2142
2143 if initial > META_DATA_READ_DEFAULT {
2144 initial = META_DATA_READ_DEFAULT;
2145 has_full = false;
2146 }
2147
2148 let mut buf = vec![0u8; initial];
2149 reader.read_exact(&mut buf).await?;
2150
2151 let (tmp_buf, major, minor) = FileMeta::check_xl2_v1(&buf)?;
2152
2153 match major {
2154 1 => match minor {
2155 0 => {
2156 read_more(reader, &mut buf, size, size, has_full).await?;
2157 Ok(buf)
2158 }
2159 1..=3 => {
2160 let (sz, tmp_buf) = FileMeta::read_bytes_header(tmp_buf)?;
2161 let mut want = sz as usize + (buf.len() - tmp_buf.len());
2162
2163 if minor < 2 {
2164 read_more(reader, &mut buf, size, want, has_full).await?;
2165 return Ok(buf[..want].to_vec());
2166 }
2167
2168 let want_max = usize::min(want + MSGP_UINT32_SIZE, size);
2169 read_more(reader, &mut buf, size, want_max, has_full).await?;
2170
2171 if buf.len() < want {
2172 return Err(Error::FileCorrupt);
2173 }
2174
2175 let tmp = &buf[want..];
2176 let crc_size = 5;
2177 let other_size = tmp.len() - crc_size;
2178
2179 want += tmp.len() - other_size;
2180
2181 Ok(buf[..want].to_vec())
2182 }
2183 _ => Err(Error::other(std::io::Error::new(
2184 std::io::ErrorKind::InvalidData,
2185 "Unknown minor metadata version",
2186 ))),
2187 },
2188 _ => Err(Error::other(std::io::Error::new(
2189 std::io::ErrorKind::InvalidData,
2190 "Unknown major metadata version",
2191 ))),
2192 }
2193}
2194#[cfg(test)]
2195mod test {
2196 use super::*;
2197 use crate::test_data::*;
2198
2199 #[test]
2200 fn test_new_file_meta() {
2201 let mut fm = FileMeta::new();
2202
2203 let (m, n) = (3, 2);
2204
2205 for i in 0..5 {
2206 let mut fi = FileInfo::new(i.to_string().as_str(), m, n);
2207 fi.mod_time = Some(OffsetDateTime::now_utc());
2208
2209 fm.add_version(fi).unwrap();
2210 }
2211
2212 let buff = fm.marshal_msg().unwrap();
2213
2214 let mut newfm = FileMeta::default();
2215 newfm.unmarshal_msg(&buff).unwrap();
2216
2217 assert_eq!(fm, newfm)
2218 }
2219
2220 #[test]
2221 fn test_marshal_metaobject() {
2222 let obj = MetaObject {
2223 data_dir: Some(Uuid::new_v4()),
2224 ..Default::default()
2225 };
2226
2227 let encoded = obj.marshal_msg().unwrap();
2230
2231 let mut obj2 = MetaObject::default();
2232 obj2.unmarshal_msg(&encoded).unwrap();
2233
2234 assert_eq!(obj, obj2);
2237 assert_eq!(obj.data_dir, obj2.data_dir);
2238 }
2239
2240 #[test]
2241 fn test_marshal_metadeletemarker() {
2242 let obj = MetaDeleteMarker {
2243 version_id: Some(Uuid::new_v4()),
2244 ..Default::default()
2245 };
2246
2247 let encoded = obj.marshal_msg().unwrap();
2250
2251 let mut obj2 = MetaDeleteMarker::default();
2252 obj2.unmarshal_msg(&encoded).unwrap();
2253
2254 assert_eq!(obj, obj2);
2257 assert_eq!(obj.version_id, obj2.version_id);
2258 }
2259
2260 #[test]
2261 fn test_marshal_metaversion() {
2262 let mut fi = FileInfo::new("tset", 3, 2);
2263 fi.version_id = Some(Uuid::new_v4());
2264 fi.mod_time = Some(OffsetDateTime::from_unix_timestamp(OffsetDateTime::now_utc().unix_timestamp()).unwrap());
2265 let mut obj = FileMetaVersion::from(fi);
2266 obj.write_version = 110;
2267
2268 let encoded = obj.marshal_msg().unwrap();
2271
2272 let mut obj2 = FileMetaVersion::default();
2273 obj2.unmarshal_msg(&encoded).unwrap();
2274
2275 assert_eq!(obj, obj2);
2279 assert_eq!(obj.get_version_id(), obj2.get_version_id());
2280 assert_eq!(obj.write_version, obj2.write_version);
2281 assert_eq!(obj.write_version, 110);
2282 }
2283
2284 #[test]
2285 fn test_marshal_metaversionheader() {
2286 let mut obj = FileMetaVersionHeader::default();
2287 let vid = Some(Uuid::new_v4());
2288 obj.version_id = vid;
2289
2290 let encoded = obj.marshal_msg().unwrap();
2291
2292 let mut obj2 = FileMetaVersionHeader::default();
2293 obj2.unmarshal_msg(&encoded).unwrap();
2294
2295 assert_eq!(obj, obj2);
2297 assert_eq!(obj.version_id, obj2.version_id);
2298 assert_eq!(obj.version_id, vid);
2299 }
2300
2301 #[test]
2302 fn test_real_xlmeta_compatibility() {
2303 let data = create_real_xlmeta().expect("创建真实测试数据失败");
2305
2306 assert_eq!(&data[0..4], b"XL2 ", "文件头应该是 'XL2 '");
2308 assert_eq!(&data[4..8], &[1, 0, 3, 0], "版本号应该是 1.3.0");
2309
2310 let fm = FileMeta::load(&data).expect("解析真实数据失败");
2312
2313 assert_eq!(fm.meta_ver, XL_META_VERSION);
2315 assert_eq!(fm.versions.len(), 3, "应该有 3 个版本(1 个对象,1 个删除标记,1 个 Legacy)");
2316
2317 let mut object_count = 0;
2319 let mut delete_count = 0;
2320 let mut legacy_count = 0;
2321
2322 for version in &fm.versions {
2323 match version.header.version_type {
2324 VersionType::Object => object_count += 1,
2325 VersionType::Delete => delete_count += 1,
2326 VersionType::Legacy => legacy_count += 1,
2327 VersionType::Invalid => panic!("不应该有无效版本"),
2328 }
2329 }
2330
2331 assert_eq!(object_count, 1, "应该有 1 个对象版本");
2332 assert_eq!(delete_count, 1, "应该有 1 个删除标记");
2333 assert_eq!(legacy_count, 1, "应该有 1 个 Legacy 版本");
2334
2335 assert!(fm.is_compatible_with_meta(), "应该与 xl 格式兼容");
2337
2338 fm.validate_integrity().expect("完整性验证失败");
2340
2341 let stats = fm.get_version_stats();
2343 assert_eq!(stats.total_versions, 3);
2344 assert_eq!(stats.object_versions, 1);
2345 assert_eq!(stats.delete_markers, 1);
2346 assert_eq!(stats.invalid_versions, 1); }
2348
2349 #[test]
2350 fn test_complex_xlmeta_handling() {
2351 let data = create_complex_xlmeta().expect("创建复杂测试数据失败");
2353 let fm = FileMeta::load(&data).expect("解析复杂数据失败");
2354
2355 assert!(fm.versions.len() >= 10, "应该有至少 10 个版本");
2357
2358 assert!(fm.is_sorted_by_mod_time(), "版本应该按修改时间排序");
2360
2361 let stats = fm.get_version_stats();
2363 assert!(stats.object_versions > 0, "应该有对象版本");
2364 assert!(stats.delete_markers > 0, "应该有删除标记");
2365
2366 let merged = merge_file_meta_versions(1, false, 0, &[fm.versions.clone()]);
2368 assert!(!merged.is_empty(), "合并后应该有版本");
2369 }
2370
2371 #[test]
2372 fn test_inline_data_handling() {
2373 let data = create_xlmeta_with_inline_data().expect("创建内联数据测试失败");
2375 let fm = FileMeta::load(&data).expect("解析内联数据失败");
2376
2377 assert_eq!(fm.versions.len(), 1, "应该有 1 个版本");
2378 assert!(!fm.data.as_slice().is_empty(), "应该包含内联数据");
2379
2380 let inline_data = fm.data.as_slice();
2382 assert!(!inline_data.is_empty(), "内联数据不应为空");
2383 }
2384
2385 #[test]
2386 fn test_error_handling_and_recovery() {
2387 let corrupted_data = create_corrupted_xlmeta();
2389 let result = FileMeta::load(&corrupted_data);
2390 assert!(result.is_err(), "损坏的数据应该解析失败");
2391
2392 let empty_data = create_empty_xlmeta().expect("创建空数据失败");
2394 let fm = FileMeta::load(&empty_data).expect("解析空数据失败");
2395 assert_eq!(fm.versions.len(), 0, "空文件应该没有版本");
2396 }
2397
2398 #[test]
2399 fn test_version_type_legacy_support() {
2400 assert_eq!(VersionType::Legacy.to_u8(), 3);
2402 assert_eq!(VersionType::from_u8(3), VersionType::Legacy);
2403 assert!(VersionType::Legacy.valid(), "Legacy 类型应该是有效的");
2404
2405 let legacy_version = FileMetaVersion {
2407 version_type: VersionType::Legacy,
2408 object: None,
2409 delete_marker: None,
2410 write_version: 1,
2411 };
2412
2413 assert!(legacy_version.is_legacy(), "应该识别为 Legacy 版本");
2414 }
2415
2416 #[test]
2417 fn test_signature_calculation() {
2418 let data = create_real_xlmeta().expect("创建测试数据失败");
2420 let fm = FileMeta::load(&data).expect("解析失败");
2421
2422 for version in &fm.versions {
2423 let signature = version.header.get_signature();
2424 assert_eq!(signature.len(), 4, "签名应该是 4 字节");
2425
2426 let signature2 = version.header.get_signature();
2428 assert_eq!(signature, signature2, "相同版本的签名应该一致");
2429 }
2430 }
2431
2432 #[test]
2433 fn test_metadata_validation() {
2434 let data = create_real_xlmeta().expect("创建测试数据失败");
2436 let fm = FileMeta::load(&data).expect("解析失败");
2437
2438 fm.validate_integrity().expect("完整性验证应该通过");
2440
2441 assert!(fm.is_compatible_with_meta(), "应该与 xl 格式兼容");
2443
2444 assert!(fm.is_sorted_by_mod_time(), "版本应该按时间排序");
2446 }
2447
2448 #[test]
2449 fn test_round_trip_serialization() {
2450 let original_data = create_real_xlmeta().expect("创建原始数据失败");
2452 let fm = FileMeta::load(&original_data).expect("解析原始数据失败");
2453
2454 let serialized_data = fm.marshal_msg().expect("重新序列化失败");
2456
2457 let fm2 = FileMeta::load(&serialized_data).expect("解析序列化数据失败");
2459
2460 assert_eq!(fm.versions.len(), fm2.versions.len(), "版本数量应该一致");
2462 assert_eq!(fm.meta_ver, fm2.meta_ver, "元数据版本应该一致");
2463
2464 for (v1, v2) in fm.versions.iter().zip(fm2.versions.iter()) {
2466 assert_eq!(v1.header.version_type, v2.header.version_type, "版本类型应该一致");
2467 assert_eq!(v1.header.version_id, v2.header.version_id, "版本 ID 应该一致");
2468 }
2469 }
2470
2471 #[test]
2472 fn test_performance_with_large_metadata() {
2473 use std::time::Instant;
2475
2476 let start = Instant::now();
2477 let data = create_complex_xlmeta().expect("创建大型测试数据失败");
2478 let creation_time = start.elapsed();
2479
2480 let start = Instant::now();
2481 let fm = FileMeta::load(&data).expect("解析大型数据失败");
2482 let parsing_time = start.elapsed();
2483
2484 let start = Instant::now();
2485 let _serialized = fm.marshal_msg().expect("序列化失败");
2486 let serialization_time = start.elapsed();
2487
2488 println!("性能测试结果:");
2489 println!(" 创建时间:{creation_time:?}");
2490 println!(" 解析时间:{parsing_time:?}");
2491 println!(" 序列化时间:{serialization_time:?}");
2492
2493 assert!(parsing_time.as_millis() < 100, "解析时间应该小于 100ms");
2495 assert!(serialization_time.as_millis() < 100, "序列化时间应该小于 100ms");
2496 }
2497
2498 #[test]
2499 fn test_edge_cases() {
2500 let mut fm = FileMeta::new();
2504 let version = FileMetaVersion {
2505 version_type: VersionType::Object,
2506 object: Some(MetaObject {
2507 version_id: None, data_dir: None,
2509 erasure_algorithm: crate::fileinfo::ErasureAlgo::ReedSolomon,
2510 erasure_m: 1,
2511 erasure_n: 1,
2512 erasure_block_size: 64 * 1024,
2513 erasure_index: 0,
2514 erasure_dist: vec![0],
2515 bitrot_checksum_algo: ChecksumAlgo::HighwayHash,
2516 part_numbers: vec![1],
2517 part_etags: Vec::new(),
2518 part_sizes: vec![0],
2519 part_actual_sizes: Vec::new(),
2520 part_indices: Vec::new(),
2521 size: 0,
2522 mod_time: None,
2523 meta_sys: HashMap::new(),
2524 meta_user: HashMap::new(),
2525 }),
2526 delete_marker: None,
2527 write_version: 1,
2528 };
2529
2530 let shallow_version = FileMetaShallowVersion::try_from(version).expect("转换失败");
2531 fm.versions.push(shallow_version);
2532
2533 let data = fm.marshal_msg().expect("序列化失败");
2535 let fm2 = FileMeta::load(&data).expect("解析失败");
2536 assert_eq!(fm2.versions.len(), 1);
2537
2538 let large_object = MetaObject {
2540 size: i64::MAX,
2541 part_sizes: vec![usize::MAX],
2542 ..Default::default()
2543 };
2544
2545 assert_eq!(large_object.size, i64::MAX);
2547 }
2548
2549 #[tokio::test]
2550 async fn test_concurrent_operations() {
2551 use std::sync::Arc;
2553 use std::sync::Mutex;
2554
2555 let fm = Arc::new(Mutex::new(FileMeta::new()));
2556 let mut handles = vec![];
2557
2558 for i in 0..10 {
2560 let fm_clone: Arc<Mutex<FileMeta>> = Arc::clone(&fm);
2561 let handle = tokio::spawn(async move {
2562 let mut fi = crate::fileinfo::FileInfo::new(&format!("test-{i}"), 2, 1);
2563 fi.version_id = Some(Uuid::new_v4());
2564 fi.mod_time = Some(OffsetDateTime::now_utc());
2565
2566 let mut fm_guard = fm_clone.lock().unwrap();
2567 fm_guard.add_version(fi).unwrap();
2568 });
2569 handles.push(handle);
2570 }
2571
2572 for handle in handles {
2574 handle.await.unwrap();
2575 }
2576
2577 let fm_guard = fm.lock().unwrap();
2578 assert_eq!(fm_guard.versions.len(), 10);
2579 }
2580
2581 #[test]
2582 fn test_memory_efficiency() {
2583 use std::mem;
2585
2586 let empty_fm = FileMeta::new();
2588 let empty_size = mem::size_of_val(&empty_fm);
2589 println!("Empty FileMeta size: {empty_size} bytes");
2590
2591 let mut large_fm = FileMeta::new();
2593 for i in 0..100 {
2594 let mut fi = crate::fileinfo::FileInfo::new(&format!("test-{i}"), 2, 1);
2595 fi.version_id = Some(Uuid::new_v4());
2596 fi.mod_time = Some(OffsetDateTime::now_utc());
2597 large_fm.add_version(fi).unwrap();
2598 }
2599
2600 let large_size = mem::size_of_val(&large_fm);
2601 println!("Large FileMeta size: {large_size} bytes");
2602
2603 println!("版本数量:{}", large_fm.versions.len());
2606 assert!(!large_fm.versions.is_empty(), "应该有版本数据");
2607 }
2608
2609 #[test]
2610 fn test_version_ordering_edge_cases() {
2611 let mut fm = FileMeta::new();
2613
2614 let same_time = OffsetDateTime::now_utc();
2616 for i in 0..5 {
2617 let mut fi = crate::fileinfo::FileInfo::new(&format!("test-{i}"), 2, 1);
2618 fi.version_id = Some(Uuid::new_v4());
2619 fi.mod_time = Some(same_time);
2620 fm.add_version(fi).unwrap();
2621 }
2622
2623 let original_order: Vec<_> = fm.versions.iter().map(|v| v.header.version_id).collect();
2625 fm.sort_by_mod_time();
2626 let sorted_order: Vec<_> = fm.versions.iter().map(|v| v.header.version_id).collect();
2627
2628 assert_eq!(original_order.len(), sorted_order.len());
2630 }
2631
2632 #[test]
2633 fn test_checksum_algorithms() {
2634 let algorithms = vec![ChecksumAlgo::Invalid, ChecksumAlgo::HighwayHash];
2636
2637 for algo in algorithms {
2638 let obj = MetaObject {
2639 bitrot_checksum_algo: algo.clone(),
2640 ..Default::default()
2641 };
2642
2643 match algo {
2645 ChecksumAlgo::Invalid => assert!(!algo.valid()),
2646 ChecksumAlgo::HighwayHash => assert!(algo.valid()),
2647 }
2648
2649 let data = obj.marshal_msg().unwrap();
2651 let mut obj2 = MetaObject::default();
2652 obj2.unmarshal_msg(&data).unwrap();
2653 assert_eq!(obj.bitrot_checksum_algo.to_u8(), obj2.bitrot_checksum_algo.to_u8());
2654 }
2655 }
2656
2657 #[test]
2658 fn test_erasure_coding_parameters() {
2659 let test_cases = vec![
2661 (1, 1), (2, 1), (4, 2), (8, 4), ];
2666
2667 for (data_blocks, parity_blocks) in test_cases {
2668 let obj = MetaObject {
2669 erasure_m: data_blocks,
2670 erasure_n: parity_blocks,
2671 erasure_dist: (0..(data_blocks + parity_blocks)).map(|i| i as u8).collect(),
2672 ..Default::default()
2673 };
2674
2675 assert!(obj.erasure_m > 0, "数据块数量必须大于 0");
2677 assert!(obj.erasure_n > 0, "校验块数量必须大于 0");
2678 assert_eq!(obj.erasure_dist.len(), data_blocks + parity_blocks);
2679
2680 let data = obj.marshal_msg().unwrap();
2682 let mut obj2 = MetaObject::default();
2683 obj2.unmarshal_msg(&data).unwrap();
2684 assert_eq!(obj.erasure_m, obj2.erasure_m);
2685 assert_eq!(obj.erasure_n, obj2.erasure_n);
2686 assert_eq!(obj.erasure_dist, obj2.erasure_dist);
2687 }
2688 }
2689
2690 #[test]
2691 fn test_metadata_size_limits() {
2692 let mut obj = MetaObject::default();
2694
2695 for i in 0..10 {
2697 obj.meta_user
2698 .insert(format!("key-{i:04}"), format!("value-{:04}-{}", i, "x".repeat(10)));
2699 }
2700
2701 let data = obj.marshal_msg().unwrap();
2703 assert!(data.len() > 100, "序列化后的数据应该有合理大小");
2704
2705 let mut obj2 = MetaObject::default();
2707 obj2.unmarshal_msg(&data).unwrap();
2708 assert_eq!(obj.meta_user.len(), obj2.meta_user.len());
2709 }
2710
2711 #[test]
2712 fn test_version_statistics_accuracy() {
2713 let mut fm = FileMeta::new();
2715
2716 let object_count = 3;
2718 let delete_count = 2;
2719
2720 for i in 0..object_count {
2722 let mut fi = crate::fileinfo::FileInfo::new(&format!("obj-{i}"), 2, 1);
2723 fi.version_id = Some(Uuid::new_v4());
2724 fi.mod_time = Some(OffsetDateTime::now_utc());
2725 fm.add_version(fi).unwrap();
2726 }
2727
2728 for i in 0..delete_count {
2730 let delete_marker = MetaDeleteMarker {
2731 version_id: Some(Uuid::new_v4()),
2732 mod_time: Some(OffsetDateTime::now_utc()),
2733 meta_sys: None,
2734 };
2735
2736 let delete_version = FileMetaVersion {
2737 version_type: VersionType::Delete,
2738 object: None,
2739 delete_marker: Some(delete_marker),
2740 write_version: (i + 100) as u64,
2741 };
2742
2743 let shallow_version = FileMetaShallowVersion::try_from(delete_version).unwrap();
2744 fm.versions.push(shallow_version);
2745 }
2746
2747 let stats = fm.get_version_stats();
2749 assert_eq!(stats.total_versions, object_count + delete_count);
2750 assert_eq!(stats.object_versions, object_count);
2751 assert_eq!(stats.delete_markers, delete_count);
2752
2753 let detailed_stats = fm.get_detailed_version_stats();
2755 assert_eq!(detailed_stats.total_versions, object_count + delete_count);
2756 assert_eq!(detailed_stats.object_versions, object_count);
2757 assert_eq!(detailed_stats.delete_markers, delete_count);
2758 }
2759
2760 #[test]
2761 fn test_cross_platform_compatibility() {
2762 let mut fm = FileMeta::new();
2764
2765 let paths = vec![
2767 "unix/style/path",
2768 "windows\\style\\path",
2769 "mixed/style\\path",
2770 "unicode/路径/测试",
2771 ];
2772
2773 for path in paths {
2774 let mut fi = crate::fileinfo::FileInfo::new(path, 2, 1);
2775 fi.version_id = Some(Uuid::new_v4());
2776 fi.mod_time = Some(OffsetDateTime::now_utc());
2777 fm.add_version(fi).unwrap();
2778 }
2779
2780 let data = fm.marshal_msg().unwrap();
2782 let mut fm2 = FileMeta::default();
2783 fm2.unmarshal_msg(&data).unwrap();
2784
2785 assert_eq!(fm.versions.len(), fm2.versions.len());
2786
2787 for (v1, v2) in fm.versions.iter().zip(fm2.versions.iter()) {
2789 assert_eq!(v1.header.version_id, v2.header.version_id);
2790 }
2791 }
2792
2793 #[test]
2794 fn test_data_integrity_validation() {
2795 let mut fm = FileMeta::new();
2797
2798 let mut fi = crate::fileinfo::FileInfo::new("test", 2, 1);
2800 fi.version_id = Some(Uuid::new_v4());
2801 fi.mod_time = Some(OffsetDateTime::now_utc());
2802 fm.add_version(fi).unwrap();
2803
2804 assert!(fm.validate_integrity().is_ok());
2806 }
2807
2808 #[test]
2809 fn test_version_merge_scenarios() {
2810 let mut versions1 = vec![];
2812 let mut versions2 = vec![];
2813
2814 for i in 0..3 {
2816 let mut fi1 = crate::fileinfo::FileInfo::new(&format!("test1-{i}"), 2, 1);
2817 fi1.version_id = Some(Uuid::new_v4());
2818 fi1.mod_time = Some(OffsetDateTime::from_unix_timestamp(1000 + i * 10).unwrap());
2819
2820 let mut fi2 = crate::fileinfo::FileInfo::new(&format!("test2-{i}"), 2, 1);
2821 fi2.version_id = Some(Uuid::new_v4());
2822 fi2.mod_time = Some(OffsetDateTime::from_unix_timestamp(1005 + i * 10).unwrap());
2823
2824 let version1 = FileMetaVersion::from(fi1);
2825 let version2 = FileMetaVersion::from(fi2);
2826
2827 versions1.push(FileMetaShallowVersion::try_from(version1).unwrap());
2828 versions2.push(FileMetaShallowVersion::try_from(version2).unwrap());
2829 }
2830
2831 let merged = merge_file_meta_versions(1, false, 0, &[versions1.clone()]);
2833 assert!(!merged.is_empty(), "单个版本列表的合并结果不应为空");
2834
2835 let merged = merge_file_meta_versions(1, false, 0, &[versions1.clone(), versions2.clone()]);
2837 println!("合并结果数量:{}", merged.len());
2839 }
2840
2841 #[test]
2842 fn test_flags_operations() {
2843 let flags = vec![Flags::FreeVersion, Flags::UsesDataDir, Flags::InlineData];
2845
2846 for flag in flags {
2847 let flag_value = flag as u8;
2848 assert!(flag_value > 0, "标志位值应该大于 0");
2849
2850 let combined = Flags::FreeVersion as u8 | Flags::UsesDataDir as u8;
2852 assert!(combined > 0, "组合标志位应该大于 0");
2854 }
2855 }
2856
2857 #[test]
2858 fn test_uuid_handling_edge_cases() {
2859 let test_uuids = vec![
2861 Uuid::new_v4(), ];
2863
2864 for uuid in test_uuids {
2865 let obj = MetaObject {
2866 version_id: Some(uuid),
2867 data_dir: Some(uuid),
2868 ..Default::default()
2869 };
2870
2871 let data = obj.marshal_msg().unwrap();
2873 let mut obj2 = MetaObject::default();
2874 obj2.unmarshal_msg(&data).unwrap();
2875
2876 assert_eq!(obj.version_id, obj2.version_id);
2877 assert_eq!(obj.data_dir, obj2.data_dir);
2878 }
2879
2880 let obj = MetaObject {
2882 version_id: Some(Uuid::nil()),
2883 data_dir: Some(Uuid::nil()),
2884 ..Default::default()
2885 };
2886
2887 let data = obj.marshal_msg().unwrap();
2888 let mut obj2 = MetaObject::default();
2889 obj2.unmarshal_msg(&data).unwrap();
2890
2891 println!("原始 version_id: {:?}", obj.version_id);
2894 println!("反序列化后 version_id: {:?}", obj2.version_id);
2895 }
2897
2898 #[test]
2899 fn test_part_handling_edge_cases() {
2900 let mut obj = MetaObject::default();
2902
2903 assert!(obj.part_numbers.is_empty());
2905 assert!(obj.part_etags.is_empty());
2906 assert!(obj.part_sizes.is_empty());
2907
2908 obj.part_numbers = vec![1];
2910 obj.part_etags = vec!["etag1".to_string()];
2911 obj.part_sizes = vec![1024];
2912 obj.part_actual_sizes = vec![1024];
2913
2914 let data = obj.marshal_msg().unwrap();
2915 let mut obj2 = MetaObject::default();
2916 obj2.unmarshal_msg(&data).unwrap();
2917
2918 assert_eq!(obj.part_numbers, obj2.part_numbers);
2919 assert_eq!(obj.part_etags, obj2.part_etags);
2920 assert_eq!(obj.part_sizes, obj2.part_sizes);
2921 assert_eq!(obj.part_actual_sizes, obj2.part_actual_sizes);
2922
2923 obj.part_numbers = vec![1, 2, 3];
2925 obj.part_etags = vec!["etag1".to_string(), "etag2".to_string(), "etag3".to_string()];
2926 obj.part_sizes = vec![1024, 2048, 512];
2927 obj.part_actual_sizes = vec![1024, 2048, 512];
2928
2929 let data = obj.marshal_msg().unwrap();
2930 let mut obj2 = MetaObject::default();
2931 obj2.unmarshal_msg(&data).unwrap();
2932
2933 assert_eq!(obj.part_numbers, obj2.part_numbers);
2934 assert_eq!(obj.part_etags, obj2.part_etags);
2935 assert_eq!(obj.part_sizes, obj2.part_sizes);
2936 assert_eq!(obj.part_actual_sizes, obj2.part_actual_sizes);
2937 }
2938
2939 #[test]
2940 fn test_version_header_validation() {
2941 let mut header = FileMetaVersionHeader {
2943 version_type: VersionType::Object,
2944 mod_time: Some(OffsetDateTime::now_utc()),
2945 ec_m: 2,
2946 ec_n: 1,
2947 ..Default::default()
2948 };
2949 assert!(header.is_valid());
2950
2951 header.version_type = VersionType::Invalid;
2953 assert!(!header.is_valid());
2954
2955 header.version_type = VersionType::Object;
2957 assert!(header.is_valid());
2958
2959 header.ec_m = 0;
2962 header.ec_n = 1;
2963 assert!(header.is_valid()); header.ec_m = 2;
2967 header.ec_n = 0;
2968 assert!(header.is_valid()); header.ec_n = 1;
2973 assert!(header.is_valid());
2974 }
2975
2976 #[test]
2977 fn test_special_characters_in_metadata() {
2978 let mut obj = MetaObject::default();
2980
2981 let special_cases = vec![
2983 ("empty", ""),
2984 ("unicode", "测试🚀🎉"),
2985 ("newlines", "line1\nline2\nline3"),
2986 ("tabs", "col1\tcol2\tcol3"),
2987 ("quotes", "\"quoted\" and 'single'"),
2988 ("backslashes", "path\\to\\file"),
2989 ("mixed", "Mixed: 中文,English, 123, !@#$%"),
2990 ];
2991
2992 for (key, value) in special_cases {
2993 obj.meta_user.insert(key.to_string(), value.to_string());
2994 }
2995
2996 let data = obj.marshal_msg().unwrap();
2998 let mut obj2 = MetaObject::default();
2999 obj2.unmarshal_msg(&data).unwrap();
3000
3001 assert_eq!(obj.meta_user, obj2.meta_user);
3002
3003 for (key, expected_value) in [
3005 ("empty", ""),
3006 ("unicode", "测试🚀🎉"),
3007 ("newlines", "line1\nline2\nline3"),
3008 ("tabs", "col1\tcol2\tcol3"),
3009 ("quotes", "\"quoted\" and 'single'"),
3010 ("backslashes", "path\\to\\file"),
3011 ("mixed", "Mixed: 中文,English, 123, !@#$%"),
3012 ] {
3013 assert_eq!(obj2.meta_user.get(key), Some(&expected_value.to_string()));
3014 }
3015 }
3016}
3017
3018#[tokio::test]
3019async fn test_read_xl_meta_no_data() {
3020 use tokio::fs;
3021 use tokio::fs::File;
3022 use tokio::io::AsyncWriteExt;
3023
3024 let mut fm = FileMeta::new();
3025
3026 let (m, n) = (3, 2);
3027
3028 for i in 0..5 {
3029 let mut fi = FileInfo::new(i.to_string().as_str(), m, n);
3030 fi.mod_time = Some(OffsetDateTime::now_utc());
3031
3032 fm.add_version(fi).unwrap();
3033 }
3034
3035 let mut buff = fm.marshal_msg().unwrap();
3036
3037 buff.resize(buff.len() + 100, 0);
3038
3039 let filepath = "./test_xl.meta";
3040
3041 let mut file = File::create(filepath).await.unwrap();
3042 file.write_all(&buff).await.unwrap();
3044
3045 let mut f = File::open(filepath).await.unwrap();
3046
3047 let stat = f.metadata().await.unwrap();
3048
3049 let data = read_xl_meta_no_data(&mut f, stat.len() as usize).await.unwrap();
3050
3051 let mut newfm = FileMeta::default();
3052 newfm.unmarshal_msg(&data).unwrap();
3053
3054 fs::remove_file(filepath).await.unwrap();
3055
3056 assert_eq!(fm, newfm)
3057}
3058
3059#[derive(Debug, Default, Clone)]
3060pub struct VersionStats {
3061 pub total_versions: usize,
3062 pub object_versions: usize,
3063 pub delete_markers: usize,
3064 pub invalid_versions: usize,
3065 pub free_versions: usize,
3066}
3067
3068impl FileMetaVersionHeader {
3069 pub fn is_valid(&self) -> bool {
3072 if !self.version_type.valid() {
3074 return false;
3075 }
3076
3077 if let Some(mod_time) = self.mod_time {
3079 let now = OffsetDateTime::now_utc();
3080 let future_limit = now + time::Duration::hours(24); if mod_time > future_limit {
3082 return false;
3083 }
3084 }
3085
3086 if self.has_ec() && (self.ec_n == 0 || self.ec_m == 0 || self.ec_m < self.ec_n) {
3088 return false;
3089 }
3090
3091 true
3092 }
3093
3094 }
3096
3097#[derive(Debug, Default, Clone)]
3099pub struct DetailedVersionStats {
3100 pub total_versions: usize,
3101 pub object_versions: usize,
3102 pub delete_markers: usize,
3103 pub invalid_versions: usize,
3104 pub legacy_versions: usize,
3105 pub free_versions: usize,
3106 pub versions_with_data_dir: usize,
3107 pub versions_with_inline_data: usize,
3108 pub total_size: i64,
3109 pub latest_mod_time: Option<OffsetDateTime>,
3110}
3111
3112impl FileMeta {
3113 pub fn get_detailed_version_stats(&self) -> DetailedVersionStats {
3115 let mut stats = DetailedVersionStats {
3116 total_versions: self.versions.len(),
3117 ..Default::default()
3118 };
3119
3120 for version in &self.versions {
3121 match version.header.version_type {
3122 VersionType::Object => {
3123 stats.object_versions += 1;
3124 if let Ok(ver) = FileMetaVersion::try_from(version.meta.as_slice()) {
3125 if let Some(obj) = &ver.object {
3126 stats.total_size += obj.size;
3127 if obj.uses_data_dir() {
3128 stats.versions_with_data_dir += 1;
3129 }
3130 if obj.inlinedata() {
3131 stats.versions_with_inline_data += 1;
3132 }
3133 }
3134 }
3135 }
3136 VersionType::Delete => stats.delete_markers += 1,
3137 VersionType::Legacy => stats.legacy_versions += 1,
3138 VersionType::Invalid => stats.invalid_versions += 1,
3139 }
3140
3141 if version.header.free_version() {
3142 stats.free_versions += 1;
3143 }
3144
3145 if stats.latest_mod_time.is_none()
3146 || (version.header.mod_time.is_some() && version.header.mod_time > stats.latest_mod_time)
3147 {
3148 stats.latest_mod_time = version.header.mod_time;
3149 }
3150 }
3151
3152 stats
3153 }
3154}