1use rand::{RngExt, SeedableRng};
19use rand_chacha::ChaCha8Rng;
20use std::io;
21
22pub const SECTOR_SIZE: usize = 512;
27
28const MAX_OVERLAYS: usize = 2;
34
35#[derive(Debug, Clone)]
40pub struct SectorBitSet {
41 bits: Vec<u64>,
42 len: usize,
43}
44
45impl SectorBitSet {
46 pub fn new(num_sectors: usize) -> Self {
50 let num_words = num_sectors.div_ceil(64);
51 Self {
52 bits: vec![0; num_words],
53 len: num_sectors,
54 }
55 }
56
57 fn indices(&self, sector: usize) -> (usize, usize) {
63 assert!(sector < self.len, "sector index out of bounds");
64 (sector / 64, sector % 64)
65 }
66
67 pub fn set(&mut self, sector: usize) {
73 let (word, bit) = self.indices(sector);
74 self.bits[word] |= 1 << bit;
75 }
76
77 pub fn clear(&mut self, sector: usize) {
83 let (word, bit) = self.indices(sector);
84 self.bits[word] &= !(1 << bit);
85 }
86
87 pub fn is_set(&self, sector: usize) -> bool {
93 let (word, bit) = self.indices(sector);
94 (self.bits[word] & (1 << bit)) != 0
95 }
96
97 pub fn len(&self) -> usize {
99 self.len
100 }
101
102 pub fn is_empty(&self) -> bool {
104 self.len == 0
105 }
106
107 pub fn resize_copy(other: &Self, new_len: usize) -> Self {
111 let mut new_bitset = Self::new(new_len);
112 let copy_len = other.len.min(new_len);
113 for sector in 0..copy_len {
114 if other.is_set(sector) {
115 new_bitset.set(sector);
116 }
117 }
118 new_bitset
119 }
120}
121
122#[derive(Debug, Clone)]
127struct WriteOverlay {
128 offset: u64,
130 size: u32,
132 data: Vec<u8>,
134 active: bool,
136}
137
138#[derive(Debug, Clone)]
143struct PendingWrite {
144 offset: u64,
146 data: Vec<u8>,
148 is_phantom: bool,
150}
151
152#[derive(Debug)]
178pub struct InMemoryStorage {
179 data: Vec<u8>,
181 written: SectorBitSet,
183 faults: SectorBitSet,
185 overlays: [Option<WriteOverlay>; MAX_OVERLAYS],
187 pending_writes: Vec<PendingWrite>,
189 size: u64,
191 seed: u64,
193}
194
195impl InMemoryStorage {
196 pub fn new(size: u64, seed: u64) -> Self {
203 let num_sectors = (size as usize).div_ceil(SECTOR_SIZE);
204 Self {
205 data: vec![0; size as usize],
206 written: SectorBitSet::new(num_sectors),
207 faults: SectorBitSet::new(num_sectors),
208 overlays: [const { None }; MAX_OVERLAYS],
209 pending_writes: Vec::new(),
210 size,
211 seed,
212 }
213 }
214
215 pub fn size(&self) -> u64 {
217 self.size
218 }
219
220 pub fn num_sectors(&self) -> usize {
222 self.written.len()
223 }
224
225 pub fn resize(&mut self, new_size: u64) {
230 let old_size = self.size;
231 self.size = new_size;
232
233 self.data.resize(new_size as usize, 0);
235
236 let new_num_sectors = (new_size as usize).div_ceil(SECTOR_SIZE);
238 let old_num_sectors = self.written.len();
239
240 if new_num_sectors != old_num_sectors {
241 self.written = SectorBitSet::resize_copy(&self.written, new_num_sectors);
242 self.faults = SectorBitSet::resize_copy(&self.faults, new_num_sectors);
243 }
244
245 tracing::trace!(
247 "InMemoryStorage resized from {} to {} bytes ({} to {} sectors)",
248 old_size,
249 new_size,
250 old_num_sectors,
251 new_num_sectors
252 );
253 }
254
255 pub fn read(&self, offset: u64, buf: &mut [u8]) -> io::Result<()> {
272 let end = offset
274 .checked_add(buf.len() as u64)
275 .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "offset overflow"))?;
276
277 if end > self.size {
278 return Err(io::Error::new(
279 io::ErrorKind::InvalidInput,
280 format!(
281 "read past end of storage: offset={}, len={}, size={}",
282 offset,
283 buf.len(),
284 self.size
285 ),
286 ));
287 }
288
289 let offset_usize = offset as usize;
291 buf.copy_from_slice(&self.data[offset_usize..offset_usize + buf.len()]);
292
293 let start_sector = offset_usize / SECTOR_SIZE;
295 let end_sector = (offset_usize + buf.len()).div_ceil(SECTOR_SIZE);
296
297 for sector in start_sector..end_sector {
298 if sector >= self.written.len() {
299 break;
300 }
301
302 let sector_start = sector * SECTOR_SIZE;
304 let sector_end = sector_start + SECTOR_SIZE;
305
306 let buf_start = sector_start.saturating_sub(offset_usize);
307 let buf_end = (sector_end.saturating_sub(offset_usize)).min(buf.len());
308
309 if buf_start >= buf_end {
310 continue;
311 }
312
313 let sector_buf = &mut buf[buf_start..buf_end];
314
315 if !self.written.is_set(sector) {
317 self.fill_unwritten_sector(sector, sector_buf, sector_start, offset_usize);
318 }
319
320 if self.faults.is_set(sector) {
322 self.apply_corruption(sector, sector_buf);
323 }
324 }
325
326 self.apply_overlays(offset, buf);
328
329 Ok(())
330 }
331
332 fn fill_unwritten_sector(
334 &self,
335 sector: usize,
336 buf: &mut [u8],
337 sector_start: usize,
338 read_offset: usize,
339 ) {
340 let mut rng = ChaCha8Rng::seed_from_u64(self.seed.wrapping_add(sector as u64));
342
343 let mut sector_data = [0u8; SECTOR_SIZE];
345 rng.fill(&mut sector_data);
346
347 let offset_in_sector = read_offset.saturating_sub(sector_start);
349 let copy_start = offset_in_sector.min(SECTOR_SIZE);
350 let copy_len = buf.len().min(SECTOR_SIZE - copy_start);
351
352 buf[..copy_len].copy_from_slice(§or_data[copy_start..copy_start + copy_len]);
353 }
354
355 fn apply_corruption(&self, sector: usize, buf: &mut [u8]) {
362 if buf.is_empty() {
363 return;
364 }
365
366 let sector_start = sector * SECTOR_SIZE;
368 let mut seed_bytes = [0u8; 8];
369 if sector_start + 8 <= self.data.len() {
370 seed_bytes.copy_from_slice(&self.data[sector_start..sector_start + 8]);
371 }
372 let seed = u64::from_le_bytes(seed_bytes);
373
374 let mut rng = ChaCha8Rng::seed_from_u64(seed);
375 let byte_idx = rng.random_range(0..buf.len());
376 let bit_idx = rng.random_range(0..8u8);
377 buf[byte_idx] ^= 1 << bit_idx;
378 }
379
380 fn apply_overlays(&self, offset: u64, buf: &mut [u8]) {
382 for overlay in self.overlays.iter().flatten() {
383 if !overlay.active {
384 continue;
385 }
386
387 let overlay_end = overlay.offset + overlay.size as u64;
389 let read_end = offset + buf.len() as u64;
390
391 if overlay.offset >= read_end || overlay_end <= offset {
392 continue;
393 }
394
395 let intersect_start = overlay.offset.max(offset);
397 let intersect_end = overlay_end.min(read_end);
398
399 let buf_offset = (intersect_start - offset) as usize;
400 let overlay_offset = (intersect_start - overlay.offset) as usize;
401 let copy_len = (intersect_end - intersect_start) as usize;
402
403 buf[buf_offset..buf_offset + copy_len]
404 .copy_from_slice(&overlay.data[overlay_offset..overlay_offset + copy_len]);
405 }
406 }
407
408 pub fn write(&mut self, offset: u64, data: &[u8], is_synced: bool) -> io::Result<()> {
423 let end = offset
425 .checked_add(data.len() as u64)
426 .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "offset overflow"))?;
427
428 if end > self.size {
430 self.resize(end);
431 }
432
433 let offset_usize = offset as usize;
434
435 let start_sector = offset_usize / SECTOR_SIZE;
437 let end_sector = (offset_usize + data.len()).div_ceil(SECTOR_SIZE);
438
439 for sector in start_sector..end_sector {
440 if sector < self.written.len() {
441 self.written.set(sector);
442 self.faults.clear(sector);
443 }
444 }
445
446 self.data[offset_usize..offset_usize + data.len()].copy_from_slice(data);
448
449 if !is_synced {
451 self.pending_writes.push(PendingWrite {
452 offset,
453 data: data.to_vec(),
454 is_phantom: false,
455 });
456 }
457
458 Ok(())
459 }
460
461 pub fn sync(&mut self) {
465 self.pending_writes.clear();
466 }
467
468 pub fn apply_misdirected_write(
486 &mut self,
487 intended_offset: u64,
488 mistaken_offset: u64,
489 data: &[u8],
490 ) -> io::Result<()> {
491 let intended_end = intended_offset
493 .checked_add(data.len() as u64)
494 .ok_or_else(|| {
495 io::Error::new(io::ErrorKind::InvalidInput, "intended offset overflow")
496 })?;
497
498 let mistaken_end = mistaken_offset
499 .checked_add(data.len() as u64)
500 .ok_or_else(|| {
501 io::Error::new(io::ErrorKind::InvalidInput, "mistaken offset overflow")
502 })?;
503
504 let required_size = intended_end.max(mistaken_end);
506 if required_size > self.size {
507 self.resize(required_size);
508 }
509
510 let intended_usize = intended_offset as usize;
512 let old_data = self.data[intended_usize..intended_usize + data.len()].to_vec();
513
514 self.overlays[0] = Some(WriteOverlay {
516 offset: intended_offset,
517 size: data.len() as u32,
518 data: old_data,
519 active: true,
520 });
521
522 self.overlays[1] = Some(WriteOverlay {
524 offset: mistaken_offset,
525 size: data.len() as u32,
526 data: data.to_vec(),
527 active: true,
528 });
529
530 self.data[intended_usize..intended_usize + data.len()].copy_from_slice(data);
532
533 let start_sector = intended_usize / SECTOR_SIZE;
535 let end_sector = (intended_usize + data.len()).div_ceil(SECTOR_SIZE);
536 for sector in start_sector..end_sector {
537 if sector < self.written.len() {
538 self.written.set(sector);
539 }
540 }
541
542 Ok(())
543 }
544
545 pub fn clear_overlays(&mut self) {
549 for overlay in &mut self.overlays {
550 *overlay = None;
551 }
552 }
553
554 pub fn read_misdirected(&self, offset: u64, buf: &mut [u8]) -> io::Result<()> {
569 if buf.is_empty() {
570 return Ok(());
571 }
572
573 let mut rng = ChaCha8Rng::seed_from_u64(self.seed.wrapping_add(offset));
575
576 let max_offset = self.size.saturating_sub(buf.len() as u64);
578 if max_offset == 0 {
579 return self.read(0, buf);
581 }
582
583 let mut misdirected_offset = rng.random_range(0..max_offset);
584
585 if misdirected_offset == offset {
587 misdirected_offset = (misdirected_offset + SECTOR_SIZE as u64) % max_offset;
588 }
589
590 self.read(misdirected_offset, buf)
591 }
592
593 pub fn record_phantom_write(&mut self, offset: u64, data: &[u8]) {
604 self.pending_writes.push(PendingWrite {
605 offset,
606 data: data.to_vec(),
607 is_phantom: true,
608 });
609 }
611
612 pub fn apply_crash(&mut self, crash_fault_probability: f64) {
623 let mut rng = ChaCha8Rng::seed_from_u64(self.seed);
624
625 for pending in &self.pending_writes {
626 if pending.is_phantom {
627 continue;
629 }
630
631 if rng.random::<f64>() >= crash_fault_probability {
633 continue;
634 }
635
636 let offset_usize = pending.offset as usize;
638 let start_sector = offset_usize / SECTOR_SIZE;
639 let end_sector = (offset_usize + pending.data.len()).div_ceil(SECTOR_SIZE);
640
641 if start_sector < end_sector && end_sector <= self.faults.len() {
642 let faulted_sector = rng.random_range(start_sector..end_sector);
643 self.faults.set(faulted_sector);
644 }
645 }
646
647 self.pending_writes.clear();
649 }
650
651 pub fn set_fault(&mut self, sector: usize) {
655 if sector < self.faults.len() {
656 self.faults.set(sector);
657 }
658 }
659
660 pub fn clear_fault(&mut self, sector: usize) {
662 if sector < self.faults.len() {
663 self.faults.clear(sector);
664 }
665 }
666
667 pub fn has_fault(&self, sector: usize) -> bool {
669 sector < self.faults.len() && self.faults.is_set(sector)
670 }
671
672 pub fn is_written(&self, sector: usize) -> bool {
674 sector < self.written.len() && self.written.is_set(sector)
675 }
676}
677
678#[cfg(test)]
679mod tests {
680 use super::*;
681
682 #[test]
683 fn test_basic_write_read() {
684 let mut storage = InMemoryStorage::new(4096, 42);
685
686 let data = b"Hello, World!";
688 storage.write(0, data, true).expect("write failed");
689
690 let mut buf = vec![0u8; data.len()];
692 storage.read(0, &mut buf).expect("read failed");
693
694 assert_eq!(&buf, data);
695 }
696
697 #[test]
698 fn test_unwritten_sector_deterministic() {
699 let storage1 = InMemoryStorage::new(4096, 42);
700 let storage2 = InMemoryStorage::new(4096, 42);
701
702 let mut buf1 = vec![0u8; SECTOR_SIZE];
704 let mut buf2 = vec![0u8; SECTOR_SIZE];
705
706 storage1.read(0, &mut buf1).expect("read1 failed");
707 storage2.read(0, &mut buf2).expect("read2 failed");
708
709 assert_eq!(buf1, buf2);
711
712 let storage3 = InMemoryStorage::new(4096, 99);
714 let mut buf3 = vec![0u8; SECTOR_SIZE];
715 storage3.read(0, &mut buf3).expect("read3 failed");
716
717 assert_ne!(buf1, buf3);
718 }
719
720 #[test]
721 fn test_fault_corruption() {
722 let mut storage = InMemoryStorage::new(4096, 42);
723
724 let data = vec![0xAA; SECTOR_SIZE];
726 storage.write(0, &data, true).expect("write failed");
727
728 let mut buf_clean = vec![0u8; SECTOR_SIZE];
730 storage.read(0, &mut buf_clean).expect("read failed");
731 assert_eq!(buf_clean, data);
732
733 storage.set_fault(0);
735 let mut buf_faulted = vec![0u8; SECTOR_SIZE];
736 storage.read(0, &mut buf_faulted).expect("read failed");
737
738 assert_ne!(buf_faulted, data);
740
741 let bit_diffs: u32 = buf_clean
743 .iter()
744 .zip(buf_faulted.iter())
745 .map(|(a, b)| (*a ^ *b).count_ones())
746 .sum();
747 assert_eq!(bit_diffs, 1, "Expected exactly one bit flip");
748 }
749
750 #[test]
751 fn test_corruption_determinism() {
752 let mut storage = InMemoryStorage::new(4096, 42);
753
754 let data = vec![0xAA; SECTOR_SIZE];
756 storage.write(0, &data, true).expect("write failed");
757 storage.set_fault(0);
758
759 let mut buf1 = vec![0u8; SECTOR_SIZE];
761 let mut buf2 = vec![0u8; SECTOR_SIZE];
762 storage.read(0, &mut buf1).expect("read1 failed");
763 storage.read(0, &mut buf2).expect("read2 failed");
764
765 assert_eq!(buf1, buf2);
767 }
768
769 #[test]
770 fn test_misdirected_write() {
771 let mut storage = InMemoryStorage::new(4096, 42);
772
773 let original_intended = vec![0x11; SECTOR_SIZE];
775 let original_mistaken = vec![0x22; SECTOR_SIZE];
776 storage
777 .write(0, &original_intended, true)
778 .expect("write1 failed");
779 storage
780 .write(SECTOR_SIZE as u64, &original_mistaken, true)
781 .expect("write2 failed");
782
783 let new_data = vec![0xFF; SECTOR_SIZE];
785 storage
786 .apply_misdirected_write(0, SECTOR_SIZE as u64, &new_data)
787 .expect("misdirect failed");
788
789 let mut buf_intended = vec![0u8; SECTOR_SIZE];
791 storage.read(0, &mut buf_intended).expect("read failed");
792 assert_eq!(buf_intended, original_intended);
793
794 let mut buf_mistaken = vec![0u8; SECTOR_SIZE];
796 storage
797 .read(SECTOR_SIZE as u64, &mut buf_mistaken)
798 .expect("read failed");
799 assert_eq!(buf_mistaken, new_data);
800
801 storage.clear_overlays();
803 storage.read(0, &mut buf_intended).expect("read failed");
804 assert_eq!(buf_intended, new_data); }
806
807 #[test]
808 fn test_phantom_write_lost_on_crash() {
809 let mut storage = InMemoryStorage::new(4096, 42);
810
811 let real_data = vec![0x11; SECTOR_SIZE];
813 storage.write(0, &real_data, true).expect("write failed");
814
815 let phantom_data = vec![0xFF; SECTOR_SIZE];
817 storage.record_phantom_write(0, &phantom_data);
818
819 let mut buf = vec![0u8; SECTOR_SIZE];
821 storage.read(0, &mut buf).expect("read failed");
822 assert_eq!(buf, real_data);
823
824 storage.apply_crash(1.0);
826
827 storage.read(0, &mut buf).expect("read failed");
829 assert_eq!(buf, real_data);
830 }
831
832 #[test]
833 fn test_crash_faults_pending_writes() {
834 let mut storage = InMemoryStorage::new(4096, 42);
835
836 let data = vec![0xAA; SECTOR_SIZE];
838 storage.write(0, &data, false).expect("write failed");
839
840 storage.apply_crash(1.0);
842
843 assert!(storage.has_fault(0));
845
846 let mut buf = vec![0u8; SECTOR_SIZE];
848 storage.read(0, &mut buf).expect("read failed");
849 assert_ne!(buf, data);
850 }
851
852 #[test]
853 fn test_sync_clears_pending() {
854 let mut storage = InMemoryStorage::new(4096, 42);
855
856 let data = vec![0xAA; SECTOR_SIZE];
858 storage.write(0, &data, false).expect("write failed");
859
860 storage.sync();
862
863 storage.apply_crash(1.0);
865
866 assert!(!storage.has_fault(0));
868
869 let mut buf = vec![0u8; SECTOR_SIZE];
871 storage.read(0, &mut buf).expect("read failed");
872 assert_eq!(buf, data);
873 }
874
875 #[test]
876 fn test_sector_bitset() {
877 let mut bitset = SectorBitSet::new(100);
878
879 assert!(!bitset.is_set(0));
880 assert!(!bitset.is_set(50));
881 assert!(!bitset.is_set(99));
882
883 bitset.set(0);
884 bitset.set(50);
885 bitset.set(99);
886
887 assert!(bitset.is_set(0));
888 assert!(bitset.is_set(50));
889 assert!(bitset.is_set(99));
890 assert!(!bitset.is_set(1));
891
892 bitset.clear(50);
893 assert!(!bitset.is_set(50));
894
895 assert_eq!(bitset.len(), 100);
896 }
897
898 #[test]
899 fn test_read_past_end() {
900 let storage = InMemoryStorage::new(1024, 42);
901
902 let mut buf = vec![0u8; 100];
903 let result = storage.read(1000, &mut buf);
904
905 assert!(result.is_err());
906 }
907
908 #[test]
909 fn test_write_past_end_auto_extends() {
910 let mut storage = InMemoryStorage::new(1024, 42);
911
912 let data = vec![0xAB; 100];
914 let result = storage.write(1000, &data, true);
915
916 assert!(result.is_ok());
917 assert_eq!(storage.size(), 1100); let mut read_buf = vec![0u8; 100];
921 storage.read(1000, &mut read_buf).expect("read failed");
922 assert_eq!(read_buf, data);
923 }
924
925 #[test]
926 fn test_read_misdirected() {
927 let mut storage = InMemoryStorage::new(4096, 42);
928
929 let data0 = vec![0x11; SECTOR_SIZE];
931 let data1 = vec![0x22; SECTOR_SIZE];
932 let data2 = vec![0x33; SECTOR_SIZE];
933
934 storage.write(0, &data0, true).expect("write failed");
935 storage
936 .write(SECTOR_SIZE as u64, &data1, true)
937 .expect("write failed");
938 storage
939 .write(2 * SECTOR_SIZE as u64, &data2, true)
940 .expect("write failed");
941
942 let mut buf = vec![0u8; SECTOR_SIZE];
944 storage.read_misdirected(0, &mut buf).expect("read failed");
945
946 assert_ne!(buf, data0);
948 }
949
950 #[test]
951 fn test_partial_sector_read() {
952 let mut storage = InMemoryStorage::new(4096, 42);
953
954 let data: Vec<u8> = (0..SECTOR_SIZE).map(|i| (i % 256) as u8).collect();
956 storage.write(0, &data, true).expect("write failed");
957
958 let mut buf = vec![0u8; 100];
960 storage.read(50, &mut buf).expect("read failed");
961
962 assert_eq!(buf, &data[50..150]);
963 }
964
965 #[test]
966 fn test_multi_sector_read() {
967 let mut storage = InMemoryStorage::new(4096, 42);
968
969 let data = vec![0xAB; SECTOR_SIZE * 3];
971 storage.write(0, &data, true).expect("write failed");
972
973 let mut buf = vec![0u8; SECTOR_SIZE * 3];
975 storage.read(0, &mut buf).expect("read failed");
976
977 assert_eq!(buf, data);
978 }
979}